1
0
mirror of https://github.com/open-telemetry/opentelemetry-go.git synced 2025-11-25 22:41:46 +02:00

Add Exemplar to metricdata package (#3849)

* Add Exemplar to metricdata pkg

* Update histogram Aggregator

* Update opencensus bridge

* Update prometheus exporter

* Update OTLP exporter

* Update stdoutmetric exporter

* Add changes to changelog

* Update fail tests

* Add tests for IgnoreExemplars

* Fix merge
This commit is contained in:
Tyler Yahn
2023-03-14 07:56:18 -07:00
committed by GitHub
parent b62eb2ca88
commit 01b8f15a72
16 changed files with 550 additions and 143 deletions

View File

@@ -11,6 +11,8 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
### Added ### Added
- The `WithoutTimestamps` option to `go.opentelemetry.io/otel/exporters/stdout/stdoutmetric` to sets all timestamps to zero. (#3828) - The `WithoutTimestamps` option to `go.opentelemetry.io/otel/exporters/stdout/stdoutmetric` to sets all timestamps to zero. (#3828)
- The new `Exemplar` type is added to `go.opentelemetry.io/otel/sdk/metric/metricdata`.
Both the `DataPoint` and `HistogramDataPoint` types from that package have a new field of `Exemplars` containing the sampled exemplars for their timeseries. (#3849)
### Changed ### Changed
@@ -18,6 +20,7 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
- Optimize memory allocation when creation new metric instruments in `go.opentelemetry.io/otel/sdk/metric`. (#3832) - Optimize memory allocation when creation new metric instruments in `go.opentelemetry.io/otel/sdk/metric`. (#3832)
- Avoid creating new objects on all calls to `WithDeferredSetup` and `SkipContextSetup` in OpenTracing bridge. (#3833) - Avoid creating new objects on all calls to `WithDeferredSetup` and `SkipContextSetup` in OpenTracing bridge. (#3833)
- The `New` and `Detect` functions from `go.opentelemetry.io/otel/sdk/resource` return errors that wrap underlying errors instead of just containing the underlying error strings. (#3844) - The `New` and `Detect` functions from `go.opentelemetry.io/otel/sdk/resource` return errors that wrap underlying errors instead of just containing the underlying error strings. (#3844)
- Both the `Histogram` and `HistogramDataPoint` are redefined with a generic argument of `[N int64 | float64]` in `go.opentelemetry.io/otel/sdk/metric/metricdata`. (#3849)
### Removed ### Removed

View File

@@ -127,8 +127,8 @@ func convertNumberDataPoints[N int64 | float64](labelKeys []ocmetricdata.LabelKe
// convertHistogram converts OpenCensus Distribution timeseries to an // convertHistogram converts OpenCensus Distribution timeseries to an
// OpenTelemetry Histogram aggregation. // OpenTelemetry Histogram aggregation.
func convertHistogram(labelKeys []ocmetricdata.LabelKey, ts []*ocmetricdata.TimeSeries) (metricdata.Histogram, error) { func convertHistogram(labelKeys []ocmetricdata.LabelKey, ts []*ocmetricdata.TimeSeries) (metricdata.Histogram[float64], error) {
points := make([]metricdata.HistogramDataPoint, 0, len(ts)) points := make([]metricdata.HistogramDataPoint[float64], 0, len(ts))
var errInfo []string var errInfo []string
for _, t := range ts { for _, t := range ts {
attrs, err := convertAttrs(labelKeys, t.LabelValues) attrs, err := convertAttrs(labelKeys, t.LabelValues)
@@ -152,7 +152,7 @@ func convertHistogram(labelKeys []ocmetricdata.LabelKey, ts []*ocmetricdata.Time
continue continue
} }
// TODO: handle exemplars // TODO: handle exemplars
points = append(points, metricdata.HistogramDataPoint{ points = append(points, metricdata.HistogramDataPoint[float64]{
Attributes: attrs, Attributes: attrs,
StartTime: t.StartTime, StartTime: t.StartTime,
Time: p.Time, Time: p.Time,
@@ -167,7 +167,7 @@ func convertHistogram(labelKeys []ocmetricdata.LabelKey, ts []*ocmetricdata.Time
if len(errInfo) > 0 { if len(errInfo) > 0 {
aggregatedError = fmt.Errorf("%w: %v", errHistogramDataPoint, errInfo) aggregatedError = fmt.Errorf("%w: %v", errHistogramDataPoint, errInfo)
} }
return metricdata.Histogram{DataPoints: points, Temporality: metricdata.CumulativeTemporality}, aggregatedError return metricdata.Histogram[float64]{DataPoints: points, Temporality: metricdata.CumulativeTemporality}, aggregatedError
} }
// convertBucketCounts converts from OpenCensus bucket counts to slice of uint64. // convertBucketCounts converts from OpenCensus bucket counts to slice of uint64.

View File

@@ -214,8 +214,8 @@ func TestConvertMetrics(t *testing.T) {
Name: "foo.com/histogram-a", Name: "foo.com/histogram-a",
Description: "a testing histogram", Description: "a testing histogram",
Unit: "1", Unit: "1",
Data: metricdata.Histogram{ Data: metricdata.Histogram[float64]{
DataPoints: []metricdata.HistogramDataPoint{ DataPoints: []metricdata.HistogramDataPoint[float64]{
{ {
Attributes: attribute.NewSet(attribute.KeyValue{ Attributes: attribute.NewSet(attribute.KeyValue{
Key: attribute.Key("a"), Key: attribute.Key("a"),
@@ -387,9 +387,9 @@ func TestConvertMetrics(t *testing.T) {
Name: "foo.com/histogram-a", Name: "foo.com/histogram-a",
Description: "a testing histogram", Description: "a testing histogram",
Unit: "1", Unit: "1",
Data: metricdata.Histogram{ Data: metricdata.Histogram[float64]{
Temporality: metricdata.CumulativeTemporality, Temporality: metricdata.CumulativeTemporality,
DataPoints: []metricdata.HistogramDataPoint{}, DataPoints: []metricdata.HistogramDataPoint[float64]{},
}, },
}, },
}, },

View File

@@ -95,7 +95,9 @@ func metric(m metricdata.Metrics) (*mpb.Metric, error) {
out.Data, err = Sum[int64](a) out.Data, err = Sum[int64](a)
case metricdata.Sum[float64]: case metricdata.Sum[float64]:
out.Data, err = Sum[float64](a) out.Data, err = Sum[float64](a)
case metricdata.Histogram: case metricdata.Histogram[int64]:
out.Data, err = Histogram(a)
case metricdata.Histogram[float64]:
out.Data, err = Histogram(a) out.Data, err = Histogram(a)
default: default:
return out, fmt.Errorf("%w: %T", errUnknownAggregation, a) return out, fmt.Errorf("%w: %T", errUnknownAggregation, a)
@@ -155,7 +157,7 @@ func DataPoints[N int64 | float64](dPts []metricdata.DataPoint[N]) []*mpb.Number
// Histogram returns an OTLP Metric_Histogram generated from h. An error is // Histogram returns an OTLP Metric_Histogram generated from h. An error is
// returned with a partial Metric_Histogram if the temporality of h is // returned with a partial Metric_Histogram if the temporality of h is
// unknown. // unknown.
func Histogram(h metricdata.Histogram) (*mpb.Metric_Histogram, error) { func Histogram[N int64 | float64](h metricdata.Histogram[N]) (*mpb.Metric_Histogram, error) {
t, err := Temporality(h.Temporality) t, err := Temporality(h.Temporality)
if err != nil { if err != nil {
return nil, err return nil, err
@@ -170,7 +172,7 @@ func Histogram(h metricdata.Histogram) (*mpb.Metric_Histogram, error) {
// HistogramDataPoints returns a slice of OTLP HistogramDataPoint generated // HistogramDataPoints returns a slice of OTLP HistogramDataPoint generated
// from dPts. // from dPts.
func HistogramDataPoints(dPts []metricdata.HistogramDataPoint) []*mpb.HistogramDataPoint { func HistogramDataPoints[N int64 | float64](dPts []metricdata.HistogramDataPoint[N]) []*mpb.HistogramDataPoint {
out := make([]*mpb.HistogramDataPoint, 0, len(dPts)) out := make([]*mpb.HistogramDataPoint, 0, len(dPts))
for _, dPt := range dPts { for _, dPt := range dPts {
sum := dPt.Sum sum := dPt.Sum

View File

@@ -52,7 +52,28 @@ var (
minA, maxA, sumA = 2.0, 4.0, 90.0 minA, maxA, sumA = 2.0, 4.0, 90.0
minB, maxB, sumB = 4.0, 150.0, 234.0 minB, maxB, sumB = 4.0, 150.0, 234.0
otelHDP = []metricdata.HistogramDataPoint{{ otelHDPInt64 = []metricdata.HistogramDataPoint[int64]{{
Attributes: alice,
StartTime: start,
Time: end,
Count: 30,
Bounds: []float64{1, 5},
BucketCounts: []uint64{0, 30, 0},
Min: metricdata.NewExtrema(minA),
Max: metricdata.NewExtrema(maxA),
Sum: sumA,
}, {
Attributes: bob,
StartTime: start,
Time: end,
Count: 3,
Bounds: []float64{1, 5},
BucketCounts: []uint64{0, 1, 2},
Min: metricdata.NewExtrema(minB),
Max: metricdata.NewExtrema(maxB),
Sum: sumB,
}}
otelHDPFloat64 = []metricdata.HistogramDataPoint[float64]{{
Attributes: alice, Attributes: alice,
StartTime: start, StartTime: start,
Time: end, Time: end,
@@ -96,14 +117,18 @@ var (
Max: &maxB, Max: &maxB,
}} }}
otelHist = metricdata.Histogram{ otelHistInt64 = metricdata.Histogram[int64]{
Temporality: metricdata.DeltaTemporality, Temporality: metricdata.DeltaTemporality,
DataPoints: otelHDP, DataPoints: otelHDPInt64,
}
otelHistFloat64 = metricdata.Histogram[float64]{
Temporality: metricdata.DeltaTemporality,
DataPoints: otelHDPFloat64,
} }
invalidTemporality metricdata.Temporality invalidTemporality metricdata.Temporality
otelHistInvalid = metricdata.Histogram{ otelHistInvalid = metricdata.Histogram[int64]{
Temporality: invalidTemporality, Temporality: invalidTemporality,
DataPoints: otelHDP, DataPoints: otelHDPInt64,
} }
pbHist = &mpb.Histogram{ pbHist = &mpb.Histogram{
@@ -215,10 +240,16 @@ var (
Data: otelSumInvalid, Data: otelSumInvalid,
}, },
{ {
Name: "histogram", Name: "int64-histogram",
Description: "Histogram", Description: "Histogram",
Unit: "1", Unit: "1",
Data: otelHist, Data: otelHistInt64,
},
{
Name: "float64-histogram",
Description: "Histogram",
Unit: "1",
Data: otelHistFloat64,
}, },
{ {
Name: "invalid-histogram", Name: "invalid-histogram",
@@ -260,7 +291,13 @@ var (
Data: &mpb.Metric_Sum{Sum: pbSumFloat64}, Data: &mpb.Metric_Sum{Sum: pbSumFloat64},
}, },
{ {
Name: "histogram", Name: "int64-histogram",
Description: "Histogram",
Unit: "1",
Data: &mpb.Metric_Histogram{Histogram: pbHist},
},
{
Name: "float64-histogram",
Description: "Histogram", Description: "Histogram",
Unit: "1", Unit: "1",
Data: &mpb.Metric_Histogram{Histogram: pbHist}, Data: &mpb.Metric_Histogram{Histogram: pbHist},
@@ -327,12 +364,16 @@ func TestTransformations(t *testing.T) {
// errors deep inside the structs). // errors deep inside the structs).
// DataPoint types. // DataPoint types.
assert.Equal(t, pbHDP, HistogramDataPoints(otelHDP)) assert.Equal(t, pbHDP, HistogramDataPoints(otelHDPInt64))
assert.Equal(t, pbHDP, HistogramDataPoints(otelHDPFloat64))
assert.Equal(t, pbDPtsInt64, DataPoints[int64](otelDPtsInt64)) assert.Equal(t, pbDPtsInt64, DataPoints[int64](otelDPtsInt64))
require.Equal(t, pbDPtsFloat64, DataPoints[float64](otelDPtsFloat64)) require.Equal(t, pbDPtsFloat64, DataPoints[float64](otelDPtsFloat64))
// Aggregations. // Aggregations.
h, err := Histogram(otelHist) h, err := Histogram(otelHistInt64)
assert.NoError(t, err)
assert.Equal(t, &mpb.Metric_Histogram{Histogram: pbHist}, h)
h, err = Histogram(otelHistFloat64)
assert.NoError(t, err) assert.NoError(t, err)
assert.Equal(t, &mpb.Metric_Histogram{Histogram: pbHist}, h) assert.Equal(t, &mpb.Metric_Histogram{Histogram: pbHist}, h)
h, err = Histogram(otelHistInvalid) h, err = Histogram(otelHistInvalid)

View File

@@ -155,7 +155,9 @@ func (c *collector) Collect(ch chan<- prometheus.Metric) {
for _, m := range scopeMetrics.Metrics { for _, m := range scopeMetrics.Metrics {
switch v := m.Data.(type) { switch v := m.Data.(type) {
case metricdata.Histogram: case metricdata.Histogram[int64]:
addHistogramMetric(ch, v, m, keys, values, c.getName(m), c.metricFamilies)
case metricdata.Histogram[float64]:
addHistogramMetric(ch, v, m, keys, values, c.getName(m), c.metricFamilies) addHistogramMetric(ch, v, m, keys, values, c.getName(m), c.metricFamilies)
case metricdata.Sum[int64]: case metricdata.Sum[int64]:
addSumMetric(ch, v, m, keys, values, c.getName(m), c.metricFamilies) addSumMetric(ch, v, m, keys, values, c.getName(m), c.metricFamilies)
@@ -170,7 +172,7 @@ func (c *collector) Collect(ch chan<- prometheus.Metric) {
} }
} }
func addHistogramMetric(ch chan<- prometheus.Metric, histogram metricdata.Histogram, m metricdata.Metrics, ks, vs [2]string, name string, mfs map[string]*dto.MetricFamily) { func addHistogramMetric[N int64 | float64](ch chan<- prometheus.Metric, histogram metricdata.Histogram[N], m metricdata.Metrics, ks, vs [2]string, name string, mfs map[string]*dto.MetricFamily) {
// TODO(https://github.com/open-telemetry/opentelemetry-go/issues/3163): support exemplars // TODO(https://github.com/open-telemetry/opentelemetry-go/issues/3163): support exemplars
drop, help := validateMetrics(name, m.Description, dto.MetricType_HISTOGRAM.Enum(), mfs) drop, help := validateMetrics(name, m.Description, dto.MetricType_HISTOGRAM.Enum(), mfs)
if drop { if drop {

View File

@@ -81,9 +81,9 @@ var (
Name: "latency", Name: "latency",
Description: "Time spend processing received requests", Description: "Time spend processing received requests",
Unit: "ms", Unit: "ms",
Data: metricdata.Histogram{ Data: metricdata.Histogram[float64]{
Temporality: metricdata.DeltaTemporality, Temporality: metricdata.DeltaTemporality,
DataPoints: []metricdata.HistogramDataPoint{ DataPoints: []metricdata.HistogramDataPoint[float64]{
{ {
Attributes: attribute.NewSet(attribute.String("server", "central")), Attributes: attribute.NewSet(attribute.String("server", "central")),
StartTime: now, StartTime: now,

View File

@@ -138,8 +138,13 @@ func redactAggregationTimestamps(orig metricdata.Aggregation) metricdata.Aggrega
return metricdata.Gauge[int64]{ return metricdata.Gauge[int64]{
DataPoints: redactDataPointTimestamps(a.DataPoints), DataPoints: redactDataPointTimestamps(a.DataPoints),
} }
case metricdata.Histogram: case metricdata.Histogram[int64]:
return metricdata.Histogram{ return metricdata.Histogram[int64]{
Temporality: a.Temporality,
DataPoints: redactHistogramTimestamps(a.DataPoints),
}
case metricdata.Histogram[float64]:
return metricdata.Histogram[float64]{
Temporality: a.Temporality, Temporality: a.Temporality,
DataPoints: redactHistogramTimestamps(a.DataPoints), DataPoints: redactHistogramTimestamps(a.DataPoints),
} }
@@ -149,10 +154,10 @@ func redactAggregationTimestamps(orig metricdata.Aggregation) metricdata.Aggrega
} }
} }
func redactHistogramTimestamps(hdp []metricdata.HistogramDataPoint) []metricdata.HistogramDataPoint { func redactHistogramTimestamps[T int64 | float64](hdp []metricdata.HistogramDataPoint[T]) []metricdata.HistogramDataPoint[T] {
out := make([]metricdata.HistogramDataPoint, len(hdp)) out := make([]metricdata.HistogramDataPoint[T], len(hdp))
for i, dp := range hdp { for i, dp := range hdp {
out[i] = metricdata.HistogramDataPoint{ out[i] = metricdata.HistogramDataPoint[T]{
Attributes: dp.Attributes, Attributes: dp.Attributes,
Count: dp.Count, Count: dp.Count,
Sum: dp.Sum, Sum: dp.Sum,

View File

@@ -141,12 +141,12 @@ func (s *deltaHistogram[N]) Aggregation() metricdata.Aggregation {
// Do not allow modification of our copy of bounds. // Do not allow modification of our copy of bounds.
bounds := make([]float64, len(s.bounds)) bounds := make([]float64, len(s.bounds))
copy(bounds, s.bounds) copy(bounds, s.bounds)
h := metricdata.Histogram{ h := metricdata.Histogram[N]{
Temporality: metricdata.DeltaTemporality, Temporality: metricdata.DeltaTemporality,
DataPoints: make([]metricdata.HistogramDataPoint, 0, len(s.values)), DataPoints: make([]metricdata.HistogramDataPoint[N], 0, len(s.values)),
} }
for a, b := range s.values { for a, b := range s.values {
hdp := metricdata.HistogramDataPoint{ hdp := metricdata.HistogramDataPoint[N]{
Attributes: a, Attributes: a,
StartTime: s.start, StartTime: s.start,
Time: t, Time: t,
@@ -204,9 +204,9 @@ func (s *cumulativeHistogram[N]) Aggregation() metricdata.Aggregation {
// Do not allow modification of our copy of bounds. // Do not allow modification of our copy of bounds.
bounds := make([]float64, len(s.bounds)) bounds := make([]float64, len(s.bounds))
copy(bounds, s.bounds) copy(bounds, s.bounds)
h := metricdata.Histogram{ h := metricdata.Histogram[N]{
Temporality: metricdata.CumulativeTemporality, Temporality: metricdata.CumulativeTemporality,
DataPoints: make([]metricdata.HistogramDataPoint, 0, len(s.values)), DataPoints: make([]metricdata.HistogramDataPoint[N], 0, len(s.values)),
} }
for a, b := range s.values { for a, b := range s.values {
// The HistogramDataPoint field values returned need to be copies of // The HistogramDataPoint field values returned need to be copies of
@@ -217,7 +217,7 @@ func (s *cumulativeHistogram[N]) Aggregation() metricdata.Aggregation {
counts := make([]uint64, len(b.counts)) counts := make([]uint64, len(b.counts))
copy(counts, b.counts) copy(counts, b.counts)
hdp := metricdata.HistogramDataPoint{ hdp := metricdata.HistogramDataPoint[N]{
Attributes: a, Attributes: a,
StartTime: s.start, StartTime: s.start,
Time: t, Time: t,

View File

@@ -49,31 +49,31 @@ func testHistogram[N int64 | float64](t *testing.T) {
} }
incr := monoIncr incr := monoIncr
eFunc := deltaHistExpecter(incr) eFunc := deltaHistExpecter[N](incr)
t.Run("Delta", tester.Run(NewDeltaHistogram[N](histConf), incr, eFunc)) t.Run("Delta", tester.Run(NewDeltaHistogram[N](histConf), incr, eFunc))
eFunc = cumuHistExpecter(incr) eFunc = cumuHistExpecter[N](incr)
t.Run("Cumulative", tester.Run(NewCumulativeHistogram[N](histConf), incr, eFunc)) t.Run("Cumulative", tester.Run(NewCumulativeHistogram[N](histConf), incr, eFunc))
} }
func deltaHistExpecter(incr setMap) expectFunc { func deltaHistExpecter[N int64 | float64](incr setMap) expectFunc {
h := metricdata.Histogram{Temporality: metricdata.DeltaTemporality} h := metricdata.Histogram[N]{Temporality: metricdata.DeltaTemporality}
return func(m int) metricdata.Aggregation { return func(m int) metricdata.Aggregation {
h.DataPoints = make([]metricdata.HistogramDataPoint, 0, len(incr)) h.DataPoints = make([]metricdata.HistogramDataPoint[N], 0, len(incr))
for a, v := range incr { for a, v := range incr {
h.DataPoints = append(h.DataPoints, hPoint(a, float64(v), uint64(m))) h.DataPoints = append(h.DataPoints, hPoint[N](a, float64(v), uint64(m)))
} }
return h return h
} }
} }
func cumuHistExpecter(incr setMap) expectFunc { func cumuHistExpecter[N int64 | float64](incr setMap) expectFunc {
var cycle int var cycle int
h := metricdata.Histogram{Temporality: metricdata.CumulativeTemporality} h := metricdata.Histogram[N]{Temporality: metricdata.CumulativeTemporality}
return func(m int) metricdata.Aggregation { return func(m int) metricdata.Aggregation {
cycle++ cycle++
h.DataPoints = make([]metricdata.HistogramDataPoint, 0, len(incr)) h.DataPoints = make([]metricdata.HistogramDataPoint[N], 0, len(incr))
for a, v := range incr { for a, v := range incr {
h.DataPoints = append(h.DataPoints, hPoint(a, float64(v), uint64(cycle*m))) h.DataPoints = append(h.DataPoints, hPoint[N](a, float64(v), uint64(cycle*m)))
} }
return h return h
} }
@@ -81,11 +81,11 @@ func cumuHistExpecter(incr setMap) expectFunc {
// hPoint returns an HistogramDataPoint that started and ended now with multi // hPoint returns an HistogramDataPoint that started and ended now with multi
// number of measurements values v. It includes a min and max (set to v). // number of measurements values v. It includes a min and max (set to v).
func hPoint(a attribute.Set, v float64, multi uint64) metricdata.HistogramDataPoint { func hPoint[N int64 | float64](a attribute.Set, v float64, multi uint64) metricdata.HistogramDataPoint[N] {
idx := sort.SearchFloat64s(bounds, v) idx := sort.SearchFloat64s(bounds, v)
counts := make([]uint64, len(bounds)+1) counts := make([]uint64, len(bounds)+1)
counts[idx] += multi counts[idx] += multi
return metricdata.HistogramDataPoint{ return metricdata.HistogramDataPoint[N]{
Attributes: a, Attributes: a,
StartTime: now(), StartTime: now(),
Time: now(), Time: now(),
@@ -128,7 +128,7 @@ func testHistImmutableBounds[N int64 | float64](newA func(aggregation.ExplicitBu
assert.Equal(t, cpB, getBounds(a), "modifying the bounds argument should not change the bounds") assert.Equal(t, cpB, getBounds(a), "modifying the bounds argument should not change the bounds")
a.Aggregate(5, alice) a.Aggregate(5, alice)
hdp := a.Aggregation().(metricdata.Histogram).DataPoints[0] hdp := a.Aggregation().(metricdata.Histogram[N]).DataPoints[0]
hdp.Bounds[1] = 10 hdp.Bounds[1] = 10
assert.Equal(t, cpB, getBounds(a), "modifying the Aggregation bounds should not change the bounds") assert.Equal(t, cpB, getBounds(a), "modifying the Aggregation bounds should not change the bounds")
} }
@@ -155,7 +155,7 @@ func TestHistogramImmutableBounds(t *testing.T) {
func TestCumulativeHistogramImutableCounts(t *testing.T) { func TestCumulativeHistogramImutableCounts(t *testing.T) {
a := NewCumulativeHistogram[int64](histConf) a := NewCumulativeHistogram[int64](histConf)
a.Aggregate(5, alice) a.Aggregate(5, alice)
hdp := a.Aggregation().(metricdata.Histogram).DataPoints[0] hdp := a.Aggregation().(metricdata.Histogram[int64]).DataPoints[0]
cumuH := a.(*cumulativeHistogram[int64]) cumuH := a.(*cumulativeHistogram[int64])
require.Equal(t, hdp.BucketCounts, cumuH.values[alice].counts) require.Equal(t, hdp.BucketCounts, cumuH.values[alice].counts)
@@ -173,8 +173,8 @@ func TestDeltaHistogramReset(t *testing.T) {
assert.Nil(t, a.Aggregation()) assert.Nil(t, a.Aggregation())
a.Aggregate(1, alice) a.Aggregate(1, alice)
expect := metricdata.Histogram{Temporality: metricdata.DeltaTemporality} expect := metricdata.Histogram[int64]{Temporality: metricdata.DeltaTemporality}
expect.DataPoints = []metricdata.HistogramDataPoint{hPoint(alice, 1, 1)} expect.DataPoints = []metricdata.HistogramDataPoint[int64]{hPoint[int64](alice, 1, 1)}
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())
// The attr set should be forgotten once Aggregations is called. // The attr set should be forgotten once Aggregations is called.
@@ -183,7 +183,7 @@ func TestDeltaHistogramReset(t *testing.T) {
// Aggregating another set should not affect the original (alice). // Aggregating another set should not affect the original (alice).
a.Aggregate(1, bob) a.Aggregate(1, bob)
expect.DataPoints = []metricdata.HistogramDataPoint{hPoint(bob, 1, 1)} expect.DataPoints = []metricdata.HistogramDataPoint[int64]{hPoint[int64](bob, 1, 1)}
metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation()) metricdatatest.AssertAggregationsEqual(t, expect, a.Aggregation())
} }

View File

@@ -382,9 +382,9 @@ func TestMeterCreatesInstruments(t *testing.T) {
}, },
want: metricdata.Metrics{ want: metricdata.Metrics{
Name: "histogram", Name: "histogram",
Data: metricdata.Histogram{ Data: metricdata.Histogram[int64]{
Temporality: metricdata.CumulativeTemporality, Temporality: metricdata.CumulativeTemporality,
DataPoints: []metricdata.HistogramDataPoint{ DataPoints: []metricdata.HistogramDataPoint[int64]{
{ {
Attributes: attribute.Set{}, Attributes: attribute.Set{},
Count: 1, Count: 1,
@@ -446,9 +446,9 @@ func TestMeterCreatesInstruments(t *testing.T) {
}, },
want: metricdata.Metrics{ want: metricdata.Metrics{
Name: "histogram", Name: "histogram",
Data: metricdata.Histogram{ Data: metricdata.Histogram[float64]{
Temporality: metricdata.CumulativeTemporality, Temporality: metricdata.CumulativeTemporality,
DataPoints: []metricdata.HistogramDataPoint{ DataPoints: []metricdata.HistogramDataPoint[float64]{
{ {
Attributes: attribute.Set{}, Attributes: attribute.Set{},
Count: 1, Count: 1,
@@ -1124,8 +1124,8 @@ func testAttributeFilter(temporality metricdata.Temporality) func(*testing.T) {
}, },
wantMetric: metricdata.Metrics{ wantMetric: metricdata.Metrics{
Name: "sfhistogram", Name: "sfhistogram",
Data: metricdata.Histogram{ Data: metricdata.Histogram[float64]{
DataPoints: []metricdata.HistogramDataPoint{ DataPoints: []metricdata.HistogramDataPoint[float64]{
{ {
Attributes: attribute.NewSet(attribute.String("foo", "bar")), Attributes: attribute.NewSet(attribute.String("foo", "bar")),
Bounds: []float64{0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000}, Bounds: []float64{0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000},
@@ -1206,8 +1206,8 @@ func testAttributeFilter(temporality metricdata.Temporality) func(*testing.T) {
}, },
wantMetric: metricdata.Metrics{ wantMetric: metricdata.Metrics{
Name: "sihistogram", Name: "sihistogram",
Data: metricdata.Histogram{ Data: metricdata.Histogram[int64]{
DataPoints: []metricdata.HistogramDataPoint{ DataPoints: []metricdata.HistogramDataPoint[int64]{
{ {
Attributes: attribute.NewSet(attribute.String("foo", "bar")), Attributes: attribute.NewSet(attribute.String("foo", "bar")),
Bounds: []float64{0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000}, Bounds: []float64{0, 5, 10, 25, 50, 75, 100, 250, 500, 750, 1000, 2500, 5000, 7500, 10000},

View File

@@ -59,7 +59,8 @@ type Aggregation interface {
// Gauge represents a measurement of the current value of an instrument. // Gauge represents a measurement of the current value of an instrument.
type Gauge[N int64 | float64] struct { type Gauge[N int64 | float64] struct {
// DataPoints reprents individual aggregated measurements with unique Attributes. // DataPoints are the individual aggregated measurements with unique
// Attributes.
DataPoints []DataPoint[N] DataPoints []DataPoint[N]
} }
@@ -67,7 +68,8 @@ func (Gauge[N]) privateAggregation() {}
// Sum represents the sum of all measurements of values from an instrument. // Sum represents the sum of all measurements of values from an instrument.
type Sum[N int64 | float64] struct { type Sum[N int64 | float64] struct {
// DataPoints reprents individual aggregated measurements with unique Attributes. // DataPoints are the individual aggregated measurements with unique
// Attributes.
DataPoints []DataPoint[N] DataPoints []DataPoint[N]
// Temporality describes if the aggregation is reported as the change from the // Temporality describes if the aggregation is reported as the change from the
// last report time, or the cumulative changes since a fixed start time. // last report time, or the cumulative changes since a fixed start time.
@@ -89,21 +91,25 @@ type DataPoint[N int64 | float64] struct {
Time time.Time `json:",omitempty"` Time time.Time `json:",omitempty"`
// Value is the value of this data point. // Value is the value of this data point.
Value N Value N
// Exemplars is the sampled Exemplars collected during the timeseries.
Exemplars []Exemplar[N] `json:",omitempty"`
} }
// Histogram represents the histogram of all measurements of values from an instrument. // Histogram represents the histogram of all measurements of values from an instrument.
type Histogram struct { type Histogram[N int64 | float64] struct {
// DataPoints reprents individual aggregated measurements with unique Attributes. // DataPoints are the individual aggregated measurements with unique
DataPoints []HistogramDataPoint // Attributes.
DataPoints []HistogramDataPoint[N]
// Temporality describes if the aggregation is reported as the change from the // Temporality describes if the aggregation is reported as the change from the
// last report time, or the cumulative changes since a fixed start time. // last report time, or the cumulative changes since a fixed start time.
Temporality Temporality Temporality Temporality
} }
func (Histogram) privateAggregation() {} func (Histogram[N]) privateAggregation() {}
// HistogramDataPoint is a single histogram data point in a timeseries. // HistogramDataPoint is a single histogram data point in a timeseries.
type HistogramDataPoint struct { type HistogramDataPoint[N int64 | float64] struct {
// Attributes is the set of key value pairs that uniquely identify the // Attributes is the set of key value pairs that uniquely identify the
// timeseries. // timeseries.
Attributes attribute.Set Attributes attribute.Set
@@ -126,6 +132,9 @@ type HistogramDataPoint struct {
Max Extrema Max Extrema
// Sum is the sum of the values recorded. // Sum is the sum of the values recorded.
Sum float64 Sum float64
// Exemplars is the sampled Exemplars collected during the timeseries.
Exemplars []Exemplar[N] `json:",omitempty"`
} }
// Extrema is the minimum or maximum value of a dataset. // Extrema is the minimum or maximum value of a dataset.
@@ -144,3 +153,22 @@ func NewExtrema(v float64) Extrema {
func (e Extrema) Value() (v float64, defined bool) { func (e Extrema) Value() (v float64, defined bool) {
return e.value, e.valid return e.value, e.valid
} }
// Exemplar is a measurement sampled from a timeseries providing a typical
// example.
type Exemplar[N int64 | float64] struct {
// FilteredAttributes are the attributes recorded with the measurement but
// filtered out of the timeseries' aggregated data.
FilteredAttributes []attribute.KeyValue
// Time is the time when the measurement was recorded.
Time time.Time
// Value is the measured value.
Value N
// SpanID is the ID of the span that was active during the measurement. If
// no span was active or the span was not sampled this will be empty.
SpanID []byte `json:",omitempty"`
// TraceID is the ID of the trace the active span belonged to during the
// measurement. If no span was active or the span was not sampled this will
// be empty.
TraceID []byte `json:",omitempty"`
}

View File

@@ -30,14 +30,18 @@ type Datatypes interface {
metricdata.DataPoint[int64] | metricdata.DataPoint[int64] |
metricdata.Gauge[float64] | metricdata.Gauge[float64] |
metricdata.Gauge[int64] | metricdata.Gauge[int64] |
metricdata.Histogram | metricdata.Histogram[float64] |
metricdata.HistogramDataPoint | metricdata.Histogram[int64] |
metricdata.HistogramDataPoint[float64] |
metricdata.HistogramDataPoint[int64] |
metricdata.Extrema | metricdata.Extrema |
metricdata.Metrics | metricdata.Metrics |
metricdata.ResourceMetrics | metricdata.ResourceMetrics |
metricdata.ScopeMetrics | metricdata.ScopeMetrics |
metricdata.Sum[float64] | metricdata.Sum[float64] |
metricdata.Sum[int64] metricdata.Sum[int64] |
metricdata.Exemplar[float64] |
metricdata.Exemplar[int64]
// Interface types are not allowed in union types, therefore the // Interface types are not allowed in union types, therefore the
// Aggregation and Value type from metricdata are not included here. // Aggregation and Value type from metricdata are not included here.
@@ -45,6 +49,15 @@ type Datatypes interface {
type config struct { type config struct {
ignoreTimestamp bool ignoreTimestamp bool
ignoreExemplars bool
}
func newConfig(opts []Option) config {
var cfg config
for _, opt := range opts {
cfg = opt.apply(cfg)
}
return cfg
} }
// Option allows for fine grain control over how AssertEqual operates. // Option allows for fine grain control over how AssertEqual operates.
@@ -66,21 +79,30 @@ func IgnoreTimestamp() Option {
}) })
} }
// IgnoreExemplars disables checking if Exemplars are different.
func IgnoreExemplars() Option {
return fnOption(func(cfg config) config {
cfg.ignoreExemplars = true
return cfg
})
}
// AssertEqual asserts that the two concrete data-types from the metricdata // AssertEqual asserts that the two concrete data-types from the metricdata
// package are equal. // package are equal.
func AssertEqual[T Datatypes](t *testing.T, expected, actual T, opts ...Option) bool { func AssertEqual[T Datatypes](t *testing.T, expected, actual T, opts ...Option) bool {
t.Helper() t.Helper()
cfg := config{} cfg := newConfig(opts)
for _, opt := range opts {
cfg = opt.apply(cfg)
}
// Generic types cannot be type asserted. Use an interface instead. // Generic types cannot be type asserted. Use an interface instead.
aIface := interface{}(actual) aIface := interface{}(actual)
var r []string var r []string
switch e := interface{}(expected).(type) { switch e := interface{}(expected).(type) {
case metricdata.Exemplar[int64]:
r = equalExemplars(e, aIface.(metricdata.Exemplar[int64]), cfg)
case metricdata.Exemplar[float64]:
r = equalExemplars(e, aIface.(metricdata.Exemplar[float64]), cfg)
case metricdata.DataPoint[int64]: case metricdata.DataPoint[int64]:
r = equalDataPoints(e, aIface.(metricdata.DataPoint[int64]), cfg) r = equalDataPoints(e, aIface.(metricdata.DataPoint[int64]), cfg)
case metricdata.DataPoint[float64]: case metricdata.DataPoint[float64]:
@@ -89,10 +111,14 @@ func AssertEqual[T Datatypes](t *testing.T, expected, actual T, opts ...Option)
r = equalGauges(e, aIface.(metricdata.Gauge[int64]), cfg) r = equalGauges(e, aIface.(metricdata.Gauge[int64]), cfg)
case metricdata.Gauge[float64]: case metricdata.Gauge[float64]:
r = equalGauges(e, aIface.(metricdata.Gauge[float64]), cfg) r = equalGauges(e, aIface.(metricdata.Gauge[float64]), cfg)
case metricdata.Histogram: case metricdata.Histogram[float64]:
r = equalHistograms(e, aIface.(metricdata.Histogram), cfg) r = equalHistograms(e, aIface.(metricdata.Histogram[float64]), cfg)
case metricdata.HistogramDataPoint: case metricdata.Histogram[int64]:
r = equalHistogramDataPoints(e, aIface.(metricdata.HistogramDataPoint), cfg) r = equalHistograms(e, aIface.(metricdata.Histogram[int64]), cfg)
case metricdata.HistogramDataPoint[float64]:
r = equalHistogramDataPoints(e, aIface.(metricdata.HistogramDataPoint[float64]), cfg)
case metricdata.HistogramDataPoint[int64]:
r = equalHistogramDataPoints(e, aIface.(metricdata.HistogramDataPoint[int64]), cfg)
case metricdata.Extrema: case metricdata.Extrema:
r = equalExtrema(e, aIface.(metricdata.Extrema), cfg) r = equalExtrema(e, aIface.(metricdata.Extrema), cfg)
case metricdata.Metrics: case metricdata.Metrics:
@@ -122,11 +148,7 @@ func AssertEqual[T Datatypes](t *testing.T, expected, actual T, opts ...Option)
func AssertAggregationsEqual(t *testing.T, expected, actual metricdata.Aggregation, opts ...Option) bool { func AssertAggregationsEqual(t *testing.T, expected, actual metricdata.Aggregation, opts ...Option) bool {
t.Helper() t.Helper()
cfg := config{} cfg := newConfig(opts)
for _, opt := range opts {
cfg = opt.apply(cfg)
}
if r := equalAggregations(expected, actual, cfg); len(r) > 0 { if r := equalAggregations(expected, actual, cfg); len(r) > 0 {
t.Error(r) t.Error(r)
return false return false
@@ -141,6 +163,10 @@ func AssertHasAttributes[T Datatypes](t *testing.T, actual T, attrs ...attribute
var reasons []string var reasons []string
switch e := interface{}(actual).(type) { switch e := interface{}(actual).(type) {
case metricdata.Exemplar[int64]:
reasons = hasAttributesExemplars(e, attrs...)
case metricdata.Exemplar[float64]:
reasons = hasAttributesExemplars(e, attrs...)
case metricdata.DataPoint[int64]: case metricdata.DataPoint[int64]:
reasons = hasAttributesDataPoints(e, attrs...) reasons = hasAttributesDataPoints(e, attrs...)
case metricdata.DataPoint[float64]: case metricdata.DataPoint[float64]:
@@ -153,11 +179,15 @@ func AssertHasAttributes[T Datatypes](t *testing.T, actual T, attrs ...attribute
reasons = hasAttributesSum(e, attrs...) reasons = hasAttributesSum(e, attrs...)
case metricdata.Sum[float64]: case metricdata.Sum[float64]:
reasons = hasAttributesSum(e, attrs...) reasons = hasAttributesSum(e, attrs...)
case metricdata.HistogramDataPoint: case metricdata.HistogramDataPoint[int64]:
reasons = hasAttributesHistogramDataPoints(e, attrs...)
case metricdata.HistogramDataPoint[float64]:
reasons = hasAttributesHistogramDataPoints(e, attrs...) reasons = hasAttributesHistogramDataPoints(e, attrs...)
case metricdata.Extrema: case metricdata.Extrema:
// Nothing to check. // Nothing to check.
case metricdata.Histogram: case metricdata.Histogram[int64]:
reasons = hasAttributesHistogram(e, attrs...)
case metricdata.Histogram[float64]:
reasons = hasAttributesHistogram(e, attrs...) reasons = hasAttributesHistogram(e, attrs...)
case metricdata.Metrics: case metricdata.Metrics:
reasons = hasAttributesMetrics(e, attrs...) reasons = hasAttributesMetrics(e, attrs...)

View File

@@ -38,15 +38,19 @@ func TestFailAssertEqual(t *testing.T) {
t.Run("ResourceMetrics", testFailDatatype(resourceMetricsA, resourceMetricsB)) t.Run("ResourceMetrics", testFailDatatype(resourceMetricsA, resourceMetricsB))
t.Run("ScopeMetrics", testFailDatatype(scopeMetricsA, scopeMetricsB)) t.Run("ScopeMetrics", testFailDatatype(scopeMetricsA, scopeMetricsB))
t.Run("Metrics", testFailDatatype(metricsA, metricsB)) t.Run("Metrics", testFailDatatype(metricsA, metricsB))
t.Run("Histogram", testFailDatatype(histogramA, histogramB)) t.Run("HistogramInt64", testFailDatatype(histogramInt64A, histogramInt64B))
t.Run("HistogramFloat64", testFailDatatype(histogramFloat64A, histogramFloat64B))
t.Run("SumInt64", testFailDatatype(sumInt64A, sumInt64B)) t.Run("SumInt64", testFailDatatype(sumInt64A, sumInt64B))
t.Run("SumFloat64", testFailDatatype(sumFloat64A, sumFloat64B)) t.Run("SumFloat64", testFailDatatype(sumFloat64A, sumFloat64B))
t.Run("GaugeInt64", testFailDatatype(gaugeInt64A, gaugeInt64B)) t.Run("GaugeInt64", testFailDatatype(gaugeInt64A, gaugeInt64B))
t.Run("GaugeFloat64", testFailDatatype(gaugeFloat64A, gaugeFloat64B)) t.Run("GaugeFloat64", testFailDatatype(gaugeFloat64A, gaugeFloat64B))
t.Run("HistogramDataPoint", testFailDatatype(histogramDataPointA, histogramDataPointB)) t.Run("HistogramDataPointInt64", testFailDatatype(histogramDataPointInt64A, histogramDataPointInt64B))
t.Run("HistogramDataPointFloat64", testFailDatatype(histogramDataPointFloat64A, histogramDataPointFloat64B))
t.Run("DataPointInt64", testFailDatatype(dataPointInt64A, dataPointInt64B)) t.Run("DataPointInt64", testFailDatatype(dataPointInt64A, dataPointInt64B))
t.Run("DataPointFloat64", testFailDatatype(dataPointFloat64A, dataPointFloat64B)) t.Run("DataPointFloat64", testFailDatatype(dataPointFloat64A, dataPointFloat64B))
t.Run("ExemplarInt64", testFailDatatype(exemplarInt64A, exemplarInt64B))
t.Run("ExemplarFloat64", testFailDatatype(exemplarFloat64A, exemplarFloat64B))
t.Run("Extrema", testFailDatatype(minA, minB))
} }
func TestFailAssertAggregationsEqual(t *testing.T) { func TestFailAssertAggregationsEqual(t *testing.T) {
@@ -57,20 +61,23 @@ func TestFailAssertAggregationsEqual(t *testing.T) {
AssertAggregationsEqual(t, sumFloat64A, sumFloat64B) AssertAggregationsEqual(t, sumFloat64A, sumFloat64B)
AssertAggregationsEqual(t, gaugeInt64A, gaugeInt64B) AssertAggregationsEqual(t, gaugeInt64A, gaugeInt64B)
AssertAggregationsEqual(t, gaugeFloat64A, gaugeFloat64B) AssertAggregationsEqual(t, gaugeFloat64A, gaugeFloat64B)
AssertAggregationsEqual(t, histogramA, histogramB) AssertAggregationsEqual(t, histogramInt64A, histogramInt64B)
AssertAggregationsEqual(t, histogramFloat64A, histogramFloat64B)
} }
func TestFailAssertAttribute(t *testing.T) { func TestFailAssertAttribute(t *testing.T) {
AssertHasAttributes(t, exemplarInt64A, attribute.Bool("A", false))
AssertHasAttributes(t, exemplarFloat64A, attribute.Bool("B", true))
AssertHasAttributes(t, dataPointInt64A, attribute.Bool("A", false)) AssertHasAttributes(t, dataPointInt64A, attribute.Bool("A", false))
AssertHasAttributes(t, dataPointFloat64A, attribute.Bool("B", true)) AssertHasAttributes(t, dataPointFloat64A, attribute.Bool("B", true))
AssertHasAttributes(t, gaugeInt64A, attribute.Bool("A", false)) AssertHasAttributes(t, gaugeInt64A, attribute.Bool("A", false))
AssertHasAttributes(t, gaugeFloat64A, attribute.Bool("B", true)) AssertHasAttributes(t, gaugeFloat64A, attribute.Bool("B", true))
AssertHasAttributes(t, sumInt64A, attribute.Bool("A", false)) AssertHasAttributes(t, sumInt64A, attribute.Bool("A", false))
AssertHasAttributes(t, sumFloat64A, attribute.Bool("B", true)) AssertHasAttributes(t, sumFloat64A, attribute.Bool("B", true))
AssertHasAttributes(t, histogramDataPointA, attribute.Bool("A", false)) AssertHasAttributes(t, histogramDataPointInt64A, attribute.Bool("A", false))
AssertHasAttributes(t, histogramDataPointA, attribute.Bool("B", true)) AssertHasAttributes(t, histogramDataPointFloat64A, attribute.Bool("B", true))
AssertHasAttributes(t, histogramA, attribute.Bool("A", false)) AssertHasAttributes(t, histogramInt64A, attribute.Bool("A", false))
AssertHasAttributes(t, histogramA, attribute.Bool("B", true)) AssertHasAttributes(t, histogramFloat64A, attribute.Bool("B", true))
AssertHasAttributes(t, metricsA, attribute.Bool("A", false)) AssertHasAttributes(t, metricsA, attribute.Bool("A", false))
AssertHasAttributes(t, metricsA, attribute.Bool("B", true)) AssertHasAttributes(t, metricsA, attribute.Bool("B", true))
AssertHasAttributes(t, resourceMetricsA, attribute.Bool("A", false)) AssertHasAttributes(t, resourceMetricsA, attribute.Bool("A", false))

View File

@@ -30,53 +30,110 @@ var (
attrA = attribute.NewSet(attribute.Bool("A", true)) attrA = attribute.NewSet(attribute.Bool("A", true))
attrB = attribute.NewSet(attribute.Bool("B", true)) attrB = attribute.NewSet(attribute.Bool("B", true))
fltrAttrA = []attribute.KeyValue{attribute.Bool("filter A", true)}
fltrAttrB = []attribute.KeyValue{attribute.Bool("filter B", true)}
startA = time.Now() startA = time.Now()
startB = startA.Add(time.Millisecond) startB = startA.Add(time.Millisecond)
endA = startA.Add(time.Second) endA = startA.Add(time.Second)
endB = startB.Add(time.Second) endB = startB.Add(time.Second)
spanIDA = []byte{0, 0, 0, 0, 0, 0, 0, 1}
spanIDB = []byte{0, 0, 0, 0, 0, 0, 0, 2}
traceIDA = []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}
traceIDB = []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2}
exemplarInt64A = metricdata.Exemplar[int64]{
FilteredAttributes: fltrAttrA,
Time: endA,
Value: -10,
SpanID: spanIDA,
TraceID: traceIDA,
}
exemplarFloat64A = metricdata.Exemplar[float64]{
FilteredAttributes: fltrAttrA,
Time: endA,
Value: -10.0,
SpanID: spanIDA,
TraceID: traceIDA,
}
exemplarInt64B = metricdata.Exemplar[int64]{
FilteredAttributes: fltrAttrB,
Time: endB,
Value: 12,
SpanID: spanIDB,
TraceID: traceIDB,
}
exemplarFloat64B = metricdata.Exemplar[float64]{
FilteredAttributes: fltrAttrB,
Time: endB,
Value: 12.0,
SpanID: spanIDB,
TraceID: traceIDB,
}
exemplarInt64C = metricdata.Exemplar[int64]{
FilteredAttributes: fltrAttrA,
Time: endB,
Value: -10,
SpanID: spanIDA,
TraceID: traceIDA,
}
exemplarFloat64C = metricdata.Exemplar[float64]{
FilteredAttributes: fltrAttrA,
Time: endB,
Value: -10.0,
SpanID: spanIDA,
TraceID: traceIDA,
}
dataPointInt64A = metricdata.DataPoint[int64]{ dataPointInt64A = metricdata.DataPoint[int64]{
Attributes: attrA, Attributes: attrA,
StartTime: startA, StartTime: startA,
Time: endA, Time: endA,
Value: -1, Value: -1,
Exemplars: []metricdata.Exemplar[int64]{exemplarInt64A},
} }
dataPointFloat64A = metricdata.DataPoint[float64]{ dataPointFloat64A = metricdata.DataPoint[float64]{
Attributes: attrA, Attributes: attrA,
StartTime: startA, StartTime: startA,
Time: endA, Time: endA,
Value: -1.0, Value: -1.0,
Exemplars: []metricdata.Exemplar[float64]{exemplarFloat64A},
} }
dataPointInt64B = metricdata.DataPoint[int64]{ dataPointInt64B = metricdata.DataPoint[int64]{
Attributes: attrB, Attributes: attrB,
StartTime: startB, StartTime: startB,
Time: endB, Time: endB,
Value: 2, Value: 2,
Exemplars: []metricdata.Exemplar[int64]{exemplarInt64B},
} }
dataPointFloat64B = metricdata.DataPoint[float64]{ dataPointFloat64B = metricdata.DataPoint[float64]{
Attributes: attrB, Attributes: attrB,
StartTime: startB, StartTime: startB,
Time: endB, Time: endB,
Value: 2.0, Value: 2.0,
Exemplars: []metricdata.Exemplar[float64]{exemplarFloat64B},
} }
dataPointInt64C = metricdata.DataPoint[int64]{ dataPointInt64C = metricdata.DataPoint[int64]{
Attributes: attrA, Attributes: attrA,
StartTime: startB, StartTime: startB,
Time: endB, Time: endB,
Value: -1, Value: -1,
Exemplars: []metricdata.Exemplar[int64]{exemplarInt64C},
} }
dataPointFloat64C = metricdata.DataPoint[float64]{ dataPointFloat64C = metricdata.DataPoint[float64]{
Attributes: attrA, Attributes: attrA,
StartTime: startB, StartTime: startB,
Time: endB, Time: endB,
Value: -1.0, Value: -1.0,
Exemplars: []metricdata.Exemplar[float64]{exemplarFloat64C},
} }
minA = metricdata.NewExtrema(-1.) minA = metricdata.NewExtrema(-1.)
minB, maxB = metricdata.NewExtrema(3.), metricdata.NewExtrema(99.) minB, maxB = metricdata.NewExtrema(3.), metricdata.NewExtrema(99.)
minC = metricdata.NewExtrema(-1.) minC = metricdata.NewExtrema(-1.)
histogramDataPointA = metricdata.HistogramDataPoint{ histogramDataPointInt64A = metricdata.HistogramDataPoint[int64]{
Attributes: attrA, Attributes: attrA,
StartTime: startA, StartTime: startA,
Time: endA, Time: endA,
@@ -85,8 +142,20 @@ var (
BucketCounts: []uint64{1, 1}, BucketCounts: []uint64{1, 1},
Min: minA, Min: minA,
Sum: 2, Sum: 2,
Exemplars: []metricdata.Exemplar[int64]{exemplarInt64A},
} }
histogramDataPointB = metricdata.HistogramDataPoint{ histogramDataPointFloat64A = metricdata.HistogramDataPoint[float64]{
Attributes: attrA,
StartTime: startA,
Time: endA,
Count: 2,
Bounds: []float64{0, 10},
BucketCounts: []uint64{1, 1},
Min: minA,
Sum: 2,
Exemplars: []metricdata.Exemplar[float64]{exemplarFloat64A},
}
histogramDataPointInt64B = metricdata.HistogramDataPoint[int64]{
Attributes: attrB, Attributes: attrB,
StartTime: startB, StartTime: startB,
Time: endB, Time: endB,
@@ -96,8 +165,21 @@ var (
Max: maxB, Max: maxB,
Min: minB, Min: minB,
Sum: 3, Sum: 3,
Exemplars: []metricdata.Exemplar[int64]{exemplarInt64B},
} }
histogramDataPointC = metricdata.HistogramDataPoint{ histogramDataPointFloat64B = metricdata.HistogramDataPoint[float64]{
Attributes: attrB,
StartTime: startB,
Time: endB,
Count: 3,
Bounds: []float64{0, 10, 100},
BucketCounts: []uint64{1, 1, 1},
Max: maxB,
Min: minB,
Sum: 3,
Exemplars: []metricdata.Exemplar[float64]{exemplarFloat64B},
}
histogramDataPointInt64C = metricdata.HistogramDataPoint[int64]{
Attributes: attrA, Attributes: attrA,
StartTime: startB, StartTime: startB,
Time: endB, Time: endB,
@@ -106,6 +188,18 @@ var (
BucketCounts: []uint64{1, 1}, BucketCounts: []uint64{1, 1},
Min: minC, Min: minC,
Sum: 2, Sum: 2,
Exemplars: []metricdata.Exemplar[int64]{exemplarInt64C},
}
histogramDataPointFloat64C = metricdata.HistogramDataPoint[float64]{
Attributes: attrA,
StartTime: startB,
Time: endB,
Count: 2,
Bounds: []float64{0, 10},
BucketCounts: []uint64{1, 1},
Min: minC,
Sum: 2,
Exemplars: []metricdata.Exemplar[float64]{exemplarFloat64C},
} }
gaugeInt64A = metricdata.Gauge[int64]{ gaugeInt64A = metricdata.Gauge[int64]{
@@ -158,17 +252,29 @@ var (
DataPoints: []metricdata.DataPoint[float64]{dataPointFloat64C}, DataPoints: []metricdata.DataPoint[float64]{dataPointFloat64C},
} }
histogramA = metricdata.Histogram{ histogramInt64A = metricdata.Histogram[int64]{
Temporality: metricdata.CumulativeTemporality, Temporality: metricdata.CumulativeTemporality,
DataPoints: []metricdata.HistogramDataPoint{histogramDataPointA}, DataPoints: []metricdata.HistogramDataPoint[int64]{histogramDataPointInt64A},
} }
histogramB = metricdata.Histogram{ histogramFloat64A = metricdata.Histogram[float64]{
Temporality: metricdata.CumulativeTemporality,
DataPoints: []metricdata.HistogramDataPoint[float64]{histogramDataPointFloat64A},
}
histogramInt64B = metricdata.Histogram[int64]{
Temporality: metricdata.DeltaTemporality, Temporality: metricdata.DeltaTemporality,
DataPoints: []metricdata.HistogramDataPoint{histogramDataPointB}, DataPoints: []metricdata.HistogramDataPoint[int64]{histogramDataPointInt64B},
} }
histogramC = metricdata.Histogram{ histogramFloat64B = metricdata.Histogram[float64]{
Temporality: metricdata.DeltaTemporality,
DataPoints: []metricdata.HistogramDataPoint[float64]{histogramDataPointFloat64B},
}
histogramInt64C = metricdata.Histogram[int64]{
Temporality: metricdata.CumulativeTemporality, Temporality: metricdata.CumulativeTemporality,
DataPoints: []metricdata.HistogramDataPoint{histogramDataPointC}, DataPoints: []metricdata.HistogramDataPoint[int64]{histogramDataPointInt64C},
}
histogramFloat64C = metricdata.Histogram[float64]{
Temporality: metricdata.CumulativeTemporality,
DataPoints: []metricdata.HistogramDataPoint[float64]{histogramDataPointFloat64C},
} }
metricsA = metricdata.Metrics{ metricsA = metricdata.Metrics{
@@ -224,7 +330,7 @@ func testDatatype[T Datatypes](a, b T, f equalFunc[T]) func(*testing.T) {
AssertEqual(t, a, a) AssertEqual(t, a, a)
AssertEqual(t, b, b) AssertEqual(t, b, b)
r := f(a, b, config{}) r := f(a, b, newConfig(nil))
assert.Greaterf(t, len(r), 0, "%v == %v", a, b) assert.Greaterf(t, len(r), 0, "%v == %v", a, b)
} }
} }
@@ -234,8 +340,20 @@ func testDatatypeIgnoreTime[T Datatypes](a, b T, f equalFunc[T]) func(*testing.T
AssertEqual(t, a, a) AssertEqual(t, a, a)
AssertEqual(t, b, b) AssertEqual(t, b, b)
r := f(a, b, config{ignoreTimestamp: true}) c := newConfig([]Option{IgnoreTimestamp()})
assert.Equalf(t, len(r), 0, "%v == %v", a, b) r := f(a, b, c)
assert.Len(t, r, 0, "unexpected inequality")
}
}
func testDatatypeIgnoreExemplars[T Datatypes](a, b T, f equalFunc[T]) func(*testing.T) {
return func(t *testing.T) {
AssertEqual(t, a, a)
AssertEqual(t, b, b)
c := newConfig([]Option{IgnoreExemplars()})
r := f(a, b, c)
assert.Len(t, r, 0, "unexpected inequality")
} }
} }
@@ -243,30 +361,56 @@ func TestAssertEqual(t *testing.T) {
t.Run("ResourceMetrics", testDatatype(resourceMetricsA, resourceMetricsB, equalResourceMetrics)) t.Run("ResourceMetrics", testDatatype(resourceMetricsA, resourceMetricsB, equalResourceMetrics))
t.Run("ScopeMetrics", testDatatype(scopeMetricsA, scopeMetricsB, equalScopeMetrics)) t.Run("ScopeMetrics", testDatatype(scopeMetricsA, scopeMetricsB, equalScopeMetrics))
t.Run("Metrics", testDatatype(metricsA, metricsB, equalMetrics)) t.Run("Metrics", testDatatype(metricsA, metricsB, equalMetrics))
t.Run("Histogram", testDatatype(histogramA, histogramB, equalHistograms)) t.Run("HistogramInt64", testDatatype(histogramInt64A, histogramInt64B, equalHistograms[int64]))
t.Run("HistogramFloat64", testDatatype(histogramFloat64A, histogramFloat64B, equalHistograms[float64]))
t.Run("SumInt64", testDatatype(sumInt64A, sumInt64B, equalSums[int64])) t.Run("SumInt64", testDatatype(sumInt64A, sumInt64B, equalSums[int64]))
t.Run("SumFloat64", testDatatype(sumFloat64A, sumFloat64B, equalSums[float64])) t.Run("SumFloat64", testDatatype(sumFloat64A, sumFloat64B, equalSums[float64]))
t.Run("GaugeInt64", testDatatype(gaugeInt64A, gaugeInt64B, equalGauges[int64])) t.Run("GaugeInt64", testDatatype(gaugeInt64A, gaugeInt64B, equalGauges[int64]))
t.Run("GaugeFloat64", testDatatype(gaugeFloat64A, gaugeFloat64B, equalGauges[float64])) t.Run("GaugeFloat64", testDatatype(gaugeFloat64A, gaugeFloat64B, equalGauges[float64]))
t.Run("HistogramDataPoint", testDatatype(histogramDataPointA, histogramDataPointB, equalHistogramDataPoints)) t.Run("HistogramDataPointInt64", testDatatype(histogramDataPointInt64A, histogramDataPointInt64B, equalHistogramDataPoints[int64]))
t.Run("HistogramDataPointFloat64", testDatatype(histogramDataPointFloat64A, histogramDataPointFloat64B, equalHistogramDataPoints[float64]))
t.Run("DataPointInt64", testDatatype(dataPointInt64A, dataPointInt64B, equalDataPoints[int64])) t.Run("DataPointInt64", testDatatype(dataPointInt64A, dataPointInt64B, equalDataPoints[int64]))
t.Run("DataPointFloat64", testDatatype(dataPointFloat64A, dataPointFloat64B, equalDataPoints[float64])) t.Run("DataPointFloat64", testDatatype(dataPointFloat64A, dataPointFloat64B, equalDataPoints[float64]))
t.Run("Extrema", testDatatype(minA, minB, equalExtrema)) t.Run("Extrema", testDatatype(minA, minB, equalExtrema))
t.Run("ExemplarInt64", testDatatype(exemplarInt64A, exemplarInt64B, equalExemplars[int64]))
t.Run("ExemplarFloat64", testDatatype(exemplarFloat64A, exemplarFloat64B, equalExemplars[float64]))
} }
func TestAssertEqualIgnoreTime(t *testing.T) { func TestAssertEqualIgnoreTime(t *testing.T) {
t.Run("ResourceMetrics", testDatatypeIgnoreTime(resourceMetricsA, resourceMetricsC, equalResourceMetrics)) t.Run("ResourceMetrics", testDatatypeIgnoreTime(resourceMetricsA, resourceMetricsC, equalResourceMetrics))
t.Run("ScopeMetrics", testDatatypeIgnoreTime(scopeMetricsA, scopeMetricsC, equalScopeMetrics)) t.Run("ScopeMetrics", testDatatypeIgnoreTime(scopeMetricsA, scopeMetricsC, equalScopeMetrics))
t.Run("Metrics", testDatatypeIgnoreTime(metricsA, metricsC, equalMetrics)) t.Run("Metrics", testDatatypeIgnoreTime(metricsA, metricsC, equalMetrics))
t.Run("Histogram", testDatatypeIgnoreTime(histogramA, histogramC, equalHistograms)) t.Run("HistogramInt64", testDatatypeIgnoreTime(histogramInt64A, histogramInt64C, equalHistograms[int64]))
t.Run("HistogramFloat64", testDatatypeIgnoreTime(histogramFloat64A, histogramFloat64C, equalHistograms[float64]))
t.Run("SumInt64", testDatatypeIgnoreTime(sumInt64A, sumInt64C, equalSums[int64])) t.Run("SumInt64", testDatatypeIgnoreTime(sumInt64A, sumInt64C, equalSums[int64]))
t.Run("SumFloat64", testDatatypeIgnoreTime(sumFloat64A, sumFloat64C, equalSums[float64])) t.Run("SumFloat64", testDatatypeIgnoreTime(sumFloat64A, sumFloat64C, equalSums[float64]))
t.Run("GaugeInt64", testDatatypeIgnoreTime(gaugeInt64A, gaugeInt64C, equalGauges[int64])) t.Run("GaugeInt64", testDatatypeIgnoreTime(gaugeInt64A, gaugeInt64C, equalGauges[int64]))
t.Run("GaugeFloat64", testDatatypeIgnoreTime(gaugeFloat64A, gaugeFloat64C, equalGauges[float64])) t.Run("GaugeFloat64", testDatatypeIgnoreTime(gaugeFloat64A, gaugeFloat64C, equalGauges[float64]))
t.Run("HistogramDataPoint", testDatatypeIgnoreTime(histogramDataPointA, histogramDataPointC, equalHistogramDataPoints)) t.Run("HistogramDataPointInt64", testDatatypeIgnoreTime(histogramDataPointInt64A, histogramDataPointInt64C, equalHistogramDataPoints[int64]))
t.Run("HistogramDataPointFloat64", testDatatypeIgnoreTime(histogramDataPointFloat64A, histogramDataPointFloat64C, equalHistogramDataPoints[float64]))
t.Run("DataPointInt64", testDatatypeIgnoreTime(dataPointInt64A, dataPointInt64C, equalDataPoints[int64])) t.Run("DataPointInt64", testDatatypeIgnoreTime(dataPointInt64A, dataPointInt64C, equalDataPoints[int64]))
t.Run("DataPointFloat64", testDatatypeIgnoreTime(dataPointFloat64A, dataPointFloat64C, equalDataPoints[float64])) t.Run("DataPointFloat64", testDatatypeIgnoreTime(dataPointFloat64A, dataPointFloat64C, equalDataPoints[float64]))
t.Run("Extrema", testDatatypeIgnoreTime(minA, minC, equalExtrema)) t.Run("Extrema", testDatatypeIgnoreTime(minA, minC, equalExtrema))
t.Run("ExemplarInt64", testDatatypeIgnoreTime(exemplarInt64A, exemplarInt64C, equalExemplars[int64]))
t.Run("ExemplarFloat64", testDatatypeIgnoreTime(exemplarFloat64A, exemplarFloat64C, equalExemplars[float64]))
}
func TestAssertEqualIgnoreExemplars(t *testing.T) {
hdpInt64 := histogramDataPointInt64A
hdpInt64.Exemplars = []metricdata.Exemplar[int64]{exemplarInt64B}
t.Run("HistogramDataPointInt64", testDatatypeIgnoreExemplars(histogramDataPointInt64A, hdpInt64, equalHistogramDataPoints[int64]))
hdpFloat64 := histogramDataPointFloat64A
hdpFloat64.Exemplars = []metricdata.Exemplar[float64]{exemplarFloat64B}
t.Run("HistogramDataPointFloat64", testDatatypeIgnoreExemplars(histogramDataPointFloat64A, hdpFloat64, equalHistogramDataPoints[float64]))
dpInt64 := dataPointInt64A
dpInt64.Exemplars = []metricdata.Exemplar[int64]{exemplarInt64B}
t.Run("DataPointInt64", testDatatypeIgnoreExemplars(dataPointInt64A, dpInt64, equalDataPoints[int64]))
dpFloat64 := dataPointFloat64A
dpFloat64.Exemplars = []metricdata.Exemplar[float64]{exemplarFloat64B}
t.Run("DataPointFloat64", testDatatypeIgnoreExemplars(dataPointFloat64A, dpFloat64, equalDataPoints[float64]))
} }
type unknownAggregation struct { type unknownAggregation struct {
@@ -279,7 +423,8 @@ func TestAssertAggregationsEqual(t *testing.T) {
AssertAggregationsEqual(t, sumFloat64A, sumFloat64A) AssertAggregationsEqual(t, sumFloat64A, sumFloat64A)
AssertAggregationsEqual(t, gaugeInt64A, gaugeInt64A) AssertAggregationsEqual(t, gaugeInt64A, gaugeInt64A)
AssertAggregationsEqual(t, gaugeFloat64A, gaugeFloat64A) AssertAggregationsEqual(t, gaugeFloat64A, gaugeFloat64A)
AssertAggregationsEqual(t, histogramA, histogramA) AssertAggregationsEqual(t, histogramInt64A, histogramInt64A)
AssertAggregationsEqual(t, histogramFloat64A, histogramFloat64A)
r := equalAggregations(sumInt64A, nil, config{}) r := equalAggregations(sumInt64A, nil, config{})
assert.Len(t, r, 1, "should return nil comparison mismatch only") assert.Len(t, r, 1, "should return nil comparison mismatch only")
@@ -291,46 +436,56 @@ func TestAssertAggregationsEqual(t *testing.T) {
assert.Len(t, r, 1, "should return with unknown aggregation only") assert.Len(t, r, 1, "should return with unknown aggregation only")
r = equalAggregations(sumInt64A, sumInt64B, config{}) r = equalAggregations(sumInt64A, sumInt64B, config{})
assert.Greaterf(t, len(r), 0, "%v == %v", sumInt64A, sumInt64B) assert.Greaterf(t, len(r), 0, "sums should not be equal: %v == %v", sumInt64A, sumInt64B)
r = equalAggregations(sumInt64A, sumInt64C, config{ignoreTimestamp: true}) r = equalAggregations(sumInt64A, sumInt64C, config{ignoreTimestamp: true})
assert.Equalf(t, len(r), 0, "%v == %v", sumInt64A, sumInt64C) assert.Len(t, r, 0, "sums should be equal: %v", r)
r = equalAggregations(sumFloat64A, sumFloat64B, config{}) r = equalAggregations(sumFloat64A, sumFloat64B, config{})
assert.Greaterf(t, len(r), 0, "%v == %v", sumFloat64A, sumFloat64B) assert.Greaterf(t, len(r), 0, "sums should not be equal: %v == %v", sumFloat64A, sumFloat64B)
r = equalAggregations(sumFloat64A, sumFloat64C, config{ignoreTimestamp: true}) r = equalAggregations(sumFloat64A, sumFloat64C, config{ignoreTimestamp: true})
assert.Equalf(t, len(r), 0, "%v == %v", sumFloat64A, sumFloat64C) assert.Len(t, r, 0, "sums should be equal: %v", r)
r = equalAggregations(gaugeInt64A, gaugeInt64B, config{}) r = equalAggregations(gaugeInt64A, gaugeInt64B, config{})
assert.Greaterf(t, len(r), 0, "%v == %v", gaugeInt64A, gaugeInt64B) assert.Greaterf(t, len(r), 0, "gauges should not be equal: %v == %v", gaugeInt64A, gaugeInt64B)
r = equalAggregations(gaugeInt64A, gaugeInt64C, config{ignoreTimestamp: true}) r = equalAggregations(gaugeInt64A, gaugeInt64C, config{ignoreTimestamp: true})
assert.Equalf(t, len(r), 0, "%v == %v", gaugeInt64A, gaugeInt64C) assert.Len(t, r, 0, "gauges should be equal: %v", r)
r = equalAggregations(gaugeFloat64A, gaugeFloat64B, config{}) r = equalAggregations(gaugeFloat64A, gaugeFloat64B, config{})
assert.Greaterf(t, len(r), 0, "%v == %v", gaugeFloat64A, gaugeFloat64B) assert.Greaterf(t, len(r), 0, "gauges should not be equal: %v == %v", gaugeFloat64A, gaugeFloat64B)
r = equalAggregations(gaugeFloat64A, gaugeFloat64C, config{ignoreTimestamp: true}) r = equalAggregations(gaugeFloat64A, gaugeFloat64C, config{ignoreTimestamp: true})
assert.Equalf(t, len(r), 0, "%v == %v", gaugeFloat64A, gaugeFloat64C) assert.Len(t, r, 0, "gauges should be equal: %v", r)
r = equalAggregations(histogramA, histogramB, config{}) r = equalAggregations(histogramInt64A, histogramInt64B, config{})
assert.Greaterf(t, len(r), 0, "%v == %v", histogramA, histogramB) assert.Greaterf(t, len(r), 0, "histograms should not be equal: %v == %v", histogramInt64A, histogramInt64B)
r = equalAggregations(histogramA, histogramC, config{ignoreTimestamp: true}) r = equalAggregations(histogramInt64A, histogramInt64C, config{ignoreTimestamp: true})
assert.Equalf(t, len(r), 0, "%v == %v", histogramA, histogramC) assert.Len(t, r, 0, "histograms should be equal: %v", r)
r = equalAggregations(histogramFloat64A, histogramFloat64B, config{})
assert.Greaterf(t, len(r), 0, "histograms should not be equal: %v == %v", histogramFloat64A, histogramFloat64B)
r = equalAggregations(histogramFloat64A, histogramFloat64C, config{ignoreTimestamp: true})
assert.Len(t, r, 0, "histograms should be equal: %v", r)
} }
func TestAssertAttributes(t *testing.T) { func TestAssertAttributes(t *testing.T) {
AssertHasAttributes(t, minA, attribute.Bool("A", true)) // No-op, always pass. AssertHasAttributes(t, minA, attribute.Bool("A", true)) // No-op, always pass.
AssertHasAttributes(t, exemplarInt64A, attribute.Bool("filter A", true))
AssertHasAttributes(t, exemplarFloat64A, attribute.Bool("filter A", true))
AssertHasAttributes(t, dataPointInt64A, attribute.Bool("A", true)) AssertHasAttributes(t, dataPointInt64A, attribute.Bool("A", true))
AssertHasAttributes(t, dataPointFloat64A, attribute.Bool("A", true)) AssertHasAttributes(t, dataPointFloat64A, attribute.Bool("A", true))
AssertHasAttributes(t, gaugeInt64A, attribute.Bool("A", true)) AssertHasAttributes(t, gaugeInt64A, attribute.Bool("A", true))
AssertHasAttributes(t, gaugeFloat64A, attribute.Bool("A", true)) AssertHasAttributes(t, gaugeFloat64A, attribute.Bool("A", true))
AssertHasAttributes(t, sumInt64A, attribute.Bool("A", true)) AssertHasAttributes(t, sumInt64A, attribute.Bool("A", true))
AssertHasAttributes(t, sumFloat64A, attribute.Bool("A", true)) AssertHasAttributes(t, sumFloat64A, attribute.Bool("A", true))
AssertHasAttributes(t, histogramDataPointA, attribute.Bool("A", true)) AssertHasAttributes(t, histogramDataPointInt64A, attribute.Bool("A", true))
AssertHasAttributes(t, histogramA, attribute.Bool("A", true)) AssertHasAttributes(t, histogramDataPointFloat64A, attribute.Bool("A", true))
AssertHasAttributes(t, histogramInt64A, attribute.Bool("A", true))
AssertHasAttributes(t, histogramFloat64A, attribute.Bool("A", true))
AssertHasAttributes(t, metricsA, attribute.Bool("A", true)) AssertHasAttributes(t, metricsA, attribute.Bool("A", true))
AssertHasAttributes(t, scopeMetricsA, attribute.Bool("A", true)) AssertHasAttributes(t, scopeMetricsA, attribute.Bool("A", true))
AssertHasAttributes(t, resourceMetricsA, attribute.Bool("A", true)) AssertHasAttributes(t, resourceMetricsA, attribute.Bool("A", true))
@@ -343,8 +498,10 @@ func TestAssertAttributes(t *testing.T) {
assert.Equal(t, len(r), 0, "sumInt64A has A=True") assert.Equal(t, len(r), 0, "sumInt64A has A=True")
r = hasAttributesAggregation(sumFloat64A, attribute.Bool("A", true)) r = hasAttributesAggregation(sumFloat64A, attribute.Bool("A", true))
assert.Equal(t, len(r), 0, "sumFloat64A has A=True") assert.Equal(t, len(r), 0, "sumFloat64A has A=True")
r = hasAttributesAggregation(histogramA, attribute.Bool("A", true)) r = hasAttributesAggregation(histogramInt64A, attribute.Bool("A", true))
assert.Equal(t, len(r), 0, "histogramA has A=True") assert.Equal(t, len(r), 0, "histogramInt64A has A=True")
r = hasAttributesAggregation(histogramFloat64A, attribute.Bool("A", true))
assert.Equal(t, len(r), 0, "histogramFloat64A has A=True")
r = hasAttributesAggregation(gaugeInt64A, attribute.Bool("A", false)) r = hasAttributesAggregation(gaugeInt64A, attribute.Bool("A", false))
assert.Greater(t, len(r), 0, "gaugeInt64A does not have A=False") assert.Greater(t, len(r), 0, "gaugeInt64A does not have A=False")
@@ -354,8 +511,10 @@ func TestAssertAttributes(t *testing.T) {
assert.Greater(t, len(r), 0, "sumInt64A does not have A=False") assert.Greater(t, len(r), 0, "sumInt64A does not have A=False")
r = hasAttributesAggregation(sumFloat64A, attribute.Bool("A", false)) r = hasAttributesAggregation(sumFloat64A, attribute.Bool("A", false))
assert.Greater(t, len(r), 0, "sumFloat64A does not have A=False") assert.Greater(t, len(r), 0, "sumFloat64A does not have A=False")
r = hasAttributesAggregation(histogramA, attribute.Bool("A", false)) r = hasAttributesAggregation(histogramInt64A, attribute.Bool("A", false))
assert.Greater(t, len(r), 0, "histogramA does not have A=False") assert.Greater(t, len(r), 0, "histogramInt64A does not have A=False")
r = hasAttributesAggregation(histogramFloat64A, attribute.Bool("A", false))
assert.Greater(t, len(r), 0, "histogramFloat64A does not have A=False")
r = hasAttributesAggregation(gaugeInt64A, attribute.Bool("B", true)) r = hasAttributesAggregation(gaugeInt64A, attribute.Bool("B", true))
assert.Greater(t, len(r), 0, "gaugeInt64A does not have Attribute B") assert.Greater(t, len(r), 0, "gaugeInt64A does not have Attribute B")
@@ -365,22 +524,26 @@ func TestAssertAttributes(t *testing.T) {
assert.Greater(t, len(r), 0, "sumInt64A does not have Attribute B") assert.Greater(t, len(r), 0, "sumInt64A does not have Attribute B")
r = hasAttributesAggregation(sumFloat64A, attribute.Bool("B", true)) r = hasAttributesAggregation(sumFloat64A, attribute.Bool("B", true))
assert.Greater(t, len(r), 0, "sumFloat64A does not have Attribute B") assert.Greater(t, len(r), 0, "sumFloat64A does not have Attribute B")
r = hasAttributesAggregation(histogramA, attribute.Bool("B", true)) r = hasAttributesAggregation(histogramInt64A, attribute.Bool("B", true))
assert.Greater(t, len(r), 0, "histogramA does not have Attribute B") assert.Greater(t, len(r), 0, "histogramIntA does not have Attribute B")
r = hasAttributesAggregation(histogramFloat64A, attribute.Bool("B", true))
assert.Greater(t, len(r), 0, "histogramFloatA does not have Attribute B")
} }
func TestAssertAttributesFail(t *testing.T) { func TestAssertAttributesFail(t *testing.T) {
fakeT := &testing.T{} fakeT := &testing.T{}
assert.False(t, AssertHasAttributes(fakeT, dataPointInt64A, attribute.Bool("A", false))) assert.False(t, AssertHasAttributes(fakeT, dataPointInt64A, attribute.Bool("A", false)))
assert.False(t, AssertHasAttributes(fakeT, dataPointFloat64A, attribute.Bool("B", true))) assert.False(t, AssertHasAttributes(fakeT, dataPointFloat64A, attribute.Bool("B", true)))
assert.False(t, AssertHasAttributes(fakeT, exemplarInt64A, attribute.Bool("A", false)))
assert.False(t, AssertHasAttributes(fakeT, exemplarFloat64A, attribute.Bool("B", true)))
assert.False(t, AssertHasAttributes(fakeT, gaugeInt64A, attribute.Bool("A", false))) assert.False(t, AssertHasAttributes(fakeT, gaugeInt64A, attribute.Bool("A", false)))
assert.False(t, AssertHasAttributes(fakeT, gaugeFloat64A, attribute.Bool("B", true))) assert.False(t, AssertHasAttributes(fakeT, gaugeFloat64A, attribute.Bool("B", true)))
assert.False(t, AssertHasAttributes(fakeT, sumInt64A, attribute.Bool("A", false))) assert.False(t, AssertHasAttributes(fakeT, sumInt64A, attribute.Bool("A", false)))
assert.False(t, AssertHasAttributes(fakeT, sumFloat64A, attribute.Bool("B", true))) assert.False(t, AssertHasAttributes(fakeT, sumFloat64A, attribute.Bool("B", true)))
assert.False(t, AssertHasAttributes(fakeT, histogramDataPointA, attribute.Bool("A", false))) assert.False(t, AssertHasAttributes(fakeT, histogramDataPointInt64A, attribute.Bool("A", false)))
assert.False(t, AssertHasAttributes(fakeT, histogramDataPointA, attribute.Bool("B", true))) assert.False(t, AssertHasAttributes(fakeT, histogramDataPointFloat64A, attribute.Bool("B", true)))
assert.False(t, AssertHasAttributes(fakeT, histogramA, attribute.Bool("A", false))) assert.False(t, AssertHasAttributes(fakeT, histogramInt64A, attribute.Bool("A", false)))
assert.False(t, AssertHasAttributes(fakeT, histogramA, attribute.Bool("B", true))) assert.False(t, AssertHasAttributes(fakeT, histogramFloat64A, attribute.Bool("B", true)))
assert.False(t, AssertHasAttributes(fakeT, metricsA, attribute.Bool("A", false))) assert.False(t, AssertHasAttributes(fakeT, metricsA, attribute.Bool("A", false)))
assert.False(t, AssertHasAttributes(fakeT, metricsA, attribute.Bool("B", true))) assert.False(t, AssertHasAttributes(fakeT, metricsA, attribute.Bool("B", true)))
assert.False(t, AssertHasAttributes(fakeT, resourceMetricsA, attribute.Bool("A", false))) assert.False(t, AssertHasAttributes(fakeT, resourceMetricsA, attribute.Bool("A", false)))

View File

@@ -131,8 +131,14 @@ func equalAggregations(a, b metricdata.Aggregation, cfg config) (reasons []strin
reasons = append(reasons, "Sum[float64] not equal:") reasons = append(reasons, "Sum[float64] not equal:")
reasons = append(reasons, r...) reasons = append(reasons, r...)
} }
case metricdata.Histogram: case metricdata.Histogram[int64]:
r := equalHistograms(v, b.(metricdata.Histogram), cfg) r := equalHistograms(v, b.(metricdata.Histogram[int64]), cfg)
if len(r) > 0 {
reasons = append(reasons, "Histogram not equal:")
reasons = append(reasons, r...)
}
case metricdata.Histogram[float64]:
r := equalHistograms(v, b.(metricdata.Histogram[float64]), cfg)
if len(r) > 0 { if len(r) > 0 {
reasons = append(reasons, "Histogram not equal:") reasons = append(reasons, "Histogram not equal:")
reasons = append(reasons, r...) reasons = append(reasons, r...)
@@ -195,7 +201,7 @@ func equalSums[N int64 | float64](a, b metricdata.Sum[N], cfg config) (reasons [
// //
// The DataPoints each Histogram contains are compared based on containing the // The DataPoints each Histogram contains are compared based on containing the
// same HistogramDataPoint, not the order they are stored in. // same HistogramDataPoint, not the order they are stored in.
func equalHistograms(a, b metricdata.Histogram, cfg config) (reasons []string) { func equalHistograms[N int64 | float64](a, b metricdata.Histogram[N], cfg config) (reasons []string) {
if a.Temporality != b.Temporality { if a.Temporality != b.Temporality {
reasons = append(reasons, notEqualStr("Temporality", a.Temporality, b.Temporality)) reasons = append(reasons, notEqualStr("Temporality", a.Temporality, b.Temporality))
} }
@@ -203,7 +209,7 @@ func equalHistograms(a, b metricdata.Histogram, cfg config) (reasons []string) {
r := compareDiff(diffSlices( r := compareDiff(diffSlices(
a.DataPoints, a.DataPoints,
b.DataPoints, b.DataPoints,
func(a, b metricdata.HistogramDataPoint) bool { func(a, b metricdata.HistogramDataPoint[N]) bool {
r := equalHistogramDataPoints(a, b, cfg) r := equalHistogramDataPoints(a, b, cfg)
return len(r) == 0 return len(r) == 0
}, },
@@ -237,12 +243,26 @@ func equalDataPoints[N int64 | float64](a, b metricdata.DataPoint[N], cfg config
if a.Value != b.Value { if a.Value != b.Value {
reasons = append(reasons, notEqualStr("Value", a.Value, b.Value)) reasons = append(reasons, notEqualStr("Value", a.Value, b.Value))
} }
if !cfg.ignoreExemplars {
r := compareDiff(diffSlices(
a.Exemplars,
b.Exemplars,
func(a, b metricdata.Exemplar[N]) bool {
r := equalExemplars(a, b, cfg)
return len(r) == 0
},
))
if r != "" {
reasons = append(reasons, fmt.Sprintf("Exemplars not equal:\n%s", r))
}
}
return reasons return reasons
} }
// equalHistogramDataPoints returns reasons HistogramDataPoints are not equal. // equalHistogramDataPoints returns reasons HistogramDataPoints are not equal.
// If they are equal, the returned reasons will be empty. // If they are equal, the returned reasons will be empty.
func equalHistogramDataPoints(a, b metricdata.HistogramDataPoint, cfg config) (reasons []string) { // nolint: revive // Intentional internal control flag func equalHistogramDataPoints[N int64 | float64](a, b metricdata.HistogramDataPoint[N], cfg config) (reasons []string) { // nolint: revive // Intentional internal control flag
if !a.Attributes.Equals(&b.Attributes) { if !a.Attributes.Equals(&b.Attributes) {
reasons = append(reasons, notEqualStr( reasons = append(reasons, notEqualStr(
"Attributes", "Attributes",
@@ -276,6 +296,19 @@ func equalHistogramDataPoints(a, b metricdata.HistogramDataPoint, cfg config) (r
if a.Sum != b.Sum { if a.Sum != b.Sum {
reasons = append(reasons, notEqualStr("Sum", a.Sum, b.Sum)) reasons = append(reasons, notEqualStr("Sum", a.Sum, b.Sum))
} }
if !cfg.ignoreExemplars {
r := compareDiff(diffSlices(
a.Exemplars,
b.Exemplars,
func(a, b metricdata.Exemplar[N]) bool {
r := equalExemplars(a, b, cfg)
return len(r) == 0
},
))
if r != "" {
reasons = append(reasons, fmt.Sprintf("Exemplars not equal:\n%s", r))
}
}
return reasons return reasons
} }
@@ -312,6 +345,82 @@ func eqExtrema(a, b metricdata.Extrema) bool {
return aV == bV return aV == bV
} }
func equalKeyValue(a, b []attribute.KeyValue) bool {
// Comparison of []attribute.KeyValue as a comparable requires Go >= 1.20.
// To support Go < 1.20 use this function instead.
if len(a) != len(b) {
return false
}
for i, v := range a {
if v.Key != b[i].Key {
return false
}
if v.Value.Type() != b[i].Value.Type() {
return false
}
switch v.Value.Type() {
case attribute.BOOL:
if v.Value.AsBool() != b[i].Value.AsBool() {
return false
}
case attribute.INT64:
if v.Value.AsInt64() != b[i].Value.AsInt64() {
return false
}
case attribute.FLOAT64:
if v.Value.AsFloat64() != b[i].Value.AsFloat64() {
return false
}
case attribute.STRING:
if v.Value.AsString() != b[i].Value.AsString() {
return false
}
case attribute.BOOLSLICE:
if ok := equalSlices(v.Value.AsBoolSlice(), b[i].Value.AsBoolSlice()); !ok {
return false
}
case attribute.INT64SLICE:
if ok := equalSlices(v.Value.AsInt64Slice(), b[i].Value.AsInt64Slice()); !ok {
return false
}
case attribute.FLOAT64SLICE:
if ok := equalSlices(v.Value.AsFloat64Slice(), b[i].Value.AsFloat64Slice()); !ok {
return false
}
case attribute.STRINGSLICE:
if ok := equalSlices(v.Value.AsStringSlice(), b[i].Value.AsStringSlice()); !ok {
return false
}
default:
// We control all types passed to this, panic to signal developers
// early they changed things in an incompatible way.
panic(fmt.Sprintf("unknown attribute value type: %s", v.Value.Type()))
}
}
return true
}
func equalExemplars[N int64 | float64](a, b metricdata.Exemplar[N], cfg config) (reasons []string) {
if !equalKeyValue(a.FilteredAttributes, b.FilteredAttributes) {
reasons = append(reasons, notEqualStr("FilteredAttributes", a.FilteredAttributes, b.FilteredAttributes))
}
if !cfg.ignoreTimestamp {
if !a.Time.Equal(b.Time) {
reasons = append(reasons, notEqualStr("Time", a.Time.UnixNano(), b.Time.UnixNano()))
}
}
if a.Value != b.Value {
reasons = append(reasons, notEqualStr("Value", a.Value, b.Value))
}
if !equalSlices(a.SpanID, b.SpanID) {
reasons = append(reasons, notEqualStr("SpanID", a.SpanID, b.SpanID))
}
if !equalSlices(a.TraceID, b.TraceID) {
reasons = append(reasons, notEqualStr("TraceID", a.TraceID, b.TraceID))
}
return reasons
}
func diffSlices[T any](a, b []T, equal func(T, T) bool) (extraA, extraB []T) { func diffSlices[T any](a, b []T, equal func(T, T) bool) (extraA, extraB []T) {
visited := make([]bool, len(b)) visited := make([]bool, len(b))
for i := 0; i < len(a); i++ { for i := 0; i < len(a); i++ {
@@ -372,6 +481,21 @@ func missingAttrStr(name string) string {
return fmt.Sprintf("missing attribute %s", name) return fmt.Sprintf("missing attribute %s", name)
} }
func hasAttributesExemplars[T int64 | float64](exemplar metricdata.Exemplar[T], attrs ...attribute.KeyValue) (reasons []string) {
s := attribute.NewSet(exemplar.FilteredAttributes...)
for _, attr := range attrs {
val, ok := s.Value(attr.Key)
if !ok {
reasons = append(reasons, missingAttrStr(string(attr.Key)))
continue
}
if val != attr.Value {
reasons = append(reasons, notEqualStr(string(attr.Key), attr.Value.Emit(), val.Emit()))
}
}
return reasons
}
func hasAttributesDataPoints[T int64 | float64](dp metricdata.DataPoint[T], attrs ...attribute.KeyValue) (reasons []string) { func hasAttributesDataPoints[T int64 | float64](dp metricdata.DataPoint[T], attrs ...attribute.KeyValue) (reasons []string) {
for _, attr := range attrs { for _, attr := range attrs {
val, ok := dp.Attributes.Value(attr.Key) val, ok := dp.Attributes.Value(attr.Key)
@@ -408,7 +532,7 @@ func hasAttributesSum[T int64 | float64](sum metricdata.Sum[T], attrs ...attribu
return reasons return reasons
} }
func hasAttributesHistogramDataPoints(dp metricdata.HistogramDataPoint, attrs ...attribute.KeyValue) (reasons []string) { func hasAttributesHistogramDataPoints[T int64 | float64](dp metricdata.HistogramDataPoint[T], attrs ...attribute.KeyValue) (reasons []string) {
for _, attr := range attrs { for _, attr := range attrs {
val, ok := dp.Attributes.Value(attr.Key) val, ok := dp.Attributes.Value(attr.Key)
if !ok { if !ok {
@@ -422,7 +546,7 @@ func hasAttributesHistogramDataPoints(dp metricdata.HistogramDataPoint, attrs ..
return reasons return reasons
} }
func hasAttributesHistogram(histogram metricdata.Histogram, attrs ...attribute.KeyValue) (reasons []string) { func hasAttributesHistogram[T int64 | float64](histogram metricdata.Histogram[T], attrs ...attribute.KeyValue) (reasons []string) {
for n, dp := range histogram.DataPoints { for n, dp := range histogram.DataPoints {
reas := hasAttributesHistogramDataPoints(dp, attrs...) reas := hasAttributesHistogramDataPoints(dp, attrs...)
if len(reas) > 0 { if len(reas) > 0 {
@@ -443,7 +567,9 @@ func hasAttributesAggregation(agg metricdata.Aggregation, attrs ...attribute.Key
reasons = hasAttributesSum(agg, attrs...) reasons = hasAttributesSum(agg, attrs...)
case metricdata.Sum[float64]: case metricdata.Sum[float64]:
reasons = hasAttributesSum(agg, attrs...) reasons = hasAttributesSum(agg, attrs...)
case metricdata.Histogram: case metricdata.Histogram[int64]:
reasons = hasAttributesHistogram(agg, attrs...)
case metricdata.Histogram[float64]:
reasons = hasAttributesHistogram(agg, attrs...) reasons = hasAttributesHistogram(agg, attrs...)
default: default:
reasons = []string{fmt.Sprintf("unknown aggregation %T", agg)} reasons = []string{fmt.Sprintf("unknown aggregation %T", agg)}