1
0
mirror of https://github.com/open-telemetry/opentelemetry-go.git synced 2024-12-10 09:50:58 +02:00

Bump go.opentelemetry.io/proto/otlp from 0.7.0 to 0.8.0 in /exporters/otlp (#1872)

* Bump go.opentelemetry.io/proto/otlp in /exporters/otlp

Bumps [go.opentelemetry.io/proto/otlp](https://github.com/open-telemetry/opentelemetry-proto-go) from 0.7.0 to 0.8.0.
- [Release notes](https://github.com/open-telemetry/opentelemetry-proto-go/releases)
- [Commits](https://github.com/open-telemetry/opentelemetry-proto-go/compare/v0.7.0...v0.8.0)

Signed-off-by: dependabot[bot] <support@github.com>

* Update go.sum for OTLP using examples

* Upgrade OTLP use

* Add error when sink gets unknown metric type

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Tyler Yahn <codingalias@gmail.com>
Co-authored-by: Tyler Yahn <MrAlias@users.noreply.github.com>
Co-authored-by: Anthony Mirabella <a9@aneurysm9.com>
This commit is contained in:
dependabot[bot] 2021-05-10 14:20:07 -04:00 committed by GitHub
parent 696af78754
commit 1e3fa3a3e0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 248 additions and 296 deletions

View File

@ -15,7 +15,6 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
@ -31,9 +30,9 @@ github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:W
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0 h1:LUVKkCeviFUMKqHa4tXIIij/lbhnMbP7Fn5wKdKkRh4=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
@ -52,8 +51,8 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
go.opentelemetry.io/proto/otlp v0.7.0 h1:rwOQPCuKAKmwGKq2aVNnYIibI6wnV7EvzgfTCzcdGg8=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.opentelemetry.io/proto/otlp v0.8.0 h1:Y0huTcGvPnmeIUuwORZk07LQ2q2CbrXHBkUidvGIoL0=
go.opentelemetry.io/proto/otlp v0.8.0/go.mod h1:4i41ohS2vg3FjjjRpBNqfT/voGvIxREH17c6djRtXx8=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@ -100,7 +99,6 @@ google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyac
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.37.0 h1:uSZWeQJX5j11bIQ4AJoj+McDBo29cY1MCoC1wO3ts+c=
google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=

View File

@ -60,7 +60,6 @@ github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4s
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
@ -96,8 +95,9 @@ github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvq
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0 h1:LUVKkCeviFUMKqHa4tXIIij/lbhnMbP7Fn5wKdKkRh4=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
@ -290,8 +290,8 @@ go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mI
go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opentelemetry.io/proto/otlp v0.7.0 h1:rwOQPCuKAKmwGKq2aVNnYIibI6wnV7EvzgfTCzcdGg8=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.opentelemetry.io/proto/otlp v0.8.0 h1:Y0huTcGvPnmeIUuwORZk07LQ2q2CbrXHBkUidvGIoL0=
go.opentelemetry.io/proto/otlp v0.8.0/go.mod h1:4i41ohS2vg3FjjjRpBNqfT/voGvIxREH17c6djRtXx8=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
@ -412,7 +412,6 @@ google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQ
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.37.0 h1:uSZWeQJX5j11bIQ4AJoj+McDBo29cY1MCoC1wO3ts+c=
google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=

View File

@ -17,7 +17,7 @@ require (
go.opentelemetry.io/otel/sdk/export/metric v0.20.0
go.opentelemetry.io/otel/sdk/metric v0.20.0
go.opentelemetry.io/otel/trace v0.20.0
go.opentelemetry.io/proto/otlp v0.7.0
go.opentelemetry.io/proto/otlp v0.8.0
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013
google.golang.org/grpc v1.37.0
google.golang.org/protobuf v1.26.0

View File

@ -15,7 +15,6 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
@ -31,9 +30,9 @@ github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:W
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0 h1:LUVKkCeviFUMKqHa4tXIIij/lbhnMbP7Fn5wKdKkRh4=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
@ -52,8 +51,8 @@ github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
go.opentelemetry.io/proto/otlp v0.7.0 h1:rwOQPCuKAKmwGKq2aVNnYIibI6wnV7EvzgfTCzcdGg8=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.opentelemetry.io/proto/otlp v0.8.0 h1:Y0huTcGvPnmeIUuwORZk07LQ2q2CbrXHBkUidvGIoL0=
go.opentelemetry.io/proto/otlp v0.8.0/go.mod h1:4i41ohS2vg3FjjjRpBNqfT/voGvIxREH17c6djRtXx8=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@ -100,7 +99,6 @@ google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyac
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.37.0 h1:uSZWeQJX5j11bIQ4AJoj+McDBo29cY1MCoC1wO3ts+c=
google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=

View File

@ -34,6 +34,7 @@ import (
"go.opentelemetry.io/otel/sdk/resource"
sdktrace "go.opentelemetry.io/otel/sdk/trace"
commonpb "go.opentelemetry.io/proto/otlp/common/v1"
metricpb "go.opentelemetry.io/proto/otlp/metrics/v1"
)
// RunEndToEndTest can be used by protocol driver tests to validate
@ -208,52 +209,34 @@ func RunEndToEndTest(ctx context.Context, t *testing.T, exp *otlp.Exporter, mcTr
}
seen[m.Name] = struct{}{}
switch data.iKind {
case metric.CounterInstrumentKind, metric.ValueObserverInstrumentKind:
var dp []*metricpb.NumberDataPoint
switch data.iKind {
case metric.CounterInstrumentKind:
switch data.nKind {
case number.Int64Kind:
if dp := m.GetIntSum().DataPoints; assert.Len(t, dp, 1) {
assert.Equal(t, data.val, dp[0].Value, "invalid value for %q", m.Name)
}
case number.Float64Kind:
if dp := m.GetDoubleSum().DataPoints; assert.Len(t, dp, 1) {
assert.Equal(t, float64(data.val), dp[0].Value, "invalid value for %q", m.Name)
}
default:
assert.Failf(t, "invalid number kind", data.nKind.String())
}
require.NotNil(t, m.GetSum())
dp = m.GetSum().GetDataPoints()
case metric.ValueObserverInstrumentKind:
require.NotNil(t, m.GetGauge())
dp = m.GetGauge().GetDataPoints()
}
if assert.Len(t, dp, 1) {
switch data.nKind {
case number.Int64Kind:
if dp := m.GetIntGauge().DataPoints; assert.Len(t, dp, 1) {
assert.Equal(t, data.val, dp[0].Value, "invalid value for %q", m.Name)
}
v := &metricpb.NumberDataPoint_AsInt{AsInt: data.val}
assert.Equal(t, v, dp[0].Value, "invalid value for %q", m.Name)
case number.Float64Kind:
if dp := m.GetDoubleGauge().DataPoints; assert.Len(t, dp, 1) {
assert.Equal(t, float64(data.val), dp[0].Value, "invalid value for %q", m.Name)
v := &metricpb.NumberDataPoint_AsDouble{AsDouble: float64(data.val)}
assert.Equal(t, v, dp[0].Value, "invalid value for %q", m.Name)
}
default:
assert.Failf(t, "invalid number kind", data.nKind.String())
}
case metric.ValueRecorderInstrumentKind:
switch data.nKind {
case number.Int64Kind:
assert.NotNil(t, m.GetIntHistogram())
if dp := m.GetIntHistogram().DataPoints; assert.Len(t, dp, 1) {
count := dp[0].Count
assert.Equal(t, uint64(1), count, "invalid count for %q", m.Name)
assert.Equal(t, int64(data.val*int64(count)), dp[0].Sum, "invalid sum for %q (value %d)", m.Name, data.val)
}
case number.Float64Kind:
assert.NotNil(t, m.GetDoubleHistogram())
if dp := m.GetDoubleHistogram().DataPoints; assert.Len(t, dp, 1) {
require.NotNil(t, m.GetSummary())
if dp := m.GetSummary().DataPoints; assert.Len(t, dp, 1) {
count := dp[0].Count
assert.Equal(t, uint64(1), count, "invalid count for %q", m.Name)
assert.Equal(t, float64(data.val*int64(count)), dp[0].Sum, "invalid sum for %q (value %d)", m.Name, data.val)
}
default:
assert.Failf(t, "invalid number kind", data.nKind.String())
}
default:
assert.Failf(t, "invalid metrics kind", data.iKind.String())
}

View File

@ -200,19 +200,17 @@ func sink(ctx context.Context, in <-chan result) ([]*metricpb.ResourceMetrics, e
continue
}
switch res.Metric.Data.(type) {
case *metricpb.Metric_IntGauge:
m.GetIntGauge().DataPoints = append(m.GetIntGauge().DataPoints, res.Metric.GetIntGauge().DataPoints...)
case *metricpb.Metric_IntHistogram:
m.GetIntHistogram().DataPoints = append(m.GetIntHistogram().DataPoints, res.Metric.GetIntHistogram().DataPoints...)
case *metricpb.Metric_IntSum:
m.GetIntSum().DataPoints = append(m.GetIntSum().DataPoints, res.Metric.GetIntSum().DataPoints...)
case *metricpb.Metric_DoubleGauge:
m.GetDoubleGauge().DataPoints = append(m.GetDoubleGauge().DataPoints, res.Metric.GetDoubleGauge().DataPoints...)
case *metricpb.Metric_DoubleHistogram:
m.GetDoubleHistogram().DataPoints = append(m.GetDoubleHistogram().DataPoints, res.Metric.GetDoubleHistogram().DataPoints...)
case *metricpb.Metric_DoubleSum:
m.GetDoubleSum().DataPoints = append(m.GetDoubleSum().DataPoints, res.Metric.GetDoubleSum().DataPoints...)
case *metricpb.Metric_Gauge:
m.GetGauge().DataPoints = append(m.GetGauge().DataPoints, res.Metric.GetGauge().DataPoints...)
case *metricpb.Metric_Sum:
m.GetSum().DataPoints = append(m.GetSum().DataPoints, res.Metric.GetSum().DataPoints...)
case *metricpb.Metric_Histogram:
m.GetHistogram().DataPoints = append(m.GetHistogram().DataPoints, res.Metric.GetHistogram().DataPoints...)
case *metricpb.Metric_Summary:
m.GetSummary().DataPoints = append(m.GetSummary().DataPoints, res.Metric.GetSummary().DataPoints...)
default:
err := fmt.Sprintf("unsupported metric type: %T", res.Metric.Data)
errStrings = append(errStrings, err)
}
}
@ -317,43 +315,39 @@ func gaugeArray(record export.Record, points []aggregation.Point) (*metricpb.Met
pbLabels := stringKeyValues(labels.Iter())
ndp := make([]*metricpb.NumberDataPoint, 0, len(points))
switch nk := desc.NumberKind(); nk {
case number.Int64Kind:
var pts []*metricpb.IntDataPoint
for _, s := range points {
pts = append(pts, &metricpb.IntDataPoint{
for _, p := range points {
ndp = append(ndp, &metricpb.NumberDataPoint{
Labels: pbLabels,
StartTimeUnixNano: toNanos(record.StartTime()),
TimeUnixNano: toNanos(record.EndTime()),
Value: s.Number.CoerceToInt64(nk),
Value: &metricpb.NumberDataPoint_AsInt{
AsInt: p.Number.CoerceToInt64(nk),
},
})
}
m.Data = &metricpb.Metric_IntGauge{
IntGauge: &metricpb.IntGauge{
DataPoints: pts,
},
}
case number.Float64Kind:
var pts []*metricpb.DoubleDataPoint
for _, s := range points {
pts = append(pts, &metricpb.DoubleDataPoint{
for _, p := range points {
ndp = append(ndp, &metricpb.NumberDataPoint{
Labels: pbLabels,
StartTimeUnixNano: toNanos(record.StartTime()),
TimeUnixNano: toNanos(record.EndTime()),
Value: s.Number.CoerceToFloat64(nk),
Value: &metricpb.NumberDataPoint_AsDouble{
AsDouble: p.Number.CoerceToFloat64(nk),
},
})
}
m.Data = &metricpb.Metric_DoubleGauge{
DoubleGauge: &metricpb.DoubleGauge{
DataPoints: pts,
},
}
default:
return nil, fmt.Errorf("%w: %v", ErrUnknownValueType, nk)
}
m.Data = &metricpb.Metric_Gauge{
Gauge: &metricpb.Gauge{
DataPoints: ndp,
},
}
return m, nil
}
@ -369,11 +363,13 @@ func gaugePoint(record export.Record, num number.Number, start, end time.Time) (
switch n := desc.NumberKind(); n {
case number.Int64Kind:
m.Data = &metricpb.Metric_IntGauge{
IntGauge: &metricpb.IntGauge{
DataPoints: []*metricpb.IntDataPoint{
m.Data = &metricpb.Metric_Gauge{
Gauge: &metricpb.Gauge{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: num.CoerceToInt64(n),
Value: &metricpb.NumberDataPoint_AsInt{
AsInt: num.CoerceToInt64(n),
},
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(start),
TimeUnixNano: toNanos(end),
@ -382,11 +378,13 @@ func gaugePoint(record export.Record, num number.Number, start, end time.Time) (
},
}
case number.Float64Kind:
m.Data = &metricpb.Metric_DoubleGauge{
DoubleGauge: &metricpb.DoubleGauge{
DataPoints: []*metricpb.DoubleDataPoint{
m.Data = &metricpb.Metric_Gauge{
Gauge: &metricpb.Gauge{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: num.CoerceToFloat64(n),
Value: &metricpb.NumberDataPoint_AsDouble{
AsDouble: num.CoerceToFloat64(n),
},
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(start),
TimeUnixNano: toNanos(end),
@ -423,13 +421,15 @@ func sumPoint(record export.Record, num number.Number, start, end time.Time, ek
switch n := desc.NumberKind(); n {
case number.Int64Kind:
m.Data = &metricpb.Metric_IntSum{
IntSum: &metricpb.IntSum{
m.Data = &metricpb.Metric_Sum{
Sum: &metricpb.Sum{
IsMonotonic: monotonic,
AggregationTemporality: exportKindToTemporality(ek),
DataPoints: []*metricpb.IntDataPoint{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: num.CoerceToInt64(n),
Value: &metricpb.NumberDataPoint_AsInt{
AsInt: num.CoerceToInt64(n),
},
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(start),
TimeUnixNano: toNanos(end),
@ -438,13 +438,15 @@ func sumPoint(record export.Record, num number.Number, start, end time.Time, ek
},
}
case number.Float64Kind:
m.Data = &metricpb.Metric_DoubleSum{
DoubleSum: &metricpb.DoubleSum{
m.Data = &metricpb.Metric_Sum{
Sum: &metricpb.Sum{
IsMonotonic: monotonic,
AggregationTemporality: exportKindToTemporality(ek),
DataPoints: []*metricpb.DoubleDataPoint{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: num.CoerceToFloat64(n),
Value: &metricpb.NumberDataPoint_AsDouble{
AsDouble: num.CoerceToFloat64(n),
},
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(start),
TimeUnixNano: toNanos(end),
@ -490,46 +492,29 @@ func minMaxSumCount(record export.Record, a aggregation.MinMaxSumCount) (*metric
Name: desc.Name(),
Description: desc.Description(),
Unit: string(desc.Unit()),
}
buckets := []uint64{min.AsRaw(), max.AsRaw()}
bounds := []float64{0.0, 100.0}
switch n := desc.NumberKind(); n {
case number.Int64Kind:
m.Data = &metricpb.Metric_IntHistogram{
IntHistogram: &metricpb.IntHistogram{
DataPoints: []*metricpb.IntHistogramDataPoint{
Data: &metricpb.Metric_Summary{
Summary: &metricpb.Summary{
DataPoints: []*metricpb.SummaryDataPoint{
{
Sum: sum.CoerceToInt64(n),
Sum: sum.CoerceToFloat64(desc.NumberKind()),
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(record.StartTime()),
TimeUnixNano: toNanos(record.EndTime()),
Count: uint64(count),
BucketCounts: buckets,
ExplicitBounds: bounds,
},
},
},
}
case number.Float64Kind:
m.Data = &metricpb.Metric_DoubleHistogram{
DoubleHistogram: &metricpb.DoubleHistogram{
DataPoints: []*metricpb.DoubleHistogramDataPoint{
QuantileValues: []*metricpb.SummaryDataPoint_ValueAtQuantile{
{
Sum: sum.CoerceToFloat64(n),
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(record.StartTime()),
TimeUnixNano: toNanos(record.EndTime()),
Count: uint64(count),
BucketCounts: buckets,
ExplicitBounds: bounds,
Quantile: 0.0,
Value: min.CoerceToFloat64(desc.NumberKind()),
},
{
Quantile: 1.0,
Value: max.CoerceToFloat64(desc.NumberKind()),
},
},
},
},
},
},
}
default:
return nil, fmt.Errorf("%w: %v", ErrUnknownValueType, n)
}
return m, nil
}
@ -570,15 +555,12 @@ func histogramPoint(record export.Record, ek export.ExportKind, a aggregation.Hi
Name: desc.Name(),
Description: desc.Description(),
Unit: string(desc.Unit()),
}
switch n := desc.NumberKind(); n {
case number.Int64Kind:
m.Data = &metricpb.Metric_IntHistogram{
IntHistogram: &metricpb.IntHistogram{
Data: &metricpb.Metric_Histogram{
Histogram: &metricpb.Histogram{
AggregationTemporality: exportKindToTemporality(ek),
DataPoints: []*metricpb.IntHistogramDataPoint{
DataPoints: []*metricpb.HistogramDataPoint{
{
Sum: sum.CoerceToInt64(n),
Sum: sum.CoerceToFloat64(desc.NumberKind()),
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(record.StartTime()),
TimeUnixNano: toNanos(record.EndTime()),
@ -588,28 +570,8 @@ func histogramPoint(record export.Record, ek export.ExportKind, a aggregation.Hi
},
},
},
}
case number.Float64Kind:
m.Data = &metricpb.Metric_DoubleHistogram{
DoubleHistogram: &metricpb.DoubleHistogram{
AggregationTemporality: exportKindToTemporality(ek),
DataPoints: []*metricpb.DoubleHistogramDataPoint{
{
Sum: sum.CoerceToFloat64(n),
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(record.StartTime()),
TimeUnixNano: toNanos(record.EndTime()),
Count: uint64(count),
BucketCounts: counts,
ExplicitBounds: boundaries,
},
},
},
}
default:
return nil, fmt.Errorf("%w: %v", ErrUnknownValueType, n)
}
return m, nil
}

View File

@ -129,12 +129,10 @@ func TestMinMaxSumCountDatapoints(t *testing.T) {
assert.NoError(t, mmsc.Update(context.Background(), 1, &desc))
assert.NoError(t, mmsc.Update(context.Background(), 10, &desc))
require.NoError(t, mmsc.SynchronizedMove(ckpt, &desc))
expected := []*metricpb.IntHistogramDataPoint{
expected := []*metricpb.SummaryDataPoint{
{
Count: 2,
Sum: 11,
ExplicitBounds: []float64{0.0, 100.0},
BucketCounts: []uint64{1, 10},
StartTimeUnixNano: uint64(intervalStart.UnixNano()),
TimeUnixNano: uint64(intervalEnd.UnixNano()),
Labels: []*commonpb.StringKeyValue{
@ -143,16 +141,28 @@ func TestMinMaxSumCountDatapoints(t *testing.T) {
Value: "1",
},
},
QuantileValues: []*metricpb.SummaryDataPoint_ValueAtQuantile{
{
Quantile: 0.0,
Value: 1.0,
},
{
Quantile: 1.0,
Value: 10.0,
},
},
},
}
record := export.NewRecord(&desc, &labels, nil, ckpt.Aggregation(), intervalStart, intervalEnd)
m, err := minMaxSumCount(record, ckpt.(aggregation.MinMaxSumCount))
if assert.NoError(t, err) {
assert.Nil(t, m.GetIntGauge())
assert.Equal(t, expected, m.GetIntHistogram().DataPoints)
assert.Nil(t, m.GetIntSum())
assert.Nil(t, m.GetDoubleGauge())
assert.Nil(t, m.GetDoubleHistogram())
assert.Nil(t, m.GetGauge())
assert.Nil(t, m.GetSum())
assert.Nil(t, m.GetHistogram())
assert.Equal(t, expected, m.GetSummary().DataPoints)
assert.Nil(t, m.GetIntGauge()) // nolint
assert.Nil(t, m.GetIntSum()) // nolint
assert.Nil(t, m.GetIntHistogram()) // nolint
}
}
@ -179,13 +189,11 @@ func TestSumIntDataPoints(t *testing.T) {
require.NoError(t, err)
if m, err := sumPoint(record, value, record.StartTime(), record.EndTime(), export.CumulativeExportKind, true); assert.NoError(t, err) {
assert.Nil(t, m.GetIntGauge())
assert.Nil(t, m.GetIntHistogram())
assert.Equal(t, &metricpb.IntSum{
assert.Nil(t, m.GetGauge())
assert.Equal(t, &metricpb.Sum{
AggregationTemporality: otelCumulative,
IsMonotonic: true,
DataPoints: []*metricpb.IntDataPoint{{
Value: 1,
DataPoints: []*metricpb.NumberDataPoint{{
StartTimeUnixNano: uint64(intervalStart.UnixNano()),
TimeUnixNano: uint64(intervalEnd.UnixNano()),
Labels: []*commonpb.StringKeyValue{
@ -194,9 +202,16 @@ func TestSumIntDataPoints(t *testing.T) {
Value: "1",
},
},
}}}, m.GetIntSum())
assert.Nil(t, m.GetDoubleGauge())
assert.Nil(t, m.GetDoubleHistogram())
Value: &metricpb.NumberDataPoint_AsInt{
AsInt: 1,
},
}},
}, m.GetSum())
assert.Nil(t, m.GetHistogram())
assert.Nil(t, m.GetSummary())
assert.Nil(t, m.GetIntGauge()) // nolint
assert.Nil(t, m.GetIntSum()) // nolint
assert.Nil(t, m.GetIntHistogram()) // nolint
}
}
@ -213,16 +228,14 @@ func TestSumFloatDataPoints(t *testing.T) {
require.NoError(t, err)
if m, err := sumPoint(record, value, record.StartTime(), record.EndTime(), export.DeltaExportKind, false); assert.NoError(t, err) {
assert.Nil(t, m.GetIntGauge())
assert.Nil(t, m.GetIntHistogram())
assert.Nil(t, m.GetIntSum())
assert.Nil(t, m.GetDoubleGauge())
assert.Nil(t, m.GetDoubleHistogram())
assert.Equal(t, &metricpb.DoubleSum{
assert.Nil(t, m.GetGauge())
assert.Equal(t, &metricpb.Sum{
IsMonotonic: false,
AggregationTemporality: otelDelta,
DataPoints: []*metricpb.DoubleDataPoint{{
Value: 1,
DataPoints: []*metricpb.NumberDataPoint{{
Value: &metricpb.NumberDataPoint_AsDouble{
AsDouble: 1.0,
},
StartTimeUnixNano: uint64(intervalStart.UnixNano()),
TimeUnixNano: uint64(intervalEnd.UnixNano()),
Labels: []*commonpb.StringKeyValue{
@ -231,7 +244,12 @@ func TestSumFloatDataPoints(t *testing.T) {
Value: "1",
},
},
}}}, m.GetDoubleSum())
}}}, m.GetSum())
assert.Nil(t, m.GetHistogram())
assert.Nil(t, m.GetSummary())
assert.Nil(t, m.GetIntGauge()) // nolint
assert.Nil(t, m.GetIntSum()) // nolint
assert.Nil(t, m.GetIntHistogram()) // nolint
}
}
@ -248,8 +266,7 @@ func TestLastValueIntDataPoints(t *testing.T) {
require.NoError(t, err)
if m, err := gaugePoint(record, value, time.Time{}, timestamp); assert.NoError(t, err) {
assert.Equal(t, []*metricpb.IntDataPoint{{
Value: 100,
assert.Equal(t, []*metricpb.NumberDataPoint{{
StartTimeUnixNano: 0,
TimeUnixNano: uint64(timestamp.UnixNano()),
Labels: []*commonpb.StringKeyValue{
@ -258,12 +275,16 @@ func TestLastValueIntDataPoints(t *testing.T) {
Value: "1",
},
},
}}, m.GetIntGauge().DataPoints)
assert.Nil(t, m.GetIntHistogram())
assert.Nil(t, m.GetIntSum())
assert.Nil(t, m.GetDoubleGauge())
assert.Nil(t, m.GetDoubleHistogram())
assert.Nil(t, m.GetDoubleSum())
Value: &metricpb.NumberDataPoint_AsInt{
AsInt: 100,
},
}}, m.GetGauge().DataPoints)
assert.Nil(t, m.GetSum())
assert.Nil(t, m.GetHistogram())
assert.Nil(t, m.GetSummary())
assert.Nil(t, m.GetIntGauge()) // nolint
assert.Nil(t, m.GetIntSum()) // nolint
assert.Nil(t, m.GetIntHistogram()) // nolint
}
}
@ -280,8 +301,7 @@ func TestExactIntDataPoints(t *testing.T) {
require.NoError(t, err)
if m, err := gaugeArray(record, pts); assert.NoError(t, err) {
assert.Equal(t, []*metricpb.IntDataPoint{{
Value: 100,
assert.Equal(t, []*metricpb.NumberDataPoint{{
StartTimeUnixNano: toNanos(intervalStart),
TimeUnixNano: toNanos(intervalEnd),
Labels: []*commonpb.StringKeyValue{
@ -290,12 +310,16 @@ func TestExactIntDataPoints(t *testing.T) {
Value: "1",
},
},
}}, m.GetIntGauge().DataPoints)
assert.Nil(t, m.GetIntHistogram())
assert.Nil(t, m.GetIntSum())
assert.Nil(t, m.GetDoubleGauge())
assert.Nil(t, m.GetDoubleHistogram())
assert.Nil(t, m.GetDoubleSum())
Value: &metricpb.NumberDataPoint_AsInt{
AsInt: 100,
},
}}, m.GetGauge().DataPoints)
assert.Nil(t, m.GetSum())
assert.Nil(t, m.GetHistogram())
assert.Nil(t, m.GetSummary())
assert.Nil(t, m.GetIntGauge()) // nolint
assert.Nil(t, m.GetIntSum()) // nolint
assert.Nil(t, m.GetIntHistogram()) // nolint
}
}
@ -312,8 +336,10 @@ func TestExactFloatDataPoints(t *testing.T) {
require.NoError(t, err)
if m, err := gaugeArray(record, pts); assert.NoError(t, err) {
assert.Equal(t, []*metricpb.DoubleDataPoint{{
Value: 100,
assert.Equal(t, []*metricpb.NumberDataPoint{{
Value: &metricpb.NumberDataPoint_AsDouble{
AsDouble: 100,
},
StartTimeUnixNano: toNanos(intervalStart),
TimeUnixNano: toNanos(intervalEnd),
Labels: []*commonpb.StringKeyValue{
@ -322,12 +348,13 @@ func TestExactFloatDataPoints(t *testing.T) {
Value: "1",
},
},
}}, m.GetDoubleGauge().DataPoints)
assert.Nil(t, m.GetIntHistogram())
assert.Nil(t, m.GetIntSum())
assert.Nil(t, m.GetIntGauge())
assert.Nil(t, m.GetDoubleHistogram())
assert.Nil(t, m.GetDoubleSum())
}}, m.GetGauge().DataPoints)
assert.Nil(t, m.GetSum())
assert.Nil(t, m.GetHistogram())
assert.Nil(t, m.GetSummary())
assert.Nil(t, m.GetIntGauge()) // nolint
assert.Nil(t, m.GetIntSum()) // nolint
assert.Nil(t, m.GetIntHistogram()) // nolint
}
}

View File

@ -162,19 +162,19 @@ func TestNoGroupingExport(t *testing.T) {
Metrics: []*metricpb.Metric{
{
Name: "int64-count",
Data: &metricpb.Metric_IntSum{
IntSum: &metricpb.IntSum{
Data: &metricpb.Metric_Sum{
Sum: &metricpb.Sum{
IsMonotonic: true,
AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
DataPoints: []*metricpb.IntDataPoint{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu2Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
@ -208,10 +208,10 @@ func TestValuerecorderMetricGroupingExport(t *testing.T) {
Metrics: []*metricpb.Metric{
{
Name: "valuerecorder",
Data: &metricpb.Metric_IntHistogram{
IntHistogram: &metricpb.IntHistogram{
Data: &metricpb.Metric_Histogram{
Histogram: &metricpb.Histogram{
AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
DataPoints: []*metricpb.IntHistogramDataPoint{
DataPoints: []*metricpb.HistogramDataPoint{
{
Labels: []*commonpb.StringKeyValue{
{
@ -281,19 +281,19 @@ func TestCountInt64MetricGroupingExport(t *testing.T) {
Metrics: []*metricpb.Metric{
{
Name: "int64-count",
Data: &metricpb.Metric_IntSum{
IntSum: &metricpb.IntSum{
Data: &metricpb.Metric_Sum{
Sum: &metricpb.Sum{
IsMonotonic: true,
AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
DataPoints: []*metricpb.IntDataPoint{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
@ -331,13 +331,13 @@ func TestCountFloat64MetricGroupingExport(t *testing.T) {
Metrics: []*metricpb.Metric{
{
Name: "float64-count",
Data: &metricpb.Metric_DoubleSum{
DoubleSum: &metricpb.DoubleSum{
Data: &metricpb.Metric_Sum{
Sum: &metricpb.Sum{
IsMonotonic: true,
AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
DataPoints: []*metricpb.DoubleDataPoint{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsDouble{AsDouble: 11.0},
Labels: []*commonpb.StringKeyValue{
{
Key: "CPU",
@ -352,7 +352,7 @@ func TestCountFloat64MetricGroupingExport(t *testing.T) {
TimeUnixNano: pointTime(),
},
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsDouble{AsDouble: 11.0},
Labels: []*commonpb.StringKeyValue{
{
Key: "CPU",
@ -424,25 +424,25 @@ func TestResourceMetricGroupingExport(t *testing.T) {
Metrics: []*metricpb.Metric{
{
Name: "int64-count",
Data: &metricpb.Metric_IntSum{
IntSum: &metricpb.IntSum{
Data: &metricpb.Metric_Sum{
Sum: &metricpb.Sum{
IsMonotonic: true,
AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
DataPoints: []*metricpb.IntDataPoint{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu2Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
@ -462,13 +462,13 @@ func TestResourceMetricGroupingExport(t *testing.T) {
Metrics: []*metricpb.Metric{
{
Name: "int64-count",
Data: &metricpb.Metric_IntSum{
IntSum: &metricpb.IntSum{
Data: &metricpb.Metric_Sum{
Sum: &metricpb.Sum{
IsMonotonic: true,
AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
DataPoints: []*metricpb.IntDataPoint{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
@ -562,25 +562,25 @@ func TestResourceInstLibMetricGroupingExport(t *testing.T) {
Metrics: []*metricpb.Metric{
{
Name: "int64-count",
Data: &metricpb.Metric_IntSum{
IntSum: &metricpb.IntSum{
Data: &metricpb.Metric_Sum{
Sum: &metricpb.Sum{
IsMonotonic: true,
AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
DataPoints: []*metricpb.IntDataPoint{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu2Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
@ -599,13 +599,13 @@ func TestResourceInstLibMetricGroupingExport(t *testing.T) {
Metrics: []*metricpb.Metric{
{
Name: "int64-count",
Data: &metricpb.Metric_IntSum{
IntSum: &metricpb.IntSum{
Data: &metricpb.Metric_Sum{
Sum: &metricpb.Sum{
IsMonotonic: true,
AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
DataPoints: []*metricpb.IntDataPoint{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
@ -623,13 +623,13 @@ func TestResourceInstLibMetricGroupingExport(t *testing.T) {
Metrics: []*metricpb.Metric{
{
Name: "int64-count",
Data: &metricpb.Metric_IntSum{
IntSum: &metricpb.IntSum{
Data: &metricpb.Metric_Sum{
Sum: &metricpb.Sum{
IsMonotonic: true,
AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
DataPoints: []*metricpb.IntDataPoint{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
@ -653,13 +653,13 @@ func TestResourceInstLibMetricGroupingExport(t *testing.T) {
Metrics: []*metricpb.Metric{
{
Name: "int64-count",
Data: &metricpb.Metric_IntSum{
IntSum: &metricpb.IntSum{
Data: &metricpb.Metric_Sum{
Sum: &metricpb.Sum{
IsMonotonic: true,
AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
DataPoints: []*metricpb.IntDataPoint{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
@ -716,13 +716,13 @@ func TestStatelessExportKind(t *testing.T) {
Metrics: []*metricpb.Metric{
{
Name: "instrument",
Data: &metricpb.Metric_IntSum{
IntSum: &metricpb.IntSum{
Data: &metricpb.Metric_Sum{
Sum: &metricpb.Sum{
IsMonotonic: k.monotonic,
AggregationTemporality: k.aggTemporality,
DataPoints: []*metricpb.IntDataPoint{
DataPoints: []*metricpb.NumberDataPoint{
{
Value: 11,
Value: &metricpb.NumberDataPoint_AsInt{AsInt: 11},
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
@ -832,42 +832,27 @@ func runMetricExportTests(t *testing.T, opts []otlp.ExporterOption, rs []record,
assert.Equal(t, expected.Unit, g[i].Unit)
assert.Equal(t, expected.Description, g[i].Description)
switch g[i].Data.(type) {
case *metricpb.Metric_IntGauge:
assert.ElementsMatch(t, expected.GetIntGauge().DataPoints, g[i].GetIntGauge().DataPoints)
case *metricpb.Metric_IntHistogram:
case *metricpb.Metric_Gauge:
assert.ElementsMatch(t, expected.GetGauge().GetDataPoints(), g[i].GetGauge().GetDataPoints())
case *metricpb.Metric_Sum:
assert.Equal(t,
expected.GetIntHistogram().GetAggregationTemporality(),
g[i].GetIntHistogram().GetAggregationTemporality(),
)
assert.ElementsMatch(t, expected.GetIntHistogram().DataPoints, g[i].GetIntHistogram().DataPoints)
case *metricpb.Metric_IntSum:
assert.Equal(t,
expected.GetIntSum().GetAggregationTemporality(),
g[i].GetIntSum().GetAggregationTemporality(),
expected.GetSum().GetAggregationTemporality(),
g[i].GetSum().GetAggregationTemporality(),
)
assert.Equal(t,
expected.GetIntSum().GetIsMonotonic(),
g[i].GetIntSum().GetIsMonotonic(),
expected.GetSum().GetIsMonotonic(),
g[i].GetSum().GetIsMonotonic(),
)
assert.ElementsMatch(t, expected.GetIntSum().DataPoints, g[i].GetIntSum().DataPoints)
case *metricpb.Metric_DoubleGauge:
assert.ElementsMatch(t, expected.GetDoubleGauge().DataPoints, g[i].GetDoubleGauge().DataPoints)
case *metricpb.Metric_DoubleHistogram:
assert.Equal(t,
expected.GetDoubleHistogram().GetAggregationTemporality(),
g[i].GetDoubleHistogram().GetAggregationTemporality(),
assert.ElementsMatch(t, expected.GetSum().GetDataPoints(), g[i].GetSum().GetDataPoints())
case *metricpb.Metric_Histogram:
assert.Equal(
t,
expected.GetHistogram().GetAggregationTemporality(),
g[i].GetHistogram().GetAggregationTemporality(),
)
assert.ElementsMatch(t, expected.GetDoubleHistogram().DataPoints, g[i].GetDoubleHistogram().DataPoints)
case *metricpb.Metric_DoubleSum:
assert.Equal(t,
expected.GetDoubleSum().GetAggregationTemporality(),
g[i].GetDoubleSum().GetAggregationTemporality(),
)
assert.Equal(t,
expected.GetDoubleSum().GetIsMonotonic(),
g[i].GetDoubleSum().GetIsMonotonic(),
)
assert.ElementsMatch(t, expected.GetDoubleSum().DataPoints, g[i].GetDoubleSum().DataPoints)
assert.ElementsMatch(t, expected.GetHistogram().GetDataPoints(), g[i].GetHistogram().GetDataPoints())
case *metricpb.Metric_Summary:
assert.ElementsMatch(t, expected.GetSummary().GetDataPoints(), g[i].GetSummary().GetDataPoints())
default:
assert.Failf(t, "unknown data type", g[i].Name)
}