1
0
mirror of https://github.com/open-telemetry/opentelemetry-go.git synced 2025-02-03 13:11:53 +02:00

Update proto v0.5.0 (#1230)

* Updating version of OTLP to 0.5.0

* updating trace transform to use latest protos

* update otlp span test

* forgot one test

* finishing the rest of the implementation

In this change:
- MetricDescriptor is no longer a member of the metric
- splitting Sum, Gauge, Histogram by Int/Float
- SummaryDataPoints are no longer around, MinMaxSumCount is now a Histogram

* update changelog

* Update CHANGELOG.md

Move changes to the Unreleased section. This is to account for the recent release.

Co-authored-by: Tyler Yahn <MrAlias@users.noreply.github.com>
This commit is contained in:
alrex 2020-10-08 20:07:39 -07:00 committed by GitHub
parent 27c84d689d
commit 25ccf5a08a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 5969 additions and 2079 deletions

View File

@ -16,6 +16,7 @@ This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.htm
- `ErrorOption` has been changed to an interface to conform with project design standards which included adding a `NewErrorConfig` function. - `ErrorOption` has been changed to an interface to conform with project design standards which included adding a `NewErrorConfig` function.
- `EmptySpanContext` is removed. - `EmptySpanContext` is removed.
- Move the `go.opentelemetry.io/otel/api/trace/tracetest` package into `go.opentelemetry.io/otel/oteltest`. (#1229) - Move the `go.opentelemetry.io/otel/api/trace/tracetest` package into `go.opentelemetry.io/otel/oteltest`. (#1229)
- OTLP Exporter supports OTLP v0.5.0. (#1230)
## [0.13.0] - 2020-10-08 ## [0.13.0] - 2020-10-08

@ -1 +1 @@
Subproject commit e43e1abc40428a6ee98e3bfd79bec1dfa2ed18cd Subproject commit 313a868be259dce6c6516dd417d3ad5fd3321acf

View File

@ -0,0 +1,577 @@
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: opentelemetry/proto/collector/logs/v1/logs_service.proto
package v1
import (
context "context"
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
v1 "go.opentelemetry.io/otel/exporters/otlp/internal/opentelemetry-proto-gen/logs/v1"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
io "io"
math "math"
math_bits "math/bits"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
type ExportLogsServiceRequest struct {
// An array of ResourceLogs.
// For data coming from a single resource this array will typically contain one
// element. Intermediary nodes (such as OpenTelemetry Collector) that receive
// data from multiple origins typically batch the data before forwarding further and
// in that case this array will contain multiple elements.
ResourceLogs []*v1.ResourceLogs `protobuf:"bytes,1,rep,name=resource_logs,json=resourceLogs,proto3" json:"resource_logs,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ExportLogsServiceRequest) Reset() { *m = ExportLogsServiceRequest{} }
func (m *ExportLogsServiceRequest) String() string { return proto.CompactTextString(m) }
func (*ExportLogsServiceRequest) ProtoMessage() {}
func (*ExportLogsServiceRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_8e3bf87aaa43acd4, []int{0}
}
func (m *ExportLogsServiceRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ExportLogsServiceRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ExportLogsServiceRequest.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ExportLogsServiceRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_ExportLogsServiceRequest.Merge(m, src)
}
func (m *ExportLogsServiceRequest) XXX_Size() int {
return m.Size()
}
func (m *ExportLogsServiceRequest) XXX_DiscardUnknown() {
xxx_messageInfo_ExportLogsServiceRequest.DiscardUnknown(m)
}
var xxx_messageInfo_ExportLogsServiceRequest proto.InternalMessageInfo
func (m *ExportLogsServiceRequest) GetResourceLogs() []*v1.ResourceLogs {
if m != nil {
return m.ResourceLogs
}
return nil
}
type ExportLogsServiceResponse struct {
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ExportLogsServiceResponse) Reset() { *m = ExportLogsServiceResponse{} }
func (m *ExportLogsServiceResponse) String() string { return proto.CompactTextString(m) }
func (*ExportLogsServiceResponse) ProtoMessage() {}
func (*ExportLogsServiceResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_8e3bf87aaa43acd4, []int{1}
}
func (m *ExportLogsServiceResponse) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
}
func (m *ExportLogsServiceResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
if deterministic {
return xxx_messageInfo_ExportLogsServiceResponse.Marshal(b, m, deterministic)
} else {
b = b[:cap(b)]
n, err := m.MarshalToSizedBuffer(b)
if err != nil {
return nil, err
}
return b[:n], nil
}
}
func (m *ExportLogsServiceResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_ExportLogsServiceResponse.Merge(m, src)
}
func (m *ExportLogsServiceResponse) XXX_Size() int {
return m.Size()
}
func (m *ExportLogsServiceResponse) XXX_DiscardUnknown() {
xxx_messageInfo_ExportLogsServiceResponse.DiscardUnknown(m)
}
var xxx_messageInfo_ExportLogsServiceResponse proto.InternalMessageInfo
func init() {
proto.RegisterType((*ExportLogsServiceRequest)(nil), "opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest")
proto.RegisterType((*ExportLogsServiceResponse)(nil), "opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse")
}
func init() {
proto.RegisterFile("opentelemetry/proto/collector/logs/v1/logs_service.proto", fileDescriptor_8e3bf87aaa43acd4)
}
var fileDescriptor_8e3bf87aaa43acd4 = []byte{
// 290 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x92, 0x41, 0x4a, 0x03, 0x31,
0x14, 0x86, 0x0d, 0x42, 0x17, 0xa9, 0x82, 0xcc, 0xaa, 0x56, 0x18, 0x64, 0x40, 0xa9, 0x8b, 0x26,
0xb4, 0x6e, 0xdc, 0x29, 0x05, 0x77, 0x22, 0x65, 0xdc, 0x75, 0x53, 0x74, 0x78, 0x0c, 0x23, 0x31,
0x2f, 0xbe, 0xa4, 0x83, 0x1e, 0xc2, 0x23, 0xb8, 0xf5, 0x2c, 0x2e, 0x3d, 0x82, 0xcc, 0x49, 0x64,
0x92, 0x22, 0x53, 0x1d, 0x61, 0x70, 0x15, 0xf2, 0xf2, 0x7f, 0xff, 0xff, 0x3f, 0x08, 0x3f, 0x43,
0x03, 0xda, 0x81, 0x82, 0x07, 0x70, 0xf4, 0x2c, 0x0d, 0xa1, 0x43, 0x99, 0xa1, 0x52, 0x90, 0x39,
0x24, 0xa9, 0x30, 0xb7, 0xb2, 0x9c, 0xf8, 0x73, 0x69, 0x81, 0xca, 0x22, 0x03, 0xe1, 0x45, 0xd1,
0xd1, 0x06, 0x19, 0x86, 0xe2, 0x9b, 0x14, 0x35, 0x21, 0xca, 0xc9, 0xf0, 0xb8, 0x2d, 0xa0, 0x69,
0x1b, 0xc8, 0xe4, 0x9e, 0x0f, 0x2e, 0x9f, 0x0c, 0x92, 0xbb, 0xc2, 0xdc, 0xde, 0x84, 0xa4, 0x14,
0x1e, 0x57, 0x60, 0x5d, 0x74, 0xcd, 0x77, 0x09, 0x2c, 0xae, 0x28, 0x83, 0x65, 0x8d, 0x0c, 0xd8,
0xe1, 0xf6, 0xa8, 0x3f, 0x3d, 0x11, 0x6d, 0x15, 0xd6, 0xc1, 0x22, 0x5d, 0x13, 0xb5, 0x5f, 0xba,
0x43, 0x8d, 0x5b, 0x72, 0xc0, 0xf7, 0x5b, 0xb2, 0xac, 0x41, 0x6d, 0x61, 0xfa, 0xca, 0x78, 0xbf,
0x31, 0x8f, 0x5e, 0x18, 0xef, 0x05, 0x75, 0x74, 0x2e, 0x3a, 0xed, 0x2c, 0xfe, 0x5a, 0x64, 0x78,
0xf1, 0x7f, 0x83, 0xd0, 0x2e, 0xd9, 0x9a, 0xbd, 0xb1, 0xf7, 0x2a, 0x66, 0x1f, 0x55, 0xcc, 0x3e,
0xab, 0x98, 0xf1, 0x51, 0x81, 0xdd, 0x4c, 0x67, 0x7b, 0x0d, 0xbf, 0x79, 0xad, 0x99, 0xb3, 0xc5,
0x22, 0xff, 0x49, 0x17, 0x28, 0xd1, 0x81, 0x92, 0xe0, 0x2b, 0x00, 0x59, 0x89, 0x4e, 0x19, 0x59,
0x68, 0x07, 0xa4, 0x6f, 0x95, 0xdc, 0x50, 0x8f, 0x7d, 0xd6, 0x38, 0x07, 0xfd, 0xfb, 0xcf, 0xdc,
0xf5, 0xfc, 0xe3, 0xe9, 0x57, 0x00, 0x00, 0x00, 0xff, 0xff, 0x46, 0x1c, 0xa2, 0x18, 0x63, 0x02,
0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// LogsServiceClient is the client API for LogsService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type LogsServiceClient interface {
// For performance reasons, it is recommended to keep this RPC
// alive for the entire life of the application.
Export(ctx context.Context, in *ExportLogsServiceRequest, opts ...grpc.CallOption) (*ExportLogsServiceResponse, error)
}
type logsServiceClient struct {
cc *grpc.ClientConn
}
func NewLogsServiceClient(cc *grpc.ClientConn) LogsServiceClient {
return &logsServiceClient{cc}
}
func (c *logsServiceClient) Export(ctx context.Context, in *ExportLogsServiceRequest, opts ...grpc.CallOption) (*ExportLogsServiceResponse, error) {
out := new(ExportLogsServiceResponse)
err := c.cc.Invoke(ctx, "/opentelemetry.proto.collector.logs.v1.LogsService/Export", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// LogsServiceServer is the server API for LogsService service.
type LogsServiceServer interface {
// For performance reasons, it is recommended to keep this RPC
// alive for the entire life of the application.
Export(context.Context, *ExportLogsServiceRequest) (*ExportLogsServiceResponse, error)
}
// UnimplementedLogsServiceServer can be embedded to have forward compatible implementations.
type UnimplementedLogsServiceServer struct {
}
func (*UnimplementedLogsServiceServer) Export(ctx context.Context, req *ExportLogsServiceRequest) (*ExportLogsServiceResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Export not implemented")
}
func RegisterLogsServiceServer(s *grpc.Server, srv LogsServiceServer) {
s.RegisterService(&_LogsService_serviceDesc, srv)
}
func _LogsService_Export_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ExportLogsServiceRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(LogsServiceServer).Export(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/opentelemetry.proto.collector.logs.v1.LogsService/Export",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(LogsServiceServer).Export(ctx, req.(*ExportLogsServiceRequest))
}
return interceptor(ctx, in, info, handler)
}
var _LogsService_serviceDesc = grpc.ServiceDesc{
ServiceName: "opentelemetry.proto.collector.logs.v1.LogsService",
HandlerType: (*LogsServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Export",
Handler: _LogsService_Export_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "opentelemetry/proto/collector/logs/v1/logs_service.proto",
}
func (m *ExportLogsServiceRequest) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ExportLogsServiceRequest) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ExportLogsServiceRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.XXX_unrecognized != nil {
i -= len(m.XXX_unrecognized)
copy(dAtA[i:], m.XXX_unrecognized)
}
if len(m.ResourceLogs) > 0 {
for iNdEx := len(m.ResourceLogs) - 1; iNdEx >= 0; iNdEx-- {
{
size, err := m.ResourceLogs[iNdEx].MarshalToSizedBuffer(dAtA[:i])
if err != nil {
return 0, err
}
i -= size
i = encodeVarintLogsService(dAtA, i, uint64(size))
}
i--
dAtA[i] = 0xa
}
}
return len(dAtA) - i, nil
}
func (m *ExportLogsServiceResponse) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalToSizedBuffer(dAtA[:size])
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *ExportLogsServiceResponse) MarshalTo(dAtA []byte) (int, error) {
size := m.Size()
return m.MarshalToSizedBuffer(dAtA[:size])
}
func (m *ExportLogsServiceResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) {
i := len(dAtA)
_ = i
var l int
_ = l
if m.XXX_unrecognized != nil {
i -= len(m.XXX_unrecognized)
copy(dAtA[i:], m.XXX_unrecognized)
}
return len(dAtA) - i, nil
}
func encodeVarintLogsService(dAtA []byte, offset int, v uint64) int {
offset -= sovLogsService(v)
base := offset
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return base
}
func (m *ExportLogsServiceRequest) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if len(m.ResourceLogs) > 0 {
for _, e := range m.ResourceLogs {
l = e.Size()
n += 1 + l + sovLogsService(uint64(l))
}
}
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func (m *ExportLogsServiceResponse) Size() (n int) {
if m == nil {
return 0
}
var l int
_ = l
if m.XXX_unrecognized != nil {
n += len(m.XXX_unrecognized)
}
return n
}
func sovLogsService(x uint64) (n int) {
return (math_bits.Len64(x|1) + 6) / 7
}
func sozLogsService(x uint64) (n int) {
return sovLogsService(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (m *ExportLogsServiceRequest) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLogsService
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ExportLogsServiceRequest: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ExportLogsServiceRequest: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field ResourceLogs", wireType)
}
var msglen int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLogsService
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
}
if msglen < 0 {
return ErrInvalidLengthLogsService
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthLogsService
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.ResourceLogs = append(m.ResourceLogs, &v1.ResourceLogs{})
if err := m.ResourceLogs[len(m.ResourceLogs)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil {
return err
}
iNdEx = postIndex
default:
iNdEx = preIndex
skippy, err := skipLogsService(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthLogsService
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthLogsService
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func (m *ExportLogsServiceResponse) Unmarshal(dAtA []byte) error {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowLogsService
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: ExportLogsServiceResponse: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: ExportLogsServiceResponse: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
default:
iNdEx = preIndex
skippy, err := skipLogsService(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthLogsService
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthLogsService
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)
iNdEx += skippy
}
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipLogsService(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
depth := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLogsService
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLogsService
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
case 1:
iNdEx += 8
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowLogsService
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
if length < 0 {
return 0, ErrInvalidLengthLogsService
}
iNdEx += length
case 3:
depth++
case 4:
if depth == 0 {
return 0, ErrUnexpectedEndOfGroupLogsService
}
depth--
case 5:
iNdEx += 4
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
if iNdEx < 0 {
return 0, ErrInvalidLengthLogsService
}
if depth == 0 {
return iNdEx, nil
}
}
return 0, io.ErrUnexpectedEOF
}
var (
ErrInvalidLengthLogsService = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowLogsService = fmt.Errorf("proto: integer overflow")
ErrUnexpectedEndOfGroupLogsService = fmt.Errorf("proto: unexpected end of group")
)

File diff suppressed because it is too large Load Diff

View File

@ -35,39 +35,39 @@ const (
Span_SPAN_KIND_UNSPECIFIED Span_SpanKind = 0 Span_SPAN_KIND_UNSPECIFIED Span_SpanKind = 0
// Indicates that the span represents an internal operation within an application, // Indicates that the span represents an internal operation within an application,
// as opposed to an operations happening at the boundaries. Default value. // as opposed to an operations happening at the boundaries. Default value.
Span_INTERNAL Span_SpanKind = 1 Span_SPAN_KIND_INTERNAL Span_SpanKind = 1
// Indicates that the span covers server-side handling of an RPC or other // Indicates that the span covers server-side handling of an RPC or other
// remote network request. // remote network request.
Span_SERVER Span_SpanKind = 2 Span_SPAN_KIND_SERVER Span_SpanKind = 2
// Indicates that the span describes a request to some remote service. // Indicates that the span describes a request to some remote service.
Span_CLIENT Span_SpanKind = 3 Span_SPAN_KIND_CLIENT Span_SpanKind = 3
// Indicates that the span describes a producer sending a message to a broker. // Indicates that the span describes a producer sending a message to a broker.
// Unlike CLIENT and SERVER, there is often no direct critical path latency relationship // Unlike CLIENT and SERVER, there is often no direct critical path latency relationship
// between producer and consumer spans. A PRODUCER span ends when the message was accepted // between producer and consumer spans. A PRODUCER span ends when the message was accepted
// by the broker while the logical processing of the message might span a much longer time. // by the broker while the logical processing of the message might span a much longer time.
Span_PRODUCER Span_SpanKind = 4 Span_SPAN_KIND_PRODUCER Span_SpanKind = 4
// Indicates that the span describes consumer receiving a message from a broker. // Indicates that the span describes consumer receiving a message from a broker.
// Like the PRODUCER kind, there is often no direct critical path latency relationship // Like the PRODUCER kind, there is often no direct critical path latency relationship
// between producer and consumer spans. // between producer and consumer spans.
Span_CONSUMER Span_SpanKind = 5 Span_SPAN_KIND_CONSUMER Span_SpanKind = 5
) )
var Span_SpanKind_name = map[int32]string{ var Span_SpanKind_name = map[int32]string{
0: "SPAN_KIND_UNSPECIFIED", 0: "SPAN_KIND_UNSPECIFIED",
1: "INTERNAL", 1: "SPAN_KIND_INTERNAL",
2: "SERVER", 2: "SPAN_KIND_SERVER",
3: "CLIENT", 3: "SPAN_KIND_CLIENT",
4: "PRODUCER", 4: "SPAN_KIND_PRODUCER",
5: "CONSUMER", 5: "SPAN_KIND_CONSUMER",
} }
var Span_SpanKind_value = map[string]int32{ var Span_SpanKind_value = map[string]int32{
"SPAN_KIND_UNSPECIFIED": 0, "SPAN_KIND_UNSPECIFIED": 0,
"INTERNAL": 1, "SPAN_KIND_INTERNAL": 1,
"SERVER": 2, "SPAN_KIND_SERVER": 2,
"CLIENT": 3, "SPAN_KIND_CLIENT": 3,
"PRODUCER": 4, "SPAN_KIND_PRODUCER": 4,
"CONSUMER": 5, "SPAN_KIND_CONSUMER": 5,
} }
func (x Span_SpanKind) String() string { func (x Span_SpanKind) String() string {
@ -83,63 +83,63 @@ func (Span_SpanKind) EnumDescriptor() ([]byte, []int) {
type Status_StatusCode int32 type Status_StatusCode int32
const ( const (
Status_Ok Status_StatusCode = 0 Status_STATUS_CODE_OK Status_StatusCode = 0
Status_Cancelled Status_StatusCode = 1 Status_STATUS_CODE_CANCELLED Status_StatusCode = 1
Status_UnknownError Status_StatusCode = 2 Status_STATUS_CODE_UNKNOWN_ERROR Status_StatusCode = 2
Status_InvalidArgument Status_StatusCode = 3 Status_STATUS_CODE_INVALID_ARGUMENT Status_StatusCode = 3
Status_DeadlineExceeded Status_StatusCode = 4 Status_STATUS_CODE_DEADLINE_EXCEEDED Status_StatusCode = 4
Status_NotFound Status_StatusCode = 5 Status_STATUS_CODE_NOT_FOUND Status_StatusCode = 5
Status_AlreadyExists Status_StatusCode = 6 Status_STATUS_CODE_ALREADY_EXISTS Status_StatusCode = 6
Status_PermissionDenied Status_StatusCode = 7 Status_STATUS_CODE_PERMISSION_DENIED Status_StatusCode = 7
Status_ResourceExhausted Status_StatusCode = 8 Status_STATUS_CODE_RESOURCE_EXHAUSTED Status_StatusCode = 8
Status_FailedPrecondition Status_StatusCode = 9 Status_STATUS_CODE_FAILED_PRECONDITION Status_StatusCode = 9
Status_Aborted Status_StatusCode = 10 Status_STATUS_CODE_ABORTED Status_StatusCode = 10
Status_OutOfRange Status_StatusCode = 11 Status_STATUS_CODE_OUT_OF_RANGE Status_StatusCode = 11
Status_Unimplemented Status_StatusCode = 12 Status_STATUS_CODE_UNIMPLEMENTED Status_StatusCode = 12
Status_InternalError Status_StatusCode = 13 Status_STATUS_CODE_INTERNAL_ERROR Status_StatusCode = 13
Status_Unavailable Status_StatusCode = 14 Status_STATUS_CODE_UNAVAILABLE Status_StatusCode = 14
Status_DataLoss Status_StatusCode = 15 Status_STATUS_CODE_DATA_LOSS Status_StatusCode = 15
Status_Unauthenticated Status_StatusCode = 16 Status_STATUS_CODE_UNAUTHENTICATED Status_StatusCode = 16
) )
var Status_StatusCode_name = map[int32]string{ var Status_StatusCode_name = map[int32]string{
0: "Ok", 0: "STATUS_CODE_OK",
1: "Cancelled", 1: "STATUS_CODE_CANCELLED",
2: "UnknownError", 2: "STATUS_CODE_UNKNOWN_ERROR",
3: "InvalidArgument", 3: "STATUS_CODE_INVALID_ARGUMENT",
4: "DeadlineExceeded", 4: "STATUS_CODE_DEADLINE_EXCEEDED",
5: "NotFound", 5: "STATUS_CODE_NOT_FOUND",
6: "AlreadyExists", 6: "STATUS_CODE_ALREADY_EXISTS",
7: "PermissionDenied", 7: "STATUS_CODE_PERMISSION_DENIED",
8: "ResourceExhausted", 8: "STATUS_CODE_RESOURCE_EXHAUSTED",
9: "FailedPrecondition", 9: "STATUS_CODE_FAILED_PRECONDITION",
10: "Aborted", 10: "STATUS_CODE_ABORTED",
11: "OutOfRange", 11: "STATUS_CODE_OUT_OF_RANGE",
12: "Unimplemented", 12: "STATUS_CODE_UNIMPLEMENTED",
13: "InternalError", 13: "STATUS_CODE_INTERNAL_ERROR",
14: "Unavailable", 14: "STATUS_CODE_UNAVAILABLE",
15: "DataLoss", 15: "STATUS_CODE_DATA_LOSS",
16: "Unauthenticated", 16: "STATUS_CODE_UNAUTHENTICATED",
} }
var Status_StatusCode_value = map[string]int32{ var Status_StatusCode_value = map[string]int32{
"Ok": 0, "STATUS_CODE_OK": 0,
"Cancelled": 1, "STATUS_CODE_CANCELLED": 1,
"UnknownError": 2, "STATUS_CODE_UNKNOWN_ERROR": 2,
"InvalidArgument": 3, "STATUS_CODE_INVALID_ARGUMENT": 3,
"DeadlineExceeded": 4, "STATUS_CODE_DEADLINE_EXCEEDED": 4,
"NotFound": 5, "STATUS_CODE_NOT_FOUND": 5,
"AlreadyExists": 6, "STATUS_CODE_ALREADY_EXISTS": 6,
"PermissionDenied": 7, "STATUS_CODE_PERMISSION_DENIED": 7,
"ResourceExhausted": 8, "STATUS_CODE_RESOURCE_EXHAUSTED": 8,
"FailedPrecondition": 9, "STATUS_CODE_FAILED_PRECONDITION": 9,
"Aborted": 10, "STATUS_CODE_ABORTED": 10,
"OutOfRange": 11, "STATUS_CODE_OUT_OF_RANGE": 11,
"Unimplemented": 12, "STATUS_CODE_UNIMPLEMENTED": 12,
"InternalError": 13, "STATUS_CODE_INTERNAL_ERROR": 13,
"Unavailable": 14, "STATUS_CODE_UNAVAILABLE": 14,
"DataLoss": 15, "STATUS_CODE_DATA_LOSS": 15,
"Unauthenticated": 16, "STATUS_CODE_UNAUTHENTICATED": 16,
} }
func (x Status_StatusCode) String() string { func (x Status_StatusCode) String() string {
@ -725,7 +725,7 @@ func (m *Status) GetCode() Status_StatusCode {
if m != nil { if m != nil {
return m.Code return m.Code
} }
return Status_Ok return Status_STATUS_CODE_OK
} }
func (m *Status) GetMessage() string { func (m *Status) GetMessage() string {
@ -751,72 +751,75 @@ func init() {
} }
var fileDescriptor_5c407ac9c675a601 = []byte{ var fileDescriptor_5c407ac9c675a601 = []byte{
// 1031 bytes of a gzipped FileDescriptorProto // 1088 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xcb, 0x6e, 0x23, 0x45, 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0x4f, 0x6f, 0xe3, 0xc4,
0x14, 0x9d, 0xf6, 0x3b, 0xd7, 0x8f, 0x74, 0x6a, 0x5e, 0x3d, 0x19, 0xc8, 0x58, 0xd6, 0x48, 0x18, 0x1b, 0x5e, 0xb7, 0x4e, 0xda, 0x7d, 0xdb, 0x66, 0xbd, 0xb3, 0xfb, 0xdb, 0xba, 0xff, 0xf3, 0x0b,
0x46, 0x63, 0x93, 0x20, 0xa4, 0x41, 0x02, 0x81, 0xc7, 0xee, 0x20, 0x2b, 0xc1, 0x31, 0xe5, 0x78, 0x2b, 0x11, 0x58, 0x6d, 0x42, 0x8b, 0x90, 0x16, 0x09, 0x04, 0xae, 0x3d, 0xed, 0x5a, 0x75, 0xed,
0x16, 0x6c, 0xac, 0x8a, 0xab, 0xf0, 0x94, 0xd2, 0xae, 0xb2, 0xaa, 0xcb, 0xc6, 0x59, 0xf0, 0x0b, 0x30, 0xb6, 0xcb, 0xc2, 0xc5, 0x72, 0x9b, 0x51, 0x65, 0x35, 0x19, 0x47, 0xb6, 0x53, 0xb5, 0x07,
0xfc, 0x06, 0x5f, 0xc1, 0x0e, 0x09, 0x96, 0x6c, 0xd8, 0x22, 0x94, 0x1d, 0x7f, 0x81, 0xaa, 0xba, 0xbe, 0x02, 0x37, 0x0e, 0x48, 0x7c, 0x1c, 0x24, 0x38, 0x72, 0xe1, 0x8a, 0x50, 0xbf, 0x08, 0x68,
0x3b, 0x13, 0x47, 0x89, 0x33, 0x9b, 0x6c, 0xec, 0xaa, 0x7b, 0xcf, 0xb9, 0xe7, 0x9e, 0xba, 0xd5, 0xc6, 0x4e, 0x1b, 0x87, 0x36, 0xdd, 0x4b, 0x2f, 0xc9, 0xcc, 0xfb, 0x3e, 0xcf, 0xfb, 0xbc, 0xff,
0x6e, 0x43, 0x5d, 0xce, 0x98, 0xd0, 0x2c, 0x60, 0x53, 0xa6, 0xd5, 0x59, 0x73, 0xa6, 0xa4, 0x96, 0x46, 0x09, 0x34, 0xe3, 0x01, 0x65, 0x19, 0xed, 0xd1, 0x3e, 0xcd, 0x92, 0xcb, 0xf6, 0x20, 0x89,
0x4d, 0xad, 0xc8, 0x98, 0x35, 0x17, 0xbb, 0xd1, 0xa2, 0x61, 0x83, 0xe8, 0x83, 0x15, 0x64, 0x14, 0xb3, 0xb8, 0x9d, 0x25, 0xe1, 0x09, 0x6d, 0x9f, 0x6f, 0xe7, 0x87, 0x96, 0x30, 0xa2, 0xf5, 0x12,
0x6c, 0x44, 0x80, 0xc5, 0xee, 0xf6, 0x27, 0xd7, 0xd5, 0x19, 0xcb, 0xe9, 0x54, 0x0a, 0x53, 0x28, 0x32, 0x37, 0xb6, 0x72, 0xc0, 0xf9, 0xf6, 0xea, 0xc7, 0xb7, 0xc5, 0x39, 0x89, 0xfb, 0xfd, 0x98,
0x5a, 0x45, 0xa4, 0xed, 0xc6, 0x75, 0x58, 0xc5, 0x42, 0x39, 0x57, 0x91, 0x6c, 0xb2, 0x8e, 0xf0, 0xf1, 0x40, 0xf9, 0x29, 0x27, 0xad, 0xb6, 0x6e, 0xc3, 0x26, 0x34, 0x8d, 0x87, 0x49, 0x2e, 0x3b,
0xb5, 0xbf, 0x1d, 0x28, 0xe3, 0x38, 0x34, 0x98, 0x11, 0x11, 0x22, 0x1f, 0x0a, 0x09, 0xc6, 0x73, 0x3a, 0xe7, 0xf8, 0xc6, 0x9f, 0x12, 0x2c, 0x91, 0xc2, 0xe4, 0x0e, 0x42, 0x96, 0x22, 0x0c, 0xf3,
0xaa, 0x4e, 0xbd, 0xb8, 0xf7, 0x71, 0xe3, 0xba, 0xf6, 0x2e, 0x0a, 0x2d, 0x76, 0x1b, 0x49, 0x05, 0x23, 0x8c, 0x2a, 0xd5, 0xa5, 0xe6, 0xc2, 0xce, 0x47, 0xad, 0xdb, 0xd2, 0xbb, 0x0e, 0x74, 0xbe,
0x7c, 0x41, 0x45, 0x3f, 0xc3, 0x87, 0x5c, 0x84, 0x5a, 0xcd, 0xa7, 0x4c, 0x68, 0xa2, 0xb9, 0x14, 0xdd, 0x1a, 0x45, 0x20, 0xd7, 0x54, 0xf4, 0x03, 0x6c, 0x44, 0x2c, 0xcd, 0x92, 0x61, 0x9f, 0xb2,
0xa3, 0x80, 0x9f, 0x28, 0xa2, 0xce, 0x46, 0xa1, 0xd1, 0xf1, 0x52, 0xd5, 0x74, 0xbd, 0xb8, 0xf7, 0x2c, 0xcc, 0xa2, 0x98, 0x05, 0xbd, 0xe8, 0x38, 0x09, 0x93, 0xcb, 0x20, 0xe5, 0x3a, 0xea, 0x4c,
0x45, 0x63, 0x9d, 0xf5, 0x46, 0x77, 0xb5, 0xc4, 0x61, 0x54, 0xc1, 0x36, 0x8a, 0x9f, 0xf2, 0x9b, 0x7d, 0xb6, 0xb9, 0xb0, 0xf3, 0x79, 0x6b, 0x5a, 0xe9, 0x2d, 0xb3, 0x1c, 0xc2, 0xca, 0x23, 0x88,
0x93, 0xb5, 0x3f, 0x1c, 0x78, 0xba, 0x86, 0x8c, 0x04, 0x3c, 0xbe, 0xa1, 0xbd, 0xd8, 0xf4, 0xe7, 0x44, 0xc9, 0x5a, 0x74, 0xb7, 0xb3, 0xf1, 0x9b, 0x04, 0x6b, 0x53, 0xc8, 0x88, 0xc1, 0xf2, 0x1d,
0xd7, 0x36, 0x16, 0x9f, 0xf5, 0x8d, 0x9d, 0xe1, 0x47, 0xd7, 0x37, 0x85, 0x5e, 0x41, 0xf6, 0xb2, 0xe9, 0x15, 0x45, 0x7f, 0x76, 0x6b, 0x62, 0x45, 0xaf, 0xef, 0xcc, 0x8c, 0xbc, 0xb8, 0x3d, 0x29,
0xed, 0xda, 0x7a, 0xdb, 0xa6, 0x47, 0x1c, 0x11, 0x6a, 0xff, 0x6d, 0x40, 0xc6, 0xec, 0xd1, 0x13, 0xf4, 0x06, 0x2a, 0xe3, 0x65, 0x37, 0xa6, 0x97, 0xcd, 0x73, 0x24, 0x39, 0xa1, 0xf1, 0x0b, 0x80,
0x28, 0x58, 0xc0, 0x88, 0x53, 0xdb, 0x63, 0x09, 0xe7, 0xed, 0xbe, 0x4b, 0xd1, 0x63, 0xc8, 0x1b, 0xcc, 0xef, 0x68, 0x05, 0xe6, 0x05, 0x20, 0x88, 0xba, 0x22, 0xc7, 0x45, 0x32, 0x27, 0xee, 0x66,
0xb0, 0xc9, 0xa4, 0x6c, 0x26, 0x67, 0xb6, 0x5d, 0x8a, 0x9e, 0x41, 0x31, 0xe2, 0x84, 0x9a, 0x68, 0x17, 0x2d, 0xc3, 0x1c, 0x07, 0x73, 0xcf, 0x8c, 0xf0, 0x54, 0xf9, 0xd5, 0xec, 0xa2, 0x2d, 0x58,
0xe6, 0xa5, 0xab, 0x4e, 0x7d, 0x03, 0x83, 0x0d, 0x0d, 0x4c, 0x04, 0x3d, 0x87, 0xca, 0x8c, 0x28, 0xc8, 0x39, 0x69, 0x16, 0x66, 0x54, 0x9d, 0xad, 0x4b, 0xcd, 0xc7, 0x04, 0x84, 0xc9, 0xe5, 0x16,
0x26, 0xf4, 0x28, 0x29, 0x90, 0xb1, 0x05, 0x4a, 0x51, 0x74, 0x10, 0x95, 0x41, 0x90, 0x11, 0x64, 0xf4, 0x12, 0x6a, 0x83, 0x30, 0xa1, 0x2c, 0x0b, 0x46, 0x01, 0x64, 0x11, 0x60, 0x31, 0xb7, 0xba,
0xca, 0xbc, 0xac, 0xe5, 0xdb, 0x35, 0xfa, 0x1a, 0x32, 0xa7, 0x5c, 0x50, 0x2f, 0x57, 0x75, 0xea, 0x79, 0x18, 0x04, 0x32, 0x0b, 0xfb, 0x54, 0xad, 0x08, 0xbe, 0x38, 0xa3, 0xaf, 0x40, 0x3e, 0x8b,
0x95, 0xbd, 0x17, 0xb7, 0x1b, 0xb2, 0x1f, 0x07, 0x5c, 0x50, 0x6c, 0x89, 0xa8, 0x09, 0x0f, 0x42, 0x58, 0x57, 0xad, 0xd6, 0xa5, 0x66, 0x6d, 0xe7, 0xd5, 0xfd, 0x05, 0x89, 0x8f, 0x83, 0x88, 0x75,
0x4d, 0x94, 0x1e, 0x69, 0x3e, 0x65, 0xa3, 0xb9, 0xe0, 0xcb, 0x91, 0x20, 0x42, 0x7a, 0xf9, 0xaa, 0x89, 0x20, 0xa2, 0x36, 0x3c, 0x4f, 0xb3, 0x30, 0xc9, 0x82, 0x2c, 0xea, 0xd3, 0x60, 0xc8, 0xa2,
0x53, 0xcf, 0xe1, 0x2d, 0x9b, 0x3b, 0xe6, 0x53, 0x36, 0x14, 0x7c, 0xd9, 0x23, 0x42, 0xa2, 0x17, 0x8b, 0x80, 0x85, 0x2c, 0x56, 0xe7, 0xea, 0x52, 0xb3, 0x4a, 0x9e, 0x0a, 0x9f, 0x17, 0xf5, 0xa9,
0x80, 0x98, 0xa0, 0x57, 0xe1, 0x05, 0x0b, 0xdf, 0x64, 0x82, 0xae, 0x80, 0xbf, 0x05, 0x20, 0x5a, 0xcf, 0xa2, 0x0b, 0x3b, 0x64, 0x31, 0x7a, 0x05, 0x88, 0xb2, 0xee, 0x24, 0x7c, 0x5e, 0xc0, 0x9f,
0x2b, 0x7e, 0x32, 0xd7, 0x2c, 0xf4, 0x36, 0xec, 0xa9, 0x7f, 0x74, 0xcb, 0x4c, 0x0f, 0xd8, 0xd9, 0x50, 0xd6, 0x2d, 0x81, 0xf7, 0x01, 0xc2, 0x2c, 0x4b, 0xa2, 0xe3, 0x61, 0x46, 0x53, 0xf5, 0xb1,
0x1b, 0x12, 0xcc, 0x19, 0xbe, 0x44, 0x45, 0xaf, 0xc0, 0xa3, 0x4a, 0xce, 0x66, 0x8c, 0x8e, 0xde, 0xe8, 0xfa, 0x87, 0xf7, 0xcc, 0xf4, 0x80, 0x5e, 0x1e, 0x85, 0xbd, 0x21, 0x25, 0x63, 0x54, 0xf4,
0x45, 0x47, 0x63, 0x39, 0x17, 0xda, 0x83, 0xaa, 0x53, 0x2f, 0xe3, 0x47, 0x71, 0xbe, 0x75, 0x91, 0x06, 0xd4, 0x6e, 0x12, 0x0f, 0x06, 0xb4, 0x1b, 0xdc, 0x58, 0x83, 0x93, 0x78, 0xc8, 0x32, 0x15,
0x6e, 0x9b, 0x2c, 0xfa, 0x06, 0x72, 0x6c, 0xc1, 0x84, 0x0e, 0xbd, 0xa2, 0x95, 0xaf, 0xbf, 0xc7, 0xea, 0x52, 0x73, 0x89, 0xbc, 0x28, 0xfc, 0xda, 0xb5, 0x5b, 0xe7, 0x5e, 0xf4, 0x35, 0x54, 0xe9,
0x19, 0xf9, 0x86, 0x80, 0x63, 0x1e, 0xfa, 0x14, 0x1e, 0x24, 0xda, 0x51, 0x24, 0xd6, 0x2d, 0x59, 0x39, 0x65, 0x59, 0xaa, 0x2e, 0x08, 0xf9, 0xe6, 0x7b, 0xf4, 0x08, 0x73, 0x02, 0x29, 0x78, 0xe8,
0x5d, 0x14, 0xe7, 0x2c, 0x27, 0xd6, 0xfc, 0x0a, 0xb2, 0x01, 0x17, 0xa7, 0xa1, 0x57, 0x5e, 0xe3, 0x13, 0x78, 0x3e, 0xd2, 0xce, 0x2d, 0x85, 0xee, 0xa2, 0xd0, 0x45, 0x85, 0x4f, 0x70, 0x0a, 0xcd,
0x78, 0x55, 0xf2, 0x90, 0x8b, 0x53, 0x1c, 0xb1, 0x50, 0x03, 0xee, 0x27, 0x82, 0x36, 0x10, 0xeb, 0x2f, 0xa1, 0xd2, 0x8b, 0xd8, 0x59, 0xaa, 0x2e, 0x4d, 0xa9, 0xb8, 0x2c, 0x69, 0x45, 0xec, 0x8c,
0x55, 0xac, 0xde, 0x56, 0x9c, 0x32, 0x84, 0x58, 0xee, 0x4b, 0xc8, 0x99, 0x9b, 0x35, 0x0f, 0xbd, 0xe4, 0x2c, 0xd4, 0x82, 0x67, 0x23, 0x41, 0x61, 0x28, 0xf4, 0x6a, 0x42, 0xef, 0x69, 0xe1, 0xe2,
0x4d, 0xfb, 0xd4, 0x3c, 0xbf, 0x45, 0xcf, 0x62, 0x71, 0xcc, 0xd9, 0xfe, 0xdd, 0x81, 0xac, 0x6d, 0x84, 0x42, 0xee, 0x0b, 0xa8, 0xf2, 0xcd, 0x1a, 0xa6, 0xea, 0x13, 0xf1, 0x6a, 0x5e, 0xde, 0xa3,
0xde, 0x5c, 0xc3, 0x2b, 0x63, 0x75, 0xec, 0x58, 0x4b, 0xfa, 0xf2, 0x4c, 0x93, 0x6b, 0x98, 0xba, 0x27, 0xb0, 0xa4, 0xe0, 0xac, 0xfe, 0x2a, 0x41, 0x45, 0x24, 0xcf, 0xd7, 0x70, 0x62, 0xac, 0x92,
0x74, 0x0d, 0x57, 0xe7, 0x9c, 0xbe, 0x9b, 0x39, 0x67, 0xd6, 0xcd, 0x79, 0xfb, 0x1f, 0x07, 0x32, 0x18, 0xeb, 0x62, 0x36, 0x3e, 0xd3, 0xd1, 0x1a, 0xce, 0x8c, 0xad, 0x61, 0x79, 0xce, 0xb3, 0x0f,
0xe6, 0x4c, 0xee, 0xe6, 0x09, 0x5d, 0x35, 0x98, 0xb9, 0x1b, 0x83, 0xd9, 0x75, 0x06, 0x6b, 0x13, 0x33, 0x67, 0x79, 0xda, 0x9c, 0x57, 0xff, 0x92, 0x40, 0xe6, 0x3d, 0x79, 0x98, 0x17, 0x5a, 0x2e,
0x28, 0x24, 0xcf, 0x2e, 0x7a, 0x02, 0x0f, 0x07, 0xfd, 0x56, 0x6f, 0x74, 0xd0, 0xed, 0x75, 0x46, 0x50, 0x7e, 0x98, 0x02, 0x2b, 0xd3, 0x0a, 0x6c, 0xfc, 0x2c, 0xc1, 0xfc, 0xe8, 0xf1, 0xa2, 0x15,
0xc3, 0xde, 0xa0, 0xef, 0xb7, 0xbb, 0xfb, 0x5d, 0xbf, 0xe3, 0xde, 0x43, 0x25, 0x28, 0x74, 0x7b, 0xf8, 0x9f, 0xdb, 0xd1, 0xec, 0xe0, 0xc0, 0xb4, 0x8d, 0xc0, 0xb7, 0xdd, 0x0e, 0xd6, 0xcd, 0x3d,
0xc7, 0x3e, 0xee, 0xb5, 0x0e, 0x5d, 0x07, 0x01, 0xe4, 0x06, 0x3e, 0x7e, 0xe3, 0x63, 0x37, 0x65, 0x13, 0x1b, 0xca, 0x23, 0xf4, 0x02, 0xd0, 0x8d, 0xcb, 0xb4, 0x3d, 0x4c, 0x6c, 0xcd, 0x52, 0x24,
0xd6, 0xed, 0xc3, 0xae, 0xdf, 0x3b, 0x76, 0xd3, 0x06, 0xd5, 0xc7, 0x47, 0x9d, 0x61, 0xdb, 0xc7, 0xf4, 0x1c, 0x94, 0x1b, 0xbb, 0x8b, 0xc9, 0x11, 0x26, 0xca, 0x4c, 0xd9, 0xaa, 0x5b, 0x26, 0xb6,
0x6e, 0xc6, 0xec, 0xda, 0x47, 0xbd, 0xc1, 0xf0, 0x3b, 0x1f, 0xbb, 0xd9, 0xda, 0xaf, 0x69, 0xc8, 0x3d, 0x65, 0xb6, 0x1c, 0xa3, 0x43, 0x1c, 0xc3, 0xd7, 0x31, 0x51, 0xe4, 0xb2, 0x5d, 0x77, 0x6c,
0x45, 0x77, 0x04, 0xb5, 0x21, 0x33, 0x96, 0x34, 0x7a, 0x05, 0x55, 0xf6, 0x9a, 0xef, 0x73, 0xaf, 0xd7, 0x3f, 0xc4, 0x44, 0xa9, 0x34, 0xfe, 0x91, 0xa1, 0x9a, 0xaf, 0x15, 0xd2, 0x41, 0x3e, 0x89,
0xe2, 0xaf, 0xb6, 0xa4, 0x0c, 0x5b, 0x32, 0xf2, 0x20, 0x3f, 0x65, 0x61, 0x48, 0x26, 0xc9, 0x9d, 0xbb, 0xf9, 0xaf, 0x56, 0x6d, 0xa7, 0xfd, 0x3e, 0xab, 0x58, 0x7c, 0xe9, 0x71, 0x97, 0x12, 0x41,
0x49, 0xb6, 0xb5, 0xdf, 0x52, 0x00, 0xef, 0xe0, 0x28, 0x07, 0xa9, 0xa3, 0x53, 0xf7, 0x1e, 0x2a, 0x46, 0x2a, 0xcc, 0xf5, 0x69, 0x9a, 0x86, 0xa7, 0xa3, 0x35, 0x1b, 0x5d, 0x1b, 0x3f, 0xc9, 0x00,
0xc3, 0x46, 0x9b, 0x88, 0x31, 0x0b, 0x02, 0x46, 0x5d, 0x07, 0xb9, 0x50, 0x1a, 0x8a, 0x53, 0x21, 0x37, 0x70, 0x84, 0xa0, 0xe6, 0x7a, 0x9a, 0xe7, 0xbb, 0x81, 0xee, 0x18, 0x38, 0x70, 0x0e, 0x94,
0x7f, 0x12, 0xbe, 0x52, 0x52, 0xb9, 0x29, 0x74, 0x1f, 0x36, 0xbb, 0x62, 0x41, 0x02, 0x4e, 0x5b, 0x47, 0xa2, 0x37, 0x63, 0x36, 0x5d, 0xb3, 0x75, 0x6c, 0x59, 0xd8, 0x50, 0x24, 0xb4, 0x01, 0x2b,
0x6a, 0x62, 0x7f, 0xe6, 0xdd, 0x34, 0x7a, 0x00, 0x6e, 0x87, 0x11, 0x1a, 0x70, 0xc1, 0xfc, 0xe5, 0xe3, 0x2e, 0xdf, 0x3e, 0xb0, 0x9d, 0x6f, 0xed, 0x00, 0x13, 0xe2, 0xf0, 0x66, 0xd4, 0x61, 0x7d,
0x98, 0x31, 0xca, 0x68, 0x64, 0xad, 0x27, 0xf5, 0xbe, 0x9c, 0x0b, 0xea, 0x66, 0xd1, 0x16, 0x94, 0xdc, 0x6d, 0xda, 0x47, 0x9a, 0x65, 0x1a, 0x81, 0x46, 0xf6, 0xfd, 0xc3, 0xbc, 0x31, 0xff, 0x87,
0x5b, 0x81, 0x62, 0x84, 0x9e, 0xf9, 0x4b, 0x1e, 0xea, 0xd0, 0xcd, 0x19, 0x5a, 0x9f, 0xa9, 0x29, 0x8d, 0x71, 0x84, 0x81, 0x35, 0xc3, 0x32, 0x6d, 0x1c, 0xe0, 0x77, 0x3a, 0xc6, 0x06, 0x36, 0x14,
0x0f, 0x43, 0x2e, 0x45, 0x87, 0x09, 0xce, 0xa8, 0x9b, 0x47, 0x0f, 0x61, 0x2b, 0x79, 0x9d, 0xfa, 0x79, 0x52, 0xde, 0x76, 0xbc, 0x60, 0xcf, 0xf1, 0x6d, 0x43, 0xa9, 0xa0, 0x4d, 0x58, 0x1d, 0x77,
0xcb, 0xb7, 0x64, 0x1e, 0x6a, 0x46, 0xdd, 0x02, 0x7a, 0x04, 0x68, 0x9f, 0xf0, 0x80, 0xd1, 0xbe, 0x69, 0x16, 0xc1, 0x9a, 0xf1, 0x5d, 0x80, 0xdf, 0x99, 0xae, 0xe7, 0x2a, 0xd5, 0xc9, 0xe8, 0x1d,
0x62, 0x63, 0x29, 0x28, 0x37, 0x6f, 0x17, 0x77, 0x03, 0x15, 0x21, 0xdf, 0x3a, 0x91, 0xca, 0x80, 0x4c, 0x0e, 0x4d, 0xd7, 0x35, 0x1d, 0x3b, 0x30, 0xb0, 0xcd, 0xa7, 0x3b, 0x87, 0x1a, 0xb0, 0x39,
0x00, 0x55, 0x00, 0x8e, 0xe6, 0xfa, 0xe8, 0x47, 0x4c, 0xc4, 0x84, 0xb9, 0x45, 0x23, 0x3a, 0x14, 0x0e, 0x21, 0xd8, 0x75, 0x7c, 0xa2, 0xf3, 0x04, 0xde, 0x6a, 0xbe, 0xeb, 0x61, 0x43, 0x99, 0x47,
0x7c, 0x3a, 0x33, 0xc7, 0x26, 0x0c, 0xa4, 0x64, 0x42, 0x5d, 0xa1, 0x99, 0x12, 0x24, 0x88, 0x3c, 0x1f, 0xc0, 0xd6, 0x38, 0x66, 0x4f, 0x33, 0x2d, 0xcc, 0xc7, 0x88, 0x75, 0xc7, 0x36, 0x4c, 0xcf,
0x95, 0xd1, 0x26, 0x14, 0x87, 0x82, 0x2c, 0x08, 0x0f, 0xc8, 0x49, 0xc0, 0xdc, 0x8a, 0xe9, 0xbc, 0x74, 0x6c, 0xe5, 0x31, 0x5a, 0x86, 0x67, 0xa5, 0x5c, 0x76, 0x1d, 0xc2, 0xd9, 0x80, 0xd6, 0x41,
0x43, 0x34, 0x39, 0x94, 0x61, 0xe8, 0x6e, 0x1a, 0xcb, 0x43, 0x41, 0xe6, 0xfa, 0x2d, 0x13, 0x9a, 0x2d, 0xb5, 0xd4, 0xf7, 0x02, 0x67, 0x2f, 0x20, 0x9a, 0xbd, 0x8f, 0x95, 0x85, 0xff, 0x76, 0xd0,
0x8f, 0x89, 0x29, 0xe3, 0xbe, 0xfe, 0xc5, 0xf9, 0xf3, 0x7c, 0xc7, 0xf9, 0xeb, 0x7c, 0xc7, 0xf9, 0x3c, 0xec, 0x58, 0x98, 0x77, 0x07, 0x1b, 0xca, 0xe2, 0x64, 0x85, 0xa3, 0xf5, 0x2b, 0x3a, 0xbc,
0xf7, 0x7c, 0xc7, 0x81, 0x67, 0x5c, 0xae, 0x9d, 0xd0, 0x6b, 0x38, 0x36, 0xab, 0xbe, 0x09, 0xf6, 0x84, 0xd6, 0x60, 0xb9, 0x4c, 0xd7, 0x8e, 0x34, 0xd3, 0xd2, 0x76, 0x2d, 0xac, 0xd4, 0x26, 0x3b,
0x9d, 0x1f, 0xbe, 0x9f, 0x5c, 0x85, 0x73, 0xd9, 0x94, 0x9a, 0x05, 0x4d, 0xb6, 0x9c, 0x19, 0x33, 0x67, 0x68, 0x9e, 0x16, 0x58, 0x8e, 0xeb, 0x2a, 0x4f, 0xd0, 0x16, 0xac, 0x4d, 0xf0, 0x7c, 0xef,
0x2a, 0x6c, 0x4a, 0x1d, 0xcc, 0x9a, 0x3c, 0x6e, 0xb5, 0xb9, 0x82, 0x7e, 0x69, 0x8b, 0xbf, 0x9c, 0x2d, 0xb6, 0x3d, 0x53, 0xd7, 0xb8, 0xb0, 0xb2, 0xfb, 0xa3, 0xf4, 0xfb, 0xd5, 0xa6, 0xf4, 0xc7,
0x30, 0x71, 0xf1, 0x77, 0xea, 0x24, 0x67, 0x63, 0x9f, 0xfd, 0x1f, 0x00, 0x00, 0xff, 0xff, 0x47, 0xd5, 0xa6, 0xf4, 0xf7, 0xd5, 0xa6, 0x04, 0x5b, 0x51, 0x3c, 0x75, 0xf3, 0x76, 0xc1, 0xe3, 0xa7,
0x96, 0xd8, 0x1b, 0x75, 0x09, 0x00, 0x00, 0x0e, 0x37, 0x76, 0xa4, 0xef, 0xbf, 0x39, 0x9d, 0x84, 0x47, 0x71, 0x3b, 0xce, 0x68, 0xaf, 0x4d,
0x2f, 0x06, 0x71, 0x92, 0xd1, 0x24, 0x6d, 0xc7, 0x59, 0x6f, 0xd0, 0x8e, 0x58, 0x46, 0x13, 0x16,
0xf6, 0xda, 0x25, 0xf4, 0x6b, 0x11, 0xfc, 0xf5, 0x29, 0x65, 0xd7, 0xff, 0x2c, 0x8f, 0xab, 0xc2,
0xf6, 0xe9, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xcd, 0xf7, 0xd2, 0xe8, 0x80, 0x0a, 0x00, 0x00,
} }
func (m *ResourceSpans) Marshal() (dAtA []byte, err error) { func (m *ResourceSpans) Marshal() (dAtA []byte, err error) {

View File

@ -193,23 +193,26 @@ func sink(ctx context.Context, in <-chan result) ([]*metricpb.ResourceMetrics, e
rb.InstrumentationLibraryBatches[res.InstrumentationLibrary] = mb rb.InstrumentationLibraryBatches[res.InstrumentationLibrary] = mb
} }
mID := res.Metric.GetMetricDescriptor().String() mID := res.Metric.GetName()
m, ok := mb[mID] m, ok := mb[mID]
if !ok { if !ok {
mb[mID] = res.Metric mb[mID] = res.Metric
continue continue
} }
if len(res.Metric.Int64DataPoints) > 0 { switch res.Metric.Data.(type) {
m.Int64DataPoints = append(m.Int64DataPoints, res.Metric.Int64DataPoints...) case *metricpb.Metric_IntGauge:
} m.GetIntGauge().DataPoints = append(m.GetIntGauge().DataPoints, res.Metric.GetIntGauge().DataPoints...)
if len(res.Metric.DoubleDataPoints) > 0 { case *metricpb.Metric_IntHistogram:
m.DoubleDataPoints = append(m.DoubleDataPoints, res.Metric.DoubleDataPoints...) m.GetIntHistogram().DataPoints = append(m.GetIntHistogram().DataPoints, res.Metric.GetIntHistogram().DataPoints...)
} case *metricpb.Metric_IntSum:
if len(res.Metric.HistogramDataPoints) > 0 { m.GetIntSum().DataPoints = append(m.GetIntSum().DataPoints, res.Metric.GetIntSum().DataPoints...)
m.HistogramDataPoints = append(m.HistogramDataPoints, res.Metric.HistogramDataPoints...) case *metricpb.Metric_DoubleGauge:
} m.GetDoubleGauge().DataPoints = append(m.GetDoubleGauge().DataPoints, res.Metric.GetDoubleGauge().DataPoints...)
if len(res.Metric.SummaryDataPoints) > 0 { case *metricpb.Metric_DoubleHistogram:
m.SummaryDataPoints = append(m.SummaryDataPoints, res.Metric.SummaryDataPoints...) m.GetDoubleHistogram().DataPoints = append(m.GetDoubleHistogram().DataPoints, res.Metric.GetDoubleHistogram().DataPoints...)
case *metricpb.Metric_DoubleSum:
m.GetDoubleSum().DataPoints = append(m.GetDoubleSum().DataPoints, res.Metric.GetDoubleSum().DataPoints...)
default:
} }
} }
@ -284,13 +287,57 @@ func Record(r export.Record) (*metricpb.Metric, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
return scalar(r, value, time.Time{}, tm) return gauge(r, value, time.Time{}, tm)
default: default:
return nil, fmt.Errorf("%w: %T", ErrUnimplementedAgg, agg) return nil, fmt.Errorf("%w: %T", ErrUnimplementedAgg, agg)
} }
} }
func gauge(record export.Record, num metric.Number, start, end time.Time) (*metricpb.Metric, error) {
desc := record.Descriptor()
labels := record.Labels()
m := &metricpb.Metric{
Name: desc.Name(),
Description: desc.Description(),
Unit: string(desc.Unit()),
}
switch n := desc.NumberKind(); n {
case metric.Int64NumberKind:
m.Data = &metricpb.Metric_IntGauge{
IntGauge: &metricpb.IntGauge{
DataPoints: []*metricpb.IntDataPoint{
{
Value: num.CoerceToInt64(n),
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(start),
TimeUnixNano: toNanos(end),
},
},
},
}
case metric.Float64NumberKind:
m.Data = &metricpb.Metric_DoubleGauge{
DoubleGauge: &metricpb.DoubleGauge{
DataPoints: []*metricpb.DoubleDataPoint{
{
Value: num.CoerceToFloat64(n),
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(start),
TimeUnixNano: toNanos(end),
},
},
},
}
default:
return nil, fmt.Errorf("%w: %v", ErrUnknownValueType, n)
}
return m, nil
}
// scalar transforms a Sum or LastValue Aggregator into an OTLP Metric. // scalar transforms a Sum or LastValue Aggregator into an OTLP Metric.
// For LastValue (Gauge), use start==time.Time{}. // For LastValue (Gauge), use start==time.Time{}.
func scalar(record export.Record, num metric.Number, start, end time.Time) (*metricpb.Metric, error) { func scalar(record export.Record, num metric.Number, start, end time.Time) (*metricpb.Metric, error) {
@ -298,32 +345,36 @@ func scalar(record export.Record, num metric.Number, start, end time.Time) (*met
labels := record.Labels() labels := record.Labels()
m := &metricpb.Metric{ m := &metricpb.Metric{
MetricDescriptor: &metricpb.MetricDescriptor{ Name: desc.Name(),
Name: desc.Name(), Description: desc.Description(),
Description: desc.Description(), Unit: string(desc.Unit()),
Unit: string(desc.Unit()),
},
} }
switch n := desc.NumberKind(); n { switch n := desc.NumberKind(); n {
case metric.Int64NumberKind: case metric.Int64NumberKind:
m.MetricDescriptor.Type = metricpb.MetricDescriptor_INT64 m.Data = &metricpb.Metric_IntSum{
m.Int64DataPoints = []*metricpb.Int64DataPoint{ IntSum: &metricpb.IntSum{
{ DataPoints: []*metricpb.IntDataPoint{
Value: num.CoerceToInt64(n), {
Labels: stringKeyValues(labels.Iter()), Value: num.CoerceToInt64(n),
StartTimeUnixNano: toNanos(start), Labels: stringKeyValues(labels.Iter()),
TimeUnixNano: toNanos(end), StartTimeUnixNano: toNanos(start),
TimeUnixNano: toNanos(end),
},
},
}, },
} }
case metric.Float64NumberKind: case metric.Float64NumberKind:
m.MetricDescriptor.Type = metricpb.MetricDescriptor_DOUBLE m.Data = &metricpb.Metric_DoubleSum{
m.DoubleDataPoints = []*metricpb.DoubleDataPoint{ DoubleSum: &metricpb.DoubleSum{
{ DataPoints: []*metricpb.DoubleDataPoint{
Value: num.CoerceToFloat64(n), {
Labels: stringKeyValues(labels.Iter()), Value: num.CoerceToFloat64(n),
StartTimeUnixNano: toNanos(start), Labels: stringKeyValues(labels.Iter()),
TimeUnixNano: toNanos(end), StartTimeUnixNano: toNanos(start),
TimeUnixNano: toNanos(end),
},
},
}, },
} }
default: default:
@ -360,34 +411,52 @@ func minMaxSumCount(record export.Record, a aggregation.MinMaxSumCount) (*metric
return nil, err return nil, err
} }
numKind := desc.NumberKind() m := &metricpb.Metric{
return &metricpb.Metric{ Name: desc.Name(),
MetricDescriptor: &metricpb.MetricDescriptor{ Description: desc.Description(),
Name: desc.Name(), Unit: string(desc.Unit()),
Description: desc.Description(), }
Unit: string(desc.Unit()),
Type: metricpb.MetricDescriptor_SUMMARY, buckets := []uint64{min.AsRaw(), max.AsRaw()}
}, bounds := []float64{0.0, 100.0}
SummaryDataPoints: []*metricpb.SummaryDataPoint{
{ switch n := desc.NumberKind(); n {
Labels: stringKeyValues(labels.Iter()), case metric.Int64NumberKind:
Count: uint64(count), m.Data = &metricpb.Metric_IntHistogram{
Sum: sum.CoerceToFloat64(numKind), IntHistogram: &metricpb.IntHistogram{
PercentileValues: []*metricpb.SummaryDataPoint_ValueAtPercentile{ DataPoints: []*metricpb.IntHistogramDataPoint{
{ {
Percentile: 0.0, Sum: sum.CoerceToInt64(n),
Value: min.CoerceToFloat64(numKind), Labels: stringKeyValues(labels.Iter()),
}, StartTimeUnixNano: toNanos(record.StartTime()),
{ TimeUnixNano: toNanos(record.EndTime()),
Percentile: 100.0, Count: uint64(count),
Value: max.CoerceToFloat64(numKind), BucketCounts: buckets,
ExplicitBounds: bounds,
}, },
}, },
StartTimeUnixNano: toNanos(record.StartTime()),
TimeUnixNano: toNanos(record.EndTime()),
}, },
}, }
}, nil case metric.Float64NumberKind:
m.Data = &metricpb.Metric_DoubleHistogram{
DoubleHistogram: &metricpb.DoubleHistogram{
DataPoints: []*metricpb.DoubleHistogramDataPoint{
{
Sum: sum.CoerceToFloat64(n),
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(record.StartTime()),
TimeUnixNano: toNanos(record.EndTime()),
Count: uint64(count),
BucketCounts: buckets,
ExplicitBounds: bounds,
},
},
},
}
default:
return nil, fmt.Errorf("%w: %v", ErrUnknownValueType, n)
}
return m, nil
} }
func histogramValues(a aggregation.Histogram) (boundaries []float64, counts []float64, err error) { func histogramValues(a aggregation.Histogram) (boundaries []float64, counts []float64, err error) {
@ -422,33 +491,53 @@ func histogram(record export.Record, a aggregation.Histogram) (*metricpb.Metric,
return nil, err return nil, err
} }
buckets := make([]*metricpb.HistogramDataPoint_Bucket, len(counts)) buckets := make([]uint64, len(counts))
for i := 0; i < len(counts); i++ { for i := 0; i < len(counts); i++ {
buckets[i] = &metricpb.HistogramDataPoint_Bucket{ buckets[i] = uint64(counts[i])
Count: uint64(counts[i]), }
m := &metricpb.Metric{
Name: desc.Name(),
Description: desc.Description(),
Unit: string(desc.Unit()),
}
switch n := desc.NumberKind(); n {
case metric.Int64NumberKind:
m.Data = &metricpb.Metric_IntHistogram{
IntHistogram: &metricpb.IntHistogram{
DataPoints: []*metricpb.IntHistogramDataPoint{
{
Sum: sum.CoerceToInt64(n),
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(record.StartTime()),
TimeUnixNano: toNanos(record.EndTime()),
Count: uint64(count),
BucketCounts: buckets,
ExplicitBounds: boundaries,
},
},
},
} }
case metric.Float64NumberKind:
m.Data = &metricpb.Metric_DoubleHistogram{
DoubleHistogram: &metricpb.DoubleHistogram{
DataPoints: []*metricpb.DoubleHistogramDataPoint{
{
Sum: sum.CoerceToFloat64(n),
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(record.StartTime()),
TimeUnixNano: toNanos(record.EndTime()),
Count: uint64(count),
BucketCounts: buckets,
ExplicitBounds: boundaries,
},
},
},
}
default:
return nil, fmt.Errorf("%w: %v", ErrUnknownValueType, n)
} }
numKind := desc.NumberKind() return m, nil
return &metricpb.Metric{
MetricDescriptor: &metricpb.MetricDescriptor{
Name: desc.Name(),
Description: desc.Description(),
Unit: string(desc.Unit()),
Type: metricpb.MetricDescriptor_HISTOGRAM,
},
HistogramDataPoints: []*metricpb.HistogramDataPoint{
{
Labels: stringKeyValues(labels.Iter()),
StartTimeUnixNano: toNanos(record.StartTime()),
TimeUnixNano: toNanos(record.EndTime()),
Count: uint64(count),
Sum: sum.CoerceToFloat64(numKind),
Buckets: buckets,
ExplicitBounds: boundaries,
},
},
}, nil
} }
// stringKeyValues transforms a label iterator into an OTLP StringKeyValues. // stringKeyValues transforms a label iterator into an OTLP StringKeyValues.

View File

@ -39,7 +39,6 @@ import (
"go.opentelemetry.io/otel/sdk/metric/aggregator/sum" "go.opentelemetry.io/otel/sdk/metric/aggregator/sum"
sumAgg "go.opentelemetry.io/otel/sdk/metric/aggregator/sum" sumAgg "go.opentelemetry.io/otel/sdk/metric/aggregator/sum"
"go.opentelemetry.io/otel/sdk/resource" "go.opentelemetry.io/otel/sdk/resource"
"go.opentelemetry.io/otel/unit"
) )
var ( var (
@ -117,65 +116,6 @@ func TestMinMaxSumCountValue(t *testing.T) {
} }
} }
func TestMinMaxSumCountMetricDescriptor(t *testing.T) {
tests := []struct {
name string
metricKind metric.Kind
description string
unit unit.Unit
numberKind metric.NumberKind
labels []label.KeyValue
expected *metricpb.MetricDescriptor
}{
{
"mmsc-test-a",
metric.ValueRecorderKind,
"test-a-description",
unit.Dimensionless,
metric.Int64NumberKind,
[]label.KeyValue{},
&metricpb.MetricDescriptor{
Name: "mmsc-test-a",
Description: "test-a-description",
Unit: "1",
Type: metricpb.MetricDescriptor_SUMMARY,
},
},
{
"mmsc-test-b",
metric.CounterKind, // This shouldn't change anything.
"test-b-description",
unit.Bytes,
metric.Float64NumberKind, // This shouldn't change anything.
[]label.KeyValue{label.String("A", "1")},
&metricpb.MetricDescriptor{
Name: "mmsc-test-b",
Description: "test-b-description",
Unit: "By",
Type: metricpb.MetricDescriptor_SUMMARY,
},
},
}
ctx := context.Background()
mmsc, ckpt := metrictest.Unslice2(minmaxsumcount.New(2, &metric.Descriptor{}))
if !assert.NoError(t, mmsc.Update(ctx, 1, &metric.Descriptor{})) {
return
}
require.NoError(t, mmsc.SynchronizedMove(ckpt, &metric.Descriptor{}))
for _, test := range tests {
desc := metric.NewDescriptor(test.name, test.metricKind, test.numberKind,
metric.WithDescription(test.description),
metric.WithUnit(test.unit))
labels := label.NewSet(test.labels...)
record := export.NewRecord(&desc, &labels, nil, ckpt.Aggregation(), intervalStart, intervalEnd)
got, err := minMaxSumCount(record, ckpt.(aggregation.MinMaxSumCount))
if assert.NoError(t, err) {
assert.Equal(t, test.expected, got.MetricDescriptor)
}
}
}
func TestMinMaxSumCountDatapoints(t *testing.T) { func TestMinMaxSumCountDatapoints(t *testing.T) {
desc := metric.NewDescriptor("", metric.ValueRecorderKind, metric.Int64NumberKind) desc := metric.NewDescriptor("", metric.ValueRecorderKind, metric.Int64NumberKind)
labels := label.NewSet() labels := label.NewSet()
@ -184,20 +124,12 @@ func TestMinMaxSumCountDatapoints(t *testing.T) {
assert.NoError(t, mmsc.Update(context.Background(), 1, &desc)) assert.NoError(t, mmsc.Update(context.Background(), 1, &desc))
assert.NoError(t, mmsc.Update(context.Background(), 10, &desc)) assert.NoError(t, mmsc.Update(context.Background(), 10, &desc))
require.NoError(t, mmsc.SynchronizedMove(ckpt, &desc)) require.NoError(t, mmsc.SynchronizedMove(ckpt, &desc))
expected := []*metricpb.SummaryDataPoint{ expected := []*metricpb.IntHistogramDataPoint{
{ {
Count: 2, Count: 2,
Sum: 11, Sum: 11,
PercentileValues: []*metricpb.SummaryDataPoint_ValueAtPercentile{ ExplicitBounds: []float64{0.0, 100.0},
{ BucketCounts: []uint64{1, 10},
Percentile: 0.0,
Value: 1,
},
{
Percentile: 100.0,
Value: 10,
},
},
StartTimeUnixNano: uint64(intervalStart.UnixNano()), StartTimeUnixNano: uint64(intervalStart.UnixNano()),
TimeUnixNano: uint64(intervalEnd.UnixNano()), TimeUnixNano: uint64(intervalEnd.UnixNano()),
}, },
@ -205,10 +137,11 @@ func TestMinMaxSumCountDatapoints(t *testing.T) {
record := export.NewRecord(&desc, &labels, nil, ckpt.Aggregation(), intervalStart, intervalEnd) record := export.NewRecord(&desc, &labels, nil, ckpt.Aggregation(), intervalStart, intervalEnd)
m, err := minMaxSumCount(record, ckpt.(aggregation.MinMaxSumCount)) m, err := minMaxSumCount(record, ckpt.(aggregation.MinMaxSumCount))
if assert.NoError(t, err) { if assert.NoError(t, err) {
assert.Equal(t, []*metricpb.Int64DataPoint(nil), m.Int64DataPoints) assert.Nil(t, m.GetIntGauge())
assert.Equal(t, []*metricpb.DoubleDataPoint(nil), m.DoubleDataPoints) assert.Equal(t, expected, m.GetIntHistogram().DataPoints)
assert.Equal(t, []*metricpb.HistogramDataPoint(nil), m.HistogramDataPoints) assert.Nil(t, m.GetIntSum())
assert.Equal(t, expected, m.SummaryDataPoints) assert.Nil(t, m.GetDoubleGauge())
assert.Nil(t, m.GetDoubleHistogram())
} }
} }
@ -222,62 +155,7 @@ func TestMinMaxSumCountPropagatesErrors(t *testing.T) {
assert.Equal(t, aggregation.ErrNoData, err) assert.Equal(t, aggregation.ErrNoData, err)
} }
func TestSumMetricDescriptor(t *testing.T) { func TestSumIntDataPoints(t *testing.T) {
tests := []struct {
name string
metricKind metric.Kind
description string
unit unit.Unit
numberKind metric.NumberKind
labels []label.KeyValue
expected *metricpb.MetricDescriptor
}{
{
"sum-test-a",
metric.CounterKind,
"test-a-description",
unit.Dimensionless,
metric.Int64NumberKind,
[]label.KeyValue{},
&metricpb.MetricDescriptor{
Name: "sum-test-a",
Description: "test-a-description",
Unit: "1",
Type: metricpb.MetricDescriptor_INT64,
},
},
{
"sum-test-b",
metric.ValueObserverKind, // This shouldn't change anything.
"test-b-description",
unit.Milliseconds,
metric.Float64NumberKind,
[]label.KeyValue{label.String("A", "1")},
&metricpb.MetricDescriptor{
Name: "sum-test-b",
Description: "test-b-description",
Unit: "ms",
Type: metricpb.MetricDescriptor_DOUBLE,
},
},
}
for _, test := range tests {
desc := metric.NewDescriptor(test.name, test.metricKind, test.numberKind,
metric.WithDescription(test.description),
metric.WithUnit(test.unit),
)
labels := label.NewSet(test.labels...)
emptyAgg := &sumAgg.New(1)[0]
record := export.NewRecord(&desc, &labels, nil, emptyAgg, intervalStart, intervalEnd)
got, err := scalar(record, 0, time.Time{}, time.Time{})
if assert.NoError(t, err) {
assert.Equal(t, test.expected, got.MetricDescriptor)
}
}
}
func TestSumInt64DataPoints(t *testing.T) {
desc := metric.NewDescriptor("", metric.ValueRecorderKind, metric.Int64NumberKind) desc := metric.NewDescriptor("", metric.ValueRecorderKind, metric.Int64NumberKind)
labels := label.NewSet() labels := label.NewSet()
s, ckpt := metrictest.Unslice2(sumAgg.New(2)) s, ckpt := metrictest.Unslice2(sumAgg.New(2))
@ -290,18 +168,19 @@ func TestSumInt64DataPoints(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
if m, err := scalar(record, value, record.StartTime(), record.EndTime()); assert.NoError(t, err) { if m, err := scalar(record, value, record.StartTime(), record.EndTime()); assert.NoError(t, err) {
assert.Equal(t, []*metricpb.Int64DataPoint{{ assert.Nil(t, m.GetIntGauge())
assert.Nil(t, m.GetIntHistogram())
assert.Equal(t, []*metricpb.IntDataPoint{{
Value: 1, Value: 1,
StartTimeUnixNano: uint64(intervalStart.UnixNano()), StartTimeUnixNano: uint64(intervalStart.UnixNano()),
TimeUnixNano: uint64(intervalEnd.UnixNano()), TimeUnixNano: uint64(intervalEnd.UnixNano()),
}}, m.Int64DataPoints) }}, m.GetIntSum().DataPoints)
assert.Equal(t, []*metricpb.DoubleDataPoint(nil), m.DoubleDataPoints) assert.Nil(t, m.GetDoubleGauge())
assert.Equal(t, []*metricpb.HistogramDataPoint(nil), m.HistogramDataPoints) assert.Nil(t, m.GetDoubleHistogram())
assert.Equal(t, []*metricpb.SummaryDataPoint(nil), m.SummaryDataPoints)
} }
} }
func TestSumFloat64DataPoints(t *testing.T) { func TestSumFloatDataPoints(t *testing.T) {
desc := metric.NewDescriptor("", metric.ValueRecorderKind, metric.Float64NumberKind) desc := metric.NewDescriptor("", metric.ValueRecorderKind, metric.Float64NumberKind)
labels := label.NewSet() labels := label.NewSet()
s, ckpt := metrictest.Unslice2(sumAgg.New(2)) s, ckpt := metrictest.Unslice2(sumAgg.New(2))
@ -314,18 +193,20 @@ func TestSumFloat64DataPoints(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
if m, err := scalar(record, value, record.StartTime(), record.EndTime()); assert.NoError(t, err) { if m, err := scalar(record, value, record.StartTime(), record.EndTime()); assert.NoError(t, err) {
assert.Equal(t, []*metricpb.Int64DataPoint(nil), m.Int64DataPoints) assert.Nil(t, m.GetIntGauge())
assert.Nil(t, m.GetIntHistogram())
assert.Nil(t, m.GetIntSum())
assert.Nil(t, m.GetDoubleGauge())
assert.Nil(t, m.GetDoubleHistogram())
assert.Equal(t, []*metricpb.DoubleDataPoint{{ assert.Equal(t, []*metricpb.DoubleDataPoint{{
Value: 1, Value: 1,
StartTimeUnixNano: uint64(intervalStart.UnixNano()), StartTimeUnixNano: uint64(intervalStart.UnixNano()),
TimeUnixNano: uint64(intervalEnd.UnixNano()), TimeUnixNano: uint64(intervalEnd.UnixNano()),
}}, m.DoubleDataPoints) }}, m.GetDoubleSum().DataPoints)
assert.Equal(t, []*metricpb.HistogramDataPoint(nil), m.HistogramDataPoints)
assert.Equal(t, []*metricpb.SummaryDataPoint(nil), m.SummaryDataPoints)
} }
} }
func TestLastValueInt64DataPoints(t *testing.T) { func TestLastValueIntDataPoints(t *testing.T) {
desc := metric.NewDescriptor("", metric.ValueRecorderKind, metric.Int64NumberKind) desc := metric.NewDescriptor("", metric.ValueRecorderKind, metric.Int64NumberKind)
labels := label.NewSet() labels := label.NewSet()
s, ckpt := metrictest.Unslice2(lvAgg.New(2)) s, ckpt := metrictest.Unslice2(lvAgg.New(2))
@ -337,15 +218,17 @@ func TestLastValueInt64DataPoints(t *testing.T) {
value, timestamp, err := sum.LastValue() value, timestamp, err := sum.LastValue()
require.NoError(t, err) require.NoError(t, err)
if m, err := scalar(record, value, time.Time{}, timestamp); assert.NoError(t, err) { if m, err := gauge(record, value, time.Time{}, timestamp); assert.NoError(t, err) {
assert.Equal(t, []*metricpb.Int64DataPoint{{ assert.Equal(t, []*metricpb.IntDataPoint{{
Value: 100, Value: 100,
StartTimeUnixNano: 0, StartTimeUnixNano: 0,
TimeUnixNano: uint64(timestamp.UnixNano()), TimeUnixNano: uint64(timestamp.UnixNano()),
}}, m.Int64DataPoints) }}, m.GetIntGauge().DataPoints)
assert.Equal(t, []*metricpb.DoubleDataPoint(nil), m.DoubleDataPoints) assert.Nil(t, m.GetIntHistogram())
assert.Equal(t, []*metricpb.HistogramDataPoint(nil), m.HistogramDataPoints) assert.Nil(t, m.GetIntSum())
assert.Equal(t, []*metricpb.SummaryDataPoint(nil), m.SummaryDataPoints) assert.Nil(t, m.GetDoubleGauge())
assert.Nil(t, m.GetDoubleHistogram())
assert.Nil(t, m.GetDoubleSum())
} }
} }

View File

@ -129,9 +129,9 @@ func status(status codes.Code, message string) *tracepb.Status {
var c tracepb.Status_StatusCode var c tracepb.Status_StatusCode
switch status { switch status {
case codes.Error: case codes.Error:
c = tracepb.Status_UnknownError c = tracepb.Status_STATUS_CODE_UNKNOWN_ERROR
default: default:
c = tracepb.Status_Ok c = tracepb.Status_STATUS_CODE_OK
} }
return &tracepb.Status{ return &tracepb.Status{
Code: c, Code: c,
@ -196,15 +196,15 @@ func spanEvents(es []export.Event) []*tracepb.Span_Event {
func spanKind(kind otel.SpanKind) tracepb.Span_SpanKind { func spanKind(kind otel.SpanKind) tracepb.Span_SpanKind {
switch kind { switch kind {
case otel.SpanKindInternal: case otel.SpanKindInternal:
return tracepb.Span_INTERNAL return tracepb.Span_SPAN_KIND_INTERNAL
case otel.SpanKindClient: case otel.SpanKindClient:
return tracepb.Span_CLIENT return tracepb.Span_SPAN_KIND_CLIENT
case otel.SpanKindServer: case otel.SpanKindServer:
return tracepb.Span_SERVER return tracepb.Span_SPAN_KIND_SERVER
case otel.SpanKindProducer: case otel.SpanKindProducer:
return tracepb.Span_PRODUCER return tracepb.Span_SPAN_KIND_PRODUCER
case otel.SpanKindConsumer: case otel.SpanKindConsumer:
return tracepb.Span_CONSUMER return tracepb.Span_SPAN_KIND_CONSUMER
default: default:
return tracepb.Span_SPAN_KIND_UNSPECIFIED return tracepb.Span_SPAN_KIND_UNSPECIFIED
} }

View File

@ -41,23 +41,23 @@ func TestSpanKind(t *testing.T) {
}{ }{
{ {
otel.SpanKindInternal, otel.SpanKindInternal,
tracepb.Span_INTERNAL, tracepb.Span_SPAN_KIND_INTERNAL,
}, },
{ {
otel.SpanKindClient, otel.SpanKindClient,
tracepb.Span_CLIENT, tracepb.Span_SPAN_KIND_CLIENT,
}, },
{ {
otel.SpanKindServer, otel.SpanKindServer,
tracepb.Span_SERVER, tracepb.Span_SPAN_KIND_SERVER,
}, },
{ {
otel.SpanKindProducer, otel.SpanKindProducer,
tracepb.Span_PRODUCER, tracepb.Span_SPAN_KIND_PRODUCER,
}, },
{ {
otel.SpanKindConsumer, otel.SpanKindConsumer,
tracepb.Span_CONSUMER, tracepb.Span_SPAN_KIND_CONSUMER,
}, },
{ {
otel.SpanKind(-1), otel.SpanKind(-1),
@ -162,17 +162,17 @@ func TestStatus(t *testing.T) {
{ {
codes.Ok, codes.Ok,
"test Ok", "test Ok",
tracepb.Status_Ok, tracepb.Status_STATUS_CODE_OK,
}, },
{ {
codes.Unset, codes.Unset,
"test Unset", "test Unset",
tracepb.Status_Ok, tracepb.Status_STATUS_CODE_OK,
}, },
{ {
codes.Error, codes.Error,
"test Error", "test Error",
tracepb.Status_UnknownError, tracepb.Status_STATUS_CODE_UNKNOWN_ERROR,
}, },
} { } {
expected := &tracepb.Status{Code: test.otlpStatus, Message: test.message} expected := &tracepb.Status{Code: test.otlpStatus, Message: test.message}
@ -267,7 +267,7 @@ func TestSpanData(t *testing.T) {
SpanId: []byte{0xFF, 0xFE, 0xFD, 0xFC, 0xFB, 0xFA, 0xF9, 0xF8}, SpanId: []byte{0xFF, 0xFE, 0xFD, 0xFC, 0xFB, 0xFA, 0xF9, 0xF8},
ParentSpanId: []byte{0xEF, 0xEE, 0xED, 0xEC, 0xEB, 0xEA, 0xE9, 0xE8}, ParentSpanId: []byte{0xEF, 0xEE, 0xED, 0xEC, 0xEB, 0xEA, 0xE9, 0xE8},
Name: spanData.Name, Name: spanData.Name,
Kind: tracepb.Span_SERVER, Kind: tracepb.Span_SPAN_KIND_SERVER,
StartTimeUnixNano: uint64(startTime.UnixNano()), StartTimeUnixNano: uint64(startTime.UnixNano()),
EndTimeUnixNano: uint64(endTime.UnixNano()), EndTimeUnixNano: uint64(endTime.UnixNano()),
Status: status(spanData.StatusCode, spanData.StatusMessage), Status: status(spanData.StatusCode, spanData.StatusMessage),

View File

@ -26,7 +26,6 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
commonpb "go.opentelemetry.io/otel/exporters/otlp/internal/opentelemetry-proto-gen/common/v1" commonpb "go.opentelemetry.io/otel/exporters/otlp/internal/opentelemetry-proto-gen/common/v1"
metricpb "go.opentelemetry.io/otel/exporters/otlp/internal/opentelemetry-proto-gen/metrics/v1"
"go.opentelemetry.io/otel/label" "go.opentelemetry.io/otel/label"
"go.opentelemetry.io/otel/api/metric" "go.opentelemetry.io/otel/api/metric"
@ -238,37 +237,58 @@ func newExporterEndToEndTest(t *testing.T, additionalOpts []otlp.ExporterOption)
assert.Len(t, metrics, len(instruments), "not enough metrics exported") assert.Len(t, metrics, len(instruments), "not enough metrics exported")
seen := make(map[string]struct{}, len(instruments)) seen := make(map[string]struct{}, len(instruments))
for _, m := range metrics { for _, m := range metrics {
desc := m.GetMetricDescriptor() data, ok := instruments[m.Name]
data, ok := instruments[desc.Name]
if !ok { if !ok {
assert.Failf(t, "unknown metrics", desc.Name) assert.Failf(t, "unknown metrics", m.Name)
continue continue
} }
seen[desc.Name] = struct{}{} seen[m.Name] = struct{}{}
switch data.iKind { switch data.iKind {
case metric.CounterKind, metric.ValueObserverKind: case metric.CounterKind:
switch data.nKind { switch data.nKind {
case metricapi.Int64NumberKind: case metricapi.Int64NumberKind:
assert.Equal(t, metricpb.MetricDescriptor_INT64.String(), desc.GetType().String()) if dp := m.GetIntSum().DataPoints; assert.Len(t, dp, 1) {
if dp := m.GetInt64DataPoints(); assert.Len(t, dp, 1) { assert.Equal(t, data.val, dp[0].Value, "invalid value for %q", m.Name)
assert.Equal(t, data.val, dp[0].Value, "invalid value for %q", desc.Name)
} }
case metricapi.Float64NumberKind: case metricapi.Float64NumberKind:
assert.Equal(t, metricpb.MetricDescriptor_DOUBLE.String(), desc.GetType().String()) if dp := m.GetDoubleSum().DataPoints; assert.Len(t, dp, 1) {
if dp := m.GetDoubleDataPoints(); assert.Len(t, dp, 1) { assert.Equal(t, float64(data.val), dp[0].Value, "invalid value for %q", m.Name)
assert.Equal(t, float64(data.val), dp[0].Value, "invalid value for %q", desc.Name) }
default:
assert.Failf(t, "invalid number kind", data.nKind.String())
}
case metric.ValueObserverKind:
switch data.nKind {
case metricapi.Int64NumberKind:
if dp := m.GetIntGauge().DataPoints; assert.Len(t, dp, 1) {
assert.Equal(t, data.val, dp[0].Value, "invalid value for %q", m.Name)
}
case metricapi.Float64NumberKind:
if dp := m.GetDoubleGauge().DataPoints; assert.Len(t, dp, 1) {
assert.Equal(t, float64(data.val), dp[0].Value, "invalid value for %q", m.Name)
} }
default: default:
assert.Failf(t, "invalid number kind", data.nKind.String()) assert.Failf(t, "invalid number kind", data.nKind.String())
} }
case metric.ValueRecorderKind: case metric.ValueRecorderKind:
assert.Equal(t, metricpb.MetricDescriptor_SUMMARY.String(), desc.GetType().String()) switch data.nKind {
m.GetSummaryDataPoints() case metricapi.Int64NumberKind:
if dp := m.GetSummaryDataPoints(); assert.Len(t, dp, 1) { assert.NotNil(t, m.GetIntHistogram())
count := dp[0].Count if dp := m.GetIntHistogram().DataPoints; assert.Len(t, dp, 1) {
assert.Equal(t, uint64(1), count, "invalid count for %q", desc.Name) count := dp[0].Count
assert.Equal(t, float64(data.val*int64(count)), dp[0].Sum, "invalid sum for %q (value %d)", desc.Name, data.val) assert.Equal(t, uint64(1), count, "invalid count for %q", m.Name)
assert.Equal(t, int64(data.val*int64(count)), dp[0].Sum, "invalid sum for %q (value %d)", m.Name, data.val)
}
case metricapi.Float64NumberKind:
assert.NotNil(t, m.GetDoubleHistogram())
if dp := m.GetDoubleHistogram().DataPoints; assert.Len(t, dp, 1) {
count := dp[0].Count
assert.Equal(t, uint64(1), count, "invalid count for %q", m.Name)
assert.Equal(t, float64(data.val*int64(count)), dp[0].Sum, "invalid sum for %q (value %d)", m.Name, data.val)
}
default:
assert.Failf(t, "invalid number kind", data.nKind.String())
} }
default: default:
assert.Failf(t, "invalid metrics kind", data.iKind.String()) assert.Failf(t, "invalid metrics kind", data.iKind.String())

View File

@ -109,11 +109,6 @@ var (
testHistogramBoundaries = []float64{2.0, 4.0, 8.0} testHistogramBoundaries = []float64{2.0, 4.0, 8.0}
md = &metricpb.MetricDescriptor{
Name: "int64-count",
Type: metricpb.MetricDescriptor_INT64,
}
cpu1Labels = []*commonpb.StringKeyValue{ cpu1Labels = []*commonpb.StringKeyValue{
{ {
Key: "CPU", Key: "CPU",
@ -189,19 +184,25 @@ func TestNoGroupingExport(t *testing.T) {
{ {
Metrics: []*metricpb.Metric{ Metrics: []*metricpb.Metric{
{ {
MetricDescriptor: md, Name: "int64-count",
Int64DataPoints: []*metricpb.Int64DataPoint{ Data: &metricpb.Metric_IntSum{
{ IntSum: &metricpb.IntSum{
Value: 11, IsMonotonic: true,
Labels: cpu1Labels, AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
StartTimeUnixNano: startTime(), DataPoints: []*metricpb.IntDataPoint{
TimeUnixNano: pointTime(), {
}, Value: 11,
{ Labels: cpu1Labels,
Value: 11, StartTimeUnixNano: startTime(),
Labels: cpu2Labels, TimeUnixNano: pointTime(),
StartTimeUnixNano: startTime(), },
TimeUnixNano: pointTime(), {
Value: 11,
Labels: cpu2Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
},
}, },
}, },
}, },
@ -229,50 +230,48 @@ func TestValuerecorderMetricGroupingExport(t *testing.T) {
{ {
Metrics: []*metricpb.Metric{ Metrics: []*metricpb.Metric{
{ {
MetricDescriptor: &metricpb.MetricDescriptor{ Name: "valuerecorder",
Name: "valuerecorder", Data: &metricpb.Metric_IntHistogram{
Type: metricpb.MetricDescriptor_HISTOGRAM, IntHistogram: &metricpb.IntHistogram{
}, AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
HistogramDataPoints: []*metricpb.HistogramDataPoint{ DataPoints: []*metricpb.IntHistogramDataPoint{
{
Labels: []*commonpb.StringKeyValue{
{ {
Key: "CPU", Labels: []*commonpb.StringKeyValue{
Value: "1", {
Key: "CPU",
Value: "1",
},
{
Key: "host",
Value: "test.com",
},
},
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
Count: 2,
Sum: 11,
ExplicitBounds: testHistogramBoundaries,
BucketCounts: []uint64{1, 0, 0, 1},
}, },
{ {
Key: "host", Labels: []*commonpb.StringKeyValue{
Value: "test.com", {
Key: "CPU",
Value: "1",
},
{
Key: "host",
Value: "test.com",
},
},
Count: 2,
Sum: 11,
ExplicitBounds: testHistogramBoundaries,
BucketCounts: []uint64{1, 0, 0, 1},
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
}, },
}, },
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
Count: 2,
Sum: 11,
ExplicitBounds: testHistogramBoundaries,
Buckets: []*metricpb.HistogramDataPoint_Bucket{
{Count: 1}, {Count: 0}, {Count: 0}, {Count: 1},
},
},
{
Labels: []*commonpb.StringKeyValue{
{
Key: "CPU",
Value: "1",
},
{
Key: "host",
Value: "test.com",
},
},
Count: 2,
Sum: 11,
ExplicitBounds: testHistogramBoundaries,
Buckets: []*metricpb.HistogramDataPoint_Bucket{
{Count: 1}, {Count: 0}, {Count: 0}, {Count: 1},
},
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
}, },
}, },
}, },
@ -282,9 +281,6 @@ func TestValuerecorderMetricGroupingExport(t *testing.T) {
}, },
} }
runMetricExportTests(t, []record{r, r}, expected) runMetricExportTests(t, []record{r, r}, expected)
//changing the number kind should make no difference.
r.nKind = metric.Float64NumberKind
runMetricExportTests(t, []record{r, r}, expected)
} }
func TestCountInt64MetricGroupingExport(t *testing.T) { func TestCountInt64MetricGroupingExport(t *testing.T) {
@ -306,19 +302,25 @@ func TestCountInt64MetricGroupingExport(t *testing.T) {
{ {
Metrics: []*metricpb.Metric{ Metrics: []*metricpb.Metric{
{ {
MetricDescriptor: md, Name: "int64-count",
Int64DataPoints: []*metricpb.Int64DataPoint{ Data: &metricpb.Metric_IntSum{
{ IntSum: &metricpb.IntSum{
Value: 11, IsMonotonic: true,
Labels: cpu1Labels, AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
StartTimeUnixNano: startTime(), DataPoints: []*metricpb.IntDataPoint{
TimeUnixNano: pointTime(), {
}, Value: 11,
{ Labels: cpu1Labels,
Value: 11, StartTimeUnixNano: startTime(),
Labels: cpu1Labels, TimeUnixNano: pointTime(),
StartTimeUnixNano: startTime(), },
TimeUnixNano: pointTime(), {
Value: 11,
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
},
}, },
}, },
}, },
@ -349,40 +351,43 @@ func TestCountFloat64MetricGroupingExport(t *testing.T) {
{ {
Metrics: []*metricpb.Metric{ Metrics: []*metricpb.Metric{
{ {
MetricDescriptor: &metricpb.MetricDescriptor{ Name: "float64-count",
Name: "float64-count", Data: &metricpb.Metric_DoubleSum{
Type: metricpb.MetricDescriptor_DOUBLE, DoubleSum: &metricpb.DoubleSum{
}, IsMonotonic: true,
DoubleDataPoints: []*metricpb.DoubleDataPoint{ AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
{ DataPoints: []*metricpb.DoubleDataPoint{
Value: 11,
Labels: []*commonpb.StringKeyValue{
{ {
Key: "CPU", Value: 11,
Value: "1", Labels: []*commonpb.StringKeyValue{
{
Key: "CPU",
Value: "1",
},
{
Key: "host",
Value: "test.com",
},
},
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
}, },
{ {
Key: "host", Value: 11,
Value: "test.com", Labels: []*commonpb.StringKeyValue{
{
Key: "CPU",
Value: "1",
},
{
Key: "host",
Value: "test.com",
},
},
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
}, },
}, },
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
{
Value: 11,
Labels: []*commonpb.StringKeyValue{
{
Key: "CPU",
Value: "1",
},
{
Key: "host",
Value: "test.com",
},
},
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
}, },
}, },
}, },
@ -438,25 +443,31 @@ func TestResourceMetricGroupingExport(t *testing.T) {
{ {
Metrics: []*metricpb.Metric{ Metrics: []*metricpb.Metric{
{ {
MetricDescriptor: md, Name: "int64-count",
Int64DataPoints: []*metricpb.Int64DataPoint{ Data: &metricpb.Metric_IntSum{
{ IntSum: &metricpb.IntSum{
Value: 11, IsMonotonic: true,
Labels: cpu1Labels, AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
StartTimeUnixNano: startTime(), DataPoints: []*metricpb.IntDataPoint{
TimeUnixNano: pointTime(), {
}, Value: 11,
{ Labels: cpu1Labels,
Value: 11, StartTimeUnixNano: startTime(),
Labels: cpu1Labels, TimeUnixNano: pointTime(),
StartTimeUnixNano: startTime(), },
TimeUnixNano: pointTime(), {
}, Value: 11,
{ Labels: cpu1Labels,
Value: 11, StartTimeUnixNano: startTime(),
Labels: cpu2Labels, TimeUnixNano: pointTime(),
StartTimeUnixNano: startTime(), },
TimeUnixNano: pointTime(), {
Value: 11,
Labels: cpu2Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
},
}, },
}, },
}, },
@ -470,13 +481,19 @@ func TestResourceMetricGroupingExport(t *testing.T) {
{ {
Metrics: []*metricpb.Metric{ Metrics: []*metricpb.Metric{
{ {
MetricDescriptor: md, Name: "int64-count",
Int64DataPoints: []*metricpb.Int64DataPoint{ Data: &metricpb.Metric_IntSum{
{ IntSum: &metricpb.IntSum{
Value: 11, IsMonotonic: true,
Labels: cpu1Labels, AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
StartTimeUnixNano: startTime(), DataPoints: []*metricpb.IntDataPoint{
TimeUnixNano: pointTime(), {
Value: 11,
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
},
}, },
}, },
}, },
@ -563,25 +580,31 @@ func TestResourceInstLibMetricGroupingExport(t *testing.T) {
}, },
Metrics: []*metricpb.Metric{ Metrics: []*metricpb.Metric{
{ {
MetricDescriptor: md, Name: "int64-count",
Int64DataPoints: []*metricpb.Int64DataPoint{ Data: &metricpb.Metric_IntSum{
{ IntSum: &metricpb.IntSum{
Value: 11, IsMonotonic: true,
Labels: cpu1Labels, AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
StartTimeUnixNano: startTime(), DataPoints: []*metricpb.IntDataPoint{
TimeUnixNano: pointTime(), {
}, Value: 11,
{ Labels: cpu1Labels,
Value: 11, StartTimeUnixNano: startTime(),
Labels: cpu1Labels, TimeUnixNano: pointTime(),
StartTimeUnixNano: startTime(), },
TimeUnixNano: pointTime(), {
}, Value: 11,
{ Labels: cpu1Labels,
Value: 11, StartTimeUnixNano: startTime(),
Labels: cpu2Labels, TimeUnixNano: pointTime(),
StartTimeUnixNano: startTime(), },
TimeUnixNano: pointTime(), {
Value: 11,
Labels: cpu2Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
},
}, },
}, },
}, },
@ -594,13 +617,19 @@ func TestResourceInstLibMetricGroupingExport(t *testing.T) {
}, },
Metrics: []*metricpb.Metric{ Metrics: []*metricpb.Metric{
{ {
MetricDescriptor: md, Name: "int64-count",
Int64DataPoints: []*metricpb.Int64DataPoint{ Data: &metricpb.Metric_IntSum{
{ IntSum: &metricpb.IntSum{
Value: 11, IsMonotonic: true,
Labels: cpu1Labels, AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
StartTimeUnixNano: startTime(), DataPoints: []*metricpb.IntDataPoint{
TimeUnixNano: pointTime(), {
Value: 11,
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
},
}, },
}, },
}, },
@ -612,13 +641,19 @@ func TestResourceInstLibMetricGroupingExport(t *testing.T) {
}, },
Metrics: []*metricpb.Metric{ Metrics: []*metricpb.Metric{
{ {
MetricDescriptor: md, Name: "int64-count",
Int64DataPoints: []*metricpb.Int64DataPoint{ Data: &metricpb.Metric_IntSum{
{ IntSum: &metricpb.IntSum{
Value: 11, IsMonotonic: true,
Labels: cpu1Labels, AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
StartTimeUnixNano: startTime(), DataPoints: []*metricpb.IntDataPoint{
TimeUnixNano: pointTime(), {
Value: 11,
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
},
}, },
}, },
}, },
@ -636,13 +671,19 @@ func TestResourceInstLibMetricGroupingExport(t *testing.T) {
}, },
Metrics: []*metricpb.Metric{ Metrics: []*metricpb.Metric{
{ {
MetricDescriptor: md, Name: "int64-count",
Int64DataPoints: []*metricpb.Int64DataPoint{ Data: &metricpb.Metric_IntSum{
{ IntSum: &metricpb.IntSum{
Value: 11, IsMonotonic: true,
Labels: cpu1Labels, AggregationTemporality: metricpb.AggregationTemporality_AGGREGATION_TEMPORALITY_CUMULATIVE,
StartTimeUnixNano: startTime(), DataPoints: []*metricpb.IntDataPoint{
TimeUnixNano: pointTime(), {
Value: 11,
Labels: cpu1Labels,
StartTimeUnixNano: startTime(),
TimeUnixNano: pointTime(),
},
},
}, },
}, },
}, },
@ -739,14 +780,25 @@ func runMetricExportTest(t *testing.T, exp *Exporter, rs []record, expected []me
continue continue
} }
for i, expected := range ilm.GetMetrics() { for i, expected := range ilm.GetMetrics() {
assert.Equal(t, expected.GetMetricDescriptor(), g[i].GetMetricDescriptor()) assert.Equal(t, expected.Name, g[i].Name)
// Compare each list directly because there is no order assert.Equal(t, expected.Unit, g[i].Unit)
// guarantee with the concurrent processing design of the exporter assert.Equal(t, expected.Description, g[i].Description)
// and ElementsMatch does not apply to contained slices. switch g[i].Data.(type) {
assert.ElementsMatch(t, expected.GetInt64DataPoints(), g[i].GetInt64DataPoints()) case *metricpb.Metric_IntGauge:
assert.ElementsMatch(t, expected.GetDoubleDataPoints(), g[i].GetDoubleDataPoints()) assert.ElementsMatch(t, expected.GetIntGauge().DataPoints, g[i].GetIntGauge().DataPoints)
assert.ElementsMatch(t, expected.GetHistogramDataPoints(), g[i].GetHistogramDataPoints()) case *metricpb.Metric_IntHistogram:
assert.ElementsMatch(t, expected.GetSummaryDataPoints(), g[i].GetSummaryDataPoints()) assert.ElementsMatch(t, expected.GetIntHistogram().DataPoints, g[i].GetIntHistogram().DataPoints)
case *metricpb.Metric_IntSum:
assert.ElementsMatch(t, expected.GetIntSum().DataPoints, g[i].GetIntSum().DataPoints)
case *metricpb.Metric_DoubleGauge:
assert.ElementsMatch(t, expected.GetDoubleGauge().DataPoints, g[i].GetDoubleGauge().DataPoints)
case *metricpb.Metric_DoubleHistogram:
assert.ElementsMatch(t, expected.GetDoubleHistogram().DataPoints, g[i].GetDoubleHistogram().DataPoints)
case *metricpb.Metric_DoubleSum:
assert.ElementsMatch(t, expected.GetDoubleSum().DataPoints, g[i].GetDoubleSum().DataPoints)
default:
assert.Failf(t, "unknown data type", g[i].Name)
}
} }
} }
} }

View File

@ -196,7 +196,7 @@ func TestExportSpans(t *testing.T) {
TraceId: []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1}, TraceId: []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1},
SpanId: []byte{0, 0, 0, 0, 0, 0, 0, 1}, SpanId: []byte{0, 0, 0, 0, 0, 0, 0, 1},
Name: "parent process", Name: "parent process",
Kind: tracepb.Span_SERVER, Kind: tracepb.Span_SPAN_KIND_SERVER,
StartTimeUnixNano: uint64(startTime.UnixNano()), StartTimeUnixNano: uint64(startTime.UnixNano()),
EndTimeUnixNano: uint64(endTime.UnixNano()), EndTimeUnixNano: uint64(endTime.UnixNano()),
Attributes: []*commonpb.KeyValue{ Attributes: []*commonpb.KeyValue{
@ -218,7 +218,7 @@ func TestExportSpans(t *testing.T) {
}, },
}, },
Status: &tracepb.Status{ Status: &tracepb.Status{
Code: tracepb.Status_Ok, Code: tracepb.Status_STATUS_CODE_OK,
Message: "Ok", Message: "Ok",
}, },
}, },
@ -227,7 +227,7 @@ func TestExportSpans(t *testing.T) {
SpanId: []byte{0, 0, 0, 0, 0, 0, 0, 2}, SpanId: []byte{0, 0, 0, 0, 0, 0, 0, 2},
ParentSpanId: []byte{0, 0, 0, 0, 0, 0, 0, 1}, ParentSpanId: []byte{0, 0, 0, 0, 0, 0, 0, 1},
Name: "internal process", Name: "internal process",
Kind: tracepb.Span_INTERNAL, Kind: tracepb.Span_SPAN_KIND_INTERNAL,
StartTimeUnixNano: uint64(startTime.UnixNano()), StartTimeUnixNano: uint64(startTime.UnixNano()),
EndTimeUnixNano: uint64(endTime.UnixNano()), EndTimeUnixNano: uint64(endTime.UnixNano()),
Attributes: []*commonpb.KeyValue{ Attributes: []*commonpb.KeyValue{
@ -249,7 +249,7 @@ func TestExportSpans(t *testing.T) {
}, },
}, },
Status: &tracepb.Status{ Status: &tracepb.Status{
Code: tracepb.Status_Ok, Code: tracepb.Status_STATUS_CODE_OK,
Message: "Ok", Message: "Ok",
}, },
}, },
@ -265,7 +265,7 @@ func TestExportSpans(t *testing.T) {
TraceId: []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2}, TraceId: []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2},
SpanId: []byte{0, 0, 0, 0, 0, 0, 0, 1}, SpanId: []byte{0, 0, 0, 0, 0, 0, 0, 1},
Name: "secondary parent process", Name: "secondary parent process",
Kind: tracepb.Span_SERVER, Kind: tracepb.Span_SPAN_KIND_SERVER,
StartTimeUnixNano: uint64(startTime.UnixNano()), StartTimeUnixNano: uint64(startTime.UnixNano()),
EndTimeUnixNano: uint64(endTime.UnixNano()), EndTimeUnixNano: uint64(endTime.UnixNano()),
Attributes: []*commonpb.KeyValue{ Attributes: []*commonpb.KeyValue{
@ -287,7 +287,7 @@ func TestExportSpans(t *testing.T) {
}, },
}, },
Status: &tracepb.Status{ Status: &tracepb.Status{
Code: tracepb.Status_Ok, Code: tracepb.Status_STATUS_CODE_OK,
Message: "Ok", Message: "Ok",
}, },
}, },
@ -319,7 +319,7 @@ func TestExportSpans(t *testing.T) {
TraceId: []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2}, TraceId: []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2},
SpanId: []byte{0, 0, 0, 0, 0, 0, 0, 1}, SpanId: []byte{0, 0, 0, 0, 0, 0, 0, 1},
Name: "parent process", Name: "parent process",
Kind: tracepb.Span_SERVER, Kind: tracepb.Span_SPAN_KIND_SERVER,
StartTimeUnixNano: uint64(startTime.UnixNano()), StartTimeUnixNano: uint64(startTime.UnixNano()),
EndTimeUnixNano: uint64(endTime.UnixNano()), EndTimeUnixNano: uint64(endTime.UnixNano()),
Attributes: []*commonpb.KeyValue{ Attributes: []*commonpb.KeyValue{
@ -341,7 +341,7 @@ func TestExportSpans(t *testing.T) {
}, },
}, },
Status: &tracepb.Status{ Status: &tracepb.Status{
Code: tracepb.Status_UnknownError, Code: tracepb.Status_STATUS_CODE_UNKNOWN_ERROR,
Message: "Unauthenticated", Message: "Unauthenticated",
}, },
}, },