From f3373d8b2e16d90cc9b901985518e6686395558e Mon Sep 17 00:00:00 2001 From: Blake Rouse Date: Tue, 5 Dec 2023 17:12:21 -0500 Subject: [PATCH 1/5] Add support send/recv chunk for checkin. --- elastic-agent-client.proto | 30 +- pkg/client/client_v2.go | 187 +++++- pkg/client/reader.go | 13 +- .../elastic-agent-client-deprecated.pb.go | 2 +- pkg/proto/elastic-agent-client-future.pb.go | 2 +- .../elastic-agent-client-future_grpc.pb.go | 2 +- pkg/proto/elastic-agent-client.pb.go | 615 +++++++++++------- pkg/proto/elastic-agent-client_grpc.pb.go | 6 +- 8 files changed, 596 insertions(+), 261 deletions(-) diff --git a/elastic-agent-client.proto b/elastic-agent-client.proto index 22aa027..c052f0d 100644 --- a/elastic-agent-client.proto +++ b/elastic-agent-client.proto @@ -38,7 +38,7 @@ service ElasticAgent { // // Use of the source field allows the input configurations to evolve without needing to modify // the control protocol itself. In some cases commonly used or important fields are extracted as - // a dedicated message type, but these definitions do not comletely define the contents of the + // a dedicated message type, but these definitions do not completely define the contents of the // source field which is free to contain additional fields. rpc CheckinV2(stream CheckinObserved) returns (stream CheckinExpected); @@ -62,6 +62,12 @@ service ElasticAgent { rpc Checkin(stream StateObserved) returns (stream StateExpected); } +// Features that the connection between the client and the server supports. +enum ConnectionSupports { + // Checkin chunking support. + CheckinChunking = 0; +} + // State codes for the current state. enum State { // STARTING is an optional observed state indicating the unit is doing work to start before @@ -282,6 +288,13 @@ message CheckinExpected { // Index or revision of the expected component configuration. When the expected configuration // changes the agent will increment this number and the Component field will be populated. uint64 component_idx = 6; + + // When a units timestamp is provided then the set of units could not all fit inside this single message + // and it was split across multiple messages. Each message chunk must have the same units timestamp, in + // the case that the client gets a new message with a different timestamp and its newer than the other + // timestamp then it should take that new message chunk as a start of a new message set. To finish the a + // set of messages with the same timestamp, the last chunk should be an empty set of units. + google.protobuf.Timestamp units_timestamp = 7; } // Observed status for a unit. @@ -338,6 +351,17 @@ message CheckinObserved { // Index or revision of the currently component configuration. uint64 component_idx = 6; + + // When a units timestamp is provided then the set of units could not all fit inside this single message + // and it was split across multiple messages. Each message chunk must have the same units timestamp, in + // the case that the client gets a new message with a different timestamp and its newer than the other + // timestamp then it should take that new message chunk as a start of a new message set. To finish the a + // set of messages with the same timestamp, the last chunk should be an empty set of units. + google.protobuf.Timestamp units_timestamp = 7; + + // Supports provides information to the agent about extra features this client supports. Should always be included + // on first checkin, and not again unless upon reconnect. + repeated ConnectionSupports supports = 8; } // A action request is streamed from the Elastic Agent to the application so an action can be performed @@ -449,4 +473,8 @@ message ConnInfo { bytes peer_key = 6; // Allowed services that spawned process can use. (only used in V2) repeated ConnInfoServices services = 7; + // Supports provides information to the client about extra features this server supports. + repeated ConnectionSupports supports = 8; + // Maximum message size that the client can use. + uint32 max_message_size = 9; } diff --git a/pkg/client/client_v2.go b/pkg/client/client_v2.go index eef813a..5bf360b 100644 --- a/pkg/client/client_v2.go +++ b/pkg/client/client_v2.go @@ -10,6 +10,7 @@ import ( "encoding/json" "errors" "fmt" + protobuf "google.golang.org/protobuf/proto" "io" "runtime" "runtime/pprof" @@ -24,6 +25,9 @@ import ( "github.com/elastic/elastic-agent-client/v7/pkg/utils" ) +// DefaultMaxMessageSize is the maximum message size that is allowed to be sent. +const DefaultMaxMessageSize = 1024 * 1024 * 4 // copied from the gRPC default + type ( // UnitChangedType defines types for when units are adjusted. UnitChangedType int @@ -181,10 +185,49 @@ type V2 interface { RegisterOptionalDiagnosticHook(paramTag string, name string, description string, filename string, contentType string, hook DiagnosticHook) } +// v2options hold the client options. +type v2options struct { + maxMessageSize int + chunkingAllowed bool + dialOptions []grpc.DialOption +} + +// DialOptions returns the dial options for the GRPC connection. +func (o *v2options) DialOptions() []grpc.DialOption { + opts := make([]grpc.DialOption, 0, len(o.dialOptions)+1) + opts = append(opts, o.dialOptions...) + opts = append(opts, grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(o.maxMessageSize), grpc.MaxCallSendMsgSize(o.maxMessageSize))) + return opts +} + +// V2ClientOption is an option that can be used when creating the client. +type V2ClientOption func(*v2options) + +// WithMaxMessageSize sets the maximum message size. +func WithMaxMessageSize(size int) V2ClientOption { + return func(o *v2options) { + o.maxMessageSize = size + } +} + +// WithChunking sets if the client can use chunking with the server. +func WithChunking(enabled bool) V2ClientOption { + return func(o *v2options) { + o.chunkingAllowed = enabled + } +} + +// WithGRPCDialOptions allows the setting of GRPC dial options. +func WithGRPCDialOptions(opts ...grpc.DialOption) V2ClientOption { + return func(o *v2options) { + o.dialOptions = append(o.dialOptions, opts...) + } +} + // clientV2 manages the state and communication to the Elastic Agent over the V2 control protocol. type clientV2 struct { target string - opts []grpc.DialOption + opts v2options token string agentInfoMu sync.RWMutex @@ -228,10 +271,16 @@ type clientV2 struct { } // NewV2 creates a client connection to Elastic Agent over the V2 control protocol. -func NewV2(target string, token string, versionInfo VersionInfo, opts ...grpc.DialOption) V2 { +func NewV2(target string, token string, versionInfo VersionInfo, opts ...V2ClientOption) V2 { + var options v2options + options.maxMessageSize = DefaultMaxMessageSize + for _, o := range opts { + o(&options) + } + c := &clientV2{ target: target, - opts: opts, + opts: options, token: token, versionInfo: versionInfo, stateChangeObservedCh: make(chan struct{}, 1), @@ -247,7 +296,7 @@ func NewV2(target string, token string, versionInfo VersionInfo, opts ...grpc.Di // Start starts the connection to Elastic Agent. func (c *clientV2) Start(ctx context.Context) error { c.ctx, c.cancel = context.WithCancel(ctx) - conn, err := grpc.DialContext(ctx, c.target, c.opts...) + conn, err := grpc.DialContext(ctx, c.target, c.opts.DialOptions()...) if err != nil { return err } @@ -405,8 +454,8 @@ func (c *clientV2) checkinRoundTrip() { go func() { defer wg.Done() defer close(readerDone) - expected, err := checkinClient.Recv() - for ; err == nil; expected, err = checkinClient.Recv() { + expected, err := recvExpectedChunked(checkinClient, c.opts.chunkingAllowed) + for ; err == nil; expected, err = recvExpectedChunked(checkinClient, c.opts.chunkingAllowed) { c.applyExpected(expected) } if !errors.Is(err, io.EOF) { @@ -433,6 +482,9 @@ func (c *clientV2) checkinWriter( t := time.NewTicker(c.minCheckTimeout) defer t.Stop() + // Always resent the version information on restart of the loop. + c.versionInfoSent = false + // Keep sending until the call returns an error for c.sendObserved(checkinClient) == nil { @@ -479,8 +531,13 @@ func (c *clientV2) sendObserved(client proto.ElasticAgent_CheckinV2Client) error Version: c.versionInfo.Version, Meta: c.versionInfo.Meta, } + // supports information is sent when version information is set, + // this ensures that its always sent once per connected loop + if c.opts.chunkingAllowed { + msg.Supports = []proto.ConnectionSupports{proto.ConnectionSupports_CheckinChunking} + } } - err := client.Send(msg) + err := sendObservedChunked(client, msg, c.opts.chunkingAllowed, c.opts.maxMessageSize) if err != nil && !errors.Is(err, io.EOF) { c.errCh <- err } else { @@ -1027,3 +1084,119 @@ func inExpected(unit *Unit, expected []*proto.UnitExpected) bool { } return false } + +func recvExpectedChunked(client proto.ElasticAgent_CheckinV2Client, chunk bool) (*proto.CheckinExpected, error) { + if chunk { + var initialMsg *proto.CheckinExpected + for { + msg, err := client.Recv() + if err != nil { + return nil, err + } + if msg.UnitsTimestamp == nil { + // all included in a single message + return msg, nil + } + if initialMsg == nil { + // first message in batch + initialMsg = msg + } else if initialMsg.UnitsTimestamp.AsTime() != msg.UnitsTimestamp.AsTime() { + // only used if the new timestamp is newer + if initialMsg.UnitsTimestamp.AsTime().After(msg.UnitsTimestamp.AsTime()) { + // not newer so we ignore the message + continue + } + // different batch; restart + initialMsg = msg + } + if len(msg.Units) == 0 { + // ending match message + return initialMsg, nil + } + initialMsg.Units = append(initialMsg.Units, msg.Units...) + } + } + return client.Recv() +} + +func sendObservedChunked(client proto.ElasticAgent_CheckinV2Client, msg *proto.CheckinObserved, chunk bool, maxSize int) error { + if !chunk { + // chunking is disabled + return client.Send(msg) + } + s := protobuf.Size(msg) + if s <= maxSize { + // fits so no chunking needed + return client.Send(msg) + } + // doesn't fit; chunk the message + // this is done by dividing the units into two; keep dividing each chunk into two until it fits + // a timestamp is needed to ensure all chunks have a timestamp + msgs, err := observedChunked(msg, maxSize, 2) + if err != nil { + return err + } + for _, msg := range msgs { + if err := client.Send(msg); err != nil { + return err + } + } + return nil +} + +func observedChunked(msg *proto.CheckinObserved, maxSize int, divider int) ([]*proto.CheckinObserved, error) { + timestamp := time.Now() + chunkSize := len(msg.Units) / divider + if chunkSize < 0 { + return nil, fmt.Errorf("unable to chunk proto.CheckinObserved a single unit is greater than the max %d size", maxSize) + } + msgs := make([]*proto.CheckinObserved, 0, divider+1) + for i := 0; i < divider; i++ { + if i == 0 { + // first message all fields are set; except units is made smaller + m := shallowCopyCheckinObserved(msg) + m.Units = make([]*proto.UnitObserved, chunkSize) + copy(m.Units, msg.Units[0:chunkSize]) + msg.UnitsTimestamp = timestamppb.New(timestamp) + if protobuf.Size(m) > maxSize { + // too large increase divider + return observedChunked(msg, maxSize, divider*2) + } + msgs = append(msgs, m) + continue + } + if i == divider-1 { + // last message; chunk size needs to take into account rounding division where the last chunk + // might need to include an extra unit + chunkSize = chunkSize + len(msg.Units) - (chunkSize * divider) + } + m := &proto.CheckinObserved{} + m.Token = msg.Token + m.Units = make([]*proto.UnitObserved, chunkSize) + copy(m.Units, msg.Units[i*chunkSize:(i*chunkSize)+chunkSize]) + m.UnitsTimestamp = timestamppb.New(timestamp) + if protobuf.Size(m) > maxSize { + // too large increase divider + return observedChunked(msg, maxSize, divider*2) + } + msgs = append(msgs, m) + } + msgs = append(msgs, &proto.CheckinObserved{ + Token: msg.Token, + Units: []*proto.UnitObserved{}, + UnitsTimestamp: timestamppb.New(timestamp), + }) + return msgs, nil +} + +func shallowCopyCheckinObserved(msg *proto.CheckinObserved) *proto.CheckinObserved { + return &proto.CheckinObserved{ + Token: msg.Token, + Units: msg.Units, + VersionInfo: msg.VersionInfo, + FeaturesIdx: msg.FeaturesIdx, + ComponentIdx: msg.ComponentIdx, + UnitsTimestamp: msg.UnitsTimestamp, + Supports: msg.Supports, + } +} diff --git a/pkg/client/reader.go b/pkg/client/reader.go index 2995543..8654ab8 100644 --- a/pkg/client/reader.go +++ b/pkg/client/reader.go @@ -63,7 +63,7 @@ func NewFromReader(reader io.Reader, impl StateInterface, actions ...Action) (Cl } // NewV2FromReader creates a new V2 client reading the connection information from the io.Reader. -func NewV2FromReader(reader io.Reader, ver VersionInfo, opts ...grpc.DialOption) (V2, []Service, error) { +func NewV2FromReader(reader io.Reader, ver VersionInfo, opts ...V2ClientOption) (V2, []Service, error) { connInfo := &proto.ConnInfo{} data, err := ioutil.ReadAll(reader) if err != nil { @@ -87,11 +87,20 @@ func NewV2FromReader(reader io.Reader, ver VersionInfo, opts ...grpc.DialOption) Certificates: []tls.Certificate{cert}, RootCAs: caCertPool, }) + for _, s := range connInfo.Supports { + if s == proto.ConnectionSupports_CheckinChunking { + opts = append(opts, WithChunking(true)) + } + } + if connInfo.MaxMessageSize > 0 { + opts = append(opts, WithMaxMessageSize(int(connInfo.MaxMessageSize))) + } + opts = append(opts, WithGRPCDialOptions(grpc.WithTransportCredentials(trans))) client := NewV2( connInfo.Addr, connInfo.Token, ver, - append(opts, grpc.WithTransportCredentials(trans))..., + opts..., ) services := make([]Service, 0, len(connInfo.Services)) for _, srv := range connInfo.Services { diff --git a/pkg/proto/elastic-agent-client-deprecated.pb.go b/pkg/proto/elastic-agent-client-deprecated.pb.go index 92a655e..edec4d9 100644 --- a/pkg/proto/elastic-agent-client-deprecated.pb.go +++ b/pkg/proto/elastic-agent-client-deprecated.pb.go @@ -7,7 +7,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 -// protoc v4.23.2 +// protoc v4.23.4 // source: elastic-agent-client-deprecated.proto package proto diff --git a/pkg/proto/elastic-agent-client-future.pb.go b/pkg/proto/elastic-agent-client-future.pb.go index 3b2188b..05be556 100644 --- a/pkg/proto/elastic-agent-client-future.pb.go +++ b/pkg/proto/elastic-agent-client-future.pb.go @@ -7,7 +7,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 -// protoc v4.23.2 +// protoc v4.23.4 // source: elastic-agent-client-future.proto package proto diff --git a/pkg/proto/elastic-agent-client-future_grpc.pb.go b/pkg/proto/elastic-agent-client-future_grpc.pb.go index 8894e37..2575a00 100644 --- a/pkg/proto/elastic-agent-client-future_grpc.pb.go +++ b/pkg/proto/elastic-agent-client-future_grpc.pb.go @@ -5,7 +5,7 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: // - protoc-gen-go-grpc v1.2.0 -// - protoc v4.23.2 +// - protoc v4.23.4 // source: elastic-agent-client-future.proto package proto diff --git a/pkg/proto/elastic-agent-client.pb.go b/pkg/proto/elastic-agent-client.pb.go index a1518c2..3678907 100644 --- a/pkg/proto/elastic-agent-client.pb.go +++ b/pkg/proto/elastic-agent-client.pb.go @@ -5,7 +5,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.28.1 -// protoc v4.23.2 +// protoc v4.23.4 // source: elastic-agent-client.proto package proto @@ -26,6 +26,51 @@ const ( _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) +// Features that the connection between the client and the server supports. +type ConnectionSupports int32 + +const ( + // Checkin chunking support. + ConnectionSupports_CheckinChunking ConnectionSupports = 0 +) + +// Enum value maps for ConnectionSupports. +var ( + ConnectionSupports_name = map[int32]string{ + 0: "CheckinChunking", + } + ConnectionSupports_value = map[string]int32{ + "CheckinChunking": 0, + } +) + +func (x ConnectionSupports) Enum() *ConnectionSupports { + p := new(ConnectionSupports) + *p = x + return p +} + +func (x ConnectionSupports) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ConnectionSupports) Descriptor() protoreflect.EnumDescriptor { + return file_elastic_agent_client_proto_enumTypes[0].Descriptor() +} + +func (ConnectionSupports) Type() protoreflect.EnumType { + return &file_elastic_agent_client_proto_enumTypes[0] +} + +func (x ConnectionSupports) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ConnectionSupports.Descriptor instead. +func (ConnectionSupports) EnumDescriptor() ([]byte, []int) { + return file_elastic_agent_client_proto_rawDescGZIP(), []int{0} +} + // State codes for the current state. type State int32 @@ -85,11 +130,11 @@ func (x State) String() string { } func (State) Descriptor() protoreflect.EnumDescriptor { - return file_elastic_agent_client_proto_enumTypes[0].Descriptor() + return file_elastic_agent_client_proto_enumTypes[1].Descriptor() } func (State) Type() protoreflect.EnumType { - return &file_elastic_agent_client_proto_enumTypes[0] + return &file_elastic_agent_client_proto_enumTypes[1] } func (x State) Number() protoreflect.EnumNumber { @@ -98,7 +143,7 @@ func (x State) Number() protoreflect.EnumNumber { // Deprecated: Use State.Descriptor instead. func (State) EnumDescriptor() ([]byte, []int) { - return file_elastic_agent_client_proto_rawDescGZIP(), []int{0} + return file_elastic_agent_client_proto_rawDescGZIP(), []int{1} } // Type of unit. @@ -132,11 +177,11 @@ func (x UnitType) String() string { } func (UnitType) Descriptor() protoreflect.EnumDescriptor { - return file_elastic_agent_client_proto_enumTypes[1].Descriptor() + return file_elastic_agent_client_proto_enumTypes[2].Descriptor() } func (UnitType) Type() protoreflect.EnumType { - return &file_elastic_agent_client_proto_enumTypes[1] + return &file_elastic_agent_client_proto_enumTypes[2] } func (x UnitType) Number() protoreflect.EnumNumber { @@ -145,7 +190,7 @@ func (x UnitType) Number() protoreflect.EnumNumber { // Deprecated: Use UnitType.Descriptor instead. func (UnitType) EnumDescriptor() ([]byte, []int) { - return file_elastic_agent_client_proto_rawDescGZIP(), []int{1} + return file_elastic_agent_client_proto_rawDescGZIP(), []int{2} } // Log level for the unit. @@ -188,11 +233,11 @@ func (x UnitLogLevel) String() string { } func (UnitLogLevel) Descriptor() protoreflect.EnumDescriptor { - return file_elastic_agent_client_proto_enumTypes[2].Descriptor() + return file_elastic_agent_client_proto_enumTypes[3].Descriptor() } func (UnitLogLevel) Type() protoreflect.EnumType { - return &file_elastic_agent_client_proto_enumTypes[2] + return &file_elastic_agent_client_proto_enumTypes[3] } func (x UnitLogLevel) Number() protoreflect.EnumNumber { @@ -201,7 +246,7 @@ func (x UnitLogLevel) Number() protoreflect.EnumNumber { // Deprecated: Use UnitLogLevel.Descriptor instead. func (UnitLogLevel) EnumDescriptor() ([]byte, []int) { - return file_elastic_agent_client_proto_rawDescGZIP(), []int{2} + return file_elastic_agent_client_proto_rawDescGZIP(), []int{3} } // Services that the client is allowed to use over the connection. @@ -249,11 +294,11 @@ func (x ConnInfoServices) String() string { } func (ConnInfoServices) Descriptor() protoreflect.EnumDescriptor { - return file_elastic_agent_client_proto_enumTypes[3].Descriptor() + return file_elastic_agent_client_proto_enumTypes[4].Descriptor() } func (ConnInfoServices) Type() protoreflect.EnumType { - return &file_elastic_agent_client_proto_enumTypes[3] + return &file_elastic_agent_client_proto_enumTypes[4] } func (x ConnInfoServices) Number() protoreflect.EnumNumber { @@ -262,7 +307,7 @@ func (x ConnInfoServices) Number() protoreflect.EnumNumber { // Deprecated: Use ConnInfoServices.Descriptor instead. func (ConnInfoServices) EnumDescriptor() ([]byte, []int) { - return file_elastic_agent_client_proto_rawDescGZIP(), []int{3} + return file_elastic_agent_client_proto_rawDescGZIP(), []int{4} } // Type of action being performed. @@ -300,11 +345,11 @@ func (x ActionRequest_Type) String() string { } func (ActionRequest_Type) Descriptor() protoreflect.EnumDescriptor { - return file_elastic_agent_client_proto_enumTypes[4].Descriptor() + return file_elastic_agent_client_proto_enumTypes[5].Descriptor() } func (ActionRequest_Type) Type() protoreflect.EnumType { - return &file_elastic_agent_client_proto_enumTypes[4] + return &file_elastic_agent_client_proto_enumTypes[5] } func (x ActionRequest_Type) Number() protoreflect.EnumNumber { @@ -354,11 +399,11 @@ func (x ActionRequest_Level) String() string { } func (ActionRequest_Level) Descriptor() protoreflect.EnumDescriptor { - return file_elastic_agent_client_proto_enumTypes[5].Descriptor() + return file_elastic_agent_client_proto_enumTypes[6].Descriptor() } func (ActionRequest_Level) Type() protoreflect.EnumType { - return &file_elastic_agent_client_proto_enumTypes[5] + return &file_elastic_agent_client_proto_enumTypes[6] } func (x ActionRequest_Level) Number() protoreflect.EnumNumber { @@ -403,11 +448,11 @@ func (x ActionResponse_Status) String() string { } func (ActionResponse_Status) Descriptor() protoreflect.EnumDescriptor { - return file_elastic_agent_client_proto_enumTypes[6].Descriptor() + return file_elastic_agent_client_proto_enumTypes[7].Descriptor() } func (ActionResponse_Status) Type() protoreflect.EnumType { - return &file_elastic_agent_client_proto_enumTypes[6] + return &file_elastic_agent_client_proto_enumTypes[7] } func (x ActionResponse_Status) Number() protoreflect.EnumNumber { @@ -1425,6 +1470,12 @@ type CheckinExpected struct { // Index or revision of the expected component configuration. When the expected configuration // changes the agent will increment this number and the Component field will be populated. ComponentIdx uint64 `protobuf:"varint,6,opt,name=component_idx,json=componentIdx,proto3" json:"component_idx,omitempty"` + // When a units timestamp is provided then the set of units could not all fit inside this single message + // and it was split across multiple messages. Each message chunk must have the same units timestamp, in + // the case that the client gets a new message with a different timestamp and its newer than the other + // timestamp then it should take that new message chunk as a start of a new message set. To finish the a + // set of messages with the same timestamp, the last chunk should be an empty set of units. + UnitsTimestamp *timestamppb.Timestamp `protobuf:"bytes,7,opt,name=units_timestamp,json=unitsTimestamp,proto3" json:"units_timestamp,omitempty"` } func (x *CheckinExpected) Reset() { @@ -1501,6 +1552,13 @@ func (x *CheckinExpected) GetComponentIdx() uint64 { return 0 } +func (x *CheckinExpected) GetUnitsTimestamp() *timestamppb.Timestamp { + if x != nil { + return x.UnitsTimestamp + } + return nil +} + // Observed status for a unit. // // Contains the currently applied `config_state_idx` (0 in the case of initial start, 1 is the first @@ -1688,6 +1746,15 @@ type CheckinObserved struct { FeaturesIdx uint64 `protobuf:"varint,5,opt,name=features_idx,json=featuresIdx,proto3" json:"features_idx,omitempty"` // Index or revision of the currently component configuration. ComponentIdx uint64 `protobuf:"varint,6,opt,name=component_idx,json=componentIdx,proto3" json:"component_idx,omitempty"` + // When a units timestamp is provided then the set of units could not all fit inside this single message + // and it was split across multiple messages. Each message chunk must have the same units timestamp, in + // the case that the client gets a new message with a different timestamp and its newer than the other + // timestamp then it should take that new message chunk as a start of a new message set. To finish the a + // set of messages with the same timestamp, the last chunk should be an empty set of units. + UnitsTimestamp *timestamppb.Timestamp `protobuf:"bytes,7,opt,name=units_timestamp,json=unitsTimestamp,proto3" json:"units_timestamp,omitempty"` + // Supports provides information to the agent about extra features this client supports. Should always be included + // on first checkin, and not again unless upon reconnect. + Supports []ConnectionSupports `protobuf:"varint,8,rep,packed,name=supports,proto3,enum=proto.ConnectionSupports" json:"supports,omitempty"` } func (x *CheckinObserved) Reset() { @@ -1757,6 +1824,20 @@ func (x *CheckinObserved) GetComponentIdx() uint64 { return 0 } +func (x *CheckinObserved) GetUnitsTimestamp() *timestamppb.Timestamp { + if x != nil { + return x.UnitsTimestamp + } + return nil +} + +func (x *CheckinObserved) GetSupports() []ConnectionSupports { + if x != nil { + return x.Supports + } + return nil +} + // A action request is streamed from the Elastic Agent to the application so an action can be performed // by the connected application. type ActionRequest struct { @@ -2064,6 +2145,10 @@ type ConnInfo struct { PeerKey []byte `protobuf:"bytes,6,opt,name=peer_key,json=peerKey,proto3" json:"peer_key,omitempty"` // Allowed services that spawned process can use. (only used in V2) Services []ConnInfoServices `protobuf:"varint,7,rep,packed,name=services,proto3,enum=proto.ConnInfoServices" json:"services,omitempty"` + // Supports provides information to the client about extra features this server supports. + Supports []ConnectionSupports `protobuf:"varint,8,rep,packed,name=supports,proto3,enum=proto.ConnectionSupports" json:"supports,omitempty"` + // Maximum message size that the client can use. + MaxMessageSize uint32 `protobuf:"varint,9,opt,name=max_message_size,json=maxMessageSize,proto3" json:"max_message_size,omitempty"` } func (x *ConnInfo) Reset() { @@ -2147,6 +2232,20 @@ func (x *ConnInfo) GetServices() []ConnInfoServices { return nil } +func (x *ConnInfo) GetSupports() []ConnectionSupports { + if x != nil { + return x.Supports + } + return nil +} + +func (x *ConnInfo) GetMaxMessageSize() uint32 { + if x != nil { + return x.MaxMessageSize + } + return 0 +} + var File_elastic_agent_client_proto protoreflect.FileDescriptor var file_elastic_agent_client_proto_rawDesc = []byte{ @@ -2274,7 +2373,7 @@ var file_elastic_agent_client_proto_rawDesc = []byte{ 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x20, 0x0a, 0x0c, 0x67, 0x6f, 0x5f, 0x6d, 0x61, 0x78, 0x5f, 0x70, 0x72, 0x6f, 0x63, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0a, 0x67, 0x6f, 0x4d, 0x61, 0x78, 0x50, 0x72, 0x6f, 0x63, 0x73, 0x22, - 0x99, 0x02, 0x0a, 0x0f, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x45, 0x78, 0x70, 0x65, 0x63, + 0xde, 0x02, 0x0a, 0x0f, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x45, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x12, 0x29, 0x0a, 0x05, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, 0x6e, 0x69, 0x74, 0x45, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x05, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x12, 0x36, @@ -2291,146 +2390,167 @@ var file_elastic_agent_client_proto_rawDesc = []byte{ 0x6f, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x52, 0x09, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x78, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0c, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x78, 0x22, 0xde, 0x01, 0x0a, 0x0c, - 0x55, 0x6e, 0x69, 0x74, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x12, 0x0e, 0x0a, 0x02, - 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x23, 0x0a, 0x04, - 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0f, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x2e, 0x55, 0x6e, 0x69, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, - 0x65, 0x12, 0x28, 0x0a, 0x10, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x5f, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x5f, 0x69, 0x64, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0e, 0x63, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x53, 0x74, 0x61, 0x74, 0x65, 0x49, 0x64, 0x78, 0x12, 0x22, 0x0a, 0x05, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, - 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x31, 0x0a, 0x07, 0x70, 0x61, 0x79, - 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, - 0x75, 0x63, 0x74, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0xc4, 0x01, 0x0a, - 0x1a, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, - 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, - 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x3f, 0x0a, 0x04, 0x6d, 0x65, 0x74, - 0x61, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, - 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x56, - 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x45, - 0x6e, 0x74, 0x72, 0x79, 0x52, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x1a, 0x37, 0x0a, 0x09, 0x4d, 0x65, - 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, - 0x02, 0x38, 0x01, 0x22, 0x86, 0x02, 0x0a, 0x0f, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x4f, - 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x29, 0x0a, - 0x05, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, 0x6e, 0x69, 0x74, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, - 0x64, 0x52, 0x05, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x12, 0x49, 0x0a, 0x0c, 0x76, 0x65, 0x72, 0x73, - 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x4f, 0x62, - 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, - 0x6f, 0x48, 0x00, 0x52, 0x0b, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, - 0x88, 0x01, 0x01, 0x12, 0x21, 0x0a, 0x0c, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x5f, - 0x69, 0x64, 0x78, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, 0x66, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x73, 0x49, 0x64, 0x78, 0x12, 0x23, 0x0a, 0x0d, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, - 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x78, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0c, 0x63, - 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x78, 0x42, 0x0f, 0x0a, 0x0d, 0x5f, - 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x4a, 0x04, 0x08, 0x04, - 0x10, 0x05, 0x52, 0x08, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x22, 0xc3, 0x02, 0x0a, - 0x0d, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, - 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x12, - 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x12, 0x17, 0x0a, 0x07, 0x75, 0x6e, - 0x69, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x6e, 0x69, - 0x74, 0x49, 0x64, 0x12, 0x2c, 0x0a, 0x09, 0x75, 0x6e, 0x69, 0x74, 0x5f, 0x74, 0x79, 0x70, 0x65, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, - 0x6e, 0x69, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x08, 0x75, 0x6e, 0x69, 0x74, 0x54, 0x79, 0x70, - 0x65, 0x12, 0x2d, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, 0x32, - 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, - 0x12, 0x30, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, - 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, - 0x65, 0x6c, 0x22, 0x23, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0a, 0x0a, 0x06, 0x43, 0x55, - 0x53, 0x54, 0x4f, 0x4d, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x44, 0x49, 0x41, 0x47, 0x4e, 0x4f, - 0x53, 0x54, 0x49, 0x43, 0x53, 0x10, 0x01, 0x22, 0x29, 0x0a, 0x05, 0x4c, 0x65, 0x76, 0x65, 0x6c, - 0x12, 0x07, 0x0a, 0x03, 0x41, 0x4c, 0x4c, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x4f, 0x4d, - 0x50, 0x4f, 0x4e, 0x45, 0x4e, 0x54, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x55, 0x4e, 0x49, 0x54, - 0x10, 0x02, 0x22, 0xe5, 0x01, 0x0a, 0x1a, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x69, 0x61, - 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x55, 0x6e, 0x69, 0x74, 0x52, 0x65, 0x73, 0x75, 0x6c, - 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, - 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, - 0x69, 0x6f, 0x6e, 0x12, 0x21, 0x0a, 0x0c, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x5f, 0x74, - 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x6f, 0x6e, 0x74, 0x65, - 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, - 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, - 0x12, 0x38, 0x0a, 0x09, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x18, 0x06, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, - 0x09, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x22, 0xea, 0x01, 0x0a, 0x0e, 0x41, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, - 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, - 0x6b, 0x65, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x02, 0x69, 0x64, 0x12, 0x34, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0e, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x73, - 0x75, 0x6c, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, 0x6c, - 0x74, 0x12, 0x41, 0x0a, 0x0a, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x18, - 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x63, - 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x55, 0x6e, - 0x69, 0x74, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x0a, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, - 0x73, 0x74, 0x69, 0x63, 0x22, 0x21, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, - 0x0a, 0x07, 0x53, 0x55, 0x43, 0x43, 0x45, 0x53, 0x53, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x46, - 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x01, 0x22, 0xdb, 0x01, 0x0a, 0x08, 0x43, 0x6f, 0x6e, 0x6e, - 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x61, 0x64, 0x64, 0x72, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x61, 0x64, 0x64, 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x65, 0x72, 0x76, - 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, - 0x65, 0x72, 0x76, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x6b, - 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x12, - 0x17, 0x0a, 0x07, 0x63, 0x61, 0x5f, 0x63, 0x65, 0x72, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, - 0x52, 0x06, 0x63, 0x61, 0x43, 0x65, 0x72, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x65, 0x65, 0x72, - 0x5f, 0x63, 0x65, 0x72, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x70, 0x65, 0x65, - 0x72, 0x43, 0x65, 0x72, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x6b, 0x65, - 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x65, 0x65, 0x72, 0x4b, 0x65, 0x79, - 0x12, 0x33, 0x0a, 0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x18, 0x07, 0x20, 0x03, - 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x49, - 0x6e, 0x66, 0x6f, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x08, 0x73, 0x65, 0x72, - 0x76, 0x69, 0x63, 0x65, 0x73, 0x2a, 0x68, 0x0a, 0x05, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x0c, - 0x0a, 0x08, 0x53, 0x54, 0x41, 0x52, 0x54, 0x49, 0x4e, 0x47, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, - 0x43, 0x4f, 0x4e, 0x46, 0x49, 0x47, 0x55, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x0b, 0x0a, - 0x07, 0x48, 0x45, 0x41, 0x4c, 0x54, 0x48, 0x59, 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x44, 0x45, - 0x47, 0x52, 0x41, 0x44, 0x45, 0x44, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c, - 0x45, 0x44, 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x54, 0x4f, 0x50, 0x50, 0x49, 0x4e, 0x47, - 0x10, 0x05, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x54, 0x4f, 0x50, 0x50, 0x45, 0x44, 0x10, 0x06, 0x2a, - 0x21, 0x0a, 0x08, 0x55, 0x6e, 0x69, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x09, 0x0a, 0x05, 0x49, - 0x4e, 0x50, 0x55, 0x54, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x4f, 0x55, 0x54, 0x50, 0x55, 0x54, - 0x10, 0x01, 0x2a, 0x43, 0x0a, 0x0c, 0x55, 0x6e, 0x69, 0x74, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, - 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x00, 0x12, 0x08, 0x0a, - 0x04, 0x57, 0x41, 0x52, 0x4e, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, - 0x02, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, - 0x54, 0x52, 0x41, 0x43, 0x45, 0x10, 0x04, 0x2a, 0x50, 0x0a, 0x10, 0x43, 0x6f, 0x6e, 0x6e, 0x49, - 0x6e, 0x66, 0x6f, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x43, - 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x68, 0x65, 0x63, - 0x6b, 0x69, 0x6e, 0x56, 0x32, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x74, 0x6f, 0x72, 0x65, - 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x10, 0x03, - 0x12, 0x07, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x10, 0x04, 0x32, 0xc6, 0x01, 0x0a, 0x0c, 0x45, 0x6c, - 0x61, 0x73, 0x74, 0x69, 0x63, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x12, 0x3f, 0x0a, 0x09, 0x43, 0x68, - 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x56, 0x32, 0x12, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, - 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x1a, - 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x45, - 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x28, 0x01, 0x30, 0x01, 0x12, 0x3a, 0x0a, 0x07, 0x41, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, - 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x1a, 0x14, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x28, 0x01, 0x30, 0x01, 0x12, 0x39, 0x0a, 0x07, 0x43, 0x68, 0x65, 0x63, 0x6b, - 0x69, 0x6e, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, - 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x1a, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x28, 0x01, - 0x30, 0x01, 0x42, 0x14, 0x5a, 0x0f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x3b, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0xf8, 0x01, 0x01, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x78, 0x12, 0x43, 0x0a, 0x0f, 0x75, + 0x6e, 0x69, 0x74, 0x73, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x07, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, + 0x52, 0x0e, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, + 0x22, 0xde, 0x01, 0x0a, 0x0c, 0x55, 0x6e, 0x69, 0x74, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, + 0x64, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, + 0x64, 0x12, 0x23, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x0f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, 0x6e, 0x69, 0x74, 0x54, 0x79, 0x70, 0x65, + 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x28, 0x0a, 0x10, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x69, 0x64, 0x78, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, + 0x52, 0x0e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x53, 0x74, 0x61, 0x74, 0x65, 0x49, 0x64, 0x78, + 0x12, 0x22, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x31, + 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x17, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2e, 0x53, 0x74, 0x72, 0x75, 0x63, 0x74, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, + 0x64, 0x22, 0xc4, 0x01, 0x0a, 0x1a, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x4f, 0x62, 0x73, + 0x65, 0x72, 0x76, 0x65, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, + 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x3f, + 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x4f, 0x62, 0x73, 0x65, + 0x72, 0x76, 0x65, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x2e, + 0x4d, 0x65, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x1a, + 0x37, 0x0a, 0x09, 0x4d, 0x65, 0x74, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, + 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x82, 0x03, 0x0a, 0x0f, 0x43, 0x68, 0x65, + 0x63, 0x6b, 0x69, 0x6e, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x12, 0x14, 0x0a, 0x05, + 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, + 0x65, 0x6e, 0x12, 0x29, 0x0a, 0x05, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, 0x6e, 0x69, 0x74, 0x4f, 0x62, + 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x52, 0x05, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x12, 0x49, 0x0a, + 0x0c, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x68, 0x65, 0x63, + 0x6b, 0x69, 0x6e, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, + 0x6f, 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x48, 0x00, 0x52, 0x0b, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, + 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x88, 0x01, 0x01, 0x12, 0x21, 0x0a, 0x0c, 0x66, 0x65, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x73, 0x5f, 0x69, 0x64, 0x78, 0x18, 0x05, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b, + 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x49, 0x64, 0x78, 0x12, 0x23, 0x0a, 0x0d, 0x63, + 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x78, 0x18, 0x06, 0x20, 0x01, + 0x28, 0x04, 0x52, 0x0c, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x78, + 0x12, 0x43, 0x0a, 0x0f, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, + 0x61, 0x6d, 0x70, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x75, 0x6e, 0x69, 0x74, 0x73, 0x54, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x35, 0x0a, 0x08, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, + 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0e, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, + 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, + 0x74, 0x73, 0x52, 0x08, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x73, 0x42, 0x0f, 0x0a, 0x0d, + 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x69, 0x6e, 0x66, 0x6f, 0x4a, 0x04, 0x08, + 0x04, 0x10, 0x05, 0x52, 0x08, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x22, 0xc3, 0x02, + 0x0a, 0x0d, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, + 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x03, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x06, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x12, 0x17, 0x0a, 0x07, 0x75, + 0x6e, 0x69, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x75, 0x6e, + 0x69, 0x74, 0x49, 0x64, 0x12, 0x2c, 0x0a, 0x09, 0x75, 0x6e, 0x69, 0x74, 0x5f, 0x74, 0x79, 0x70, + 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x0f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, + 0x55, 0x6e, 0x69, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x08, 0x75, 0x6e, 0x69, 0x74, 0x54, 0x79, + 0x70, 0x65, 0x12, 0x2d, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, + 0x65, 0x12, 0x30, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, + 0x76, 0x65, 0x6c, 0x22, 0x23, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0a, 0x0a, 0x06, 0x43, + 0x55, 0x53, 0x54, 0x4f, 0x4d, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x44, 0x49, 0x41, 0x47, 0x4e, + 0x4f, 0x53, 0x54, 0x49, 0x43, 0x53, 0x10, 0x01, 0x22, 0x29, 0x0a, 0x05, 0x4c, 0x65, 0x76, 0x65, + 0x6c, 0x12, 0x07, 0x0a, 0x03, 0x41, 0x4c, 0x4c, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x4f, + 0x4d, 0x50, 0x4f, 0x4e, 0x45, 0x4e, 0x54, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x55, 0x4e, 0x49, + 0x54, 0x10, 0x02, 0x22, 0xe5, 0x01, 0x0a, 0x1a, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x69, + 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x55, 0x6e, 0x69, 0x74, 0x52, 0x65, 0x73, 0x75, + 0x6c, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x6e, 0x61, + 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, + 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x21, 0x0a, 0x0c, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x5f, + 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x6f, 0x6e, 0x74, + 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, + 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, + 0x74, 0x12, 0x38, 0x0a, 0x09, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, + 0x52, 0x09, 0x67, 0x65, 0x6e, 0x65, 0x72, 0x61, 0x74, 0x65, 0x64, 0x22, 0xea, 0x01, 0x0a, 0x0e, + 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, + 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, + 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x02, 0x69, 0x64, 0x12, 0x34, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x63, 0x74, + 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, + 0x73, 0x75, 0x6c, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x73, 0x75, + 0x6c, 0x74, 0x12, 0x41, 0x0a, 0x0a, 0x64, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, + 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x69, 0x61, 0x67, 0x6e, 0x6f, 0x73, 0x74, 0x69, 0x63, 0x55, + 0x6e, 0x69, 0x74, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x0a, 0x64, 0x69, 0x61, 0x67, 0x6e, + 0x6f, 0x73, 0x74, 0x69, 0x63, 0x22, 0x21, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, + 0x0b, 0x0a, 0x07, 0x53, 0x55, 0x43, 0x43, 0x45, 0x53, 0x53, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, + 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x01, 0x22, 0xbc, 0x02, 0x0a, 0x08, 0x43, 0x6f, 0x6e, + 0x6e, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x61, 0x64, 0x64, 0x72, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x61, 0x64, 0x64, 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x65, 0x72, + 0x76, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, + 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, + 0x6b, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, + 0x12, 0x17, 0x0a, 0x07, 0x63, 0x61, 0x5f, 0x63, 0x65, 0x72, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x06, 0x63, 0x61, 0x43, 0x65, 0x72, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x70, 0x65, 0x65, + 0x72, 0x5f, 0x63, 0x65, 0x72, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x70, 0x65, + 0x65, 0x72, 0x43, 0x65, 0x72, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x6b, + 0x65, 0x79, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x65, 0x65, 0x72, 0x4b, 0x65, + 0x79, 0x12, 0x33, 0x0a, 0x08, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x18, 0x07, 0x20, + 0x03, 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, + 0x49, 0x6e, 0x66, 0x6f, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x52, 0x08, 0x73, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x35, 0x0a, 0x08, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, + 0x74, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0e, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x75, 0x70, 0x70, 0x6f, + 0x72, 0x74, 0x73, 0x52, 0x08, 0x73, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x73, 0x12, 0x28, 0x0a, + 0x10, 0x6d, 0x61, 0x78, 0x5f, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x7a, + 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0e, 0x6d, 0x61, 0x78, 0x4d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x2a, 0x29, 0x0a, 0x12, 0x43, 0x6f, 0x6e, 0x6e, 0x65, + 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x75, 0x70, 0x70, 0x6f, 0x72, 0x74, 0x73, 0x12, 0x13, 0x0a, + 0x0f, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x43, 0x68, 0x75, 0x6e, 0x6b, 0x69, 0x6e, 0x67, + 0x10, 0x00, 0x2a, 0x68, 0x0a, 0x05, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x0c, 0x0a, 0x08, 0x53, + 0x54, 0x41, 0x52, 0x54, 0x49, 0x4e, 0x47, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x43, 0x4f, 0x4e, + 0x46, 0x49, 0x47, 0x55, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x48, 0x45, + 0x41, 0x4c, 0x54, 0x48, 0x59, 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x44, 0x45, 0x47, 0x52, 0x41, + 0x44, 0x45, 0x44, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, + 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x54, 0x4f, 0x50, 0x50, 0x49, 0x4e, 0x47, 0x10, 0x05, 0x12, + 0x0b, 0x0a, 0x07, 0x53, 0x54, 0x4f, 0x50, 0x50, 0x45, 0x44, 0x10, 0x06, 0x2a, 0x21, 0x0a, 0x08, + 0x55, 0x6e, 0x69, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x09, 0x0a, 0x05, 0x49, 0x4e, 0x50, 0x55, + 0x54, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x4f, 0x55, 0x54, 0x50, 0x55, 0x54, 0x10, 0x01, 0x2a, + 0x43, 0x0a, 0x0c, 0x55, 0x6e, 0x69, 0x74, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, + 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, + 0x52, 0x4e, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x09, + 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41, + 0x43, 0x45, 0x10, 0x04, 0x2a, 0x50, 0x0a, 0x10, 0x43, 0x6f, 0x6e, 0x6e, 0x49, 0x6e, 0x66, 0x6f, + 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x43, 0x68, 0x65, 0x63, + 0x6b, 0x69, 0x6e, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, + 0x56, 0x32, 0x10, 0x01, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x10, 0x02, 0x12, + 0x0c, 0x0a, 0x08, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x10, 0x03, 0x12, 0x07, 0x0a, + 0x03, 0x4c, 0x6f, 0x67, 0x10, 0x04, 0x32, 0xc6, 0x01, 0x0a, 0x0c, 0x45, 0x6c, 0x61, 0x73, 0x74, + 0x69, 0x63, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x12, 0x3f, 0x0a, 0x09, 0x43, 0x68, 0x65, 0x63, 0x6b, + 0x69, 0x6e, 0x56, 0x32, 0x12, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x68, 0x65, + 0x63, 0x6b, 0x69, 0x6e, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x64, 0x1a, 0x16, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x45, 0x78, 0x70, 0x65, + 0x63, 0x74, 0x65, 0x64, 0x28, 0x01, 0x30, 0x01, 0x12, 0x3a, 0x0a, 0x07, 0x41, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x12, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x1a, 0x14, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x28, 0x01, 0x30, 0x01, 0x12, 0x39, 0x0a, 0x07, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x69, 0x6e, 0x12, + 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x4f, 0x62, 0x73, + 0x65, 0x72, 0x76, 0x65, 0x64, 0x1a, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x74, + 0x61, 0x74, 0x65, 0x45, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x28, 0x01, 0x30, 0x01, 0x42, + 0x14, 0x5a, 0x0f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x3b, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0xf8, 0x01, 0x01, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -2445,94 +2565,99 @@ func file_elastic_agent_client_proto_rawDescGZIP() []byte { return file_elastic_agent_client_proto_rawDescData } -var file_elastic_agent_client_proto_enumTypes = make([]protoimpl.EnumInfo, 7) +var file_elastic_agent_client_proto_enumTypes = make([]protoimpl.EnumInfo, 8) var file_elastic_agent_client_proto_msgTypes = make([]protoimpl.MessageInfo, 23) var file_elastic_agent_client_proto_goTypes = []interface{}{ - (State)(0), // 0: proto.State - (UnitType)(0), // 1: proto.UnitType - (UnitLogLevel)(0), // 2: proto.UnitLogLevel - (ConnInfoServices)(0), // 3: proto.ConnInfoServices - (ActionRequest_Type)(0), // 4: proto.ActionRequest.Type - (ActionRequest_Level)(0), // 5: proto.ActionRequest.Level - (ActionResponse_Status)(0), // 6: proto.ActionResponse.Status - (*Package)(nil), // 7: proto.Package - (*Meta)(nil), // 8: proto.Meta - (*DataStream)(nil), // 9: proto.DataStream - (*Stream)(nil), // 10: proto.Stream - (*UnitExpectedConfig)(nil), // 11: proto.UnitExpectedConfig - (*UnitExpected)(nil), // 12: proto.UnitExpected - (*CheckinAgentInfo)(nil), // 13: proto.CheckinAgentInfo - (*Features)(nil), // 14: proto.Features - (*FQDNFeature)(nil), // 15: proto.FQDNFeature - (*ElasticAPMTLS)(nil), // 16: proto.ElasticAPMTLS - (*ElasticAPM)(nil), // 17: proto.ElasticAPM - (*APMConfig)(nil), // 18: proto.APMConfig - (*Component)(nil), // 19: proto.Component - (*ComponentLimits)(nil), // 20: proto.ComponentLimits - (*CheckinExpected)(nil), // 21: proto.CheckinExpected - (*UnitObserved)(nil), // 22: proto.UnitObserved - (*CheckinObservedVersionInfo)(nil), // 23: proto.CheckinObservedVersionInfo - (*CheckinObserved)(nil), // 24: proto.CheckinObserved - (*ActionRequest)(nil), // 25: proto.ActionRequest - (*ActionDiagnosticUnitResult)(nil), // 26: proto.ActionDiagnosticUnitResult - (*ActionResponse)(nil), // 27: proto.ActionResponse - (*ConnInfo)(nil), // 28: proto.ConnInfo - nil, // 29: proto.CheckinObservedVersionInfo.MetaEntry - (*structpb.Struct)(nil), // 30: google.protobuf.Struct - (*timestamppb.Timestamp)(nil), // 31: google.protobuf.Timestamp - (*StateObserved)(nil), // 32: proto.StateObserved - (*StateExpected)(nil), // 33: proto.StateExpected + (ConnectionSupports)(0), // 0: proto.ConnectionSupports + (State)(0), // 1: proto.State + (UnitType)(0), // 2: proto.UnitType + (UnitLogLevel)(0), // 3: proto.UnitLogLevel + (ConnInfoServices)(0), // 4: proto.ConnInfoServices + (ActionRequest_Type)(0), // 5: proto.ActionRequest.Type + (ActionRequest_Level)(0), // 6: proto.ActionRequest.Level + (ActionResponse_Status)(0), // 7: proto.ActionResponse.Status + (*Package)(nil), // 8: proto.Package + (*Meta)(nil), // 9: proto.Meta + (*DataStream)(nil), // 10: proto.DataStream + (*Stream)(nil), // 11: proto.Stream + (*UnitExpectedConfig)(nil), // 12: proto.UnitExpectedConfig + (*UnitExpected)(nil), // 13: proto.UnitExpected + (*CheckinAgentInfo)(nil), // 14: proto.CheckinAgentInfo + (*Features)(nil), // 15: proto.Features + (*FQDNFeature)(nil), // 16: proto.FQDNFeature + (*ElasticAPMTLS)(nil), // 17: proto.ElasticAPMTLS + (*ElasticAPM)(nil), // 18: proto.ElasticAPM + (*APMConfig)(nil), // 19: proto.APMConfig + (*Component)(nil), // 20: proto.Component + (*ComponentLimits)(nil), // 21: proto.ComponentLimits + (*CheckinExpected)(nil), // 22: proto.CheckinExpected + (*UnitObserved)(nil), // 23: proto.UnitObserved + (*CheckinObservedVersionInfo)(nil), // 24: proto.CheckinObservedVersionInfo + (*CheckinObserved)(nil), // 25: proto.CheckinObserved + (*ActionRequest)(nil), // 26: proto.ActionRequest + (*ActionDiagnosticUnitResult)(nil), // 27: proto.ActionDiagnosticUnitResult + (*ActionResponse)(nil), // 28: proto.ActionResponse + (*ConnInfo)(nil), // 29: proto.ConnInfo + nil, // 30: proto.CheckinObservedVersionInfo.MetaEntry + (*structpb.Struct)(nil), // 31: google.protobuf.Struct + (*timestamppb.Timestamp)(nil), // 32: google.protobuf.Timestamp + (*StateObserved)(nil), // 33: proto.StateObserved + (*StateExpected)(nil), // 34: proto.StateExpected } var file_elastic_agent_client_proto_depIdxs = []int32{ - 30, // 0: proto.Package.source:type_name -> google.protobuf.Struct - 30, // 1: proto.Meta.source:type_name -> google.protobuf.Struct - 7, // 2: proto.Meta.package:type_name -> proto.Package - 30, // 3: proto.DataStream.source:type_name -> google.protobuf.Struct - 30, // 4: proto.Stream.source:type_name -> google.protobuf.Struct - 9, // 5: proto.Stream.data_stream:type_name -> proto.DataStream - 30, // 6: proto.UnitExpectedConfig.source:type_name -> google.protobuf.Struct - 8, // 7: proto.UnitExpectedConfig.meta:type_name -> proto.Meta - 9, // 8: proto.UnitExpectedConfig.data_stream:type_name -> proto.DataStream - 10, // 9: proto.UnitExpectedConfig.streams:type_name -> proto.Stream - 1, // 10: proto.UnitExpected.type:type_name -> proto.UnitType - 0, // 11: proto.UnitExpected.state:type_name -> proto.State - 11, // 12: proto.UnitExpected.config:type_name -> proto.UnitExpectedConfig - 2, // 13: proto.UnitExpected.log_level:type_name -> proto.UnitLogLevel - 30, // 14: proto.Features.source:type_name -> google.protobuf.Struct - 15, // 15: proto.Features.fqdn:type_name -> proto.FQDNFeature - 16, // 16: proto.ElasticAPM.tls:type_name -> proto.ElasticAPMTLS - 17, // 17: proto.APMConfig.elastic:type_name -> proto.ElasticAPM - 20, // 18: proto.Component.limits:type_name -> proto.ComponentLimits - 18, // 19: proto.Component.apm_config:type_name -> proto.APMConfig - 30, // 20: proto.ComponentLimits.source:type_name -> google.protobuf.Struct - 12, // 21: proto.CheckinExpected.units:type_name -> proto.UnitExpected - 13, // 22: proto.CheckinExpected.agent_info:type_name -> proto.CheckinAgentInfo - 14, // 23: proto.CheckinExpected.features:type_name -> proto.Features - 19, // 24: proto.CheckinExpected.component:type_name -> proto.Component - 1, // 25: proto.UnitObserved.type:type_name -> proto.UnitType - 0, // 26: proto.UnitObserved.state:type_name -> proto.State - 30, // 27: proto.UnitObserved.payload:type_name -> google.protobuf.Struct - 29, // 28: proto.CheckinObservedVersionInfo.meta:type_name -> proto.CheckinObservedVersionInfo.MetaEntry - 22, // 29: proto.CheckinObserved.units:type_name -> proto.UnitObserved - 23, // 30: proto.CheckinObserved.version_info:type_name -> proto.CheckinObservedVersionInfo - 1, // 31: proto.ActionRequest.unit_type:type_name -> proto.UnitType - 4, // 32: proto.ActionRequest.type:type_name -> proto.ActionRequest.Type - 5, // 33: proto.ActionRequest.level:type_name -> proto.ActionRequest.Level - 31, // 34: proto.ActionDiagnosticUnitResult.generated:type_name -> google.protobuf.Timestamp - 6, // 35: proto.ActionResponse.status:type_name -> proto.ActionResponse.Status - 26, // 36: proto.ActionResponse.diagnostic:type_name -> proto.ActionDiagnosticUnitResult - 3, // 37: proto.ConnInfo.services:type_name -> proto.ConnInfoServices - 24, // 38: proto.ElasticAgent.CheckinV2:input_type -> proto.CheckinObserved - 27, // 39: proto.ElasticAgent.Actions:input_type -> proto.ActionResponse - 32, // 40: proto.ElasticAgent.Checkin:input_type -> proto.StateObserved - 21, // 41: proto.ElasticAgent.CheckinV2:output_type -> proto.CheckinExpected - 25, // 42: proto.ElasticAgent.Actions:output_type -> proto.ActionRequest - 33, // 43: proto.ElasticAgent.Checkin:output_type -> proto.StateExpected - 41, // [41:44] is the sub-list for method output_type - 38, // [38:41] is the sub-list for method input_type - 38, // [38:38] is the sub-list for extension type_name - 38, // [38:38] is the sub-list for extension extendee - 0, // [0:38] is the sub-list for field type_name + 31, // 0: proto.Package.source:type_name -> google.protobuf.Struct + 31, // 1: proto.Meta.source:type_name -> google.protobuf.Struct + 8, // 2: proto.Meta.package:type_name -> proto.Package + 31, // 3: proto.DataStream.source:type_name -> google.protobuf.Struct + 31, // 4: proto.Stream.source:type_name -> google.protobuf.Struct + 10, // 5: proto.Stream.data_stream:type_name -> proto.DataStream + 31, // 6: proto.UnitExpectedConfig.source:type_name -> google.protobuf.Struct + 9, // 7: proto.UnitExpectedConfig.meta:type_name -> proto.Meta + 10, // 8: proto.UnitExpectedConfig.data_stream:type_name -> proto.DataStream + 11, // 9: proto.UnitExpectedConfig.streams:type_name -> proto.Stream + 2, // 10: proto.UnitExpected.type:type_name -> proto.UnitType + 1, // 11: proto.UnitExpected.state:type_name -> proto.State + 12, // 12: proto.UnitExpected.config:type_name -> proto.UnitExpectedConfig + 3, // 13: proto.UnitExpected.log_level:type_name -> proto.UnitLogLevel + 31, // 14: proto.Features.source:type_name -> google.protobuf.Struct + 16, // 15: proto.Features.fqdn:type_name -> proto.FQDNFeature + 17, // 16: proto.ElasticAPM.tls:type_name -> proto.ElasticAPMTLS + 18, // 17: proto.APMConfig.elastic:type_name -> proto.ElasticAPM + 21, // 18: proto.Component.limits:type_name -> proto.ComponentLimits + 19, // 19: proto.Component.apm_config:type_name -> proto.APMConfig + 31, // 20: proto.ComponentLimits.source:type_name -> google.protobuf.Struct + 13, // 21: proto.CheckinExpected.units:type_name -> proto.UnitExpected + 14, // 22: proto.CheckinExpected.agent_info:type_name -> proto.CheckinAgentInfo + 15, // 23: proto.CheckinExpected.features:type_name -> proto.Features + 20, // 24: proto.CheckinExpected.component:type_name -> proto.Component + 32, // 25: proto.CheckinExpected.units_timestamp:type_name -> google.protobuf.Timestamp + 2, // 26: proto.UnitObserved.type:type_name -> proto.UnitType + 1, // 27: proto.UnitObserved.state:type_name -> proto.State + 31, // 28: proto.UnitObserved.payload:type_name -> google.protobuf.Struct + 30, // 29: proto.CheckinObservedVersionInfo.meta:type_name -> proto.CheckinObservedVersionInfo.MetaEntry + 23, // 30: proto.CheckinObserved.units:type_name -> proto.UnitObserved + 24, // 31: proto.CheckinObserved.version_info:type_name -> proto.CheckinObservedVersionInfo + 32, // 32: proto.CheckinObserved.units_timestamp:type_name -> google.protobuf.Timestamp + 0, // 33: proto.CheckinObserved.supports:type_name -> proto.ConnectionSupports + 2, // 34: proto.ActionRequest.unit_type:type_name -> proto.UnitType + 5, // 35: proto.ActionRequest.type:type_name -> proto.ActionRequest.Type + 6, // 36: proto.ActionRequest.level:type_name -> proto.ActionRequest.Level + 32, // 37: proto.ActionDiagnosticUnitResult.generated:type_name -> google.protobuf.Timestamp + 7, // 38: proto.ActionResponse.status:type_name -> proto.ActionResponse.Status + 27, // 39: proto.ActionResponse.diagnostic:type_name -> proto.ActionDiagnosticUnitResult + 4, // 40: proto.ConnInfo.services:type_name -> proto.ConnInfoServices + 0, // 41: proto.ConnInfo.supports:type_name -> proto.ConnectionSupports + 25, // 42: proto.ElasticAgent.CheckinV2:input_type -> proto.CheckinObserved + 28, // 43: proto.ElasticAgent.Actions:input_type -> proto.ActionResponse + 33, // 44: proto.ElasticAgent.Checkin:input_type -> proto.StateObserved + 22, // 45: proto.ElasticAgent.CheckinV2:output_type -> proto.CheckinExpected + 26, // 46: proto.ElasticAgent.Actions:output_type -> proto.ActionRequest + 34, // 47: proto.ElasticAgent.Checkin:output_type -> proto.StateExpected + 45, // [45:48] is the sub-list for method output_type + 42, // [42:45] is the sub-list for method input_type + 42, // [42:42] is the sub-list for extension type_name + 42, // [42:42] is the sub-list for extension extendee + 0, // [0:42] is the sub-list for field type_name } func init() { file_elastic_agent_client_proto_init() } @@ -2814,7 +2939,7 @@ func file_elastic_agent_client_proto_init() { File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_elastic_agent_client_proto_rawDesc, - NumEnums: 7, + NumEnums: 8, NumMessages: 23, NumExtensions: 0, NumServices: 1, diff --git a/pkg/proto/elastic-agent-client_grpc.pb.go b/pkg/proto/elastic-agent-client_grpc.pb.go index 618eb7f..0929b7c 100644 --- a/pkg/proto/elastic-agent-client_grpc.pb.go +++ b/pkg/proto/elastic-agent-client_grpc.pb.go @@ -5,7 +5,7 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: // - protoc-gen-go-grpc v1.2.0 -// - protoc v4.23.2 +// - protoc v4.23.4 // source: elastic-agent-client.proto package proto @@ -51,7 +51,7 @@ type ElasticAgentClient interface { // // Use of the source field allows the input configurations to evolve without needing to modify // the control protocol itself. In some cases commonly used or important fields are extracted as - // a dedicated message type, but these definitions do not comletely define the contents of the + // a dedicated message type, but these definitions do not completely define the contents of the // source field which is free to contain additional fields. CheckinV2(ctx context.Context, opts ...grpc.CallOption) (ElasticAgent_CheckinV2Client, error) // Called by the client after receiving connection info to allow the Elastic Agent to stream action @@ -203,7 +203,7 @@ type ElasticAgentServer interface { // // Use of the source field allows the input configurations to evolve without needing to modify // the control protocol itself. In some cases commonly used or important fields are extracted as - // a dedicated message type, but these definitions do not comletely define the contents of the + // a dedicated message type, but these definitions do not completely define the contents of the // source field which is free to contain additional fields. CheckinV2(ElasticAgent_CheckinV2Server) error // Called by the client after receiving connection info to allow the Elastic Agent to stream action From bc82b308a579a907889b07de8014d8610b5d7bd9 Mon Sep 17 00:00:00 2001 From: Blake Rouse Date: Tue, 5 Dec 2023 17:16:58 -0500 Subject: [PATCH 2/5] Fix current tests. --- pkg/client/artifact_test.go | 2 +- pkg/client/client_v2_test.go | 18 +++++++++--------- pkg/client/log_test.go | 2 +- pkg/client/store_test.go | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/pkg/client/artifact_test.go b/pkg/client/artifact_test.go index 3fc109d..33c3301 100644 --- a/pkg/client/artifact_test.go +++ b/pkg/client/artifact_test.go @@ -55,7 +55,7 @@ func TestArtifact(t *testing.T) { var errs []error ctx, cancel := context.WithCancel(context.Background()) defer cancel() - client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, grpc.WithTransportCredentials(insecure.NewCredentials())) + client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))) storeErrors(ctx, client, &errs, &errsMu) var unitsMu sync.Mutex diff --git a/pkg/client/client_v2_test.go b/pkg/client/client_v2_test.go index e4a5649..019989b 100644 --- a/pkg/client/client_v2_test.go +++ b/pkg/client/client_v2_test.go @@ -50,7 +50,7 @@ func TestRPCErrorRetryTimer(t *testing.T) { defer listener.Close() go rejectingListener(listener) - client := NewV2(listener.Addr().String(), mock.NewID(), VersionInfo{}, grpc.WithTransportCredentials(insecure.NewCredentials())) + client := NewV2(listener.Addr().String(), mock.NewID(), VersionInfo{}, WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))) assert.NoError(t, client.Start(context.Background())) // We expect one error each from checkinRoundTrip and actionRoundTrip. @@ -159,7 +159,7 @@ func TestClientV2_Checkin_Initial(t *testing.T) { var errs []error ctx, cancel := context.WithCancel(context.Background()) defer cancel() - invalidClient := NewV2(fmt.Sprintf(":%d", srv.Port), mock.NewID(), VersionInfo{}, grpc.WithTransportCredentials(insecure.NewCredentials())) + invalidClient := NewV2(fmt.Sprintf(":%d", srv.Port), mock.NewID(), VersionInfo{}, WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))) storeErrors(ctx, invalidClient, &errs, &errsMu) require.NoError(t, invalidClient.Start(ctx)) defer invalidClient.Stop() @@ -186,7 +186,7 @@ func TestClientV2_Checkin_Initial(t *testing.T) { Meta: map[string]string{ "key": "value", }, - }, grpc.WithTransportCredentials(insecure.NewCredentials())) + }, WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))) storeErrors(ctx, validClient, &errs2, &errs2Mu) // receive the units @@ -378,7 +378,7 @@ func TestClientV2_Checkin_UnitState(t *testing.T) { var errs []error ctx, cancel := context.WithCancel(context.Background()) defer cancel() - client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, grpc.WithTransportCredentials(insecure.NewCredentials())).(*clientV2) + client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))).(*clientV2) storeErrors(ctx, client, &errs, &errsMu) // receive the units @@ -509,7 +509,7 @@ func TestClientV2_Actions(t *testing.T) { var errs []error ctx, cancel := context.WithCancel(context.Background()) defer cancel() - client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, grpc.WithTransportCredentials(insecure.NewCredentials())) + client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))) storeErrors(ctx, client, &errs, &errsMu) var unitsMu sync.Mutex @@ -827,7 +827,7 @@ func TestClientV2_Checkin_FeatureFlags(t *testing.T) { var errs []error ctx, cancel := context.WithCancel(context.Background()) defer cancel() - client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, grpc.WithTransportCredentials(insecure.NewCredentials())).(*clientV2) + client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))).(*clientV2) client.minCheckTimeout = 100 * time.Millisecond // otherwise the test will run for too long storeErrors(ctx, client, &errs, &errsMu) @@ -987,7 +987,7 @@ func TestClientV2_Checkin_APMConfig(t *testing.T) { var errs []error ctx, cancel := context.WithCancel(context.Background()) defer cancel() - client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, grpc.WithTransportCredentials(insecure.NewCredentials())).(*clientV2) + client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))).(*clientV2) storeErrors(ctx, client, &errs, &errsMu) var uca unitChangesAccumulator @@ -1166,7 +1166,7 @@ func TestClientV2_Checkin_Component(t *testing.T) { defer cancel() serverAddr := fmt.Sprintf(":%d", srv.Port) - client := NewV2(serverAddr, token, VersionInfo{}, grpc.WithTransportCredentials(insecure.NewCredentials())).(*clientV2) + client := NewV2(serverAddr, token, VersionInfo{}, WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))).(*clientV2) client.minCheckTimeout = 100 * time.Millisecond // otherwise the test will run for too long storeErrors(ctx, client, &errs, &errsMu) @@ -1255,7 +1255,7 @@ func setupClientForDiagnostics(ctx context.Context, t *testing.T) (*Unit, V2, mo var errsMu sync.Mutex var errs []error - client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, grpc.WithTransportCredentials(insecure.NewCredentials())) + client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))) storeErrors(context.Background(), client, &errs, &errsMu) var unitsMu sync.Mutex diff --git a/pkg/client/log_test.go b/pkg/client/log_test.go index 26a1a12..aedba23 100644 --- a/pkg/client/log_test.go +++ b/pkg/client/log_test.go @@ -53,7 +53,7 @@ func TestLog(t *testing.T) { var errs []error ctx, cancel := context.WithCancel(context.Background()) defer cancel() - client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, grpc.WithTransportCredentials(insecure.NewCredentials())) + client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))) storeErrors(ctx, client, &errs, &errsMu) var unitsMu sync.Mutex diff --git a/pkg/client/store_test.go b/pkg/client/store_test.go index 690c98d..7176900 100644 --- a/pkg/client/store_test.go +++ b/pkg/client/store_test.go @@ -55,7 +55,7 @@ func TestStore(t *testing.T) { var errs []error ctx, cancel := context.WithCancel(context.Background()) defer cancel() - client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, grpc.WithTransportCredentials(insecure.NewCredentials())) + client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))) storeErrors(ctx, client, &errs, &errsMu) var unitsMu sync.Mutex From ac75aaf425c512ce161a005021f2b0536237e4a1 Mon Sep 17 00:00:00 2001 From: Blake Rouse Date: Wed, 6 Dec 2023 11:17:21 -0500 Subject: [PATCH 3/5] Refactor chunking add tests. --- go.mod | 4 +- go.sum | 3 + pkg/client/client_v2.go | 68 +------ pkg/utils/utils.go | 225 ++++++++++++++++++++- pkg/utils/utils_test.go | 433 ++++++++++++++++++++++++++++++++++++++++ 5 files changed, 664 insertions(+), 69 deletions(-) create mode 100644 pkg/utils/utils_test.go diff --git a/go.mod b/go.mod index f283ee2..32058d8 100644 --- a/go.mod +++ b/go.mod @@ -1,14 +1,16 @@ module github.com/elastic/elastic-agent-client/v7 -go 1.18 +go 1.20 require ( github.com/gofrs/uuid v4.2.0+incompatible github.com/golang/protobuf v1.5.3 + github.com/google/go-cmp v0.5.9 github.com/google/pprof v0.0.0-20230426061923-93006964c1fc github.com/hashicorp/go-multierror v1.1.1 github.com/magefile/mage v1.13.0 github.com/stretchr/testify v1.7.0 + golang.org/x/exp v0.0.0-20231127185646-65229373498e google.golang.org/grpc v1.56.3 google.golang.org/protobuf v1.30.0 ) diff --git a/go.sum b/go.sum index 5261b12..edd7f70 100644 --- a/go.sum +++ b/go.sum @@ -9,6 +9,7 @@ github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/pprof v0.0.0-20230426061923-93006964c1fc h1:AGDHt781oIcL4EFk7cPnvBUYTwU8BEU6GDTO3ZMn1sE= github.com/google/pprof v0.0.0-20230426061923-93006964c1fc/go.mod h1:79YE0hCXdHag9sBkw2o+N/YnZtTkXi0UT9Nnixa5eYk= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -29,6 +30,8 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +golang.org/x/exp v0.0.0-20231127185646-65229373498e h1:Gvh4YaCaXNs6dKTlfgismwWZKyjVZXwOPfIyUaqU3No= +golang.org/x/exp v0.0.0-20231127185646-65229373498e/go.mod h1:iRJReGqOEeBhDZGkGbynYwcHlctCvnjTYIamk7uXpHI= golang.org/x/net v0.17.0 h1:pVaXccu2ozPjCXewfr1S7xza/zcXTity9cCdXQYSjIM= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE= diff --git a/pkg/client/client_v2.go b/pkg/client/client_v2.go index 5bf360b..913c712 100644 --- a/pkg/client/client_v2.go +++ b/pkg/client/client_v2.go @@ -10,7 +10,6 @@ import ( "encoding/json" "errors" "fmt" - protobuf "google.golang.org/protobuf/proto" "io" "runtime" "runtime/pprof" @@ -1124,15 +1123,7 @@ func sendObservedChunked(client proto.ElasticAgent_CheckinV2Client, msg *proto.C // chunking is disabled return client.Send(msg) } - s := protobuf.Size(msg) - if s <= maxSize { - // fits so no chunking needed - return client.Send(msg) - } - // doesn't fit; chunk the message - // this is done by dividing the units into two; keep dividing each chunk into two until it fits - // a timestamp is needed to ensure all chunks have a timestamp - msgs, err := observedChunked(msg, maxSize, 2) + msgs, err := utils.ChunkedObserved(msg, maxSize) if err != nil { return err } @@ -1143,60 +1134,3 @@ func sendObservedChunked(client proto.ElasticAgent_CheckinV2Client, msg *proto.C } return nil } - -func observedChunked(msg *proto.CheckinObserved, maxSize int, divider int) ([]*proto.CheckinObserved, error) { - timestamp := time.Now() - chunkSize := len(msg.Units) / divider - if chunkSize < 0 { - return nil, fmt.Errorf("unable to chunk proto.CheckinObserved a single unit is greater than the max %d size", maxSize) - } - msgs := make([]*proto.CheckinObserved, 0, divider+1) - for i := 0; i < divider; i++ { - if i == 0 { - // first message all fields are set; except units is made smaller - m := shallowCopyCheckinObserved(msg) - m.Units = make([]*proto.UnitObserved, chunkSize) - copy(m.Units, msg.Units[0:chunkSize]) - msg.UnitsTimestamp = timestamppb.New(timestamp) - if protobuf.Size(m) > maxSize { - // too large increase divider - return observedChunked(msg, maxSize, divider*2) - } - msgs = append(msgs, m) - continue - } - if i == divider-1 { - // last message; chunk size needs to take into account rounding division where the last chunk - // might need to include an extra unit - chunkSize = chunkSize + len(msg.Units) - (chunkSize * divider) - } - m := &proto.CheckinObserved{} - m.Token = msg.Token - m.Units = make([]*proto.UnitObserved, chunkSize) - copy(m.Units, msg.Units[i*chunkSize:(i*chunkSize)+chunkSize]) - m.UnitsTimestamp = timestamppb.New(timestamp) - if protobuf.Size(m) > maxSize { - // too large increase divider - return observedChunked(msg, maxSize, divider*2) - } - msgs = append(msgs, m) - } - msgs = append(msgs, &proto.CheckinObserved{ - Token: msg.Token, - Units: []*proto.UnitObserved{}, - UnitsTimestamp: timestamppb.New(timestamp), - }) - return msgs, nil -} - -func shallowCopyCheckinObserved(msg *proto.CheckinObserved) *proto.CheckinObserved { - return &proto.CheckinObserved{ - Token: msg.Token, - Units: msg.Units, - VersionInfo: msg.VersionInfo, - FeaturesIdx: msg.FeaturesIdx, - ComponentIdx: msg.ComponentIdx, - UnitsTimestamp: msg.UnitsTimestamp, - Supports: msg.Supports, - } -} diff --git a/pkg/utils/utils.go b/pkg/utils/utils.go index 3108c3b..c630be5 100644 --- a/pkg/utils/utils.go +++ b/pkg/utils/utils.go @@ -4,7 +4,18 @@ package utils -import "encoding/json" +import ( + "encoding/json" + "time" + + "golang.org/x/exp/slices" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + protobuf "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/elastic/elastic-agent-client/v7/pkg/proto" +) // JSONMustMarshal marshals the input to JSON []byte and panics if it fails. func JSONMustMarshal(input interface{}) []byte { @@ -14,3 +25,215 @@ func JSONMustMarshal(input interface{}) []byte { } return res } + +type chunkedOptions struct { + timestamp time.Time +} + +// ChunkOption is an option for adjusting chunking. +type ChunkOption func(opts *chunkedOptions) + +// WithTimestamp adjusts the timestamp used for the chunking. +// +// Note: Mainly used for testing to ensure a specific timestamp is used. +func WithTimestamp(t time.Time) ChunkOption { + return func(opts *chunkedOptions) { + opts.timestamp = t + } +} + +// ChunkedObserved chunks `proto.CheckinObserved` message into multiple chunks to be sent across the protocol. +func ChunkedObserved(msg *proto.CheckinObserved, maxSize int, opts ...ChunkOption) ([]*proto.CheckinObserved, error) { + var options chunkedOptions + options.timestamp = time.Now() // timestamp used for chunk set + for _, opt := range opts { + opt(&options) + } + + s := protobuf.Size(msg) + if s <= maxSize || len(msg.Units) <= 1 { + // fits so no chunking needed or has 0 or 1 units which cannot be chunked + return []*proto.CheckinObserved{msg}, nil + } + + msgs := make([]*proto.CheckinObserved, 0, 3) // start at 3 minimum + + // a single unit is the smallest a chunk can be + // pre-calculate the size and ensure that a single unit is less than the maxSize + bySize := make([]observedBySize, len(msg.Units)) + for i, u := range msg.Units { + bySize[i].unit = u + bySize[i].size = protobuf.Size(u) + // >= is used because even if it's at the maxSize, with overhead + // it will still be too big even if it's at the exact maxSize + if bySize[i].size >= maxSize { + return nil, status.Errorf( + codes.ResourceExhausted, + "unable to chunk proto.CheckinObserved the unit %s is larger than max (%d vs. %d)", + u.Id, bySize[i].size, maxSize) + } + } + + // sort the smallest units first, this ensures that the first chunk that includes extra + // fields uses the smallest unit to ensure that it all fits + slices.SortStableFunc(bySize, func(a, b observedBySize) int { + return a.size - b.size + }) + + // first message all fields are set; except units is made smaller + m := shallowCopyCheckinObserved(msg) + m.Units = make([]*proto.UnitObserved, 0, 1) + m.Units = append(m.Units, bySize[0].unit) + m.UnitsTimestamp = timestamppb.New(options.timestamp) + s = protobuf.Size(m) + if s >= maxSize { + // not possible even for the first chunk to fit + return nil, status.Errorf( + codes.ResourceExhausted, + "unable to chunk proto.CheckinObserved the first chunk with unit %s is larger than max (%d vs. %d)", + m.Units[0].Id, s, maxSize) + } + + // keep adding units until it doesn't fit + nextUnit := 1 + for s < maxSize && nextUnit < len(bySize) { + us := bySize[nextUnit] + if s+us.size < maxSize { + // unit fits add it + m.Units = append(m.Units, us.unit) + s += us.size + } else { + // doesn't fit, create a new chunk + msgs = append(msgs, m) + m = &proto.CheckinObserved{} + m.Token = msg.Token + m.UnitsTimestamp = timestamppb.New(options.timestamp) + m.Units = make([]*proto.UnitObserved, 0, 1) + m.Units = append(m.Units, us.unit) + s = protobuf.Size(m) + } + } + + // all chunks created, create the empty chunk + msgs = append(msgs, m) + m = &proto.CheckinObserved{} + m.Token = msg.Token + m.UnitsTimestamp = timestamppb.New(options.timestamp) + m.Units = make([]*proto.UnitObserved, 0) + msgs = append(msgs, m) + return msgs, nil +} + +func shallowCopyCheckinObserved(msg *proto.CheckinObserved) *proto.CheckinObserved { + return &proto.CheckinObserved{ + Token: msg.Token, + Units: msg.Units, + VersionInfo: msg.VersionInfo, + FeaturesIdx: msg.FeaturesIdx, + ComponentIdx: msg.ComponentIdx, + UnitsTimestamp: msg.UnitsTimestamp, + Supports: msg.Supports, + } +} + +type observedBySize struct { + unit *proto.UnitObserved + size int +} + +// ChunkedExpected chunks `proto.CheckinExpected` message into multiple chunks to be sent across the protocol. +func ChunkedExpected(msg *proto.CheckinExpected, maxSize int, opts ...ChunkOption) ([]*proto.CheckinExpected, error) { + var options chunkedOptions + options.timestamp = time.Now() // timestamp used for chunk set + for _, opt := range opts { + opt(&options) + } + + s := protobuf.Size(msg) + if s <= maxSize || len(msg.Units) <= 1 { + // fits so no chunking needed or has 0 or 1 units which cannot be chunked + return []*proto.CheckinExpected{msg}, nil + } + + msgs := make([]*proto.CheckinExpected, 0, 3) // start at 3 minimum + + // a single unit is the smallest a chunk can be + // pre-calculate the size and ensure that a single unit is less than the maxSize + bySize := make([]expectedBySize, len(msg.Units)) + for i, u := range msg.Units { + bySize[i].unit = u + bySize[i].size = protobuf.Size(u) + // >= is used because even if it's at the maxSize, with overhead + // it will still be too big even if it's at the exact maxSize + if bySize[i].size >= maxSize { + return nil, status.Errorf( + codes.ResourceExhausted, + "unable to chunk proto.CheckinExpected the unit %s is larger than max (%d vs. %d)", + u.Id, bySize[i].size, maxSize) + } + } + + // sort the smallest units first, this ensures that the first chunk that includes extra + // fields uses the smallest unit to ensure that it all fits + slices.SortStableFunc(bySize, func(a, b expectedBySize) int { + return a.size - b.size + }) + + // first message all fields are set; except units is made smaller + m := shallowCopyCheckinExpected(msg) + m.Units = make([]*proto.UnitExpected, 0, 1) + m.Units = append(m.Units, bySize[0].unit) + m.UnitsTimestamp = timestamppb.New(options.timestamp) + s = protobuf.Size(m) + if s >= maxSize { + // not possible even for the first chunk to fit + return nil, status.Errorf( + codes.ResourceExhausted, + "unable to chunk proto.CheckinExpected the first chunk with unit %s is larger than max (%d vs. %d)", + m.Units[0].Id, s, maxSize) + } + + // keep adding units until it doesn't fit + nextUnit := 1 + for s < maxSize && nextUnit < len(bySize) { + us := bySize[nextUnit] + if s+us.size < maxSize { + // unit fits add it + m.Units = append(m.Units, us.unit) + s += us.size + } else { + // doesn't fit, create a new chunk + msgs = append(msgs, m) + m = &proto.CheckinExpected{} + m.UnitsTimestamp = timestamppb.New(options.timestamp) + m.Units = make([]*proto.UnitExpected, 0, 1) + m.Units = append(m.Units, us.unit) + s = protobuf.Size(m) + } + } + + // all chunks created, create the empty chunk + msgs = append(msgs, m) + m = &proto.CheckinExpected{} + m.UnitsTimestamp = timestamppb.New(options.timestamp) + m.Units = make([]*proto.UnitExpected, 0) + msgs = append(msgs, m) + return msgs, nil +} + +func shallowCopyCheckinExpected(msg *proto.CheckinExpected) *proto.CheckinExpected { + return &proto.CheckinExpected{ + AgentInfo: msg.AgentInfo, + Features: msg.Features, + FeaturesIdx: msg.FeaturesIdx, + Component: msg.Component, + ComponentIdx: msg.ComponentIdx, + Units: msg.Units, + UnitsTimestamp: msg.UnitsTimestamp, + } +} + +type expectedBySize struct { + unit *proto.UnitExpected + size int +} diff --git a/pkg/utils/utils_test.go b/pkg/utils/utils_test.go new file mode 100644 index 0000000..aad89d0 --- /dev/null +++ b/pkg/utils/utils_test.go @@ -0,0 +1,433 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package utils + +import ( + "strings" + "testing" + "time" + + "github.com/google/go-cmp/cmp" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/testing/protocmp" + "google.golang.org/protobuf/types/known/structpb" + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/elastic/elastic-agent-client/v7/pkg/proto" +) + +func TestChunkedObserved(t *testing.T) { + timestamp := time.Now() + + scenarios := []struct { + Name string + MaxSize int + Original *proto.CheckinObserved + Expected []*proto.CheckinObserved + Error string + }{ + { + Name: "unit too large to fit", + MaxSize: 50, + Error: "unable to chunk proto.CheckinObserved the unit id-one is larger than max", + Original: &proto.CheckinObserved{ + Token: "token", + Units: []*proto.UnitObserved{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + Payload: mustStruct(map[string]interface{}{ + "large": "this structure places this unit over the maximum size", + }), + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + }, + }, + }, + }, + { + Name: "first chunk too large", + MaxSize: 110, + Error: "unable to chunk proto.CheckinObserved the first chunk with", + Original: &proto.CheckinObserved{ + Token: "token", + Units: []*proto.UnitObserved{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + Payload: mustStruct(map[string]interface{}{ + "large": "this structure places this unit over the maximum size for first chunk", + }), + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + Payload: mustStruct(map[string]interface{}{ + "large": "this structure places this unit over the maximum size for first chunk", + }), + }, + }, + }, + }, + { + Name: "chunk", + MaxSize: 100, + Original: &proto.CheckinObserved{ + Token: "token", + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitObserved{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + Payload: mustStruct(map[string]interface{}{ + "large": "this structure places this unit over the maximum size", + }), + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + }, + }, + }, + Expected: []*proto.CheckinObserved{ + { + Token: "token", + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitObserved{ + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + }, + }, + UnitsTimestamp: timestamppb.New(timestamp), + }, + { + Token: "token", + Units: []*proto.UnitObserved{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + Payload: mustStruct(map[string]interface{}{ + "large": "this structure places this unit over the maximum size", + }), + }, + }, + UnitsTimestamp: timestamppb.New(timestamp), + }, + { + Token: "token", + Units: []*proto.UnitObserved{}, + UnitsTimestamp: timestamppb.New(timestamp), + }, + }, + }, + { + Name: "fits in single message", + MaxSize: 200, + Original: &proto.CheckinObserved{ + Token: "token", + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitObserved{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + Payload: mustStruct(map[string]interface{}{ + "large": "this structure places this unit over the maximum size", + }), + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + }, + }, + }, + Expected: []*proto.CheckinObserved{ + { + Token: "token", + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitObserved{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + Payload: mustStruct(map[string]interface{}{ + "large": "this structure places this unit over the maximum size", + }), + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + }, + }, + }, + }, + }, + } + + for _, scenario := range scenarios { + t.Run(scenario.Name, func(t *testing.T) { + observed, err := ChunkedObserved(scenario.Original, scenario.MaxSize, WithTimestamp(timestamp)) + if scenario.Error != "" { + require.Error(t, err) + assert.True(t, strings.Contains(err.Error(), scenario.Error)) + } else { + require.NoError(t, err) + diff := cmp.Diff(scenario.Expected, observed, protocmp.Transform()) + assert.Empty(t, diff) + } + }) + } +} + +func TestChunkedExpected(t *testing.T) { + timestamp := time.Now() + + scenarios := []struct { + Name string + MaxSize int + Original *proto.CheckinExpected + Expected []*proto.CheckinExpected + Error string + }{ + { + Name: "unit too large to fit", + MaxSize: 30, + Error: "unable to chunk proto.CheckinExpected the unit id-one is larger than max", + Original: &proto.CheckinExpected{ + Units: []*proto.UnitExpected{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + LogLevel: proto.UnitLogLevel_INFO, + Config: &proto.UnitExpectedConfig{ + Id: "testing", + Type: "testing", + Name: "testing", + }, + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + }, + }, + }, + }, + { + Name: "first chunk too large", + MaxSize: 50, + Error: "unable to chunk proto.CheckinExpected the first chunk with", + Original: &proto.CheckinExpected{ + Units: []*proto.UnitExpected{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + LogLevel: proto.UnitLogLevel_INFO, + Config: &proto.UnitExpectedConfig{ + Id: "testing1", + Type: "testing", + Name: "testing1", + }, + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + LogLevel: proto.UnitLogLevel_INFO, + Config: &proto.UnitExpectedConfig{ + Id: "testing2", + Type: "testing", + Name: "testing2", + }, + }, + }, + }, + }, + { + Name: "chunk", + MaxSize: 50, + Original: &proto.CheckinExpected{ + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitExpected{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Config: &proto.UnitExpectedConfig{ + Id: "testing", + Type: "testing", + Name: "testing", + }, + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + }, + }, + }, + Expected: []*proto.CheckinExpected{ + { + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitExpected{ + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + }, + }, + UnitsTimestamp: timestamppb.New(timestamp), + }, + { + Units: []*proto.UnitExpected{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Config: &proto.UnitExpectedConfig{ + Id: "testing", + Type: "testing", + Name: "testing", + }, + }, + }, + UnitsTimestamp: timestamppb.New(timestamp), + }, + { + Units: []*proto.UnitExpected{}, + UnitsTimestamp: timestamppb.New(timestamp), + }, + }, + }, + { + Name: "fits in single message", + MaxSize: 200, + Original: &proto.CheckinExpected{ + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitExpected{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Config: &proto.UnitExpectedConfig{ + Id: "testing", + Type: "testing", + Name: "testing", + }, + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + }, + }, + }, + Expected: []*proto.CheckinExpected{ + { + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitExpected{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Config: &proto.UnitExpectedConfig{ + Id: "testing", + Type: "testing", + Name: "testing", + }, + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + }, + }, + }, + }, + }, + } + + for _, scenario := range scenarios { + t.Run(scenario.Name, func(t *testing.T) { + observed, err := ChunkedExpected(scenario.Original, scenario.MaxSize, WithTimestamp(timestamp)) + if scenario.Error != "" { + require.Error(t, err) + assert.True(t, strings.Contains(err.Error(), scenario.Error)) + } else { + require.NoError(t, err) + diff := cmp.Diff(scenario.Expected, observed, protocmp.Transform()) + assert.Empty(t, diff) + } + }) + } +} + +func mustStruct(v map[string]interface{}) *structpb.Struct { + s, err := structpb.NewStruct(v) + if err != nil { + panic(err) + } + return s +} From 87b7594d1a40026c446195920205078650abe0a0 Mon Sep 17 00:00:00 2001 From: Blake Rouse Date: Wed, 6 Dec 2023 12:01:40 -0500 Subject: [PATCH 4/5] Add receive utils and more tests. --- pkg/client/client_v2.go | 38 +-------------- pkg/client/client_v2_test.go | 5 +- pkg/client/mock/stub_serverV2.go | 3 +- pkg/utils/utils.go | 84 ++++++++++++++++++++++++++++++-- pkg/utils/utils_test.go | 55 +++++++++++++++++++++ 5 files changed, 143 insertions(+), 42 deletions(-) diff --git a/pkg/client/client_v2.go b/pkg/client/client_v2.go index 913c712..06ae489 100644 --- a/pkg/client/client_v2.go +++ b/pkg/client/client_v2.go @@ -453,8 +453,8 @@ func (c *clientV2) checkinRoundTrip() { go func() { defer wg.Done() defer close(readerDone) - expected, err := recvExpectedChunked(checkinClient, c.opts.chunkingAllowed) - for ; err == nil; expected, err = recvExpectedChunked(checkinClient, c.opts.chunkingAllowed) { + expected, err := utils.RecvChunkedExpected(checkinClient) + for ; err == nil; expected, err = utils.RecvChunkedExpected(checkinClient) { c.applyExpected(expected) } if !errors.Is(err, io.EOF) { @@ -1084,40 +1084,6 @@ func inExpected(unit *Unit, expected []*proto.UnitExpected) bool { return false } -func recvExpectedChunked(client proto.ElasticAgent_CheckinV2Client, chunk bool) (*proto.CheckinExpected, error) { - if chunk { - var initialMsg *proto.CheckinExpected - for { - msg, err := client.Recv() - if err != nil { - return nil, err - } - if msg.UnitsTimestamp == nil { - // all included in a single message - return msg, nil - } - if initialMsg == nil { - // first message in batch - initialMsg = msg - } else if initialMsg.UnitsTimestamp.AsTime() != msg.UnitsTimestamp.AsTime() { - // only used if the new timestamp is newer - if initialMsg.UnitsTimestamp.AsTime().After(msg.UnitsTimestamp.AsTime()) { - // not newer so we ignore the message - continue - } - // different batch; restart - initialMsg = msg - } - if len(msg.Units) == 0 { - // ending match message - return initialMsg, nil - } - initialMsg.Units = append(initialMsg.Units, msg.Units...) - } - } - return client.Recv() -} - func sendObservedChunked(client proto.ElasticAgent_CheckinV2Client, msg *proto.CheckinObserved, chunk bool, maxSize int) error { if !chunk { // chunking is disabled diff --git a/pkg/client/client_v2_test.go b/pkg/client/client_v2_test.go index 019989b..52281f9 100644 --- a/pkg/client/client_v2_test.go +++ b/pkg/client/client_v2_test.go @@ -378,7 +378,10 @@ func TestClientV2_Checkin_UnitState(t *testing.T) { var errs []error ctx, cancel := context.WithCancel(context.Background()) defer cancel() - client := NewV2(fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))).(*clientV2) + client := NewV2( + fmt.Sprintf(":%d", srv.Port), token, VersionInfo{}, + WithChunking(true), WithMaxMessageSize(150), + WithGRPCDialOptions(grpc.WithTransportCredentials(insecure.NewCredentials()))).(*clientV2) storeErrors(ctx, client, &errs, &errsMu) // receive the units diff --git a/pkg/client/mock/stub_serverV2.go b/pkg/client/mock/stub_serverV2.go index 0f31339..f2b9da2 100644 --- a/pkg/client/mock/stub_serverV2.go +++ b/pkg/client/mock/stub_serverV2.go @@ -8,6 +8,7 @@ import ( "context" "encoding/json" "fmt" + "github.com/elastic/elastic-agent-client/v7/pkg/utils" "net" "sync" @@ -104,7 +105,7 @@ func (s *StubServerV2) Checkin(server proto.ElasticAgent_CheckinServer) error { // CheckinV2 is the V2 checkin implementation for the mock server func (s *StubServerV2) CheckinV2(server proto.ElasticAgent_CheckinV2Server) error { for { - checkin, err := server.Recv() + checkin, err := utils.RecvChunkedObserved(server) if err != nil { return err } diff --git a/pkg/utils/utils.go b/pkg/utils/utils.go index c630be5..0d4d821 100644 --- a/pkg/utils/utils.go +++ b/pkg/utils/utils.go @@ -95,8 +95,7 @@ func ChunkedObserved(msg *proto.CheckinObserved, maxSize int, opts ...ChunkOptio } // keep adding units until it doesn't fit - nextUnit := 1 - for s < maxSize && nextUnit < len(bySize) { + for nextUnit := 1; s < maxSize && nextUnit < len(bySize); nextUnit++ { us := bySize[nextUnit] if s+us.size < maxSize { // unit fits add it @@ -124,6 +123,45 @@ func ChunkedObserved(msg *proto.CheckinObserved, maxSize int, opts ...ChunkOptio return msgs, nil } +// CheckinObservedReceiver provides a Recv interface to receive proto.CheckinObserved messages. +type CheckinObservedReceiver interface { + Recv() (*proto.CheckinObserved, error) +} + +// RecvChunkedObserved handles the receiving of chunked proto.CheckinObserved. +func RecvChunkedObserved(recv CheckinObservedReceiver) (*proto.CheckinObserved, error) { + var first *proto.CheckinObserved + for { + msg, err := recv.Recv() + if err != nil { + return nil, err + } + if msg.UnitsTimestamp == nil { + // all included in a single message + return msg, nil + } + if first == nil { + // first message in batch + first = msg + } else if first.UnitsTimestamp.AsTime() != msg.UnitsTimestamp.AsTime() { + // only used if the new timestamp is newer + if first.UnitsTimestamp.AsTime().After(msg.UnitsTimestamp.AsTime()) { + // not newer so we ignore the message + continue + } + // different batch; restart + first = msg + } + if len(msg.Units) == 0 { + // ending match message + return first, nil + } + if first != msg { + first.Units = append(first.Units, msg.Units...) + } + } +} + func shallowCopyCheckinObserved(msg *proto.CheckinObserved) *proto.CheckinObserved { return &proto.CheckinObserved{ Token: msg.Token, @@ -194,8 +232,7 @@ func ChunkedExpected(msg *proto.CheckinExpected, maxSize int, opts ...ChunkOptio } // keep adding units until it doesn't fit - nextUnit := 1 - for s < maxSize && nextUnit < len(bySize) { + for nextUnit := 1; s < maxSize && nextUnit < len(bySize); nextUnit++ { us := bySize[nextUnit] if s+us.size < maxSize { // unit fits add it @@ -221,6 +258,45 @@ func ChunkedExpected(msg *proto.CheckinExpected, maxSize int, opts ...ChunkOptio return msgs, nil } +// CheckinExpectedReceiver provides a Recv interface to receive proto.CheckinExpected messages. +type CheckinExpectedReceiver interface { + Recv() (*proto.CheckinExpected, error) +} + +// RecvChunkedExpected handles the receiving of chunked proto.CheckinObjected. +func RecvChunkedExpected(recv CheckinExpectedReceiver) (*proto.CheckinExpected, error) { + var first *proto.CheckinExpected + for { + msg, err := recv.Recv() + if err != nil { + return nil, err + } + if msg.UnitsTimestamp == nil { + // all included in a single message + return msg, nil + } + if first == nil { + // first message in batch + first = msg + } else if first.UnitsTimestamp.AsTime() != msg.UnitsTimestamp.AsTime() { + // only used if the new timestamp is newer + if first.UnitsTimestamp.AsTime().After(msg.UnitsTimestamp.AsTime()) { + // not newer so we ignore the message + continue + } + // different batch; restart + first = msg + } + if len(msg.Units) == 0 { + // ending match message + return first, nil + } + if first != msg { + first.Units = append(first.Units, msg.Units...) + } + } +} + func shallowCopyCheckinExpected(msg *proto.CheckinExpected) *proto.CheckinExpected { return &proto.CheckinExpected{ AgentInfo: msg.AgentInfo, diff --git a/pkg/utils/utils_test.go b/pkg/utils/utils_test.go index aad89d0..56f8a14 100644 --- a/pkg/utils/utils_test.go +++ b/pkg/utils/utils_test.go @@ -5,6 +5,7 @@ package utils import ( + "golang.org/x/exp/slices" "strings" "testing" "time" @@ -217,6 +218,19 @@ func TestChunkedObserved(t *testing.T) { } else { require.NoError(t, err) diff := cmp.Diff(scenario.Expected, observed, protocmp.Transform()) + require.Empty(t, diff) + + // re-assemble and it should now match the original + assembled, err := RecvChunkedObserved(&fakeCheckinObservedReceiver{msgs: observed}) + require.NoError(t, err) + + // to compare we need to remove the units timestamp and ensure the units are in the same order + // completely acceptable that they get re-ordered in the chunking process + assembled.UnitsTimestamp = nil + slices.SortStableFunc(assembled.Units, sortObservedUnits) + slices.SortStableFunc(scenario.Original.Units, sortObservedUnits) + + diff = cmp.Diff(scenario.Original, assembled, protocmp.Transform()) assert.Empty(t, diff) } }) @@ -418,6 +432,19 @@ func TestChunkedExpected(t *testing.T) { } else { require.NoError(t, err) diff := cmp.Diff(scenario.Expected, observed, protocmp.Transform()) + require.Empty(t, diff) + + // re-assemble and it should now match the original + assembled, err := RecvChunkedExpected(&fakeCheckinExpectedReceiver{msgs: observed}) + require.NoError(t, err) + + // to compare we need to remove the units timestamp and ensure the units are in the same order + // completely acceptable that they get re-ordered in the chunking process + assembled.UnitsTimestamp = nil + slices.SortStableFunc(assembled.Units, sortExpectedUnits) + slices.SortStableFunc(scenario.Original.Units, sortExpectedUnits) + + diff = cmp.Diff(scenario.Original, assembled, protocmp.Transform()) assert.Empty(t, diff) } }) @@ -431,3 +458,31 @@ func mustStruct(v map[string]interface{}) *structpb.Struct { } return s } + +func sortObservedUnits(a *proto.UnitObserved, b *proto.UnitObserved) int { + return strings.Compare(a.Id, b.Id) +} + +func sortExpectedUnits(a *proto.UnitExpected, b *proto.UnitExpected) int { + return strings.Compare(a.Id, b.Id) +} + +type fakeCheckinObservedReceiver struct { + msgs []*proto.CheckinObserved +} + +func (f *fakeCheckinObservedReceiver) Recv() (*proto.CheckinObserved, error) { + var msg *proto.CheckinObserved + msg, f.msgs = f.msgs[0], f.msgs[1:] + return msg, nil +} + +type fakeCheckinExpectedReceiver struct { + msgs []*proto.CheckinExpected +} + +func (f *fakeCheckinExpectedReceiver) Recv() (*proto.CheckinExpected, error) { + var msg *proto.CheckinExpected + msg, f.msgs = f.msgs[0], f.msgs[1:] + return msg, nil +} From adcba18f166996980c950a6ac5faf18ca469672d Mon Sep 17 00:00:00 2001 From: Blake Rouse Date: Thu, 14 Dec 2023 10:11:01 -0500 Subject: [PATCH 5/5] Code review changes. --- pkg/client/chunk/expected.go | 152 ++++++++ pkg/client/chunk/expected_test.go | 348 ++++++++++++++++++ pkg/client/chunk/observed.go | 154 ++++++++ .../chunk/observed_test.go} | 318 +++++----------- pkg/client/chunk/option.go | 23 ++ pkg/client/client_v2.go | 11 +- pkg/client/mock/stub_serverV2.go | 7 +- pkg/utils/utils.go | 297 --------------- 8 files changed, 784 insertions(+), 526 deletions(-) create mode 100644 pkg/client/chunk/expected.go create mode 100644 pkg/client/chunk/expected_test.go create mode 100644 pkg/client/chunk/observed.go rename pkg/{utils/utils_test.go => client/chunk/observed_test.go} (55%) create mode 100644 pkg/client/chunk/option.go diff --git a/pkg/client/chunk/expected.go b/pkg/client/chunk/expected.go new file mode 100644 index 0000000..8f35471 --- /dev/null +++ b/pkg/client/chunk/expected.go @@ -0,0 +1,152 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package chunk + +import ( + "time" + + "golang.org/x/exp/slices" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + protobuf "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/elastic/elastic-agent-client/v7/pkg/proto" +) + +// Expected chunks `proto.CheckinExpected` message into multiple chunks to be sent across the protocol. +func Expected(msg *proto.CheckinExpected, maxSize int, opts ...Option) ([]*proto.CheckinExpected, error) { + var options options + options.timestamp = time.Now() // timestamp used for chunk set + for _, opt := range opts { + opt(&options) + } + + s := protobuf.Size(msg) + if s <= maxSize || len(msg.Units) <= 1 { + // fits so no chunking needed or has 0 or 1 units which cannot be chunked + return []*proto.CheckinExpected{msg}, nil + } + + msgs := make([]*proto.CheckinExpected, 0, 3) // start at 3 minimum + + // a single unit is the smallest a chunk can be + // pre-calculate the size and ensure that a single unit is less than the maxSize + bySize := make([]expectedBySize, len(msg.Units)) + for i, u := range msg.Units { + bySize[i].unit = u + bySize[i].size = protobuf.Size(u) + // >= is used because even if it's at the maxSize, with overhead + // it will still be too big even if it's at the exact maxSize + if bySize[i].size >= maxSize { + return nil, status.Errorf( + codes.ResourceExhausted, + "unable to chunk proto.CheckinExpected the unit %s is larger than max (%d vs. %d)", + u.Id, bySize[i].size, maxSize) + } + } + + // sort the smallest units first, this ensures that the first chunk that includes extra + // fields uses the smallest unit to ensure that it all fits + slices.SortStableFunc(bySize, func(a, b expectedBySize) int { + return a.size - b.size + }) + + // first message all fields are set; except units is made smaller + m := shallowCopyCheckinExpected(msg) + m.Units = make([]*proto.UnitExpected, 0, 1) + m.Units = append(m.Units, bySize[0].unit) + m.UnitsTimestamp = timestamppb.New(options.timestamp) + s = protobuf.Size(m) + if s >= maxSize { + // not possible even for the first chunk to fit + return nil, status.Errorf( + codes.ResourceExhausted, + "unable to chunk proto.CheckinExpected the first chunk with unit %s is larger than max (%d vs. %d)", + m.Units[0].Id, s, maxSize) + } + + // keep adding units until it doesn't fit + for nextUnit := 1; s < maxSize && nextUnit < len(bySize); nextUnit++ { + us := bySize[nextUnit] + if s+us.size < maxSize { + // unit fits add it + m.Units = append(m.Units, us.unit) + s += us.size + } else { + // doesn't fit, create a new chunk + msgs = append(msgs, m) + m = &proto.CheckinExpected{} + m.UnitsTimestamp = timestamppb.New(options.timestamp) + m.Units = make([]*proto.UnitExpected, 0, 1) + m.Units = append(m.Units, us.unit) + s = protobuf.Size(m) + } + } + msgs = append(msgs, m) + + // all chunks created, create the empty chunk + m = &proto.CheckinExpected{} + m.UnitsTimestamp = timestamppb.New(options.timestamp) + m.Units = make([]*proto.UnitExpected, 0) + msgs = append(msgs, m) + return msgs, nil +} + +// CheckinExpectedReceiver provides a Recv interface to receive proto.CheckinExpected messages. +type CheckinExpectedReceiver interface { + Recv() (*proto.CheckinExpected, error) +} + +// RecvExpected handles the receiving of chunked proto.CheckinObjected. +func RecvExpected(recv CheckinExpectedReceiver) (*proto.CheckinExpected, error) { + var first *proto.CheckinExpected + for { + msg, err := recv.Recv() + if err != nil { + return nil, err + } + if msg.UnitsTimestamp == nil { + // all included in a single message + return msg, nil + } + if first == nil { + // first message in batch + first = msg + } else if first.UnitsTimestamp.AsTime() != msg.UnitsTimestamp.AsTime() { + // only used if the new timestamp is newer + if first.UnitsTimestamp.AsTime().After(msg.UnitsTimestamp.AsTime()) { + // not newer so we ignore the message + continue + } + // different batch; restart + first = msg + } + if len(msg.Units) == 0 { + // ending match message + return first, nil + } + if first != msg { + first.Units = append(first.Units, msg.Units...) + } + } +} + +func shallowCopyCheckinExpected(msg *proto.CheckinExpected) *proto.CheckinExpected { + return &proto.CheckinExpected{ + AgentInfo: msg.AgentInfo, + Features: msg.Features, + FeaturesIdx: msg.FeaturesIdx, + Component: msg.Component, + ComponentIdx: msg.ComponentIdx, + Units: msg.Units, + UnitsTimestamp: msg.UnitsTimestamp, + } +} + +type expectedBySize struct { + unit *proto.UnitExpected + size int +} diff --git a/pkg/client/chunk/expected_test.go b/pkg/client/chunk/expected_test.go new file mode 100644 index 0000000..29573c5 --- /dev/null +++ b/pkg/client/chunk/expected_test.go @@ -0,0 +1,348 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package chunk + +import ( + "golang.org/x/exp/slices" + "strings" + "testing" + "time" + + "github.com/google/go-cmp/cmp" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/testing/protocmp" + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/elastic/elastic-agent-client/v7/pkg/proto" +) + +func TestExpected(t *testing.T) { + timestamp := time.Now() + + scenarios := []struct { + Name string + MaxSize int + Original *proto.CheckinExpected + Expected []*proto.CheckinExpected + Error string + }{ + { + Name: "unit too large to fit", + MaxSize: 30, + Error: "unable to chunk proto.CheckinExpected the unit id-one is larger than max", + Original: &proto.CheckinExpected{ + Units: []*proto.UnitExpected{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + LogLevel: proto.UnitLogLevel_INFO, + Config: &proto.UnitExpectedConfig{ + Id: "testing", + Type: "testing", + Name: "testing", + }, + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + }, + }, + }, + }, + { + Name: "first chunk too large", + MaxSize: 50, + Error: "unable to chunk proto.CheckinExpected the first chunk with", + Original: &proto.CheckinExpected{ + Units: []*proto.UnitExpected{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + LogLevel: proto.UnitLogLevel_INFO, + Config: &proto.UnitExpectedConfig{ + Id: "testing1", + Type: "testing", + Name: "testing1", + }, + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + LogLevel: proto.UnitLogLevel_INFO, + Config: &proto.UnitExpectedConfig{ + Id: "testing2", + Type: "testing", + Name: "testing2", + }, + }, + }, + }, + }, + { + Name: "chunk checkin message", + MaxSize: 70, + Original: &proto.CheckinExpected{ + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitExpected{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Config: &proto.UnitExpectedConfig{ + Id: "testing", + Type: "testing", + Name: "testing", + }, + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + }, + { + Id: "id-three", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Config: &proto.UnitExpectedConfig{ + Id: "little-larger", + }, + }, + }, + }, + Expected: []*proto.CheckinExpected{ + { + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitExpected{ + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + }, + { + Id: "id-three", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Config: &proto.UnitExpectedConfig{ + Id: "little-larger", + }, + }, + }, + UnitsTimestamp: timestamppb.New(timestamp), + }, + { + Units: []*proto.UnitExpected{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Config: &proto.UnitExpectedConfig{ + Id: "testing", + Type: "testing", + Name: "testing", + }, + }, + }, + UnitsTimestamp: timestamppb.New(timestamp), + }, + { + Units: []*proto.UnitExpected{}, + UnitsTimestamp: timestamppb.New(timestamp), + }, + }, + }, + { + Name: "fits in single message", + MaxSize: 200, + Original: &proto.CheckinExpected{ + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitExpected{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Config: &proto.UnitExpectedConfig{ + Id: "testing", + Type: "testing", + Name: "testing", + }, + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + }, + }, + }, + Expected: []*proto.CheckinExpected{ + { + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitExpected{ + { + Id: "id-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Config: &proto.UnitExpectedConfig{ + Id: "testing", + Type: "testing", + Name: "testing", + }, + }, + { + Id: "id-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + }, + }, + }, + }, + }, + } + + for _, scenario := range scenarios { + t.Run(scenario.Name, func(t *testing.T) { + observed, err := Expected(scenario.Original, scenario.MaxSize, WithTimestamp(timestamp)) + if scenario.Error != "" { + require.Error(t, err) + assert.True(t, strings.Contains(err.Error(), scenario.Error)) + } else { + require.NoError(t, err) + diff := cmp.Diff(scenario.Expected, observed, protocmp.Transform()) + require.Empty(t, diff) + + // re-assemble and it should now match the original + assembled, err := RecvExpected(&fakeCheckinExpectedReceiver{msgs: observed}) + require.NoError(t, err) + + // to compare we need to remove the units timestamp and ensure the units are in the same order + // completely acceptable that they get re-ordered in the chunking process + assembled.UnitsTimestamp = nil + slices.SortStableFunc(assembled.Units, sortExpectedUnits) + slices.SortStableFunc(scenario.Original.Units, sortExpectedUnits) + + diff = cmp.Diff(scenario.Original, assembled, protocmp.Transform()) + assert.Empty(t, diff) + } + }) + } +} + +func TestRecvExpected_Timestamp_Restart(t *testing.T) { + firstTimestamp := time.Now() + first := &proto.CheckinExpected{ + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitExpected{ + { + Id: "first-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Config: &proto.UnitExpectedConfig{ + Id: "testing", + Type: "testing", + Name: "testing", + }, + }, + { + Id: "first-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + }, + }, + } + firstMsgs, err := Expected(first, 50, WithTimestamp(firstTimestamp)) + require.NoError(t, err) + + secondTimestamp := time.Now() + second := &proto.CheckinExpected{ + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitExpected{ + { + Id: "second-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Config: &proto.UnitExpectedConfig{ + Id: "testing", + Type: "testing", + Name: "testing", + }, + }, + { + Id: "second-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + }, + }, + } + secondsMsgs, err := Expected(second, 50, WithTimestamp(secondTimestamp)) + require.NoError(t, err) + + // ensure chunking results in exact length as the order in the test relies on it + require.Len(t, firstMsgs, 3) + require.Len(t, secondsMsgs, 3) + + // re-order the messages + reorderedMsgs := make([]*proto.CheckinExpected, 6) + reorderedMsgs[0] = firstMsgs[0] + reorderedMsgs[1] = secondsMsgs[0] // becomes new set + reorderedMsgs[2] = firstMsgs[1] // ignored + reorderedMsgs[3] = firstMsgs[2] // ignored + reorderedMsgs[4] = secondsMsgs[1] + reorderedMsgs[5] = secondsMsgs[2] + + // re-assemble and it should now match the second + assembled, err := RecvExpected(&fakeCheckinExpectedReceiver{msgs: reorderedMsgs}) + require.NoError(t, err) + + // to compare we need to remove the units timestamp and ensure the units are in the same order + // completely acceptable that they get re-ordered in the chunking process + assembled.UnitsTimestamp = nil + slices.SortStableFunc(assembled.Units, sortExpectedUnits) + slices.SortStableFunc(second.Units, sortExpectedUnits) + + diff := cmp.Diff(second, assembled, protocmp.Transform()) + assert.Empty(t, diff) +} + +func sortExpectedUnits(a *proto.UnitExpected, b *proto.UnitExpected) int { + return strings.Compare(a.Id, b.Id) +} + +type fakeCheckinExpectedReceiver struct { + msgs []*proto.CheckinExpected +} + +func (f *fakeCheckinExpectedReceiver) Recv() (*proto.CheckinExpected, error) { + var msg *proto.CheckinExpected + msg, f.msgs = f.msgs[0], f.msgs[1:] + return msg, nil +} diff --git a/pkg/client/chunk/observed.go b/pkg/client/chunk/observed.go new file mode 100644 index 0000000..4da63e6 --- /dev/null +++ b/pkg/client/chunk/observed.go @@ -0,0 +1,154 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package chunk + +import ( + "time" + + "golang.org/x/exp/slices" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + protobuf "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/elastic/elastic-agent-client/v7/pkg/proto" +) + +// Observed chunks `proto.CheckinObserved` message into multiple chunks to be sent across the protocol. +func Observed(msg *proto.CheckinObserved, maxSize int, opts ...Option) ([]*proto.CheckinObserved, error) { + var options options + options.timestamp = time.Now() // timestamp used for chunk set + for _, opt := range opts { + opt(&options) + } + + s := protobuf.Size(msg) + if s <= maxSize || len(msg.Units) <= 1 { + // fits so no chunking needed or has 0 or 1 units which cannot be chunked + return []*proto.CheckinObserved{msg}, nil + } + + msgs := make([]*proto.CheckinObserved, 0, 3) // start at 3 minimum + + // a single unit is the smallest a chunk can be + // pre-calculate the size and ensure that a single unit is less than the maxSize + bySize := make([]observedBySize, len(msg.Units)) + for i, u := range msg.Units { + bySize[i].unit = u + bySize[i].size = protobuf.Size(u) + // >= is used because even if it's at the maxSize, with overhead + // it will still be too big even if it's at the exact maxSize + if bySize[i].size >= maxSize { + return nil, status.Errorf( + codes.ResourceExhausted, + "unable to chunk proto.CheckinObserved the unit %s is larger than max (%d vs. %d)", + u.Id, bySize[i].size, maxSize) + } + } + + // sort the smallest units first, this ensures that the first chunk that includes extra + // fields uses the smallest unit to ensure that it all fits + slices.SortStableFunc(bySize, func(a, b observedBySize) int { + return a.size - b.size + }) + + // first message all fields are set; except units is made smaller + m := shallowCopyCheckinObserved(msg) + m.Units = make([]*proto.UnitObserved, 0, 1) + m.Units = append(m.Units, bySize[0].unit) + m.UnitsTimestamp = timestamppb.New(options.timestamp) + s = protobuf.Size(m) + if s >= maxSize { + // not possible even for the first chunk to fit + return nil, status.Errorf( + codes.ResourceExhausted, + "unable to chunk proto.CheckinObserved the first chunk with unit %s is larger than max (%d vs. %d)", + m.Units[0].Id, s, maxSize) + } + + // keep adding units until it doesn't fit + for nextUnit := 1; s < maxSize && nextUnit < len(bySize); nextUnit++ { + us := bySize[nextUnit] + if s+us.size < maxSize { + // unit fits add it + m.Units = append(m.Units, us.unit) + s += us.size + } else { + // doesn't fit, create a new chunk + msgs = append(msgs, m) + m = &proto.CheckinObserved{} + m.Token = msg.Token + m.UnitsTimestamp = timestamppb.New(options.timestamp) + m.Units = make([]*proto.UnitObserved, 0, 1) + m.Units = append(m.Units, us.unit) + s = protobuf.Size(m) + } + } + msgs = append(msgs, m) + + // all chunks created, create the empty chunk + m = &proto.CheckinObserved{} + m.Token = msg.Token + m.UnitsTimestamp = timestamppb.New(options.timestamp) + m.Units = make([]*proto.UnitObserved, 0) + msgs = append(msgs, m) + return msgs, nil +} + +// CheckinObservedReceiver provides a Recv interface to receive proto.CheckinObserved messages. +type CheckinObservedReceiver interface { + Recv() (*proto.CheckinObserved, error) +} + +// RecvObserved handles the receiving of chunked proto.CheckinObserved. +func RecvObserved(recv CheckinObservedReceiver) (*proto.CheckinObserved, error) { + var first *proto.CheckinObserved + for { + msg, err := recv.Recv() + if err != nil { + return nil, err + } + if msg.UnitsTimestamp == nil { + // all included in a single message + return msg, nil + } + if first == nil { + // first message in batch + first = msg + } else if first.UnitsTimestamp.AsTime() != msg.UnitsTimestamp.AsTime() { + // only used if the new timestamp is newer + if first.UnitsTimestamp.AsTime().After(msg.UnitsTimestamp.AsTime()) { + // not newer so we ignore the message + continue + } + // different batch; restart + first = msg + } + if len(msg.Units) == 0 { + // ending match message + return first, nil + } + if first != msg { + first.Units = append(first.Units, msg.Units...) + } + } +} + +func shallowCopyCheckinObserved(msg *proto.CheckinObserved) *proto.CheckinObserved { + return &proto.CheckinObserved{ + Token: msg.Token, + Units: msg.Units, + VersionInfo: msg.VersionInfo, + FeaturesIdx: msg.FeaturesIdx, + ComponentIdx: msg.ComponentIdx, + UnitsTimestamp: msg.UnitsTimestamp, + Supports: msg.Supports, + } +} + +type observedBySize struct { + unit *proto.UnitObserved + size int +} diff --git a/pkg/utils/utils_test.go b/pkg/client/chunk/observed_test.go similarity index 55% rename from pkg/utils/utils_test.go rename to pkg/client/chunk/observed_test.go index 56f8a14..7b75f31 100644 --- a/pkg/utils/utils_test.go +++ b/pkg/client/chunk/observed_test.go @@ -2,7 +2,7 @@ // or more contributor license agreements. Licensed under the Elastic License; // you may not use this file except in compliance with the Elastic License. -package utils +package chunk import ( "golang.org/x/exp/slices" @@ -20,7 +20,7 @@ import ( "github.com/elastic/elastic-agent-client/v7/pkg/proto" ) -func TestChunkedObserved(t *testing.T) { +func TestObserved(t *testing.T) { timestamp := time.Now() scenarios := []struct { @@ -88,8 +88,8 @@ func TestChunkedObserved(t *testing.T) { }, }, { - Name: "chunk", - MaxSize: 100, + Name: "chunk checkin message", + MaxSize: 120, Original: &proto.CheckinObserved{ Token: "token", FeaturesIdx: 2, @@ -112,6 +112,16 @@ func TestChunkedObserved(t *testing.T) { State: proto.State_HEALTHY, Message: "Healthy", }, + { + Id: "id-three", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + Payload: mustStruct(map[string]interface{}{ + "large": "larger than id-two", + }), + }, }, }, Expected: []*proto.CheckinObserved{ @@ -127,6 +137,16 @@ func TestChunkedObserved(t *testing.T) { State: proto.State_HEALTHY, Message: "Healthy", }, + { + Id: "id-three", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + Payload: mustStruct(map[string]interface{}{ + "large": "larger than id-two", + }), + }, }, UnitsTimestamp: timestamppb.New(timestamp), }, @@ -211,7 +231,7 @@ func TestChunkedObserved(t *testing.T) { for _, scenario := range scenarios { t.Run(scenario.Name, func(t *testing.T) { - observed, err := ChunkedObserved(scenario.Original, scenario.MaxSize, WithTimestamp(timestamp)) + observed, err := Observed(scenario.Original, scenario.MaxSize, WithTimestamp(timestamp)) if scenario.Error != "" { require.Error(t, err) assert.True(t, strings.Contains(err.Error(), scenario.Error)) @@ -221,7 +241,7 @@ func TestChunkedObserved(t *testing.T) { require.Empty(t, diff) // re-assemble and it should now match the original - assembled, err := RecvChunkedObserved(&fakeCheckinObservedReceiver{msgs: observed}) + assembled, err := RecvObserved(&fakeCheckinObservedReceiver{msgs: observed}) require.NoError(t, err) // to compare we need to remove the units timestamp and ensure the units are in the same order @@ -237,218 +257,88 @@ func TestChunkedObserved(t *testing.T) { } } -func TestChunkedExpected(t *testing.T) { - timestamp := time.Now() - - scenarios := []struct { - Name string - MaxSize int - Original *proto.CheckinExpected - Expected []*proto.CheckinExpected - Error string - }{ - { - Name: "unit too large to fit", - MaxSize: 30, - Error: "unable to chunk proto.CheckinExpected the unit id-one is larger than max", - Original: &proto.CheckinExpected{ - Units: []*proto.UnitExpected{ - { - Id: "id-one", - Type: proto.UnitType_OUTPUT, - ConfigStateIdx: 1, - State: proto.State_HEALTHY, - LogLevel: proto.UnitLogLevel_INFO, - Config: &proto.UnitExpectedConfig{ - Id: "testing", - Type: "testing", - Name: "testing", - }, - }, - { - Id: "id-two", - Type: proto.UnitType_INPUT, - ConfigStateIdx: 1, - State: proto.State_HEALTHY, - }, - }, +func TestRecvObserved_Timestamp_Restart(t *testing.T) { + firstTimestamp := time.Now() + first := &proto.CheckinObserved{ + Token: "token", + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitObserved{ + { + Id: "first-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + Payload: mustStruct(map[string]interface{}{ + "large": "this structure places this unit over the maximum size", + }), }, - }, - { - Name: "first chunk too large", - MaxSize: 50, - Error: "unable to chunk proto.CheckinExpected the first chunk with", - Original: &proto.CheckinExpected{ - Units: []*proto.UnitExpected{ - { - Id: "id-one", - Type: proto.UnitType_OUTPUT, - ConfigStateIdx: 1, - State: proto.State_HEALTHY, - LogLevel: proto.UnitLogLevel_INFO, - Config: &proto.UnitExpectedConfig{ - Id: "testing1", - Type: "testing", - Name: "testing1", - }, - }, - { - Id: "id-two", - Type: proto.UnitType_INPUT, - ConfigStateIdx: 1, - State: proto.State_HEALTHY, - LogLevel: proto.UnitLogLevel_INFO, - Config: &proto.UnitExpectedConfig{ - Id: "testing2", - Type: "testing", - Name: "testing2", - }, - }, - }, + { + Id: "first-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", }, }, - { - Name: "chunk", - MaxSize: 50, - Original: &proto.CheckinExpected{ - FeaturesIdx: 2, - ComponentIdx: 3, - Units: []*proto.UnitExpected{ - { - Id: "id-one", - Type: proto.UnitType_OUTPUT, - ConfigStateIdx: 1, - State: proto.State_HEALTHY, - Config: &proto.UnitExpectedConfig{ - Id: "testing", - Type: "testing", - Name: "testing", - }, - }, - { - Id: "id-two", - Type: proto.UnitType_INPUT, - ConfigStateIdx: 1, - State: proto.State_HEALTHY, - }, - }, - }, - Expected: []*proto.CheckinExpected{ - { - FeaturesIdx: 2, - ComponentIdx: 3, - Units: []*proto.UnitExpected{ - { - Id: "id-two", - Type: proto.UnitType_INPUT, - ConfigStateIdx: 1, - State: proto.State_HEALTHY, - }, - }, - UnitsTimestamp: timestamppb.New(timestamp), - }, - { - Units: []*proto.UnitExpected{ - { - Id: "id-one", - Type: proto.UnitType_OUTPUT, - ConfigStateIdx: 1, - State: proto.State_HEALTHY, - Config: &proto.UnitExpectedConfig{ - Id: "testing", - Type: "testing", - Name: "testing", - }, - }, - }, - UnitsTimestamp: timestamppb.New(timestamp), - }, - { - Units: []*proto.UnitExpected{}, - UnitsTimestamp: timestamppb.New(timestamp), - }, + } + firstMsgs, err := Observed(first, 100, WithTimestamp(firstTimestamp)) + require.NoError(t, err) + + secondTimestamp := time.Now() + second := &proto.CheckinObserved{ + Token: "token", + FeaturesIdx: 2, + ComponentIdx: 3, + Units: []*proto.UnitObserved{ + { + Id: "second-one", + Type: proto.UnitType_OUTPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", + Payload: mustStruct(map[string]interface{}{ + "large": "this structure places this unit over the maximum size", + }), }, - }, - { - Name: "fits in single message", - MaxSize: 200, - Original: &proto.CheckinExpected{ - FeaturesIdx: 2, - ComponentIdx: 3, - Units: []*proto.UnitExpected{ - { - Id: "id-one", - Type: proto.UnitType_OUTPUT, - ConfigStateIdx: 1, - State: proto.State_HEALTHY, - Config: &proto.UnitExpectedConfig{ - Id: "testing", - Type: "testing", - Name: "testing", - }, - }, - { - Id: "id-two", - Type: proto.UnitType_INPUT, - ConfigStateIdx: 1, - State: proto.State_HEALTHY, - }, - }, - }, - Expected: []*proto.CheckinExpected{ - { - FeaturesIdx: 2, - ComponentIdx: 3, - Units: []*proto.UnitExpected{ - { - Id: "id-one", - Type: proto.UnitType_OUTPUT, - ConfigStateIdx: 1, - State: proto.State_HEALTHY, - Config: &proto.UnitExpectedConfig{ - Id: "testing", - Type: "testing", - Name: "testing", - }, - }, - { - Id: "id-two", - Type: proto.UnitType_INPUT, - ConfigStateIdx: 1, - State: proto.State_HEALTHY, - }, - }, - }, + { + Id: "second-two", + Type: proto.UnitType_INPUT, + ConfigStateIdx: 1, + State: proto.State_HEALTHY, + Message: "Healthy", }, }, } + secondsMsgs, err := Observed(second, 100, WithTimestamp(secondTimestamp)) + require.NoError(t, err) - for _, scenario := range scenarios { - t.Run(scenario.Name, func(t *testing.T) { - observed, err := ChunkedExpected(scenario.Original, scenario.MaxSize, WithTimestamp(timestamp)) - if scenario.Error != "" { - require.Error(t, err) - assert.True(t, strings.Contains(err.Error(), scenario.Error)) - } else { - require.NoError(t, err) - diff := cmp.Diff(scenario.Expected, observed, protocmp.Transform()) - require.Empty(t, diff) + // ensure chunking results in exact length as the order in the test relies on it + require.Len(t, firstMsgs, 3) + require.Len(t, secondsMsgs, 3) - // re-assemble and it should now match the original - assembled, err := RecvChunkedExpected(&fakeCheckinExpectedReceiver{msgs: observed}) - require.NoError(t, err) + // re-order the messages + reorderedMsgs := make([]*proto.CheckinObserved, 6) + reorderedMsgs[0] = firstMsgs[0] + reorderedMsgs[1] = secondsMsgs[0] // becomes new set + reorderedMsgs[2] = firstMsgs[1] // ignored + reorderedMsgs[3] = firstMsgs[2] // ignored + reorderedMsgs[4] = secondsMsgs[1] + reorderedMsgs[5] = secondsMsgs[2] - // to compare we need to remove the units timestamp and ensure the units are in the same order - // completely acceptable that they get re-ordered in the chunking process - assembled.UnitsTimestamp = nil - slices.SortStableFunc(assembled.Units, sortExpectedUnits) - slices.SortStableFunc(scenario.Original.Units, sortExpectedUnits) + // re-assemble and it should now match the second + assembled, err := RecvObserved(&fakeCheckinObservedReceiver{msgs: reorderedMsgs}) + require.NoError(t, err) - diff = cmp.Diff(scenario.Original, assembled, protocmp.Transform()) - assert.Empty(t, diff) - } - }) - } + // to compare we need to remove the units timestamp and ensure the units are in the same order + // completely acceptable that they get re-ordered in the chunking process + assembled.UnitsTimestamp = nil + slices.SortStableFunc(assembled.Units, sortObservedUnits) + slices.SortStableFunc(second.Units, sortObservedUnits) + + diff := cmp.Diff(second, assembled, protocmp.Transform()) + assert.Empty(t, diff) } func mustStruct(v map[string]interface{}) *structpb.Struct { @@ -463,10 +353,6 @@ func sortObservedUnits(a *proto.UnitObserved, b *proto.UnitObserved) int { return strings.Compare(a.Id, b.Id) } -func sortExpectedUnits(a *proto.UnitExpected, b *proto.UnitExpected) int { - return strings.Compare(a.Id, b.Id) -} - type fakeCheckinObservedReceiver struct { msgs []*proto.CheckinObserved } @@ -476,13 +362,3 @@ func (f *fakeCheckinObservedReceiver) Recv() (*proto.CheckinObserved, error) { msg, f.msgs = f.msgs[0], f.msgs[1:] return msg, nil } - -type fakeCheckinExpectedReceiver struct { - msgs []*proto.CheckinExpected -} - -func (f *fakeCheckinExpectedReceiver) Recv() (*proto.CheckinExpected, error) { - var msg *proto.CheckinExpected - msg, f.msgs = f.msgs[0], f.msgs[1:] - return msg, nil -} diff --git a/pkg/client/chunk/option.go b/pkg/client/chunk/option.go new file mode 100644 index 0000000..288ad89 --- /dev/null +++ b/pkg/client/chunk/option.go @@ -0,0 +1,23 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package chunk + +import "time" + +type options struct { + timestamp time.Time +} + +// Option is an option for adjusting chunking. +type Option func(opts *options) + +// WithTimestamp adjusts the timestamp used for the chunking. +// +// Note: Mainly used for testing to ensure a specific timestamp is used. +func WithTimestamp(t time.Time) Option { + return func(opts *options) { + opts.timestamp = t + } +} diff --git a/pkg/client/client_v2.go b/pkg/client/client_v2.go index 06ae489..74e6b8b 100644 --- a/pkg/client/client_v2.go +++ b/pkg/client/client_v2.go @@ -10,6 +10,7 @@ import ( "encoding/json" "errors" "fmt" + "github.com/elastic/elastic-agent-client/v7/pkg/client/chunk" "io" "runtime" "runtime/pprof" @@ -453,8 +454,8 @@ func (c *clientV2) checkinRoundTrip() { go func() { defer wg.Done() defer close(readerDone) - expected, err := utils.RecvChunkedExpected(checkinClient) - for ; err == nil; expected, err = utils.RecvChunkedExpected(checkinClient) { + expected, err := chunk.RecvExpected(checkinClient) + for ; err == nil; expected, err = chunk.RecvExpected(checkinClient) { c.applyExpected(expected) } if !errors.Is(err, io.EOF) { @@ -1084,12 +1085,12 @@ func inExpected(unit *Unit, expected []*proto.UnitExpected) bool { return false } -func sendObservedChunked(client proto.ElasticAgent_CheckinV2Client, msg *proto.CheckinObserved, chunk bool, maxSize int) error { - if !chunk { +func sendObservedChunked(client proto.ElasticAgent_CheckinV2Client, msg *proto.CheckinObserved, chunkingAllowed bool, maxSize int) error { + if !chunkingAllowed { // chunking is disabled return client.Send(msg) } - msgs, err := utils.ChunkedObserved(msg, maxSize) + msgs, err := chunk.Observed(msg, maxSize) if err != nil { return err } diff --git a/pkg/client/mock/stub_serverV2.go b/pkg/client/mock/stub_serverV2.go index f2b9da2..48ac00d 100644 --- a/pkg/client/mock/stub_serverV2.go +++ b/pkg/client/mock/stub_serverV2.go @@ -8,13 +8,14 @@ import ( "context" "encoding/json" "fmt" - "github.com/elastic/elastic-agent-client/v7/pkg/utils" "net" "sync" - "github.com/elastic/elastic-agent-client/v7/pkg/proto" "github.com/gofrs/uuid" "google.golang.org/grpc" + + "github.com/elastic/elastic-agent-client/v7/pkg/client/chunk" + "github.com/elastic/elastic-agent-client/v7/pkg/proto" ) // StubServerCheckinV2 is the checkin function for the V2 controller @@ -105,7 +106,7 @@ func (s *StubServerV2) Checkin(server proto.ElasticAgent_CheckinServer) error { // CheckinV2 is the V2 checkin implementation for the mock server func (s *StubServerV2) CheckinV2(server proto.ElasticAgent_CheckinV2Server) error { for { - checkin, err := utils.RecvChunkedObserved(server) + checkin, err := chunk.RecvObserved(server) if err != nil { return err } diff --git a/pkg/utils/utils.go b/pkg/utils/utils.go index 0d4d821..d3ee652 100644 --- a/pkg/utils/utils.go +++ b/pkg/utils/utils.go @@ -6,15 +6,6 @@ package utils import ( "encoding/json" - "time" - - "golang.org/x/exp/slices" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" - protobuf "google.golang.org/protobuf/proto" - "google.golang.org/protobuf/types/known/timestamppb" - - "github.com/elastic/elastic-agent-client/v7/pkg/proto" ) // JSONMustMarshal marshals the input to JSON []byte and panics if it fails. @@ -25,291 +16,3 @@ func JSONMustMarshal(input interface{}) []byte { } return res } - -type chunkedOptions struct { - timestamp time.Time -} - -// ChunkOption is an option for adjusting chunking. -type ChunkOption func(opts *chunkedOptions) - -// WithTimestamp adjusts the timestamp used for the chunking. -// -// Note: Mainly used for testing to ensure a specific timestamp is used. -func WithTimestamp(t time.Time) ChunkOption { - return func(opts *chunkedOptions) { - opts.timestamp = t - } -} - -// ChunkedObserved chunks `proto.CheckinObserved` message into multiple chunks to be sent across the protocol. -func ChunkedObserved(msg *proto.CheckinObserved, maxSize int, opts ...ChunkOption) ([]*proto.CheckinObserved, error) { - var options chunkedOptions - options.timestamp = time.Now() // timestamp used for chunk set - for _, opt := range opts { - opt(&options) - } - - s := protobuf.Size(msg) - if s <= maxSize || len(msg.Units) <= 1 { - // fits so no chunking needed or has 0 or 1 units which cannot be chunked - return []*proto.CheckinObserved{msg}, nil - } - - msgs := make([]*proto.CheckinObserved, 0, 3) // start at 3 minimum - - // a single unit is the smallest a chunk can be - // pre-calculate the size and ensure that a single unit is less than the maxSize - bySize := make([]observedBySize, len(msg.Units)) - for i, u := range msg.Units { - bySize[i].unit = u - bySize[i].size = protobuf.Size(u) - // >= is used because even if it's at the maxSize, with overhead - // it will still be too big even if it's at the exact maxSize - if bySize[i].size >= maxSize { - return nil, status.Errorf( - codes.ResourceExhausted, - "unable to chunk proto.CheckinObserved the unit %s is larger than max (%d vs. %d)", - u.Id, bySize[i].size, maxSize) - } - } - - // sort the smallest units first, this ensures that the first chunk that includes extra - // fields uses the smallest unit to ensure that it all fits - slices.SortStableFunc(bySize, func(a, b observedBySize) int { - return a.size - b.size - }) - - // first message all fields are set; except units is made smaller - m := shallowCopyCheckinObserved(msg) - m.Units = make([]*proto.UnitObserved, 0, 1) - m.Units = append(m.Units, bySize[0].unit) - m.UnitsTimestamp = timestamppb.New(options.timestamp) - s = protobuf.Size(m) - if s >= maxSize { - // not possible even for the first chunk to fit - return nil, status.Errorf( - codes.ResourceExhausted, - "unable to chunk proto.CheckinObserved the first chunk with unit %s is larger than max (%d vs. %d)", - m.Units[0].Id, s, maxSize) - } - - // keep adding units until it doesn't fit - for nextUnit := 1; s < maxSize && nextUnit < len(bySize); nextUnit++ { - us := bySize[nextUnit] - if s+us.size < maxSize { - // unit fits add it - m.Units = append(m.Units, us.unit) - s += us.size - } else { - // doesn't fit, create a new chunk - msgs = append(msgs, m) - m = &proto.CheckinObserved{} - m.Token = msg.Token - m.UnitsTimestamp = timestamppb.New(options.timestamp) - m.Units = make([]*proto.UnitObserved, 0, 1) - m.Units = append(m.Units, us.unit) - s = protobuf.Size(m) - } - } - - // all chunks created, create the empty chunk - msgs = append(msgs, m) - m = &proto.CheckinObserved{} - m.Token = msg.Token - m.UnitsTimestamp = timestamppb.New(options.timestamp) - m.Units = make([]*proto.UnitObserved, 0) - msgs = append(msgs, m) - return msgs, nil -} - -// CheckinObservedReceiver provides a Recv interface to receive proto.CheckinObserved messages. -type CheckinObservedReceiver interface { - Recv() (*proto.CheckinObserved, error) -} - -// RecvChunkedObserved handles the receiving of chunked proto.CheckinObserved. -func RecvChunkedObserved(recv CheckinObservedReceiver) (*proto.CheckinObserved, error) { - var first *proto.CheckinObserved - for { - msg, err := recv.Recv() - if err != nil { - return nil, err - } - if msg.UnitsTimestamp == nil { - // all included in a single message - return msg, nil - } - if first == nil { - // first message in batch - first = msg - } else if first.UnitsTimestamp.AsTime() != msg.UnitsTimestamp.AsTime() { - // only used if the new timestamp is newer - if first.UnitsTimestamp.AsTime().After(msg.UnitsTimestamp.AsTime()) { - // not newer so we ignore the message - continue - } - // different batch; restart - first = msg - } - if len(msg.Units) == 0 { - // ending match message - return first, nil - } - if first != msg { - first.Units = append(first.Units, msg.Units...) - } - } -} - -func shallowCopyCheckinObserved(msg *proto.CheckinObserved) *proto.CheckinObserved { - return &proto.CheckinObserved{ - Token: msg.Token, - Units: msg.Units, - VersionInfo: msg.VersionInfo, - FeaturesIdx: msg.FeaturesIdx, - ComponentIdx: msg.ComponentIdx, - UnitsTimestamp: msg.UnitsTimestamp, - Supports: msg.Supports, - } -} - -type observedBySize struct { - unit *proto.UnitObserved - size int -} - -// ChunkedExpected chunks `proto.CheckinExpected` message into multiple chunks to be sent across the protocol. -func ChunkedExpected(msg *proto.CheckinExpected, maxSize int, opts ...ChunkOption) ([]*proto.CheckinExpected, error) { - var options chunkedOptions - options.timestamp = time.Now() // timestamp used for chunk set - for _, opt := range opts { - opt(&options) - } - - s := protobuf.Size(msg) - if s <= maxSize || len(msg.Units) <= 1 { - // fits so no chunking needed or has 0 or 1 units which cannot be chunked - return []*proto.CheckinExpected{msg}, nil - } - - msgs := make([]*proto.CheckinExpected, 0, 3) // start at 3 minimum - - // a single unit is the smallest a chunk can be - // pre-calculate the size and ensure that a single unit is less than the maxSize - bySize := make([]expectedBySize, len(msg.Units)) - for i, u := range msg.Units { - bySize[i].unit = u - bySize[i].size = protobuf.Size(u) - // >= is used because even if it's at the maxSize, with overhead - // it will still be too big even if it's at the exact maxSize - if bySize[i].size >= maxSize { - return nil, status.Errorf( - codes.ResourceExhausted, - "unable to chunk proto.CheckinExpected the unit %s is larger than max (%d vs. %d)", - u.Id, bySize[i].size, maxSize) - } - } - - // sort the smallest units first, this ensures that the first chunk that includes extra - // fields uses the smallest unit to ensure that it all fits - slices.SortStableFunc(bySize, func(a, b expectedBySize) int { - return a.size - b.size - }) - - // first message all fields are set; except units is made smaller - m := shallowCopyCheckinExpected(msg) - m.Units = make([]*proto.UnitExpected, 0, 1) - m.Units = append(m.Units, bySize[0].unit) - m.UnitsTimestamp = timestamppb.New(options.timestamp) - s = protobuf.Size(m) - if s >= maxSize { - // not possible even for the first chunk to fit - return nil, status.Errorf( - codes.ResourceExhausted, - "unable to chunk proto.CheckinExpected the first chunk with unit %s is larger than max (%d vs. %d)", - m.Units[0].Id, s, maxSize) - } - - // keep adding units until it doesn't fit - for nextUnit := 1; s < maxSize && nextUnit < len(bySize); nextUnit++ { - us := bySize[nextUnit] - if s+us.size < maxSize { - // unit fits add it - m.Units = append(m.Units, us.unit) - s += us.size - } else { - // doesn't fit, create a new chunk - msgs = append(msgs, m) - m = &proto.CheckinExpected{} - m.UnitsTimestamp = timestamppb.New(options.timestamp) - m.Units = make([]*proto.UnitExpected, 0, 1) - m.Units = append(m.Units, us.unit) - s = protobuf.Size(m) - } - } - - // all chunks created, create the empty chunk - msgs = append(msgs, m) - m = &proto.CheckinExpected{} - m.UnitsTimestamp = timestamppb.New(options.timestamp) - m.Units = make([]*proto.UnitExpected, 0) - msgs = append(msgs, m) - return msgs, nil -} - -// CheckinExpectedReceiver provides a Recv interface to receive proto.CheckinExpected messages. -type CheckinExpectedReceiver interface { - Recv() (*proto.CheckinExpected, error) -} - -// RecvChunkedExpected handles the receiving of chunked proto.CheckinObjected. -func RecvChunkedExpected(recv CheckinExpectedReceiver) (*proto.CheckinExpected, error) { - var first *proto.CheckinExpected - for { - msg, err := recv.Recv() - if err != nil { - return nil, err - } - if msg.UnitsTimestamp == nil { - // all included in a single message - return msg, nil - } - if first == nil { - // first message in batch - first = msg - } else if first.UnitsTimestamp.AsTime() != msg.UnitsTimestamp.AsTime() { - // only used if the new timestamp is newer - if first.UnitsTimestamp.AsTime().After(msg.UnitsTimestamp.AsTime()) { - // not newer so we ignore the message - continue - } - // different batch; restart - first = msg - } - if len(msg.Units) == 0 { - // ending match message - return first, nil - } - if first != msg { - first.Units = append(first.Units, msg.Units...) - } - } -} - -func shallowCopyCheckinExpected(msg *proto.CheckinExpected) *proto.CheckinExpected { - return &proto.CheckinExpected{ - AgentInfo: msg.AgentInfo, - Features: msg.Features, - FeaturesIdx: msg.FeaturesIdx, - Component: msg.Component, - ComponentIdx: msg.ComponentIdx, - Units: msg.Units, - UnitsTimestamp: msg.UnitsTimestamp, - } -} - -type expectedBySize struct { - unit *proto.UnitExpected - size int -}