github.com/aristanetworks/goarista@v0.0.0-20240514173732-cca2755bbd44/kafka/encoder.go (about)

     1  // Copyright (c) 2017 Arista Networks, Inc.
     2  // Use of this source code is governed by the Apache License 2.0
     3  // that can be found in the COPYING file.
     4  
     5  package kafka
     6  
     7  import (
     8  	"expvar"
     9  	"fmt"
    10  	"sync/atomic"
    11  	"time"
    12  
    13  	"github.com/aristanetworks/goarista/monitor"
    14  
    15  	"github.com/IBM/sarama"
    16  	"github.com/aristanetworks/glog"
    17  	"google.golang.org/protobuf/proto"
    18  )
    19  
    20  // MessageEncoder is an encoder interface
    21  // which handles encoding proto.Message to sarama.ProducerMessage
    22  type MessageEncoder interface {
    23  	Encode(proto.Message) ([]*sarama.ProducerMessage, error)
    24  	HandleSuccess(*sarama.ProducerMessage)
    25  	HandleError(*sarama.ProducerError)
    26  }
    27  
    28  // BaseEncoder implements MessageEncoder interface
    29  // and mainly handle monitoring
    30  type BaseEncoder struct {
    31  	// Used for monitoring
    32  	numSuccesses monitor.Uint
    33  	numFailures  monitor.Uint
    34  	histogram    *monitor.LatencyHistogram
    35  }
    36  
    37  // counter counts the number Sysdb clients we have, and is used to guarantee that we
    38  // always have a unique name exported to expvar
    39  var counter uint32
    40  
    41  // NewBaseEncoder returns a new base MessageEncoder
    42  func NewBaseEncoder(typ string) *BaseEncoder {
    43  
    44  	// Setup monitoring structures
    45  	histName := "kafkaProducerHistogram_" + typ
    46  	statsName := "messagesStats"
    47  	if id := atomic.AddUint32(&counter, 1); id > 1 {
    48  		histName = fmt.Sprintf("%s_%d", histName, id)
    49  		statsName = fmt.Sprintf("%s_%d", statsName, id)
    50  	}
    51  	hist := monitor.NewLatencyHistogram(histName, time.Microsecond, 32, 0.3, 1000, 0)
    52  	e := &BaseEncoder{
    53  		histogram: hist,
    54  	}
    55  
    56  	statsMap := expvar.NewMap(statsName)
    57  	statsMap.Set("successes", &e.numSuccesses)
    58  	statsMap.Set("failures", &e.numFailures)
    59  
    60  	return e
    61  }
    62  
    63  // Encode encodes the proto message to a sarama.ProducerMessage
    64  func (e *BaseEncoder) Encode(message proto.Message) ([]*sarama.ProducerMessage,
    65  	error) {
    66  	// doesn't do anything, but keep it in order for BaseEncoder
    67  	// to implement MessageEncoder interface
    68  	return nil, nil
    69  }
    70  
    71  // HandleSuccess process the metadata of messages from kafka producer Successes channel
    72  func (e *BaseEncoder) HandleSuccess(msg *sarama.ProducerMessage) {
    73  	// TODO: Fix this and provide an interface to get the metadata object
    74  	metadata, ok := msg.Metadata.(Metadata)
    75  	if !ok {
    76  		return
    77  	}
    78  	// TODO: Add a monotonic clock source when one becomes available
    79  	e.histogram.UpdateLatencyValues(time.Since(metadata.StartTime))
    80  	e.numSuccesses.Add(uint64(metadata.NumMessages))
    81  }
    82  
    83  // HandleError process the metadata of messages from kafka producer Errors channel
    84  func (e *BaseEncoder) HandleError(msg *sarama.ProducerError) {
    85  	// TODO: Fix this and provide an interface to get the metadata object
    86  	metadata, ok := msg.Msg.Metadata.(Metadata)
    87  	if !ok {
    88  		return
    89  	}
    90  	// TODO: Add a monotonic clock source when one becomes available
    91  	e.histogram.UpdateLatencyValues(time.Since(metadata.StartTime))
    92  	glog.Errorf("Kafka Producer error: %s", msg.Error())
    93  	e.numFailures.Add(uint64(metadata.NumMessages))
    94  }