github.com/psiphon-labs/goarista@v0.0.0-20160825065156-d002785f4c67/kafka/producer/producer.go (about)

     1  // Copyright (C) 2016  Arista Networks, Inc.
     2  // Use of this source code is governed by the Apache License 2.0
     3  // that can be found in the COPYING file.
     4  
     5  package producer
     6  
     7  import (
     8  	"expvar"
     9  	"fmt"
    10  	"sync"
    11  	"sync/atomic"
    12  	"time"
    13  
    14  	"github.com/Shopify/sarama"
    15  	"github.com/aristanetworks/glog"
    16  	"github.com/aristanetworks/goarista/kafka"
    17  	"github.com/aristanetworks/goarista/kafka/openconfig"
    18  	"github.com/aristanetworks/goarista/monitor"
    19  	"github.com/golang/protobuf/proto"
    20  )
    21  
    22  // counter counts the number Sysdb clients we have, and is used to guarantee that we
    23  // always have a unique name exported to expvar
    24  var counter uint32
    25  
    26  // MessageEncoder defines the encoding from topic, key, proto.Message to sarama.ProducerMessage
    27  type MessageEncoder func(string, sarama.Encoder, proto.Message) (*sarama.ProducerMessage, error)
    28  
    29  // Producer forwards messages recvd on a channel to kafka.
    30  type Producer interface {
    31  	Run()
    32  	Write(proto.Message)
    33  	Stop()
    34  }
    35  
    36  type producer struct {
    37  	notifsChan    chan proto.Message
    38  	kafkaProducer sarama.AsyncProducer
    39  	topic         string
    40  	key           sarama.Encoder
    41  	encoder       MessageEncoder
    42  	done          chan struct{}
    43  	wg            sync.WaitGroup
    44  
    45  	// Used for monitoring
    46  	histogram    *monitor.Histogram
    47  	numSuccesses monitor.Uint
    48  	numFailures  monitor.Uint
    49  }
    50  
    51  // New creates new Kafka producer
    52  func New(topic string, notifsChan chan proto.Message, client sarama.Client, key sarama.Encoder,
    53  	encoder MessageEncoder) (
    54  	Producer, error) {
    55  	if notifsChan == nil {
    56  		notifsChan = make(chan proto.Message)
    57  	}
    58  	kafkaProducer, err := sarama.NewAsyncProducerFromClient(client)
    59  	if err != nil {
    60  		return nil, err
    61  	}
    62  
    63  	// Setup monitoring structures
    64  	histName := "kafkaProducerHistogram"
    65  	statsName := "messagesStats"
    66  	if id := atomic.AddUint32(&counter, 1); id > 1 {
    67  		histName = fmt.Sprintf("%s-%d", histName, id)
    68  		statsName = fmt.Sprintf("%s-%d", statsName, id)
    69  	}
    70  	hist := monitor.NewHistogram(histName, 32, 0.3, 1000, 0)
    71  	statsMap := expvar.NewMap(statsName)
    72  
    73  	p := &producer{
    74  		notifsChan:    notifsChan,
    75  		kafkaProducer: kafkaProducer,
    76  		topic:         topic,
    77  		key:           key,
    78  		encoder:       encoder,
    79  		done:          make(chan struct{}),
    80  		wg:            sync.WaitGroup{},
    81  		histogram:     hist,
    82  	}
    83  
    84  	statsMap.Set("successes", &p.numSuccesses)
    85  	statsMap.Set("failures", &p.numFailures)
    86  
    87  	return p, nil
    88  }
    89  
    90  func (p *producer) Run() {
    91  	p.wg.Add(2)
    92  	go p.handleSuccesses()
    93  	go p.handleErrors()
    94  
    95  	p.wg.Add(1)
    96  	defer p.wg.Done()
    97  	for {
    98  		select {
    99  		case batch, open := <-p.notifsChan:
   100  			if !open {
   101  				return
   102  			}
   103  			err := p.produceNotification(batch)
   104  			if err != nil {
   105  				if _, ok := err.(openconfig.UnhandledSubscribeResponseError); !ok {
   106  					panic(err)
   107  				}
   108  			}
   109  		case <-p.done:
   110  			return
   111  		}
   112  	}
   113  }
   114  
   115  func (p *producer) Write(m proto.Message) {
   116  	p.notifsChan <- m
   117  }
   118  
   119  func (p *producer) Stop() {
   120  	close(p.done)
   121  	p.kafkaProducer.Close()
   122  	p.wg.Wait()
   123  }
   124  
   125  func (p *producer) produceNotification(protoMessage proto.Message) error {
   126  	message, err := p.encoder(p.topic, p.key, protoMessage)
   127  	if err != nil {
   128  		return err
   129  	}
   130  	select {
   131  	case p.kafkaProducer.Input() <- message:
   132  		return nil
   133  	case <-p.done:
   134  		return nil
   135  	}
   136  }
   137  
   138  // handleSuccesses reads from the producer's successes channel and collects some
   139  // information for monitoring
   140  func (p *producer) handleSuccesses() {
   141  	defer p.wg.Done()
   142  	for msg := range p.kafkaProducer.Successes() {
   143  		metadata := msg.Metadata.(kafka.Metadata)
   144  		// TODO: Add a monotonic clock source when one becomes available
   145  		p.histogram.UpdateLatencyValues(metadata.StartTime, time.Now())
   146  		p.numSuccesses.Add(uint64(metadata.NumMessages))
   147  	}
   148  }
   149  
   150  // handleErrors reads from the producer's errors channel and collects some information
   151  // for monitoring
   152  func (p *producer) handleErrors() {
   153  	defer p.wg.Done()
   154  	for msg := range p.kafkaProducer.Errors() {
   155  		metadata := msg.Msg.Metadata.(kafka.Metadata)
   156  		// TODO: Add a monotonic clock source when one becomes available
   157  		p.histogram.UpdateLatencyValues(metadata.StartTime, time.Now())
   158  		glog.Errorf("Kafka Producer error: %s", msg.Error())
   159  		p.numFailures.Add(uint64(metadata.NumMessages))
   160  	}
   161  }