github.com/weedge/lib@v0.0.0-20230424045628-a36dcc1d90e4/client/mq/kafka/producer/producer.go (about)

     1  package producer
     2  
     3  import (
     4  	"fmt"
     5  	"github.com/weedge/lib/client/mq/kafka/auth"
     6  	"github.com/weedge/lib/runtimer"
     7  	"strconv"
     8  	"sync"
     9  	"time"
    10  
    11  	"github.com/Shopify/sarama"
    12  	"github.com/weedge/lib/log"
    13  )
    14  
    15  // @todo: add metrics write to log; add tapper interceptor;
    16  
    17  type Producer struct {
    18  	pType         string //sync(default),async
    19  	topic         string
    20  	partition     int32
    21  	config        *sarama.Config
    22  	asyncProducer sarama.AsyncProducer
    23  	syncProducer  sarama.SyncProducer
    24  	wg            *sync.WaitGroup
    25  }
    26  
    27  var (
    28  	compressions = map[string]sarama.CompressionCodec{
    29  		"":       sarama.CompressionNone,
    30  		"gzip":   sarama.CompressionGZIP,
    31  		"snappy": sarama.CompressionSnappy,
    32  		"lz4":    sarama.CompressionLZ4,
    33  		"zstd":   sarama.CompressionZSTD,
    34  	}
    35  	requiredAcks = map[int]sarama.RequiredAcks{
    36  		0:  sarama.NoResponse,
    37  		1:  sarama.WaitForLocal,
    38  		-1: sarama.WaitForAll,
    39  	}
    40  
    41  	partitioning = map[string]sarama.PartitionerConstructor{
    42  		"manual":     sarama.NewManualPartitioner,
    43  		"hash":       sarama.NewHashPartitioner,
    44  		"random":     sarama.NewRandomPartitioner,
    45  		"roundrobin": sarama.NewRoundRobinPartitioner,
    46  		//"referencehash": sarama.NewReferenceHashPartitioner,
    47  	}
    48  )
    49  
    50  // new sync/async producer to topic with option(requiredAcks,retryMaxCn,partitioning,compressions,TLS...etc)
    51  func NewProducer(topic string, pType string, authOpts []auth.Option, options ...Option) (p *Producer) {
    52  	p = &Producer{
    53  		topic: topic,
    54  		pType: pType,
    55  		//wg: &sync.WaitGroup{},
    56  	}
    57  	p.config = sarama.NewConfig()
    58  
    59  	opts := getProducerOptions(authOpts, options...)
    60  	log.Info(fmt.Sprintf("producer options:%+v", opts))
    61  
    62  	var err error
    63  	p.config.Version, err = sarama.ParseKafkaVersion(opts.version)
    64  	if err != nil {
    65  		return
    66  	}
    67  
    68  	p.config.ClientID = opts.clientID
    69  	p.config.Producer.RequiredAcks = requiredAcks[opts.requiredAcks]
    70  	p.config.Producer.Retry.Max = opts.retryMaxCn
    71  	p.config.Producer.Compression = compressions[opts.compression]                              // Compress messages
    72  	p.config.Producer.Flush.Frequency = time.Duration(opts.flushFrequencyMs) * time.Millisecond // Flush batches every
    73  	p.config.Producer.Return.Successes = true
    74  
    75  	partition, err := strconv.ParseInt(opts.partitioning, 10, 64)
    76  	if err != nil {
    77  		p.config.Producer.Partitioner = partitioning[opts.partitioning]
    78  	} else {
    79  		p.config.Producer.Partitioner = partitioning[opts.partitioning]
    80  		p.config.Producer.Partitioner = partitioning["manual"]
    81  		p.partition = int32(partition)
    82  	}
    83  
    84  	opts.AuthOptions.InitSSL(p.config)
    85  
    86  	opts.AuthOptions.InitSASLSCRAM(p.config)
    87  
    88  	switch p.pType {
    89  	case "sync":
    90  		err = p.initSyncProducer(opts)
    91  	case "async":
    92  		err = p.initAsyncProducer(opts)
    93  	default:
    94  		err = p.initSyncProducer(opts)
    95  	}
    96  	if err != nil {
    97  		log.Error("init producer err:", err.Error())
    98  	}
    99  
   100  	return
   101  }
   102  
   103  // send string msg no key
   104  func (p *Producer) Send(val string) {
   105  	p.send("", val)
   106  }
   107  
   108  // send string msg by string key
   109  func (p *Producer) SendByKey(key, val string) {
   110  	p.send(key, val)
   111  }
   112  
   113  func (p *Producer) send(key string, val string) {
   114  	msg := &sarama.ProducerMessage{
   115  		Topic: p.topic,
   116  		Value: sarama.StringEncoder(val),
   117  	}
   118  
   119  	if p.partition > 0 {
   120  		msg.Partition = p.partition
   121  	}
   122  
   123  	if len(key) > 0 {
   124  		msg.Key = sarama.StringEncoder(key)
   125  	}
   126  
   127  	if p.syncProducer != nil {
   128  		partition, offset, err := p.syncProducer.SendMessage(msg)
   129  		if err != nil {
   130  			log.Error("syncProducer.SendMessage msg err:", err.Error())
   131  		}
   132  		log.Info(fmt.Sprintf("syncProducer.SendMessage success,topic:%s,partition:%d,offset:%d,val:%s", p.topic, partition, offset, msg.Value))
   133  	}
   134  
   135  	if p.asyncProducer != nil {
   136  		p.asyncProducer.Input() <- msg
   137  	}
   138  }
   139  
   140  // close sync/async producer
   141  func (p *Producer) Close() {
   142  	if p.wg != nil {
   143  		p.wg.Wait()
   144  	}
   145  	if p.syncProducer != nil {
   146  		if err := p.syncProducer.Close(); err != nil {
   147  			log.Error("Failed to close sync producer cleanly:", err)
   148  			return
   149  		}
   150  		log.Info("Success to close sync producer cleanly")
   151  	}
   152  	if p.asyncProducer != nil {
   153  		if err := p.asyncProducer.Close(); err != nil {
   154  			log.Error("Failed to close async producer cleanly:", err)
   155  			return
   156  		}
   157  		log.Info("Success to close async producer cleanly")
   158  	}
   159  }
   160  
   161  // On the broker side, you may want to change the following settings to get
   162  // stronger consistency guarantees:
   163  // - For your broker, set `unclean.leader.election.enable` to false
   164  // - For the topic, you could increase `min.insync.replicas`.
   165  func (p *Producer) initSyncProducer(opts *ProducerOptions) (err error) {
   166  	p.syncProducer, err = sarama.NewSyncProducer(opts.brokerList, p.config)
   167  	if err != nil {
   168  		log.Error("Failed to start Sarama sync producer:", err)
   169  		return
   170  	}
   171  
   172  	return
   173  }
   174  
   175  func (p *Producer) initAsyncProducer(opts *ProducerOptions) (err error) {
   176  	p.asyncProducer, err = sarama.NewAsyncProducer(opts.brokerList, p.config)
   177  	if err != nil {
   178  		log.Error("Failed to start Sarama async producer:", err)
   179  	}
   180  
   181  	// We will just log to STDOUT if we're not able to produce messages.
   182  	// Note: messages will only be returned here after all retry attempts are exhausted.
   183  	runtimer.GoSafely(p.wg, false, func() {
   184  		//go func() {
   185  		for err := range p.asyncProducer.Errors() {
   186  			log.Error("Failed to async produce msg error:", err.Error())
   187  		}
   188  		//}()
   189  	}, nil, nil)
   190  
   191  	runtimer.GoSafely(p.wg, false, func() {
   192  		//go func() {
   193  		for msg := range p.asyncProducer.Successes() {
   194  			log.Info(fmt.Sprintf("asyncProducer.SendMessage success,topic:%s,partition:%d,offset:%d,val:%s", msg.Topic, msg.Partition, msg.Offset, msg.Value))
   195  		}
   196  		//}()
   197  	}, nil, nil)
   198  
   199  	return
   200  }