github.com/wfusion/gofusion@v1.1.14/mq/kafka.go (about)

     1  package mq
     2  
     3  import (
     4  	"context"
     5  	"strings"
     6  	"time"
     7  
     8  	"github.com/IBM/sarama"
     9  	"github.com/pkg/errors"
    10  
    11  	"github.com/wfusion/gofusion/common/infra/watermill"
    12  	"github.com/wfusion/gofusion/common/infra/watermill/pubsub/kafka"
    13  	"github.com/wfusion/gofusion/common/utils"
    14  	"github.com/wfusion/gofusion/config"
    15  )
    16  
    17  func newKafka(ctx context.Context, appName, name string, conf *Conf, logger watermill.LoggerAdapter) (
    18  	pub Publisher, sub Subscriber) {
    19  	if conf.Producer {
    20  		pub = newKafkaPublisher(ctx, appName, name, conf, logger)
    21  	}
    22  
    23  	if conf.Consumer {
    24  		sub = newKafkaSubscriber(ctx, appName, name, conf, logger)
    25  	}
    26  
    27  	return
    28  }
    29  
    30  type kafkaPublisher struct {
    31  	*abstractMQ
    32  	publisher *kafka.Publisher
    33  }
    34  
    35  func newKafkaPublisher(ctx context.Context, appName, name string,
    36  	conf *Conf, logger watermill.LoggerAdapter) Publisher {
    37  	cfg := kafka.PublisherConfig{
    38  		Brokers:               conf.Endpoint.Addresses,
    39  		Marshaler:             kafka.DefaultMarshaler{AppID: config.Use(appName).AppName()},
    40  		OverwriteSaramaConfig: parseKafkaConf(appName, conf),
    41  	}
    42  
    43  	pub, err := kafka.NewPublisher(cfg, logger)
    44  	if err != nil {
    45  		panic(errors.Wrapf(err, "initialize mq component kafka publisher failed: %s", err))
    46  	}
    47  
    48  	return &kafkaPublisher{
    49  		abstractMQ: newPub(ctx, pub, appName, name, conf, logger),
    50  		publisher:  pub,
    51  	}
    52  }
    53  
    54  func (k *kafkaPublisher) close() (err error) {
    55  	return k.publisher.Close()
    56  }
    57  
    58  type kafkaSubscriber struct {
    59  	*abstractMQ
    60  	subscriber *kafka.Subscriber
    61  }
    62  
    63  func newKafkaSubscriber(ctx context.Context, appName, name string,
    64  	conf *Conf, logger watermill.LoggerAdapter) Subscriber {
    65  	cfg := kafka.SubscriberConfig{
    66  		Brokers:               conf.Endpoint.Addresses,
    67  		Unmarshaler:           kafka.DefaultMarshaler{AppID: config.Use(appName).AppName()},
    68  		OverwriteSaramaConfig: parseKafkaConf(appName, conf),
    69  		ConsumerGroup:         conf.ConsumerGroup,
    70  		NackResendSleep:       100 * time.Millisecond,
    71  		ReconnectRetrySleep:   time.Second,
    72  		InitializeTopicDetails: &sarama.TopicDetail{
    73  			NumPartitions:     -1,
    74  			ReplicationFactor: -1,
    75  			ReplicaAssignment: nil,
    76  			ConfigEntries:     nil,
    77  		},
    78  	}
    79  
    80  	sub, err := kafka.NewSubscriber(cfg, logger)
    81  	if err != nil {
    82  		panic(errors.Wrapf(err, "initialize mq component kafka subscriber failed: %s", err))
    83  	}
    84  
    85  	if err = sub.SubscribeInitialize(conf.Topic); err != nil {
    86  		panic(errors.Wrapf(err, "initialize mq component kafka subscriber intialize: %s", err))
    87  	}
    88  
    89  	return &kafkaSubscriber{
    90  		abstractMQ: newSub(ctx, sub, appName, name, conf, logger),
    91  		subscriber: sub,
    92  	}
    93  }
    94  
    95  func (k *kafkaSubscriber) close() (err error) {
    96  	return k.subscriber.Close()
    97  }
    98  
    99  func parseKafkaConf(appName string, conf *Conf) (saramaCfg *sarama.Config) {
   100  	saramaCfg = sarama.NewConfig()
   101  	saramaCfg.Producer.Return.Errors = true
   102  	saramaCfg.Producer.Return.Successes = true
   103  	saramaCfg.Producer.RequiredAcks = sarama.WaitForLocal
   104  	saramaCfg.Producer.Retry.Max = 10
   105  	saramaCfg.Consumer.Fetch.Default = 16 * 1024 * 1024 // 16mb, default is 1mb
   106  	saramaCfg.Consumer.Offsets.Initial = sarama.OffsetNewest
   107  	saramaCfg.Consumer.Offsets.AutoCommit.Enable = true
   108  	saramaCfg.Consumer.Offsets.AutoCommit.Interval = time.Second // only work when auto commit disabled
   109  	saramaCfg.Consumer.Return.Errors = true
   110  	saramaCfg.Metadata.Retry.Backoff = time.Second * 2
   111  	saramaCfg.ClientID = config.Use(appName).AppName()
   112  	if utils.IsStrNotBlank(conf.Endpoint.Version) {
   113  		saramaCfg.Version = utils.Must(sarama.ParseKafkaVersion(conf.Endpoint.Version))
   114  	}
   115  	if utils.IsStrNotBlank(conf.Endpoint.User) {
   116  		saramaCfg.Net.SASL.Enable = true
   117  		saramaCfg.Net.SASL.User = conf.Endpoint.User
   118  		saramaCfg.Net.SASL.Password = conf.Endpoint.Password
   119  		saramaCfg.Net.SASL.Mechanism = sarama.SASLTypePlaintext
   120  		switch {
   121  		case strings.EqualFold(conf.Endpoint.AuthType, sarama.SASLTypeSCRAMSHA256):
   122  			saramaCfg.Net.SASL.Mechanism = sarama.SASLTypeSCRAMSHA256
   123  		case strings.EqualFold(conf.Endpoint.AuthType, sarama.SASLTypeSCRAMSHA512):
   124  			saramaCfg.Net.SASL.Mechanism = sarama.SASLTypeSCRAMSHA512
   125  		case strings.EqualFold(conf.Endpoint.AuthType, sarama.SASLTypeOAuth):
   126  			saramaCfg.Net.SASL.Mechanism = sarama.SASLTypeOAuth
   127  			saramaCfg.Net.SASL.TokenProvider = &kafkaOAuthProvider{token: saramaCfg.Net.SASL.Password}
   128  		}
   129  	}
   130  	return
   131  }
   132  
   133  type kafkaOAuthProvider struct {
   134  	token string
   135  }
   136  
   137  func (k *kafkaOAuthProvider) Token() (*sarama.AccessToken, error) {
   138  	return &sarama.AccessToken{Token: k.token}, nil
   139  }