github.com/klaytn/klaytn@v1.10.2/datasync/chaindatafetcher/kafka/kafka.go (about)

     1  // Copyright 2020 The klaytn Authors
     2  // This file is part of the klaytn library.
     3  //
     4  // The klaytn library is free software: you can redistribute it and/or modify
     5  // it under the terms of the GNU Lesser General Public License as published by
     6  // the Free Software Foundation, either version 3 of the License, or
     7  // (at your option) any later version.
     8  //
     9  // The klaytn library is distributed in the hope that it will be useful,
    10  // but WITHOUT ANY WARRANTY; without even the implied warranty of
    11  // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
    12  // GNU Lesser General Public License for more details.
    13  //
    14  // You should have received a copy of the GNU Lesser General Public License
    15  // along with the klaytn library. If not, see <http://www.gnu.org/licenses/>.
    16  
    17  package kafka
    18  
    19  import (
    20  	"encoding/json"
    21  
    22  	"github.com/Shopify/sarama"
    23  	"github.com/klaytn/klaytn/common"
    24  	"github.com/klaytn/klaytn/log"
    25  )
    26  
    27  var logger = log.NewModuleLogger(log.ChainDataFetcher)
    28  
    29  const (
    30  	// item indices of message header
    31  	MsgHeaderTotalSegments = iota
    32  	MsgHeaderSegmentIdx
    33  	MsgHeaderVersion
    34  	MsgHeaderProducerId
    35  	MsgHeaderLength
    36  )
    37  
    38  const LegacyMsgHeaderLength = 2
    39  
    40  const (
    41  	KeyTotalSegments = "totalSegments"
    42  	KeySegmentIdx    = "segmentIdx"
    43  	KeyVersion       = "version"
    44  	KeyProducerId    = "producerId"
    45  )
    46  
    47  const (
    48  	MsgVersion1_0 = "1.0"
    49  )
    50  
    51  type IKey interface {
    52  	Key() string
    53  }
    54  
    55  // Kafka connects to the brokers in an existing kafka cluster.
    56  type Kafka struct {
    57  	config   *KafkaConfig
    58  	producer sarama.SyncProducer
    59  	admin    sarama.ClusterAdmin
    60  }
    61  
    62  func NewKafka(conf *KafkaConfig) (*Kafka, error) {
    63  	producer, err := sarama.NewSyncProducer(conf.Brokers, conf.SaramaConfig)
    64  	if err != nil {
    65  		logger.Error("Failed to create a new producer", "brokers", conf.Brokers)
    66  		return nil, err
    67  	}
    68  
    69  	admin, err := sarama.NewClusterAdmin(conf.Brokers, conf.SaramaConfig)
    70  	if err != nil {
    71  		logger.Error("Failed to create a new cluster admin", "brokers", conf.Brokers)
    72  		return nil, err
    73  	}
    74  
    75  	kafka := &Kafka{
    76  		config:   conf,
    77  		producer: producer,
    78  		admin:    admin,
    79  	}
    80  
    81  	blockGroupTopic := conf.GetTopicName(EventBlockGroup)
    82  	if err := kafka.setupTopic(blockGroupTopic); err != nil {
    83  		return nil, err
    84  	}
    85  
    86  	traceGroupTopic := conf.GetTopicName(EventTraceGroup)
    87  	if err := kafka.setupTopic(traceGroupTopic); err != nil {
    88  		return nil, err
    89  	}
    90  	return kafka, nil
    91  }
    92  
    93  func (k *Kafka) setupTopic(topicName string) error {
    94  	topics, err := k.ListTopics()
    95  	if err != nil {
    96  		logger.Error("getting topic has an error", "topicName", topicName, "err", err)
    97  		return err
    98  	}
    99  
   100  	if detail, exist := topics[topicName]; exist {
   101  		logger.Info("topic configuration", "topicName", topicName, "partition", detail.NumPartitions, "replicas", detail.ReplicationFactor)
   102  		return nil
   103  	}
   104  
   105  	if err := k.CreateTopic(topicName); err != nil {
   106  		if kerr, ok := err.(*sarama.TopicError); !ok || kerr.Err != sarama.ErrTopicAlreadyExists {
   107  			logger.Error("creating a topic is failed", "topicName", topicName, "err", err)
   108  			return err
   109  		}
   110  		logger.Warn("creating a topic is failed. topic already exists", "topicName", topicName)
   111  	}
   112  
   113  	return nil
   114  }
   115  
   116  func (k *Kafka) Close() {
   117  	k.producer.Close()
   118  	k.admin.Close()
   119  }
   120  
   121  func (k *Kafka) getTopicName(event string) string {
   122  	return k.config.GetTopicName(event)
   123  }
   124  
   125  func (k *Kafka) CreateTopic(topic string) error {
   126  	return k.admin.CreateTopic(topic, &sarama.TopicDetail{
   127  		NumPartitions:     k.config.Partitions,
   128  		ReplicationFactor: k.config.Replicas,
   129  	}, false)
   130  }
   131  
   132  func (k *Kafka) DeleteTopic(topic string) error {
   133  	return k.admin.DeleteTopic(topic)
   134  }
   135  
   136  func (k *Kafka) ListTopics() (map[string]sarama.TopicDetail, error) {
   137  	return k.admin.ListTopics()
   138  }
   139  
   140  func (k *Kafka) split(data []byte) ([][]byte, int) {
   141  	size := k.config.SegmentSizeBytes
   142  	var segments [][]byte
   143  	for len(data) > size {
   144  		segments = append(segments, data[:size])
   145  		data = data[size:]
   146  	}
   147  	segments = append(segments, data)
   148  	return segments, len(segments)
   149  }
   150  
   151  func (k *Kafka) makeProducerMessage(topic, key string, segment []byte, segmentIdx, totalSegments uint64) *sarama.ProducerMessage {
   152  	msg := &sarama.ProducerMessage{
   153  		Topic: topic,
   154  		Key:   sarama.StringEncoder(key),
   155  		Headers: []sarama.RecordHeader{
   156  			{
   157  				Key:   []byte(KeyTotalSegments),
   158  				Value: common.Int64ToByteBigEndian(totalSegments),
   159  			},
   160  			{
   161  				Key:   []byte(KeySegmentIdx),
   162  				Value: common.Int64ToByteBigEndian(segmentIdx),
   163  			},
   164  		},
   165  		Value: sarama.ByteEncoder(segment),
   166  	}
   167  
   168  	if k.config.MsgVersion == MsgVersion1_0 {
   169  		extraHeaders := []sarama.RecordHeader{
   170  			{
   171  				Key:   []byte(KeyVersion),
   172  				Value: []byte(k.config.MsgVersion),
   173  			},
   174  			{
   175  				Key:   []byte(KeyProducerId),
   176  				Value: []byte(k.config.ProducerId),
   177  			},
   178  		}
   179  		msg.Headers = append(msg.Headers, extraHeaders...)
   180  	}
   181  	return msg
   182  }
   183  
   184  func (k *Kafka) Publish(topic string, data interface{}) error {
   185  	dataBytes, err := json.Marshal(data)
   186  	if err != nil {
   187  		return err
   188  	}
   189  	key := ""
   190  	if v, ok := data.(IKey); ok {
   191  		key = v.Key()
   192  	}
   193  	segments, totalSegments := k.split(dataBytes)
   194  	for idx, segment := range segments {
   195  		msg := k.makeProducerMessage(topic, key, segment, uint64(idx), uint64(totalSegments))
   196  		_, _, err = k.producer.SendMessage(msg)
   197  		if err != nil {
   198  			logger.Error("sending kafka message is failed", "err", err, "segmentIdx", idx, "key", key)
   199  			return err
   200  		}
   201  	}
   202  
   203  	return err
   204  }