github.com/klaytn/klaytn@v1.10.2/datasync/chaindatafetcher/kafka/config.go (about)

     1  // Copyright 2020 The klaytn Authors
     2  // This file is part of the klaytn library.
     3  //
     4  // The klaytn library is free software: you can redistribute it and/or modify
     5  // it under the terms of the GNU Lesser General Public License as published by
     6  // the Free Software Foundation, either version 3 of the License, or
     7  // (at your option) any later version.
     8  //
     9  // The klaytn library is distributed in the hope that it will be useful,
    10  // but WITHOUT ANY WARRANTY; without even the implied warranty of
    11  // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
    12  // GNU Lesser General Public License for more details.
    13  //
    14  // You should have received a copy of the GNU Lesser General Public License
    15  // along with the klaytn library. If not, see <http://www.gnu.org/licenses/>.
    16  
    17  package kafka
    18  
    19  import (
    20  	"fmt"
    21  	"time"
    22  
    23  	"github.com/Shopify/sarama"
    24  	"github.com/klaytn/klaytn/common"
    25  	"github.com/klaytn/klaytn/common/hexutil"
    26  )
    27  
    28  const (
    29  	EventBlockGroup = "blockgroup"
    30  	EventTraceGroup = "tracegroup"
    31  )
    32  
    33  const (
    34  	topicProjectName = "klaytn"
    35  	topicServiceName = "chaindatafetcher"
    36  	topicVersion     = "v1"
    37  )
    38  
    39  const (
    40  	DefaultReplicas             = 1
    41  	DefaultPartitions           = 1
    42  	DefaultTopicEnvironmentName = "local"
    43  	DefaultTopicResourceName    = "en-0"
    44  	DefaultMaxMessageBytes      = 1000000
    45  	DefaultRequiredAcks         = 1
    46  	DefaultSegmentSizeBytes     = 1000000 // 1 MB
    47  	DefaultMaxMessageNumber     = 100     // max number of messages in buffer
    48  	DefaultKafkaMessageVersion  = MsgVersion1_0
    49  	DefaultProducerIdPrefix     = "producer-"
    50  	DefaultExpirationTime       = time.Duration(0)
    51  )
    52  
    53  var (
    54  	DefaultSetup   = func(s sarama.ConsumerGroupSession) error { return nil }
    55  	DefaultCleanup = func(s sarama.ConsumerGroupSession) error { return nil }
    56  )
    57  
    58  type KafkaConfig struct {
    59  	SaramaConfig         *sarama.Config `json:"-"` // kafka client configurations.
    60  	MsgVersion           string         // MsgVersion is the version of Kafka message.
    61  	ProducerId           string         // ProducerId is for the identification of the message publisher.
    62  	Brokers              []string       // Brokers is a list of broker URLs.
    63  	TopicEnvironmentName string
    64  	TopicResourceName    string
    65  	Partitions           int32 // Partitions is the number of partitions of a topic.
    66  	Replicas             int16 // Replicas is a replication factor of kafka settings. This is the number of the replicated partitions in the kafka cluster.
    67  	SegmentSizeBytes     int   // SegmentSizeBytes is the size of kafka message segment
    68  	// (number of partitions) * (average size of segments) * buffer size should not be greater than memory size.
    69  	// default max number of messages is 100
    70  	MaxMessageNumber int // MaxMessageNumber is the maximum number of consumer messages.
    71  
    72  	ExpirationTime time.Duration
    73  	ErrCallback    func(string) error
    74  	Setup          func(s sarama.ConsumerGroupSession) error
    75  	Cleanup        func(s sarama.ConsumerGroupSession) error
    76  }
    77  
    78  func GetDefaultKafkaConfig() *KafkaConfig {
    79  	// TODO-ChainDataFetcher add more configuration if necessary
    80  	config := sarama.NewConfig()
    81  	// The following configurations should be true
    82  	config.Producer.Return.Errors = true
    83  	config.Producer.Return.Successes = true
    84  	config.Version = sarama.V2_4_0_0
    85  	config.Producer.MaxMessageBytes = DefaultMaxMessageBytes
    86  	config.Producer.RequiredAcks = sarama.RequiredAcks(DefaultRequiredAcks)
    87  	return &KafkaConfig{
    88  		SaramaConfig:         config,
    89  		TopicEnvironmentName: DefaultTopicEnvironmentName,
    90  		TopicResourceName:    DefaultTopicResourceName,
    91  		Partitions:           DefaultPartitions,
    92  		Replicas:             DefaultReplicas,
    93  		SegmentSizeBytes:     DefaultSegmentSizeBytes,
    94  		MaxMessageNumber:     DefaultMaxMessageNumber,
    95  		MsgVersion:           DefaultKafkaMessageVersion,
    96  		ProducerId:           GetDefaultProducerId(),
    97  		ExpirationTime:       DefaultExpirationTime,
    98  		Setup:                DefaultSetup,
    99  		Cleanup:              DefaultCleanup,
   100  		ErrCallback:          nil,
   101  	}
   102  }
   103  
   104  func GetDefaultProducerId() string {
   105  	rb := common.MakeRandomBytes(8)
   106  	randomString := hexutil.Encode(rb)
   107  	return DefaultProducerIdPrefix + randomString[2:]
   108  }
   109  
   110  func (c *KafkaConfig) GetTopicName(event string) string {
   111  	return fmt.Sprintf("%v.%v.%v.%v.%v.%v", c.TopicEnvironmentName, topicProjectName, topicServiceName, c.TopicResourceName, event, topicVersion)
   112  }
   113  
   114  func (c *KafkaConfig) String() string {
   115  	return fmt.Sprintf("brokers: %v, topicEnvironment: %v, topicResourceName: %v, partitions: %v, replicas: %v, maxMessageBytes: %v, requiredAcks: %v, segmentSize: %v, msgVersion: %v, producerId: %v",
   116  		c.Brokers, c.TopicEnvironmentName, c.TopicResourceName, c.Partitions, c.Replicas, c.SaramaConfig.Producer.MaxMessageBytes, c.SaramaConfig.Producer.RequiredAcks, c.SegmentSizeBytes, c.MsgVersion, c.ProducerId)
   117  }