storj.io/minio@v0.0.0-20230509071714-0cbc90f649b1/pkg/event/target/kafka.go (about)

     1  /*
     2   * MinIO Cloud Storage, (C) 2018 MinIO, Inc.
     3   *
     4   * Licensed under the Apache License, Version 2.0 (the "License");
     5   * you may not use this file except in compliance with the License.
     6   * You may obtain a copy of the License at
     7   *
     8   *     http://www.apache.org/licenses/LICENSE-2.0
     9   *
    10   * Unless required by applicable law or agreed to in writing, software
    11   * distributed under the License is distributed on an "AS IS" BASIS,
    12   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    13   * See the License for the specific language governing permissions and
    14   * limitations under the License.
    15   */
    16  
    17  package target
    18  
    19  import (
    20  	"context"
    21  	"crypto/tls"
    22  	"crypto/x509"
    23  	"encoding/json"
    24  	"errors"
    25  	"net"
    26  	"net/url"
    27  	"os"
    28  	"path/filepath"
    29  
    30  	"storj.io/minio/pkg/event"
    31  	xnet "storj.io/minio/pkg/net"
    32  
    33  	sarama "github.com/Shopify/sarama"
    34  	saramatls "github.com/Shopify/sarama/tools/tls"
    35  )
    36  
    37  // Kafka input constants
    38  const (
    39  	KafkaBrokers       = "brokers"
    40  	KafkaTopic         = "topic"
    41  	KafkaQueueDir      = "queue_dir"
    42  	KafkaQueueLimit    = "queue_limit"
    43  	KafkaTLS           = "tls"
    44  	KafkaTLSSkipVerify = "tls_skip_verify"
    45  	KafkaTLSClientAuth = "tls_client_auth"
    46  	KafkaSASL          = "sasl"
    47  	KafkaSASLUsername  = "sasl_username"
    48  	KafkaSASLPassword  = "sasl_password"
    49  	KafkaSASLMechanism = "sasl_mechanism"
    50  	KafkaClientTLSCert = "client_tls_cert"
    51  	KafkaClientTLSKey  = "client_tls_key"
    52  	KafkaVersion       = "version"
    53  
    54  	EnvKafkaEnable        = "MINIO_NOTIFY_KAFKA_ENABLE"
    55  	EnvKafkaBrokers       = "MINIO_NOTIFY_KAFKA_BROKERS"
    56  	EnvKafkaTopic         = "MINIO_NOTIFY_KAFKA_TOPIC"
    57  	EnvKafkaQueueDir      = "MINIO_NOTIFY_KAFKA_QUEUE_DIR"
    58  	EnvKafkaQueueLimit    = "MINIO_NOTIFY_KAFKA_QUEUE_LIMIT"
    59  	EnvKafkaTLS           = "MINIO_NOTIFY_KAFKA_TLS"
    60  	EnvKafkaTLSSkipVerify = "MINIO_NOTIFY_KAFKA_TLS_SKIP_VERIFY"
    61  	EnvKafkaTLSClientAuth = "MINIO_NOTIFY_KAFKA_TLS_CLIENT_AUTH"
    62  	EnvKafkaSASLEnable    = "MINIO_NOTIFY_KAFKA_SASL"
    63  	EnvKafkaSASLUsername  = "MINIO_NOTIFY_KAFKA_SASL_USERNAME"
    64  	EnvKafkaSASLPassword  = "MINIO_NOTIFY_KAFKA_SASL_PASSWORD"
    65  	EnvKafkaSASLMechanism = "MINIO_NOTIFY_KAFKA_SASL_MECHANISM"
    66  	EnvKafkaClientTLSCert = "MINIO_NOTIFY_KAFKA_CLIENT_TLS_CERT"
    67  	EnvKafkaClientTLSKey  = "MINIO_NOTIFY_KAFKA_CLIENT_TLS_KEY"
    68  	EnvKafkaVersion       = "MINIO_NOTIFY_KAFKA_VERSION"
    69  )
    70  
    71  // KafkaArgs - Kafka target arguments.
    72  type KafkaArgs struct {
    73  	Enable     bool        `json:"enable"`
    74  	Brokers    []xnet.Host `json:"brokers"`
    75  	Topic      string      `json:"topic"`
    76  	QueueDir   string      `json:"queueDir"`
    77  	QueueLimit uint64      `json:"queueLimit"`
    78  	Version    string      `json:"version"`
    79  	TLS        struct {
    80  		Enable        bool               `json:"enable"`
    81  		RootCAs       *x509.CertPool     `json:"-"`
    82  		SkipVerify    bool               `json:"skipVerify"`
    83  		ClientAuth    tls.ClientAuthType `json:"clientAuth"`
    84  		ClientTLSCert string             `json:"clientTLSCert"`
    85  		ClientTLSKey  string             `json:"clientTLSKey"`
    86  	} `json:"tls"`
    87  	SASL struct {
    88  		Enable    bool   `json:"enable"`
    89  		User      string `json:"username"`
    90  		Password  string `json:"password"`
    91  		Mechanism string `json:"mechanism"`
    92  	} `json:"sasl"`
    93  }
    94  
    95  // Validate KafkaArgs fields
    96  func (k KafkaArgs) Validate() error {
    97  	if !k.Enable {
    98  		return nil
    99  	}
   100  	if len(k.Brokers) == 0 {
   101  		return errors.New("no broker address found")
   102  	}
   103  	for _, b := range k.Brokers {
   104  		if _, err := xnet.ParseHost(b.String()); err != nil {
   105  			return err
   106  		}
   107  	}
   108  	if k.QueueDir != "" {
   109  		if !filepath.IsAbs(k.QueueDir) {
   110  			return errors.New("queueDir path should be absolute")
   111  		}
   112  	}
   113  	if k.Version != "" {
   114  		if _, err := sarama.ParseKafkaVersion(k.Version); err != nil {
   115  			return err
   116  		}
   117  	}
   118  	return nil
   119  }
   120  
   121  // KafkaTarget - Kafka target.
   122  type KafkaTarget struct {
   123  	id         event.TargetID
   124  	args       KafkaArgs
   125  	producer   sarama.SyncProducer
   126  	config     *sarama.Config
   127  	store      Store
   128  	loggerOnce func(ctx context.Context, err error, id interface{}, errKind ...interface{})
   129  }
   130  
   131  // ID - returns target ID.
   132  func (target *KafkaTarget) ID() event.TargetID {
   133  	return target.id
   134  }
   135  
   136  // HasQueueStore - Checks if the queueStore has been configured for the target
   137  func (target *KafkaTarget) HasQueueStore() bool {
   138  	return target.store != nil
   139  }
   140  
   141  // IsActive - Return true if target is up and active
   142  func (target *KafkaTarget) IsActive() (bool, error) {
   143  	if !target.args.pingBrokers() {
   144  		return false, errNotConnected
   145  	}
   146  	return true, nil
   147  }
   148  
   149  // Save - saves the events to the store which will be replayed when the Kafka connection is active.
   150  func (target *KafkaTarget) Save(eventData event.Event) error {
   151  	if target.store != nil {
   152  		return target.store.Put(eventData)
   153  	}
   154  	_, err := target.IsActive()
   155  	if err != nil {
   156  		return err
   157  	}
   158  	return target.send(eventData)
   159  }
   160  
   161  // send - sends an event to the kafka.
   162  func (target *KafkaTarget) send(eventData event.Event) error {
   163  	if target.producer == nil {
   164  		return errNotConnected
   165  	}
   166  	objectName, err := url.QueryUnescape(eventData.S3.Object.Key)
   167  	if err != nil {
   168  		return err
   169  	}
   170  	key := eventData.S3.Bucket.Name + "/" + objectName
   171  
   172  	data, err := json.Marshal(event.Log{EventName: eventData.EventName, Key: key, Records: []event.Event{eventData}})
   173  	if err != nil {
   174  		return err
   175  	}
   176  
   177  	msg := sarama.ProducerMessage{
   178  		Topic: target.args.Topic,
   179  		Key:   sarama.StringEncoder(key),
   180  		Value: sarama.ByteEncoder(data),
   181  	}
   182  
   183  	_, _, err = target.producer.SendMessage(&msg)
   184  
   185  	return err
   186  }
   187  
   188  // Send - reads an event from store and sends it to Kafka.
   189  func (target *KafkaTarget) Send(eventKey string) error {
   190  	var err error
   191  	_, err = target.IsActive()
   192  	if err != nil {
   193  		return err
   194  	}
   195  
   196  	if target.producer == nil {
   197  		brokers := []string{}
   198  		for _, broker := range target.args.Brokers {
   199  			brokers = append(brokers, broker.String())
   200  		}
   201  		target.producer, err = sarama.NewSyncProducer(brokers, target.config)
   202  		if err != nil {
   203  			if err != sarama.ErrOutOfBrokers {
   204  				return err
   205  			}
   206  			return errNotConnected
   207  		}
   208  	}
   209  
   210  	eventData, eErr := target.store.Get(eventKey)
   211  	if eErr != nil {
   212  		// The last event key in a successful batch will be sent in the channel atmost once by the replayEvents()
   213  		// Such events will not exist and wouldve been already been sent successfully.
   214  		if os.IsNotExist(eErr) {
   215  			return nil
   216  		}
   217  		return eErr
   218  	}
   219  
   220  	err = target.send(eventData)
   221  	if err != nil {
   222  		// Sarama opens the ciruit breaker after 3 consecutive connection failures.
   223  		if err == sarama.ErrLeaderNotAvailable || err.Error() == "circuit breaker is open" {
   224  			return errNotConnected
   225  		}
   226  		return err
   227  	}
   228  
   229  	// Delete the event from store.
   230  	return target.store.Del(eventKey)
   231  }
   232  
   233  // Close - closes underneath kafka connection.
   234  func (target *KafkaTarget) Close() error {
   235  	if target.producer != nil {
   236  		return target.producer.Close()
   237  	}
   238  	return nil
   239  }
   240  
   241  // Check if atleast one broker in cluster is active
   242  func (k KafkaArgs) pingBrokers() bool {
   243  
   244  	for _, broker := range k.Brokers {
   245  		_, dErr := net.Dial("tcp", broker.String())
   246  		if dErr == nil {
   247  			return true
   248  		}
   249  	}
   250  	return false
   251  }
   252  
   253  // NewKafkaTarget - creates new Kafka target with auth credentials.
   254  func NewKafkaTarget(id string, args KafkaArgs, doneCh <-chan struct{}, loggerOnce func(ctx context.Context, err error, id interface{}, kind ...interface{}), test bool) (*KafkaTarget, error) {
   255  	config := sarama.NewConfig()
   256  
   257  	target := &KafkaTarget{
   258  		id:         event.TargetID{ID: id, Name: "kafka"},
   259  		args:       args,
   260  		loggerOnce: loggerOnce,
   261  	}
   262  
   263  	if args.Version != "" {
   264  		kafkaVersion, err := sarama.ParseKafkaVersion(args.Version)
   265  		if err != nil {
   266  			target.loggerOnce(context.Background(), err, target.ID())
   267  			return target, err
   268  		}
   269  		config.Version = kafkaVersion
   270  	}
   271  
   272  	config.Net.SASL.User = args.SASL.User
   273  	config.Net.SASL.Password = args.SASL.Password
   274  	if args.SASL.Mechanism == "sha512" {
   275  		config.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: KafkaSHA512} }
   276  		config.Net.SASL.Mechanism = sarama.SASLMechanism(sarama.SASLTypeSCRAMSHA512)
   277  	} else if args.SASL.Mechanism == "sha256" {
   278  		config.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: KafkaSHA256} }
   279  		config.Net.SASL.Mechanism = sarama.SASLMechanism(sarama.SASLTypeSCRAMSHA256)
   280  	} else {
   281  		// default to PLAIN
   282  		config.Net.SASL.Mechanism = sarama.SASLMechanism(sarama.SASLTypePlaintext)
   283  	}
   284  	config.Net.SASL.Enable = args.SASL.Enable
   285  
   286  	tlsConfig, err := saramatls.NewConfig(args.TLS.ClientTLSCert, args.TLS.ClientTLSKey)
   287  
   288  	if err != nil {
   289  		target.loggerOnce(context.Background(), err, target.ID())
   290  		return target, err
   291  	}
   292  
   293  	config.Net.TLS.Enable = args.TLS.Enable
   294  	config.Net.TLS.Config = tlsConfig
   295  	config.Net.TLS.Config.InsecureSkipVerify = args.TLS.SkipVerify
   296  	config.Net.TLS.Config.ClientAuth = args.TLS.ClientAuth
   297  	config.Net.TLS.Config.RootCAs = args.TLS.RootCAs
   298  
   299  	config.Producer.RequiredAcks = sarama.WaitForAll
   300  	config.Producer.Retry.Max = 10
   301  	config.Producer.Return.Successes = true
   302  
   303  	target.config = config
   304  
   305  	brokers := []string{}
   306  	for _, broker := range args.Brokers {
   307  		brokers = append(brokers, broker.String())
   308  	}
   309  
   310  	var store Store
   311  
   312  	if args.QueueDir != "" {
   313  		queueDir := filepath.Join(args.QueueDir, storePrefix+"-kafka-"+id)
   314  		store = NewQueueStore(queueDir, args.QueueLimit)
   315  		if oErr := store.Open(); oErr != nil {
   316  			target.loggerOnce(context.Background(), oErr, target.ID())
   317  			return target, oErr
   318  		}
   319  		target.store = store
   320  	}
   321  
   322  	producer, err := sarama.NewSyncProducer(brokers, config)
   323  	if err != nil {
   324  		if store == nil || err != sarama.ErrOutOfBrokers {
   325  			target.loggerOnce(context.Background(), err, target.ID())
   326  			return target, err
   327  		}
   328  	}
   329  	target.producer = producer
   330  
   331  	if target.store != nil && !test {
   332  		// Replays the events from the store.
   333  		eventKeyCh := replayEvents(target.store, doneCh, target.loggerOnce, target.ID())
   334  		// Start replaying events from the store.
   335  		go sendEvents(target, eventKeyCh, doneCh, target.loggerOnce)
   336  	}
   337  
   338  	return target, nil
   339  }