github.com/argoproj/argo-events@v1.9.1/eventbus/kafka/sensor/kafka_sensor.go (about) 1 package kafka 2 3 import ( 4 "context" 5 "encoding/json" 6 "fmt" 7 "strings" 8 "sync" 9 "time" 10 11 "github.com/IBM/sarama" 12 "github.com/Knetic/govaluate" 13 eventbuscommon "github.com/argoproj/argo-events/eventbus/common" 14 "github.com/argoproj/argo-events/eventbus/kafka/base" 15 eventbusv1alpha1 "github.com/argoproj/argo-events/pkg/apis/eventbus/v1alpha1" 16 sensorv1alpha1 "github.com/argoproj/argo-events/pkg/apis/sensor/v1alpha1" 17 cloudevents "github.com/cloudevents/sdk-go/v2" 18 "go.uber.org/zap" 19 ) 20 21 type KafkaSensor struct { 22 *base.Kafka 23 *sync.Mutex 24 sensor *sensorv1alpha1.Sensor 25 26 // kafka details 27 topics *Topics 28 client sarama.Client 29 consumer sarama.ConsumerGroup 30 hostname string 31 groupName string 32 33 // triggers handlers 34 // holds the state of all sensor triggers 35 triggers Triggers 36 37 // kafka handler 38 // handles consuming from kafka, offsets, and transactions 39 kafkaHandler *KafkaHandler 40 connected bool 41 } 42 43 func NewKafkaSensor(kafkaConfig *eventbusv1alpha1.KafkaBus, sensor *sensorv1alpha1.Sensor, hostname string, logger *zap.SugaredLogger) *KafkaSensor { 44 topics := &Topics{ 45 event: kafkaConfig.Topic, 46 trigger: fmt.Sprintf("%s-%s-%s", kafkaConfig.Topic, sensor.Name, "trigger"), 47 action: fmt.Sprintf("%s-%s-%s", kafkaConfig.Topic, sensor.Name, "action"), 48 } 49 50 var groupName string 51 if kafkaConfig.ConsumerGroup == nil || kafkaConfig.ConsumerGroup.GroupName == "" { 52 groupName = fmt.Sprintf("%s-%s", sensor.Namespace, sensor.Name) 53 } else { 54 groupName = kafkaConfig.ConsumerGroup.GroupName 55 } 56 57 return &KafkaSensor{ 58 Kafka: base.NewKafka(kafkaConfig, logger), 59 Mutex: &sync.Mutex{}, 60 sensor: sensor, 61 topics: topics, 62 hostname: hostname, 63 groupName: groupName, 64 triggers: Triggers{}, 65 } 66 } 67 68 type Topics struct { 69 event string 70 trigger string 71 action string 72 } 73 74 func (t *Topics) List() []string { 75 return []string{t.event, t.trigger, t.action} 76 } 77 78 type Triggers map[string]KafkaTriggerHandler 79 80 type TriggerWithDepName struct { 81 KafkaTriggerHandler 82 depName string 83 } 84 85 func (t Triggers) List(event *cloudevents.Event) []*TriggerWithDepName { 86 triggers := []*TriggerWithDepName{} 87 88 for _, trigger := range t { 89 if depName, ok := trigger.DependsOn(event); ok { 90 triggers = append(triggers, &TriggerWithDepName{trigger, depName}) 91 } 92 } 93 94 return triggers 95 } 96 97 func (t Triggers) Ready() bool { 98 for _, trigger := range t { 99 if !trigger.Ready() { 100 return false 101 } 102 } 103 return true 104 } 105 106 func (s *KafkaSensor) Initialize() error { 107 config, err := s.Config() 108 if err != nil { 109 return err 110 } 111 112 // sensor specific config 113 config.Producer.Transaction.ID = s.hostname 114 115 client, err := sarama.NewClient(s.Brokers(), config) 116 if err != nil { 117 return err 118 } 119 120 consumer, err := sarama.NewConsumerGroupFromClient(s.groupName, client) 121 if err != nil { 122 return err 123 } 124 125 producer, err := sarama.NewAsyncProducerFromClient(client) 126 if err != nil { 127 return err 128 } 129 130 offsetManager, err := sarama.NewOffsetManagerFromClient(s.groupName, client) 131 if err != nil { 132 return err 133 } 134 135 // producer is at risk of deadlocking if Errors channel isn't read. 136 go func() { 137 for err := range producer.Errors() { 138 s.Logger.Errorf("Kafka producer error", zap.Error(err)) 139 } 140 }() 141 142 s.client = client 143 s.consumer = consumer 144 s.kafkaHandler = &KafkaHandler{ 145 Mutex: &sync.Mutex{}, 146 Logger: s.Logger, 147 GroupName: s.groupName, 148 Producer: producer, 149 OffsetManager: offsetManager, 150 TriggerTopic: s.topics.trigger, 151 Reset: s.Reset, 152 Handlers: map[string]func(*sarama.ConsumerMessage) ([]*sarama.ProducerMessage, int64, func()){ 153 s.topics.event: s.Event, 154 s.topics.trigger: s.Trigger, 155 s.topics.action: s.Action, 156 }, 157 } 158 159 return nil 160 } 161 162 func (s *KafkaSensor) Connect(ctx context.Context, triggerName string, depExpression string, dependencies []eventbuscommon.Dependency, atLeastOnce bool) (eventbuscommon.TriggerConnection, error) { 163 s.Lock() 164 defer s.Unlock() 165 166 // connect only if disconnected, if ever the connection is lost 167 // the connected boolean will flip and the sensor listener will 168 // attempt to reconnect by invoking this function again 169 if !s.connected { 170 go s.Listen(ctx) 171 s.connected = true 172 } 173 174 if _, ok := s.triggers[triggerName]; !ok { 175 expr, err := govaluate.NewEvaluableExpression(strings.ReplaceAll(depExpression, "-", "\\-")) 176 if err != nil { 177 return nil, err 178 } 179 180 depMap := map[string]eventbuscommon.Dependency{} 181 for _, dep := range dependencies { 182 depMap[base.EventKey(dep.EventSourceName, dep.EventName)] = dep 183 } 184 185 s.triggers[triggerName] = &KafkaTriggerConnection{ 186 KafkaConnection: base.NewKafkaConnection(s.Logger), 187 sensorName: s.sensor.Name, 188 triggerName: triggerName, 189 depExpression: expr, 190 dependencies: depMap, 191 atLeastOnce: atLeastOnce, 192 close: s.Close, 193 isClosed: s.IsClosed, 194 } 195 } 196 197 return s.triggers[triggerName], nil 198 } 199 200 func (s *KafkaSensor) Listen(ctx context.Context) { 201 defer s.Disconnect() 202 203 for { 204 if len(s.triggers) != len(s.sensor.Spec.Triggers) || !s.triggers.Ready() { 205 s.Logger.Info("Not ready to consume, waiting...") 206 time.Sleep(3 * time.Second) 207 continue 208 } 209 210 s.Logger.Infow("Consuming", zap.Strings("topics", s.topics.List()), zap.String("group", s.groupName)) 211 212 if err := s.consumer.Consume(ctx, s.topics.List(), s.kafkaHandler); err != nil { 213 // fail fast if topics do not exist 214 if err == sarama.ErrUnknownTopicOrPartition { 215 s.Logger.Fatalf( 216 "Topics do not exist. Please ensure the topics '%s' have been created, or the kafka setting '%s' is set to true.", 217 s.topics.List(), 218 "auto.create.topics.enable", 219 ) 220 } 221 222 s.Logger.Errorw("Failed to consume", zap.Error(err)) 223 return 224 } 225 226 if err := ctx.Err(); err != nil { 227 s.Logger.Errorw("Kafka error", zap.Error(err)) 228 return 229 } 230 } 231 } 232 233 func (s *KafkaSensor) Disconnect() { 234 s.Lock() 235 defer s.Unlock() 236 237 s.connected = false 238 } 239 240 func (s *KafkaSensor) Close() error { 241 s.Lock() 242 defer s.Unlock() 243 244 // protect against being called multiple times 245 if s.IsClosed() { 246 return nil 247 } 248 249 if err := s.consumer.Close(); err != nil { 250 return err 251 } 252 253 if err := s.kafkaHandler.Close(); err != nil { 254 return err 255 } 256 257 return s.client.Close() 258 } 259 260 func (s *KafkaSensor) IsClosed() bool { 261 return !s.connected || s.client.Closed() 262 } 263 264 func (s *KafkaSensor) Event(msg *sarama.ConsumerMessage) ([]*sarama.ProducerMessage, int64, func()) { 265 var event *cloudevents.Event 266 if err := json.Unmarshal(msg.Value, &event); err != nil { 267 s.Logger.Errorw("Failed to deserialize cloudevent, skipping", zap.Error(err)) 268 return nil, msg.Offset + 1, nil 269 } 270 271 messages := []*sarama.ProducerMessage{} 272 for _, trigger := range s.triggers.List(event) { 273 event, err := trigger.Transform(trigger.depName, event) 274 if err != nil { 275 s.Logger.Errorw("Failed to transform cloudevent, skipping", zap.Error(err)) 276 continue 277 } 278 279 if !trigger.Filter(trigger.depName, event) { 280 s.Logger.Debug("Filter condition satisfied, skipping") 281 continue 282 } 283 284 // if the trigger only requires one message to be invoked we 285 // can skip ahead to the action topic, otherwise produce to 286 // the trigger topic 287 288 var data any 289 var topic string 290 if trigger.OneAndDone() { 291 data = []*cloudevents.Event{event} 292 topic = s.topics.action 293 } else { 294 data = event 295 topic = s.topics.trigger 296 } 297 298 value, err := json.Marshal(data) 299 if err != nil { 300 s.Logger.Errorw("Failed to serialize cloudevent, skipping", zap.Error(err)) 301 continue 302 } 303 304 messages = append(messages, &sarama.ProducerMessage{ 305 Topic: topic, 306 Key: sarama.StringEncoder(trigger.Name()), 307 Value: sarama.ByteEncoder(value), 308 }) 309 } 310 311 return messages, msg.Offset + 1, nil 312 } 313 314 func (s *KafkaSensor) Trigger(msg *sarama.ConsumerMessage) ([]*sarama.ProducerMessage, int64, func()) { 315 var event *cloudevents.Event 316 if err := json.Unmarshal(msg.Value, &event); err != nil { 317 // do not return here as we still need to call trigger.Offset 318 // below to determine current offset 319 s.Logger.Errorw("Failed to deserialize cloudevent, skipping", zap.Error(err)) 320 } 321 322 messages := []*sarama.ProducerMessage{} 323 offset := msg.Offset + 1 324 325 // update trigger with new event and add any resulting action to 326 // transaction messages 327 if trigger, ok := s.triggers[string(msg.Key)]; ok && event != nil { 328 func() { 329 events, err := trigger.Update(event, msg.Partition, msg.Offset, msg.Timestamp) 330 if err != nil { 331 s.Logger.Errorw("Failed to update trigger, skipping", zap.Error(err)) 332 return 333 } 334 335 // no events, trigger not yet satisfied 336 if events == nil { 337 return 338 } 339 340 value, err := json.Marshal(events) 341 if err != nil { 342 s.Logger.Errorw("Failed to serialize cloudevent, skipping", zap.Error(err)) 343 return 344 } 345 346 messages = append(messages, &sarama.ProducerMessage{ 347 Topic: s.topics.action, 348 Key: sarama.StringEncoder(trigger.Name()), 349 Value: sarama.ByteEncoder(value), 350 }) 351 }() 352 } 353 354 // need to determine smallest possible offset against all 355 // triggers as other triggers may have messages that land on the 356 // same partition 357 for _, trigger := range s.triggers { 358 offset = trigger.Offset(msg.Partition, offset) 359 } 360 361 return messages, offset, nil 362 } 363 364 func (s *KafkaSensor) Action(msg *sarama.ConsumerMessage) ([]*sarama.ProducerMessage, int64, func()) { 365 var events []*cloudevents.Event 366 if err := json.Unmarshal(msg.Value, &events); err != nil { 367 s.Logger.Errorw("Failed to deserialize cloudevents, skipping", zap.Error(err)) 368 return nil, msg.Offset + 1, nil 369 } 370 371 var f func() 372 if trigger, ok := s.triggers[string(msg.Key)]; ok { 373 f = trigger.Action(events) 374 } 375 376 return nil, msg.Offset + 1, f 377 } 378 379 func (s *KafkaSensor) Reset() error { 380 for _, trigger := range s.triggers { 381 trigger.Reset() 382 } 383 384 return nil 385 }