github.com/argoproj/argo-events@v1.9.1/sensors/triggers/kafka/kafka.go (about) 1 /* 2 Copyright 2020 BlackRock, Inc. 3 4 Licensed under the Apache License, Version 2.0 (the "License"); 5 you may not use this file except in compliance with the License. 6 You may obtain a copy of the License at 7 8 http://www.apache.org/licenses/LICENSE-2.0 9 10 Unless required by applicable law or agreed to in writing, software 11 distributed under the License is distributed on an "AS IS" BASIS, 12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 See the License for the specific language governing permissions and 14 limitations under the License. 15 */ 16 package kafka 17 18 import ( 19 "context" 20 "encoding/binary" 21 "encoding/json" 22 "fmt" 23 "strings" 24 "time" 25 26 "github.com/hamba/avro" 27 "github.com/riferrei/srclient" 28 29 "github.com/IBM/sarama" 30 "go.uber.org/zap" 31 32 "github.com/argoproj/argo-events/common" 33 "github.com/argoproj/argo-events/common/logging" 34 apicommon "github.com/argoproj/argo-events/pkg/apis/common" 35 "github.com/argoproj/argo-events/pkg/apis/sensor/v1alpha1" 36 "github.com/argoproj/argo-events/sensors/triggers" 37 ) 38 39 // KafkaTrigger describes the trigger to place messages on Kafka topic using a producer 40 type KafkaTrigger struct { 41 // Sensor object 42 Sensor *v1alpha1.Sensor 43 // Trigger reference 44 Trigger *v1alpha1.Trigger 45 // Kafka async producer 46 Producer sarama.AsyncProducer 47 // Logger to log stuff 48 Logger *zap.SugaredLogger 49 // Avro schema of message 50 schema *srclient.Schema 51 } 52 53 // NewKafkaTrigger returns a new kafka trigger context. 54 func NewKafkaTrigger(sensor *v1alpha1.Sensor, trigger *v1alpha1.Trigger, kafkaProducers common.StringKeyedMap[sarama.AsyncProducer], logger *zap.SugaredLogger) (*KafkaTrigger, error) { 55 kafkatrigger := trigger.Template.Kafka 56 triggerLogger := logger.With(logging.LabelTriggerType, apicommon.KafkaTrigger) 57 58 producer, ok := kafkaProducers.Load(trigger.Template.Name) 59 var schema *srclient.Schema 60 61 if !ok { 62 var err error 63 config := sarama.NewConfig() 64 65 if kafkatrigger.Version == "" { 66 config.Version = sarama.V1_0_0_0 67 } else { 68 version, err := sarama.ParseKafkaVersion(kafkatrigger.Version) 69 if err != nil { 70 return nil, fmt.Errorf("failed to parse Kafka version, %w", err) 71 } 72 config.Version = version 73 } 74 75 if kafkatrigger.SASL != nil { 76 config.Net.SASL.Enable = true 77 config.Net.SASL.Mechanism = sarama.SASLMechanism(kafkatrigger.SASL.GetMechanism()) 78 if config.Net.SASL.Mechanism == "SCRAM-SHA-512" { 79 config.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &common.XDGSCRAMClient{HashGeneratorFcn: common.SHA512New} } 80 } else if config.Net.SASL.Mechanism == "SCRAM-SHA-256" { 81 config.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &common.XDGSCRAMClient{HashGeneratorFcn: common.SHA256New} } 82 } 83 84 user, err := common.GetSecretFromVolume(kafkatrigger.SASL.UserSecret) 85 if err != nil { 86 return nil, fmt.Errorf("error getting user value from secret, %w", err) 87 } 88 config.Net.SASL.User = user 89 90 password, err := common.GetSecretFromVolume(kafkatrigger.SASL.PasswordSecret) 91 if err != nil { 92 return nil, fmt.Errorf("error getting password value from secret, %w", err) 93 } 94 config.Net.SASL.Password = password 95 } 96 97 if kafkatrigger.TLS != nil { 98 tlsConfig, err := common.GetTLSConfig(kafkatrigger.TLS) 99 if err != nil { 100 return nil, fmt.Errorf("failed to get the tls configuration, %w", err) 101 } 102 tlsConfig.InsecureSkipVerify = true 103 config.Net.TLS.Config = tlsConfig 104 config.Net.TLS.Enable = true 105 } 106 107 if kafkatrigger.Compress { 108 config.Producer.Compression = sarama.CompressionSnappy 109 } 110 111 ff := 500 112 if kafkatrigger.FlushFrequency != 0 { 113 ff = int(kafkatrigger.FlushFrequency) 114 } 115 config.Producer.Flush.Frequency = time.Duration(ff) 116 117 ra := sarama.WaitForAll 118 if kafkatrigger.RequiredAcks != 0 { 119 ra = sarama.RequiredAcks(kafkatrigger.RequiredAcks) 120 } 121 config.Producer.RequiredAcks = ra 122 123 urls := strings.Split(kafkatrigger.URL, ",") 124 producer, err = sarama.NewAsyncProducer(urls, config) 125 if err != nil { 126 return nil, err 127 } 128 129 // must read from the Errors() channel or the async producer will deadlock. 130 go func() { 131 for err := range producer.Errors() { 132 triggerLogger.Errorf("Error happened in kafka producer", err) 133 } 134 }() 135 136 kafkaProducers.Store(trigger.Template.Name, producer) 137 } 138 139 if kafkatrigger.SchemaRegistry != nil { 140 var err error 141 schema, err = getSchemaFromRegistry(kafkatrigger.SchemaRegistry) 142 if err != nil { 143 return nil, err 144 } 145 } 146 147 return &KafkaTrigger{ 148 Sensor: sensor, 149 Trigger: trigger, 150 Producer: producer, 151 Logger: triggerLogger, 152 schema: schema, 153 }, nil 154 } 155 156 // GetTriggerType returns the type of the trigger 157 func (t *KafkaTrigger) GetTriggerType() apicommon.TriggerType { 158 return apicommon.KafkaTrigger 159 } 160 161 // FetchResource fetches the trigger. As the Kafka trigger is simply a Kafka producer, there 162 // is no need to fetch any resource from external source 163 func (t *KafkaTrigger) FetchResource(ctx context.Context) (interface{}, error) { 164 return t.Trigger.Template.Kafka, nil 165 } 166 167 // ApplyResourceParameters applies parameters to the trigger resource 168 func (t *KafkaTrigger) ApplyResourceParameters(events map[string]*v1alpha1.Event, resource interface{}) (interface{}, error) { 169 fetchedResource, ok := resource.(*v1alpha1.KafkaTrigger) 170 if !ok { 171 return nil, fmt.Errorf("failed to interpret the fetched trigger resource") 172 } 173 174 resourceBytes, err := json.Marshal(fetchedResource) 175 if err != nil { 176 return nil, fmt.Errorf("failed to marshal the kafka trigger resource, %w", err) 177 } 178 parameters := fetchedResource.Parameters 179 if parameters != nil { 180 updatedResourceBytes, err := triggers.ApplyParams(resourceBytes, parameters, events) 181 if err != nil { 182 return nil, err 183 } 184 var ht *v1alpha1.KafkaTrigger 185 if err := json.Unmarshal(updatedResourceBytes, &ht); err != nil { 186 return nil, fmt.Errorf("failed to unmarshal the updated kafka trigger resource after applying resource parameters. %w", err) 187 } 188 return ht, nil 189 } 190 return resource, nil 191 } 192 193 // Execute executes the trigger 194 func (t *KafkaTrigger) Execute(ctx context.Context, events map[string]*v1alpha1.Event, resource interface{}) (interface{}, error) { 195 trigger, ok := resource.(*v1alpha1.KafkaTrigger) 196 if !ok { 197 return nil, fmt.Errorf("failed to interpret the trigger resource") 198 } 199 200 if trigger.Payload == nil { 201 return nil, fmt.Errorf("payload parameters are not specified") 202 } 203 204 payload, err := triggers.ConstructPayload(events, trigger.Payload) 205 if err != nil { 206 return nil, err 207 } 208 209 // Producer with avro schema 210 if t.schema != nil { 211 payload, err = avroParser(t.schema.Schema(), t.schema.ID(), payload) 212 if err != nil { 213 return nil, err 214 } 215 } 216 217 msg := &sarama.ProducerMessage{ 218 Topic: trigger.Topic, 219 Value: sarama.ByteEncoder(payload), 220 Timestamp: time.Now().UTC(), 221 } 222 223 if trigger.PartitioningKey != nil { 224 msg.Key = sarama.StringEncoder(*trigger.PartitioningKey) 225 } 226 227 t.Producer.Input() <- msg 228 229 t.Logger.Infow("successfully produced a message", zap.Any("topic", trigger.Topic)) 230 231 return nil, nil 232 } 233 234 // ApplyPolicy applies policy on the trigger 235 func (t *KafkaTrigger) ApplyPolicy(ctx context.Context, resource interface{}) error { 236 return nil 237 } 238 239 func avroParser(schema string, schemaID int, payload []byte) ([]byte, error) { 240 var recordValue []byte 241 var payloadNative map[string]interface{} 242 243 schemaAvro, err := avro.Parse(schema) 244 if err != nil { 245 return nil, err 246 } 247 248 err = json.Unmarshal(payload, &payloadNative) 249 if err != nil { 250 return nil, err 251 } 252 avroNative, err := avro.Marshal(schemaAvro, payloadNative) 253 if err != nil { 254 return nil, err 255 } 256 257 schemaIDBytes := make([]byte, 4) 258 binary.BigEndian.PutUint32(schemaIDBytes, uint32(schemaID)) 259 recordValue = append(recordValue, byte(0)) 260 recordValue = append(recordValue, schemaIDBytes...) 261 recordValue = append(recordValue, avroNative...) 262 263 return recordValue, nil 264 } 265 266 // getSchemaFromRegistry returns a schema from registry. 267 func getSchemaFromRegistry(sr *apicommon.SchemaRegistryConfig) (*srclient.Schema, error) { 268 schemaRegistryClient := srclient.CreateSchemaRegistryClient(sr.URL) 269 if sr.Auth.Username != nil && sr.Auth.Password != nil { 270 user, _ := common.GetSecretFromVolume(sr.Auth.Username) 271 password, _ := common.GetSecretFromVolume(sr.Auth.Password) 272 schemaRegistryClient.SetCredentials(user, password) 273 } 274 schema, err := schemaRegistryClient.GetSchema(int(sr.SchemaID)) 275 if err != nil { 276 return nil, fmt.Errorf("error getting the schema with id '%d' %s", sr.SchemaID, err) 277 } 278 return schema, nil 279 }