github.com/mdaxf/iac@v0.0.0-20240519030858-58a061660378/integration/kafka/kafkaconsumer.go (about) 1 package kafka 2 3 import ( 4 "database/sql" 5 "encoding/json" 6 "fmt" 7 "os" 8 "os/signal" 9 "syscall" 10 "time" 11 12 "github.com/IBM/sarama" 13 // cluster "github.com/bsm/sarama-cluster" 14 "github.com/google/uuid" 15 "github.com/mdaxf/iac/com" 16 "github.com/mdaxf/iac/documents" 17 "github.com/mdaxf/iac/framework/queue" 18 "github.com/mdaxf/iac/logger" 19 "github.com/mdaxf/iac-signalr/signalr" 20 ) 21 22 type KafkasConfig struct { 23 Kafkas []KafkaConfig `json:"kafkas"` 24 ApiKey string `json:"apikey"` 25 } 26 27 type KafkaConfig struct { 28 Server string `json:"server"` 29 Topics []KafkaTopic `json:"topics"` 30 } 31 32 type KafkaTopic struct { 33 Topic string `json:"topic"` 34 Handler string `json:"handler"` 35 Mode string `json:"mode"` 36 Type string `json:"type"` 37 } 38 39 type KafkaConsumer struct { 40 Config KafkaConfig 41 Queue *queue.MessageQueue 42 iLog logger.Log 43 Consumer sarama.Consumer 44 DocDBconn *documents.DocDB 45 DB *sql.DB 46 SignalRClient signalr.Client 47 AppServer string 48 ApiKey string 49 } 50 51 func NewKafkaConsumer(config KafkaConfig) *KafkaConsumer { 52 iLog := logger.Log{ModuleName: logger.Framework, User: "System", ControllerName: "KafkaConsumer"} 53 54 iLog.Debug(fmt.Sprintf(("Create Kafkaconsumer with configuration : %s"), logger.ConvertJson(config))) 55 56 uuid := uuid.New().String() 57 q := queue.NewMessageQueue(uuid, "Kafkaconsumer") 58 59 Kafkaconsumer := &KafkaConsumer{ 60 Config: config, 61 Queue: q, 62 iLog: iLog, 63 } 64 65 iLog.Debug(fmt.Sprintf(("Create Kafkaconsumer: %s"), logger.ConvertJson(Kafkaconsumer))) 66 Kafkaconsumer.BuildKafkaConsumer() 67 return Kafkaconsumer 68 } 69 70 func NewKafkaConsumerExternal(config KafkaConfig, docDBconn *documents.DocDB, db *sql.DB, signalRClient signalr.Client) *KafkaConsumer { 71 iLog := logger.Log{ModuleName: logger.Framework, User: "System", ControllerName: "KafkaConsumer"} 72 73 iLog.Debug(fmt.Sprintf(("Create Kafkaconsumer with configuration : %s"), logger.ConvertJson(config))) 74 uuid := uuid.New().String() 75 q := queue.NewMessageQueue(uuid, "Kafkaconsumer") 76 77 Kafkaconsumer := &KafkaConsumer{ 78 Config: config, 79 Queue: q, 80 iLog: iLog, 81 } 82 83 Kafkaconsumer.Queue.DocDBconn = docDBconn 84 Kafkaconsumer.Queue.DB = db 85 Kafkaconsumer.Queue.SignalRClient = signalRClient 86 87 iLog.Debug(fmt.Sprintf(("Create Kafkaconsumer: %s"), logger.ConvertJson(Kafkaconsumer))) 88 // Kafkaconsumer.BuildKafkaConsumer() 89 return Kafkaconsumer 90 } 91 92 func (KafkaConsumer *KafkaConsumer) BuildKafkaConsumer() { 93 94 config := sarama.NewConfig() 95 config.Consumer.Return.Errors = true 96 config.Consumer.Offsets.AutoCommit.Enable = true 97 config.Consumer.Offsets.AutoCommit.Interval = 1 * time.Second 98 99 consumer, err := sarama.NewConsumer([]string{KafkaConsumer.Config.Server}, config) 100 if err != nil { 101 KafkaConsumer.iLog.Error(fmt.Sprintf("Error creating consumer: %v", err)) 102 return 103 } 104 105 KafkaConsumer.Consumer = consumer 106 107 KafkaConsumer.PartitionTopics() 108 109 } 110 111 /* 112 func (KafkaConsumer *KafkaConsumer) ClusterGroup() { 113 114 config := sarama.NewConfig() 115 config.Consumer.Return.Errors = true 116 config.Consumer.Offsets.AutoCommit.Enable = true 117 config.Consumer.Offsets.AutoCommit.Interval = 1 * time.Second 118 119 group := uuid.New().String() 120 topics := []string{} 121 for _, data := range KafkaConsumer.Config.Topics { 122 topics.append(data.Topic) 123 } 124 125 consumerGroup, err := cluster.NewConsumer( 126 []string{KafkaConsumer.Config.Server}, 127 group, 128 topics, 129 config, 130 ) 131 132 KafkaConsumer.Consumer = consumerGroup 133 134 if err != nil { 135 KafkaConsumer.iLog.Error(fmt.Sprintf("Failed to create consumer group: %v", err)) 136 } 137 defer consumerGroup.Close() 138 139 signals := make(chan os.Signal, 1) 140 signal.Notify(signals, syscall.SIGINT) 141 142 q := KafkaConsumer.Queue 143 go func() { 144 for message := range consumerGroup.Messages() { 145 KafkaConsumer.iLog.Debug(fmt.Sprintf("Received message: %s", message.Value)) 146 for _, data := range KafkaConsumer.Config.Topics { 147 if message.Topic == data.Topic { 148 handler = data.Handler 149 if Handler != "" { 150 ID := uuid.New().String() 151 msg := queue.Message{ 152 Id: ID, 153 UUID: ID, 154 Retry: 3, 155 Execute: 0, 156 Topic: message.Topic, 157 PayLoad: []byte(message.Value), 158 Handler: handler, 159 CreatedOn: time.Now(), 160 } 161 iLog.Debug(fmt.Sprintf("Push message %s to queue: %s", msg, q.QueueID)) 162 q.Push(msg) 163 } 164 break 165 } 166 } 167 168 consumerGroup.MarkMessage(message, "") // Mark the message as processed 169 } 170 }() 171 KafkaConsumer.waitForTerminationSignal() 172 } 173 */ 174 func (KafkaConsumer *KafkaConsumer) PartitionTopics() { 175 176 for _, data := range KafkaConsumer.Config.Topics { 177 topic := data.Topic 178 handler := data.Handler 179 KafkaConsumer.initKafkaConsumerbyTopic(topic, handler, data) 180 } 181 } 182 183 func (KafkaConsumer *KafkaConsumer) initKafkaConsumerbyTopic(topic string, handler string, data KafkaTopic) { 184 185 consumer := KafkaConsumer.Consumer 186 iLog := KafkaConsumer.iLog 187 q := KafkaConsumer.Queue 188 189 partitionConsumer, err := consumer.ConsumePartition(topic, 0, sarama.OffsetOldest) 190 if err != nil { 191 iLog.Error(fmt.Sprintf("Error creating partition consumer: %v", err)) 192 return 193 } 194 defer partitionConsumer.Close() 195 196 signals := make(chan os.Signal, 1) 197 signal.Notify(signals, os.Interrupt) 198 199 go func() { 200 ConsumerLoop: 201 for { 202 select { 203 case <-signals: 204 break ConsumerLoop 205 case err := <-partitionConsumer.Errors(): 206 iLog.Error(fmt.Sprintf("Error consuming message: %v", err)) 207 208 case message := <-partitionConsumer.Messages(): 209 iLog.Info(fmt.Sprintf("Consumed message offset %d: %s", message.Offset, string(message.Value))) 210 if data.Type == "local" { 211 ID := uuid.New().String() 212 msg := queue.Message{ 213 Id: ID, 214 UUID: ID, 215 Retry: 3, 216 Execute: 0, 217 Topic: topic, 218 PayLoad: []byte(message.Value), 219 Handler: handler, 220 CreatedOn: time.Now(), 221 } 222 iLog.Debug(fmt.Sprintf("Push message %s to queue: %s", msg, q.QueueID)) 223 q.Push(msg) 224 } else { 225 iLog.Debug(fmt.Sprintf("Call IAC Endpoint to handle the message %s with: %s", message.Value, handler)) 226 KafkaConsumer.CallWebService(message, topic, handler) 227 } 228 } 229 } 230 }() 231 232 KafkaConsumer.waitForTerminationSignal() 233 } 234 func (KafkaConsumer *KafkaConsumer) CallWebService(msg *sarama.ConsumerMessage, topic string, handler string) { 235 236 method := "POST" 237 url := KafkaConsumer.AppServer + "/trancode/execute" 238 239 var result map[string]interface{} 240 err := json.Unmarshal(msg.Value, &result) 241 if err != nil { 242 KafkaConsumer.iLog.Error(fmt.Sprintf("Error:", err)) 243 return 244 } 245 246 var inputs map[string]interface{} 247 248 inputs["Payload"] = result 249 inputs["Topic"] = topic 250 251 data := make(map[string]interface{}) 252 data["TranCode"] = handler 253 data["Inputs"] = inputs 254 255 headers := make(map[string]string) 256 headers["Content-Type"] = "application/json" 257 headers["Authorization"] = "apikey " + KafkaConsumer.ApiKey 258 259 result, err = com.CallWebService(url, method, data, headers) 260 261 if err != nil { 262 KafkaConsumer.iLog.Error(fmt.Sprintf("Error in WebServiceCallFunc.Execute: %s", err)) 263 return 264 } 265 /* 266 client := &http.Client{} 267 268 type MSGData struct { 269 TranCode string `json:"code"` 270 Inputs map[string]interface{} `json:"inputs"` 271 } 272 273 var result map[string]interface{} 274 err := json.Unmarshal(msg.Value, &result) 275 if err != nil { 276 KafkaConsumer.iLog.Error(fmt.Sprintf("Error:", err)) 277 return 278 } 279 var inputs map[string]interface{} 280 281 inputs["Payload"] = result 282 inputs["Topic"] = topic 283 284 msgdata := &MSGData{ 285 TranCode: handler, 286 Inputs: inputs, 287 } 288 289 bytesdata, err := json.Marshal(msgdata) 290 if err != nil { 291 KafkaConsumer.iLog.Error(fmt.Sprintf("Error:", err)) 292 return 293 } 294 295 req, err := http.NewRequest(method, url, bytes.NewBuffer(bytesdata)) 296 297 if err != nil { 298 KafkaConsumer.iLog.Error(fmt.Sprintf("Error in WebServiceCallFunc.Execute: %s", err)) 299 return 300 } 301 req.Header.Set("Content-Type", "application/json") 302 req.Header.Set("Authorization", "apikey "+KafkaConsumer.ApiKey) 303 304 resp, err := client.Do(req) 305 if err != nil { 306 KafkaConsumer.iLog.Error(fmt.Sprintf("Error in WebServiceCallFunc.Execute: %s", err)) 307 return 308 } 309 defer resp.Body.Close() 310 311 respBody, err := ioutil.ReadAll(resp.Body) 312 err = json.Unmarshal(respBody, &result) 313 if err != nil { 314 KafkaConsumer.iLog.Error(fmt.Sprintf("Error:", err)) 315 return 316 } */ 317 KafkaConsumer.iLog.Debug(fmt.Sprintf("Response data: %v", result)) 318 319 } 320 func (KafkaConsumer *KafkaConsumer) waitForTerminationSignal() { 321 c := make(chan os.Signal, 1) 322 signal.Notify(c, os.Interrupt, syscall.SIGTERM) 323 <-c 324 fmt.Println("\nShutting down...") 325 326 KafkaConsumer.Consumer.Close() 327 328 time.Sleep(2 * time.Second) // Add any cleanup or graceful shutdown logic here 329 os.Exit(0) 330 }