github.com/yankunsam/loki/v2@v2.6.3-0.20220817130409-389df5235c27/clients/pkg/promtail/targets/kafka/target_syncer.go (about) 1 package kafka 2 3 import ( 4 "context" 5 "errors" 6 "fmt" 7 "sync" 8 "time" 9 10 "github.com/Shopify/sarama" 11 "github.com/go-kit/log" 12 "github.com/go-kit/log/level" 13 "github.com/prometheus/client_golang/prometheus" 14 "github.com/prometheus/common/model" 15 "github.com/prometheus/prometheus/model/labels" 16 17 "github.com/grafana/loki/clients/pkg/logentry/stages" 18 "github.com/grafana/loki/clients/pkg/promtail/api" 19 "github.com/grafana/loki/clients/pkg/promtail/scrapeconfig" 20 "github.com/grafana/loki/clients/pkg/promtail/targets/target" 21 22 "github.com/grafana/loki/pkg/util" 23 ) 24 25 var TopicPollInterval = 30 * time.Second 26 27 type TopicManager interface { 28 Topics() ([]string, error) 29 } 30 31 type TargetSyncer struct { 32 logger log.Logger 33 cfg scrapeconfig.Config 34 pipeline *stages.Pipeline 35 reg prometheus.Registerer 36 client api.EntryHandler 37 38 topicManager TopicManager 39 consumer 40 close func() error 41 42 ctx context.Context 43 cancel context.CancelFunc 44 wg sync.WaitGroup 45 previousTopics []string 46 } 47 48 func NewSyncer( 49 reg prometheus.Registerer, 50 logger log.Logger, 51 cfg scrapeconfig.Config, 52 pushClient api.EntryHandler, 53 ) (*TargetSyncer, error) { 54 if err := validateConfig(&cfg); err != nil { 55 return nil, err 56 } 57 version, err := sarama.ParseKafkaVersion(cfg.KafkaConfig.Version) 58 if err != nil { 59 return nil, err 60 } 61 config := sarama.NewConfig() 62 config.Version = version 63 config.Consumer.Offsets.Initial = sarama.OffsetOldest 64 65 switch cfg.KafkaConfig.Assignor { 66 case sarama.StickyBalanceStrategyName: 67 config.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategySticky 68 case sarama.RoundRobinBalanceStrategyName: 69 config.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRoundRobin 70 case sarama.RangeBalanceStrategyName, "": 71 config.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRange 72 default: 73 return nil, fmt.Errorf("unrecognized consumer group partition assignor: %s", cfg.KafkaConfig.Assignor) 74 } 75 config, err = withAuthentication(*config, cfg.KafkaConfig.Authentication) 76 if err != nil { 77 return nil, fmt.Errorf("error setting up kafka authentication: %w", err) 78 } 79 client, err := sarama.NewClient(cfg.KafkaConfig.Brokers, config) 80 if err != nil { 81 return nil, fmt.Errorf("error creating kafka client: %w", err) 82 } 83 group, err := sarama.NewConsumerGroup(cfg.KafkaConfig.Brokers, cfg.KafkaConfig.GroupID, config) 84 if err != nil { 85 return nil, fmt.Errorf("error creating consumer group client: %w", err) 86 } 87 topicManager, err := newTopicManager(client, cfg.KafkaConfig.Topics) 88 if err != nil { 89 return nil, fmt.Errorf("error creating topic manager: %w", err) 90 } 91 pipeline, err := stages.NewPipeline(log.With(logger, "component", "kafka_pipeline"), cfg.PipelineStages, &cfg.JobName, reg) 92 if err != nil { 93 return nil, fmt.Errorf("error creating pipeline: %w", err) 94 } 95 ctx, cancel := context.WithCancel(context.Background()) 96 t := &TargetSyncer{ 97 logger: logger, 98 ctx: ctx, 99 cancel: cancel, 100 topicManager: topicManager, 101 cfg: cfg, 102 reg: reg, 103 client: pushClient, 104 pipeline: pipeline, 105 close: func() error { 106 if err := group.Close(); err != nil { 107 level.Warn(logger).Log("msg", "error while closing consumer group", "err", err) 108 } 109 return client.Close() 110 }, 111 consumer: consumer{ 112 ctx: context.Background(), 113 cancel: func() {}, 114 ConsumerGroup: group, 115 logger: logger, 116 }, 117 } 118 t.discoverer = t 119 t.loop() 120 return t, nil 121 } 122 123 func withAuthentication(cfg sarama.Config, authCfg scrapeconfig.KafkaAuthentication) (*sarama.Config, error) { 124 if len(authCfg.Type) == 0 || authCfg.Type == scrapeconfig.KafkaAuthenticationTypeNone { 125 return &cfg, nil 126 } 127 128 switch authCfg.Type { 129 case scrapeconfig.KafkaAuthenticationTypeSSL: 130 return withSSLAuthentication(cfg, authCfg) 131 case scrapeconfig.KafkaAuthenticationTypeSASL: 132 return withSASLAuthentication(cfg, authCfg) 133 default: 134 return nil, fmt.Errorf("unsupported authentication type %s", authCfg.Type) 135 } 136 } 137 138 func withSSLAuthentication(cfg sarama.Config, authCfg scrapeconfig.KafkaAuthentication) (*sarama.Config, error) { 139 cfg.Net.TLS.Enable = true 140 tc, err := createTLSConfig(authCfg.TLSConfig) 141 if err != nil { 142 return nil, err 143 } 144 cfg.Net.TLS.Config = tc 145 return &cfg, nil 146 } 147 148 func withSASLAuthentication(cfg sarama.Config, authCfg scrapeconfig.KafkaAuthentication) (*sarama.Config, error) { 149 cfg.Net.SASL.Enable = true 150 cfg.Net.SASL.User = authCfg.SASLConfig.User 151 cfg.Net.SASL.Password = authCfg.SASLConfig.Password.String() 152 cfg.Net.SASL.Mechanism = authCfg.SASLConfig.Mechanism 153 if cfg.Net.SASL.Mechanism == "" { 154 cfg.Net.SASL.Mechanism = sarama.SASLTypePlaintext 155 } 156 157 supportedMechanism := []string{ 158 sarama.SASLTypeSCRAMSHA512, 159 sarama.SASLTypeSCRAMSHA256, 160 sarama.SASLTypePlaintext, 161 } 162 if !util.StringsContain(supportedMechanism, string(authCfg.SASLConfig.Mechanism)) { 163 return nil, fmt.Errorf("error unsupported sasl mechanism: %s", authCfg.SASLConfig.Mechanism) 164 } 165 166 if cfg.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA512 { 167 cfg.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { 168 return &XDGSCRAMClient{ 169 HashGeneratorFcn: SHA512, 170 } 171 } 172 } 173 if cfg.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA256 { 174 cfg.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { 175 return &XDGSCRAMClient{ 176 HashGeneratorFcn: SHA256, 177 } 178 } 179 } 180 if authCfg.SASLConfig.UseTLS { 181 tc, err := createTLSConfig(authCfg.SASLConfig.TLSConfig) 182 if err != nil { 183 return nil, err 184 } 185 cfg.Net.TLS.Config = tc 186 cfg.Net.TLS.Enable = true 187 } 188 return &cfg, nil 189 } 190 191 func (ts *TargetSyncer) loop() { 192 topicChanged := make(chan []string) 193 ts.wg.Add(2) 194 go func() { 195 defer ts.wg.Done() 196 for { 197 select { 198 case <-ts.ctx.Done(): 199 return 200 case topics := <-topicChanged: 201 level.Info(ts.logger).Log("msg", "new topics received", "topics", fmt.Sprintf("%+v", topics)) 202 ts.stop() 203 if len(topics) > 0 { // no topics we don't need to start. 204 ts.start(ts.ctx, topics) 205 } 206 } 207 } 208 }() 209 go func() { 210 defer ts.wg.Done() 211 ticker := time.NewTicker(TopicPollInterval) 212 defer ticker.Stop() 213 214 tick := func() { 215 select { 216 case <-ts.ctx.Done(): 217 case <-ticker.C: 218 } 219 } 220 for ; true; tick() { // instant tick. 221 if ts.ctx.Err() != nil { 222 ts.stop() 223 close(topicChanged) 224 return 225 } 226 newTopics, ok, err := ts.fetchTopics() 227 if err != nil { 228 level.Warn(ts.logger).Log("msg", "failed to fetch topics", "err", err) 229 continue 230 } 231 if ok { 232 topicChanged <- newTopics 233 } 234 235 } 236 }() 237 } 238 239 // fetchTopics fetches and return new topics, if there's a difference with previous found topics 240 // it will return true as second return value. 241 func (ts *TargetSyncer) fetchTopics() ([]string, bool, error) { 242 new, err := ts.topicManager.Topics() 243 if err != nil { 244 return nil, false, err 245 } 246 if len(ts.previousTopics) != len(new) { 247 ts.previousTopics = new 248 return new, true, nil 249 } 250 for i, v := range ts.previousTopics { 251 if v != new[i] { 252 ts.previousTopics = new 253 return new, true, nil 254 } 255 } 256 return nil, false, nil 257 } 258 259 func (ts *TargetSyncer) Stop() error { 260 ts.cancel() 261 ts.wg.Wait() 262 return ts.close() 263 } 264 265 // NewTarget creates a new targets based on the current kafka claim and group session. 266 func (ts *TargetSyncer) NewTarget(session sarama.ConsumerGroupSession, claim sarama.ConsumerGroupClaim) (RunnableTarget, error) { 267 discoveredLabels := model.LabelSet{ 268 "__meta_kafka_topic": model.LabelValue(claim.Topic()), 269 "__meta_kafka_partition": model.LabelValue(fmt.Sprintf("%d", claim.Partition())), 270 "__meta_kafka_member_id": model.LabelValue(session.MemberID()), 271 "__meta_kafka_group_id": model.LabelValue(ts.cfg.KafkaConfig.GroupID), 272 } 273 details := newDetails(session, claim) 274 labelMap := make(map[string]string) 275 for k, v := range discoveredLabels.Clone().Merge(ts.cfg.KafkaConfig.Labels) { 276 labelMap[string(k)] = string(v) 277 } 278 labelOut := format(labels.FromMap(labelMap), ts.cfg.RelabelConfigs) 279 if len(labelOut) == 0 { 280 level.Warn(ts.logger).Log("msg", "dropping target", "reason", "no labels", "details", details, "discovered_labels", discoveredLabels.String()) 281 return &runnableDroppedTarget{ 282 Target: target.NewDroppedTarget("dropping target, no labels", discoveredLabels), 283 runFn: func() { 284 for range claim.Messages() { 285 } 286 }, 287 }, nil 288 } 289 t := NewTarget( 290 session, 291 claim, 292 discoveredLabels, 293 labelOut, 294 ts.cfg.RelabelConfigs, 295 ts.pipeline.Wrap(ts.client), 296 ts.cfg.KafkaConfig.UseIncomingTimestamp, 297 ) 298 299 return t, nil 300 } 301 302 func validateConfig(cfg *scrapeconfig.Config) error { 303 if cfg.KafkaConfig == nil { 304 return errors.New("Kafka configuration is empty") 305 } 306 if cfg.KafkaConfig.Version == "" { 307 cfg.KafkaConfig.Version = "2.1.1" 308 } 309 if len(cfg.KafkaConfig.Brokers) == 0 { 310 return errors.New("no Kafka bootstrap brokers defined") 311 } 312 313 if len(cfg.KafkaConfig.Topics) == 0 { 314 return errors.New("no topics given to be consumed") 315 } 316 317 if cfg.KafkaConfig.GroupID == "" { 318 cfg.KafkaConfig.GroupID = "promtail" 319 } 320 return nil 321 }