github.com/yankunsam/loki/v2@v2.6.3-0.20220817130409-389df5235c27/clients/pkg/promtail/targets/kafka/target.go (about) 1 package kafka 2 3 import ( 4 "fmt" 5 "time" 6 7 "github.com/prometheus/prometheus/model/labels" 8 "github.com/prometheus/prometheus/model/relabel" 9 10 "github.com/Shopify/sarama" 11 "github.com/prometheus/common/model" 12 13 "github.com/grafana/loki/clients/pkg/promtail/api" 14 "github.com/grafana/loki/clients/pkg/promtail/targets/target" 15 16 "github.com/grafana/loki/pkg/logproto" 17 ) 18 19 type runnableDroppedTarget struct { 20 target.Target 21 runFn func() 22 } 23 24 func (d *runnableDroppedTarget) run() { 25 d.runFn() 26 } 27 28 type Target struct { 29 discoveredLabels model.LabelSet 30 lbs model.LabelSet 31 details ConsumerDetails 32 claim sarama.ConsumerGroupClaim 33 session sarama.ConsumerGroupSession 34 client api.EntryHandler 35 relabelConfig []*relabel.Config 36 useIncomingTimestamp bool 37 } 38 39 func NewTarget( 40 session sarama.ConsumerGroupSession, 41 claim sarama.ConsumerGroupClaim, 42 discoveredLabels, lbs model.LabelSet, 43 relabelConfig []*relabel.Config, 44 client api.EntryHandler, 45 useIncomingTimestamp bool, 46 ) *Target { 47 return &Target{ 48 discoveredLabels: discoveredLabels, 49 lbs: lbs, 50 details: newDetails(session, claim), 51 claim: claim, 52 session: session, 53 client: client, 54 relabelConfig: relabelConfig, 55 useIncomingTimestamp: useIncomingTimestamp, 56 } 57 } 58 59 const ( 60 defaultKafkaMessageKey = "none" 61 labelKeyKafkaMessageKey = "__meta_kafka_message_key" 62 ) 63 64 func (t *Target) run() { 65 defer t.client.Stop() 66 for message := range t.claim.Messages() { 67 mk := string(message.Key) 68 if len(mk) == 0 { 69 mk = defaultKafkaMessageKey 70 } 71 72 // TODO: Possibly need to format after merging with discovered labels because we can specify multiple labels in source labels 73 // https://github.com/grafana/loki/pull/4745#discussion_r750022234 74 lbs := format([]labels.Label{{ 75 Name: labelKeyKafkaMessageKey, 76 Value: mk, 77 }}, t.relabelConfig) 78 79 out := t.lbs.Clone() 80 if len(lbs) > 0 { 81 out = out.Merge(lbs) 82 } 83 t.client.Chan() <- api.Entry{ 84 Entry: logproto.Entry{ 85 Line: string(message.Value), 86 Timestamp: timestamp(t.useIncomingTimestamp, message.Timestamp), 87 }, 88 Labels: out, 89 } 90 t.session.MarkMessage(message, "") 91 } 92 } 93 94 func timestamp(useIncoming bool, incoming time.Time) time.Time { 95 if useIncoming { 96 return incoming 97 } 98 return time.Now() 99 } 100 101 func (t *Target) Type() target.TargetType { 102 return target.KafkaTargetType 103 } 104 105 func (t *Target) Ready() bool { 106 return true 107 } 108 109 func (t *Target) DiscoveredLabels() model.LabelSet { 110 return t.discoveredLabels 111 } 112 113 func (t *Target) Labels() model.LabelSet { 114 return t.lbs 115 } 116 117 // Details returns target-specific details. 118 func (t *Target) Details() interface{} { 119 return t.details 120 } 121 122 type ConsumerDetails struct { 123 124 // MemberID returns the cluster member ID. 125 MemberID string 126 127 // GenerationID returns the current generation ID. 128 GenerationID int32 129 130 Topic string 131 Partition int32 132 InitialOffset int64 133 } 134 135 func (c ConsumerDetails) String() string { 136 return fmt.Sprintf("member_id=%s generation_id=%d topic=%s partition=%d initial_offset=%d", c.MemberID, c.GenerationID, c.Topic, c.Partition, c.InitialOffset) 137 } 138 139 func newDetails(session sarama.ConsumerGroupSession, claim sarama.ConsumerGroupClaim) ConsumerDetails { 140 return ConsumerDetails{ 141 MemberID: session.MemberID(), 142 GenerationID: session.GenerationID(), 143 Topic: claim.Topic(), 144 Partition: claim.Partition(), 145 InitialOffset: claim.InitialOffset(), 146 } 147 }