github.com/yankunsam/loki/v2@v2.6.3-0.20220817130409-389df5235c27/clients/pkg/promtail/targets/kafka/target_syncer_test.go (about)

     1  package kafka
     2  
     3  import (
     4  	"context"
     5  	"fmt"
     6  	"reflect"
     7  	"testing"
     8  	"time"
     9  
    10  	"github.com/grafana/loki/clients/pkg/logentry/stages"
    11  
    12  	"github.com/grafana/dskit/flagext"
    13  	"github.com/prometheus/common/config"
    14  
    15  	"github.com/Shopify/sarama"
    16  	"github.com/go-kit/log"
    17  	"github.com/prometheus/client_golang/prometheus"
    18  	"github.com/prometheus/common/model"
    19  	"github.com/prometheus/prometheus/model/relabel"
    20  	"github.com/stretchr/testify/assert"
    21  	"github.com/stretchr/testify/require"
    22  
    23  	"github.com/grafana/loki/clients/pkg/promtail/client/fake"
    24  	"github.com/grafana/loki/clients/pkg/promtail/scrapeconfig"
    25  )
    26  
    27  func Test_TopicDiscovery(t *testing.T) {
    28  	ctx, cancel := context.WithCancel(context.Background())
    29  	group := &testConsumerGroupHandler{}
    30  	TopicPollInterval = time.Microsecond
    31  	var closed bool
    32  	client := &mockKafkaClient{
    33  		topics: []string{"topic1"},
    34  	}
    35  	ts := &TargetSyncer{
    36  		ctx:          ctx,
    37  		cancel:       cancel,
    38  		logger:       log.NewNopLogger(),
    39  		reg:          prometheus.DefaultRegisterer,
    40  		topicManager: mustNewTopicsManager(client, []string{"topic1", "topic2"}),
    41  		close: func() error {
    42  			closed = true
    43  			return nil
    44  		},
    45  		consumer: consumer{
    46  			ctx:           context.Background(),
    47  			cancel:        func() {},
    48  			ConsumerGroup: group,
    49  			logger:        log.NewNopLogger(),
    50  			discoverer: DiscovererFn(func(s sarama.ConsumerGroupSession, c sarama.ConsumerGroupClaim) (RunnableTarget, error) {
    51  				return nil, nil
    52  			}),
    53  		},
    54  		cfg: scrapeconfig.Config{
    55  			JobName:        "foo",
    56  			RelabelConfigs: []*relabel.Config{},
    57  			KafkaConfig: &scrapeconfig.KafkaTargetConfig{
    58  				UseIncomingTimestamp: true,
    59  				Topics:               []string{"topic1", "topic2"},
    60  			},
    61  		},
    62  	}
    63  
    64  	ts.loop()
    65  	require.Eventually(t, func() bool {
    66  		if !group.consuming.Load() {
    67  			return false
    68  		}
    69  		return reflect.DeepEqual([]string{"topic1"}, group.topics)
    70  	}, 200*time.Millisecond, time.Millisecond, "expected topics: %v, got: %v", []string{"topic1"}, group.topics)
    71  
    72  	client.topics = []string{"topic1", "topic2"} // introduce new topics
    73  
    74  	require.Eventually(t, func() bool {
    75  		if !group.consuming.Load() {
    76  			return false
    77  		}
    78  		return reflect.DeepEqual([]string{"topic1", "topic2"}, group.topics)
    79  	}, 200*time.Millisecond, time.Millisecond, "expected topics: %v, got: %v", []string{"topic1", "topic2"}, group.topics)
    80  
    81  	require.NoError(t, ts.Stop())
    82  	require.True(t, closed)
    83  }
    84  
    85  func Test_NewTarget(t *testing.T) {
    86  	ts := &TargetSyncer{
    87  		logger: log.NewNopLogger(),
    88  		reg:    prometheus.DefaultRegisterer,
    89  		client: fake.New(func() {}),
    90  		cfg: scrapeconfig.Config{
    91  			JobName: "foo",
    92  			RelabelConfigs: []*relabel.Config{
    93  				{
    94  					SourceLabels: model.LabelNames{"__meta_kafka_topic"},
    95  					TargetLabel:  "topic",
    96  					Replacement:  "$1",
    97  					Action:       relabel.Replace,
    98  					Regex:        relabel.MustNewRegexp("(.*)"),
    99  				},
   100  			},
   101  			KafkaConfig: &scrapeconfig.KafkaTargetConfig{
   102  				UseIncomingTimestamp: true,
   103  				GroupID:              "group_1",
   104  				Topics:               []string{"topic1", "topic2"},
   105  				Labels:               model.LabelSet{"static": "static1"},
   106  			},
   107  		},
   108  	}
   109  	pipeline, err := stages.NewPipeline(ts.logger, ts.cfg.PipelineStages, &ts.cfg.JobName, ts.reg)
   110  	require.NoError(t, err)
   111  	ts.pipeline = pipeline
   112  	tg, err := ts.NewTarget(&testSession{}, newTestClaim("foo", 10, 1))
   113  
   114  	require.NoError(t, err)
   115  	require.Equal(t, ConsumerDetails{
   116  		MemberID:      "foo",
   117  		GenerationID:  10,
   118  		Topic:         "foo",
   119  		Partition:     10,
   120  		InitialOffset: 1,
   121  	}, tg.Details())
   122  	require.Equal(t, model.LabelSet{"static": "static1", "topic": "foo"}, tg.Labels())
   123  	require.Equal(t, model.LabelSet{"__meta_kafka_member_id": "foo", "__meta_kafka_partition": "10", "__meta_kafka_topic": "foo", "__meta_kafka_group_id": "group_1"}, tg.DiscoveredLabels())
   124  }
   125  
   126  func Test_NewDroppedTarget(t *testing.T) {
   127  	ts := &TargetSyncer{
   128  		logger: log.NewNopLogger(),
   129  		reg:    prometheus.DefaultRegisterer,
   130  		cfg: scrapeconfig.Config{
   131  			JobName: "foo",
   132  			KafkaConfig: &scrapeconfig.KafkaTargetConfig{
   133  				UseIncomingTimestamp: true,
   134  				GroupID:              "group1",
   135  				Topics:               []string{"topic1", "topic2"},
   136  			},
   137  		},
   138  	}
   139  	tg, err := ts.NewTarget(&testSession{}, newTestClaim("foo", 10, 1))
   140  
   141  	require.NoError(t, err)
   142  	require.Equal(t, "dropping target, no labels", tg.Details())
   143  	require.Equal(t, model.LabelSet(nil), tg.Labels())
   144  	require.Equal(t, model.LabelSet{"__meta_kafka_member_id": "foo", "__meta_kafka_partition": "10", "__meta_kafka_topic": "foo", "__meta_kafka_group_id": "group1"}, tg.DiscoveredLabels())
   145  }
   146  
   147  func Test_validateConfig(t *testing.T) {
   148  	tests := []struct {
   149  		cfg      *scrapeconfig.Config
   150  		wantErr  bool
   151  		expected *scrapeconfig.Config
   152  	}{
   153  		{
   154  			&scrapeconfig.Config{
   155  				KafkaConfig: nil,
   156  			},
   157  			true,
   158  			nil,
   159  		},
   160  		{
   161  			&scrapeconfig.Config{
   162  				KafkaConfig: &scrapeconfig.KafkaTargetConfig{
   163  					GroupID: "foo",
   164  					Topics:  []string{"bar"},
   165  				},
   166  			},
   167  			true,
   168  			nil,
   169  		},
   170  		{
   171  			&scrapeconfig.Config{
   172  				KafkaConfig: &scrapeconfig.KafkaTargetConfig{
   173  					Brokers: []string{"foo"},
   174  					GroupID: "bar",
   175  				},
   176  			},
   177  			true,
   178  			nil,
   179  		},
   180  		{
   181  			&scrapeconfig.Config{
   182  				KafkaConfig: &scrapeconfig.KafkaTargetConfig{
   183  					Brokers: []string{"foo"},
   184  				},
   185  			},
   186  			true,
   187  			nil,
   188  		},
   189  		{
   190  			&scrapeconfig.Config{
   191  				KafkaConfig: &scrapeconfig.KafkaTargetConfig{
   192  					Brokers: []string{"foo"},
   193  					Topics:  []string{"bar"},
   194  				},
   195  			},
   196  			false,
   197  			&scrapeconfig.Config{
   198  				KafkaConfig: &scrapeconfig.KafkaTargetConfig{
   199  					Brokers: []string{"foo"},
   200  					Topics:  []string{"bar"},
   201  					GroupID: "promtail",
   202  					Version: "2.1.1",
   203  				},
   204  			},
   205  		},
   206  	}
   207  
   208  	for i, tt := range tests {
   209  		tt := tt
   210  		t.Run(fmt.Sprintf("%d", i), func(t *testing.T) {
   211  			err := validateConfig(tt.cfg)
   212  			if (err != nil) != tt.wantErr {
   213  				t.Errorf("validateConfig() error = %v, wantErr %v", err, tt.wantErr)
   214  			}
   215  			if err == nil {
   216  				require.Equal(t, tt.expected, tt.cfg)
   217  			}
   218  		})
   219  	}
   220  }
   221  
   222  func Test_withAuthentication(t *testing.T) {
   223  	var (
   224  		tlsConf = config.TLSConfig{
   225  			CAFile:             "testdata/example.com.ca.pem",
   226  			CertFile:           "testdata/example.com.pem",
   227  			KeyFile:            "testdata/example.com-key.pem",
   228  			ServerName:         "example.com",
   229  			InsecureSkipVerify: true,
   230  		}
   231  		expectedTLSConf, _ = createTLSConfig(config.TLSConfig{
   232  			CAFile:             "testdata/example.com.ca.pem",
   233  			CertFile:           "testdata/example.com.pem",
   234  			KeyFile:            "testdata/example.com-key.pem",
   235  			ServerName:         "example.com",
   236  			InsecureSkipVerify: true,
   237  		})
   238  		cfg = sarama.NewConfig()
   239  	)
   240  
   241  	// no authentication
   242  	noAuthCfg, err := withAuthentication(*cfg, scrapeconfig.KafkaAuthentication{
   243  		Type: scrapeconfig.KafkaAuthenticationTypeNone,
   244  	})
   245  	assert.Nil(t, err)
   246  	assert.Equal(t, false, noAuthCfg.Net.TLS.Enable)
   247  	assert.Equal(t, false, noAuthCfg.Net.SASL.Enable)
   248  	assert.NoError(t, noAuthCfg.Validate())
   249  
   250  	// specify unsupported auth type
   251  	illegalAuthTypeCfg, err := withAuthentication(*cfg, scrapeconfig.KafkaAuthentication{
   252  		Type: "illegal",
   253  	})
   254  	assert.NotNil(t, err)
   255  	assert.Nil(t, illegalAuthTypeCfg)
   256  
   257  	// mTLS authentication
   258  	mTLSCfg, err := withAuthentication(*cfg, scrapeconfig.KafkaAuthentication{
   259  		Type:      scrapeconfig.KafkaAuthenticationTypeSSL,
   260  		TLSConfig: tlsConf,
   261  	})
   262  	assert.Nil(t, err)
   263  	assert.Equal(t, true, mTLSCfg.Net.TLS.Enable)
   264  	assert.NotNil(t, mTLSCfg.Net.TLS.Config)
   265  	assert.Equal(t, "example.com", mTLSCfg.Net.TLS.Config.ServerName)
   266  	assert.Equal(t, true, mTLSCfg.Net.TLS.Config.InsecureSkipVerify)
   267  	assert.Equal(t, expectedTLSConf.Certificates, mTLSCfg.Net.TLS.Config.Certificates)
   268  	assert.NotNil(t, mTLSCfg.Net.TLS.Config.RootCAs)
   269  	assert.NoError(t, mTLSCfg.Validate())
   270  
   271  	// mTLS authentication expect ignore sasl
   272  	mTLSCfg, err = withAuthentication(*cfg, scrapeconfig.KafkaAuthentication{
   273  		Type:      scrapeconfig.KafkaAuthenticationTypeSSL,
   274  		TLSConfig: tlsConf,
   275  		SASLConfig: scrapeconfig.KafkaSASLConfig{
   276  			Mechanism: sarama.SASLTypeSCRAMSHA256,
   277  			User:      "user",
   278  			Password:  flagext.SecretWithValue("pass"),
   279  			UseTLS:    false,
   280  		},
   281  	})
   282  	assert.Nil(t, err)
   283  	assert.Equal(t, false, mTLSCfg.Net.SASL.Enable)
   284  
   285  	// SASL/PLAIN
   286  	saslCfg, err := withAuthentication(*cfg, scrapeconfig.KafkaAuthentication{
   287  		Type: scrapeconfig.KafkaAuthenticationTypeSASL,
   288  		SASLConfig: scrapeconfig.KafkaSASLConfig{
   289  			Mechanism: sarama.SASLTypePlaintext,
   290  			User:      "user",
   291  			Password:  flagext.SecretWithValue("pass"),
   292  		},
   293  	})
   294  	assert.Nil(t, err)
   295  	assert.Equal(t, false, saslCfg.Net.TLS.Enable)
   296  	assert.Equal(t, true, saslCfg.Net.SASL.Enable)
   297  	assert.Equal(t, "user", saslCfg.Net.SASL.User)
   298  	assert.Equal(t, "pass", saslCfg.Net.SASL.Password)
   299  	assert.Equal(t, sarama.SASLTypePlaintext, string(saslCfg.Net.SASL.Mechanism))
   300  	assert.NoError(t, saslCfg.Validate())
   301  
   302  	// SASL/SCRAM
   303  	saslCfg, err = withAuthentication(*cfg, scrapeconfig.KafkaAuthentication{
   304  		Type: scrapeconfig.KafkaAuthenticationTypeSASL,
   305  		SASLConfig: scrapeconfig.KafkaSASLConfig{
   306  			Mechanism: sarama.SASLTypeSCRAMSHA512,
   307  			User:      "user",
   308  			Password:  flagext.SecretWithValue("pass"),
   309  		},
   310  	})
   311  	assert.Nil(t, err)
   312  	assert.Equal(t, false, saslCfg.Net.TLS.Enable)
   313  	assert.Equal(t, true, saslCfg.Net.SASL.Enable)
   314  	assert.Equal(t, "user", saslCfg.Net.SASL.User)
   315  	assert.Equal(t, "pass", saslCfg.Net.SASL.Password)
   316  	assert.Equal(t, sarama.SASLTypeSCRAMSHA512, string(saslCfg.Net.SASL.Mechanism))
   317  	assert.NoError(t, saslCfg.Validate())
   318  
   319  	// SASL unsupported mechanism
   320  	_, err = withAuthentication(*cfg, scrapeconfig.KafkaAuthentication{
   321  		Type: scrapeconfig.KafkaAuthenticationTypeSASL,
   322  		SASLConfig: scrapeconfig.KafkaSASLConfig{
   323  			Mechanism: sarama.SASLTypeGSSAPI,
   324  			User:      "user",
   325  			Password:  flagext.SecretWithValue("pass"),
   326  		},
   327  	})
   328  	assert.Error(t, err)
   329  	assert.Equal(t, err.Error(), "error unsupported sasl mechanism: GSSAPI")
   330  
   331  	// SASL over TLS
   332  	saslCfg, err = withAuthentication(*cfg, scrapeconfig.KafkaAuthentication{
   333  		Type: scrapeconfig.KafkaAuthenticationTypeSASL,
   334  		SASLConfig: scrapeconfig.KafkaSASLConfig{
   335  			Mechanism: sarama.SASLTypeSCRAMSHA512,
   336  			User:      "user",
   337  			Password:  flagext.SecretWithValue("pass"),
   338  			UseTLS:    true,
   339  			TLSConfig: tlsConf,
   340  		},
   341  	})
   342  	assert.Nil(t, err)
   343  	assert.Equal(t, true, saslCfg.Net.TLS.Enable)
   344  	assert.Equal(t, true, saslCfg.Net.SASL.Enable)
   345  	assert.NotNil(t, saslCfg.Net.TLS.Config)
   346  	assert.Equal(t, "example.com", saslCfg.Net.TLS.Config.ServerName)
   347  	assert.Equal(t, true, saslCfg.Net.TLS.Config.InsecureSkipVerify)
   348  	assert.Equal(t, expectedTLSConf.Certificates, saslCfg.Net.TLS.Config.Certificates)
   349  	assert.NotNil(t, saslCfg.Net.TLS.Config.RootCAs)
   350  	assert.NoError(t, saslCfg.Validate())
   351  }