github.com/rudderlabs/rudder-go-kit@v0.30.0/testhelper/docker/resource/kafka/kafka_test.go (about)

     1  package kafka
     2  
     3  import (
     4  	"bytes"
     5  	"context"
     6  	"crypto/tls"
     7  	"encoding/binary"
     8  	"encoding/json"
     9  	"fmt"
    10  	"net"
    11  	"os"
    12  	"path/filepath"
    13  	"testing"
    14  	"time"
    15  
    16  	confluent "github.com/confluentinc/confluent-kafka-go/v2/kafka"
    17  	"github.com/confluentinc/confluent-kafka-go/v2/schemaregistry"
    18  	"github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde"
    19  	"github.com/confluentinc/confluent-kafka-go/v2/schemaregistry/serde/avro"
    20  	"github.com/linkedin/goavro/v2"
    21  	"github.com/ory/dockertest/v3"
    22  	dc "github.com/ory/dockertest/v3/docker"
    23  	"github.com/segmentio/kafka-go"
    24  	"github.com/segmentio/kafka-go/sasl"
    25  	"github.com/segmentio/kafka-go/sasl/plain"
    26  	"github.com/segmentio/kafka-go/sasl/scram"
    27  	"github.com/stretchr/testify/require"
    28  	"golang.org/x/crypto/ssh"
    29  
    30  	"github.com/rudderlabs/rudder-go-kit/testhelper/docker/resource/sshserver"
    31  )
    32  
    33  const (
    34  	defaultTestTimeout = 60 * time.Second
    35  )
    36  
    37  func TestResource(t *testing.T) {
    38  	pool, err := dockertest.NewPool("")
    39  	require.NoError(t, err)
    40  
    41  	res, err := Setup(pool, t,
    42  		WithBrokers(3),
    43  	)
    44  	require.NoError(t, err)
    45  
    46  	var (
    47  		ctx     = context.Background()
    48  		topic   = "my-topic"
    49  		brokers = []string{
    50  			"localhost:" + res.Ports[0],
    51  			"localhost:" + res.Ports[1],
    52  			"localhost:" + res.Ports[2],
    53  		}
    54  	)
    55  
    56  	w := &kafka.Writer{
    57  		Addr:                   kafka.TCP(brokers...),
    58  		Balancer:               &kafka.LeastBytes{},
    59  		AllowAutoTopicCreation: true,
    60  	}
    61  	t.Cleanup(func() { _ = w.Close() })
    62  
    63  	require.Eventually(t, func() bool {
    64  		err := w.WriteMessages(ctx,
    65  			kafka.Message{Topic: topic, Key: []byte("one"), Value: []byte("one!")},
    66  			kafka.Message{Topic: topic, Key: []byte("two"), Value: []byte("two!")},
    67  			kafka.Message{Topic: topic, Key: []byte("three"), Value: []byte("three!")},
    68  		)
    69  		if err != nil {
    70  			t.Logf("failed to write messages: %s", err)
    71  		}
    72  		return err == nil
    73  	}, defaultTestTimeout, 500*time.Millisecond)
    74  }
    75  
    76  func TestWithSASL(t *testing.T) {
    77  	pool, err := dockertest.NewPool("")
    78  	require.NoError(t, err)
    79  
    80  	path, err := os.Getwd()
    81  	require.NoError(t, err)
    82  
    83  	saslConfiguration := SASLConfig{
    84  		BrokerUser: User{Username: "kafka1", Password: "password"},
    85  		Users: []User{
    86  			{Username: "client1", Password: "password"},
    87  		},
    88  		CertificatePassword: "password",
    89  		KeyStorePath:        filepath.Join(path, "testdata", "keystore", "kafka.keystore.jks"),
    90  		TrustStorePath:      filepath.Join(path, "testdata", "truststore", "kafka.truststore.jks"),
    91  	}
    92  
    93  	hashTypes := []string{"scramPlainText", "scramSHA256", "scramSHA512"}
    94  	for _, hashType := range hashTypes {
    95  		t.Run(hashType, func(t *testing.T) {
    96  			var mechanism sasl.Mechanism
    97  			containerOptions := []Option{WithBrokers(1)}
    98  
    99  			switch hashType {
   100  			case "scramPlainText":
   101  				mechanism = plain.Mechanism{
   102  					Username: saslConfiguration.Users[0].Username,
   103  					Password: saslConfiguration.Users[0].Password,
   104  				}
   105  				containerOptions = append(containerOptions, WithSASLPlain(&saslConfiguration))
   106  			case "scramSHA256":
   107  				mechanism, err = scram.Mechanism(
   108  					scram.SHA256, saslConfiguration.Users[0].Username, saslConfiguration.Users[0].Password,
   109  				)
   110  				require.NoError(t, err)
   111  				containerOptions = append(containerOptions, WithSASLScramSHA256(&saslConfiguration))
   112  			case "scramSHA512":
   113  				mechanism, err = scram.Mechanism(
   114  					scram.SHA512, saslConfiguration.Users[0].Username, saslConfiguration.Users[0].Password,
   115  				)
   116  				require.NoError(t, err)
   117  				containerOptions = append(containerOptions, WithSASLScramSHA512(&saslConfiguration))
   118  			}
   119  			container, err := Setup(pool, t, containerOptions...)
   120  			require.NoError(t, err)
   121  
   122  			w := kafka.Writer{
   123  				Addr:     kafka.TCP("localhost:" + container.Ports[0]),
   124  				Balancer: &kafka.Hash{},
   125  				Transport: &kafka.Transport{
   126  					SASL: mechanism,
   127  					TLS: &tls.Config{ // skipcq: GSC-G402
   128  						MinVersion:         tls.VersionTLS11,
   129  						MaxVersion:         tls.VersionTLS12,
   130  						InsecureSkipVerify: true,
   131  					},
   132  				},
   133  				AllowAutoTopicCreation: true,
   134  			}
   135  			t.Cleanup(func() { _ = w.Close() })
   136  
   137  			require.Eventually(t, func() bool {
   138  				err := w.WriteMessages(context.Background(),
   139  					kafka.Message{Topic: "my-topic", Key: []byte("one"), Value: []byte("one!")},
   140  					kafka.Message{Topic: "my-topic", Key: []byte("two"), Value: []byte("two!")},
   141  					kafka.Message{Topic: "my-topic", Key: []byte("three"), Value: []byte("three!")},
   142  				)
   143  				if err != nil {
   144  					t.Logf("failed to write messages: %s", err)
   145  				}
   146  				return err == nil
   147  			}, defaultTestTimeout, 500*time.Millisecond)
   148  		})
   149  	}
   150  }
   151  
   152  func TestAvroSchemaRegistry(t *testing.T) {
   153  	pool, err := dockertest.NewPool("")
   154  	require.NoError(t, err)
   155  
   156  	container, err := Setup(pool, t, WithBrokers(1), WithSchemaRegistry())
   157  	require.NoError(t, err)
   158  
   159  	c, err := confluent.NewConsumer(&confluent.ConfigMap{
   160  		"bootstrap.servers":  fmt.Sprintf("localhost:%s", container.Ports[0]),
   161  		"group.id":           "group-1",
   162  		"session.timeout.ms": 6000,
   163  		"auto.offset.reset":  "earliest",
   164  	})
   165  	require.NoError(t, err)
   166  	t.Cleanup(func() { _ = c.Close() })
   167  
   168  	topic := "my-topic"
   169  	err = c.SubscribeTopics([]string{topic}, nil)
   170  	require.NoError(t, err)
   171  
   172  	type User struct {
   173  		FirstName string `json:"first_name"`
   174  		LastName  string `json:"last_name"`
   175  	}
   176  
   177  	consumeUserMsg := func(t *testing.T, deser *avro.GenericDeserializer) {
   178  		timeout := time.After(10 * time.Second)
   179  		for {
   180  			select {
   181  			case <-timeout:
   182  				t.Fatal("Timed out waiting for expected message")
   183  			default:
   184  				ev := c.Poll(100)
   185  				if ev == nil {
   186  					continue
   187  				}
   188  
   189  				switch e := ev.(type) {
   190  				case *confluent.Message:
   191  					value := User{}
   192  					err = deser.DeserializeInto(*e.TopicPartition.Topic, e.Value, &value)
   193  					require.NoErrorf(t, err, "Failed to deserialize payload: %s", err)
   194  					require.Equal(t, User{FirstName: "John", LastName: "Doe"}, value)
   195  					return
   196  				case confluent.Error:
   197  					t.Logf("Kafka Confluent Error: %v: %v", e.Code(), e)
   198  				default:
   199  					t.Logf("Ignoring consumer entry: %+v", e)
   200  				}
   201  			}
   202  		}
   203  	}
   204  
   205  	// Registering schemas and setting up writer
   206  	schemaRegistryClient, err := schemaregistry.NewClient(schemaregistry.NewConfig(container.SchemaRegistryURL))
   207  	require.NoError(t, err)
   208  
   209  	cwd, err := os.Getwd()
   210  	require.NoError(t, err)
   211  	path := func(file string) string { return filepath.Join(cwd, "testdata", "avro", file) }
   212  	_, schemaID1 := registerSchema(t, "user1", path("user1.avsc"), schemaRegistryClient)
   213  	userSchema2, schemaID2 := registerSchema(t, "user2", path("user2.avsc"), schemaRegistryClient)
   214  	t.Logf("Schemas IDs: %d, %d", schemaID1, schemaID2)
   215  
   216  	rawMessage := json.RawMessage(`{
   217  		"first_name": "John",
   218  		"last_name": "Doe"
   219  	}`)
   220  	avroMessage := serializeAvroMessage(t, schemaID2, userSchema2, rawMessage)
   221  
   222  	w := &kafka.Writer{
   223  		Addr:                   kafka.TCP("localhost:" + container.Ports[0]),
   224  		Balancer:               &kafka.LeastBytes{},
   225  		AllowAutoTopicCreation: true,
   226  	}
   227  	t.Cleanup(func() { _ = w.Close() })
   228  
   229  	t.Log("Writing message")
   230  	require.Eventually(t, func() bool {
   231  		err := w.WriteMessages(context.Background(),
   232  			kafka.Message{Topic: topic, Key: []byte("123"), Value: avroMessage},
   233  		)
   234  		if err != nil {
   235  			t.Logf("failed to write messages: %s", err)
   236  		}
   237  		return err == nil
   238  	}, defaultTestTimeout, 500*time.Millisecond)
   239  
   240  	// Start consuming
   241  	t.Log("Consuming message")
   242  	deser, err := avro.NewGenericDeserializer(schemaRegistryClient, serde.ValueSerde, avro.NewDeserializerConfig())
   243  	require.NoError(t, err)
   244  	consumeUserMsg(t, deser)
   245  }
   246  
   247  func TestSSH(t *testing.T) {
   248  	pool, err := dockertest.NewPool("")
   249  	require.NoError(t, err)
   250  
   251  	// Start shared Docker network
   252  	network, err := pool.Client.CreateNetwork(dc.CreateNetworkOptions{Name: "kafka_network"})
   253  	require.NoError(t, err)
   254  	t.Cleanup(func() {
   255  		if err := pool.Client.RemoveNetwork(network.ID); err != nil {
   256  			t.Logf("Error while removing Docker network: %v", err)
   257  		}
   258  	})
   259  
   260  	// Start Kafka cluster with ZooKeeper and three brokers
   261  	_, err = Setup(pool, t,
   262  		WithBrokers(1),
   263  		WithNetwork(network),
   264  		WithoutDockerHostListeners(),
   265  	)
   266  	require.NoError(t, err)
   267  
   268  	// Let's setup the SSH server
   269  	publicKeyPath, err := filepath.Abs("./testdata/ssh/test_key.pub")
   270  	require.NoError(t, err)
   271  	sshServer, err := sshserver.Setup(pool, t,
   272  		sshserver.WithPublicKeyPath(publicKeyPath),
   273  		sshserver.WithCredentials("linuxserver.io", ""),
   274  		sshserver.WithDockerNetwork(network),
   275  	)
   276  	require.NoError(t, err)
   277  	sshServerHost := fmt.Sprintf("localhost:%d", sshServer.Port)
   278  	t.Logf("SSH server is listening on %s", sshServerHost)
   279  
   280  	// Prepare SSH configuration
   281  	privateKey, err := os.ReadFile("./testdata/ssh/test_key")
   282  	require.NoError(t, err)
   283  
   284  	signer, err := ssh.ParsePrivateKey(privateKey)
   285  	require.NoError(t, err)
   286  
   287  	sshConfig := &ssh.ClientConfig{
   288  		User:            "linuxserver.io",
   289  		Auth:            []ssh.AuthMethod{ssh.PublicKeys(signer)},
   290  		Timeout:         10 * time.Second,
   291  		HostKeyCallback: ssh.InsecureIgnoreHostKey(), // skipcq: GSC-G106
   292  	}
   293  	transport := &kafka.Transport{
   294  		DialTimeout: 10 * time.Second,
   295  		Dial: func(ctx context.Context, network, address string) (net.Conn, error) {
   296  			sshClient, err := ssh.Dial("tcp", sshServerHost, sshConfig)
   297  			if err != nil {
   298  				return nil, fmt.Errorf("cannot dial SSH host %q: %w", sshServerHost, err)
   299  			}
   300  
   301  			conn, err := sshClient.Dial(network, address)
   302  			if err != nil {
   303  				return nil, fmt.Errorf(
   304  					"cannot dial address %q over SSH (host %q): %w", address, sshServerHost, err,
   305  				)
   306  			}
   307  			return conn, nil
   308  		},
   309  	}
   310  
   311  	// Setup writer
   312  	w := &kafka.Writer{
   313  		Addr:                   kafka.TCP("kafka1:9092"),
   314  		Balancer:               &kafka.LeastBytes{},
   315  		AllowAutoTopicCreation: true,
   316  		Transport:              transport,
   317  	}
   318  	t.Cleanup(func() { _ = w.Close() })
   319  
   320  	require.Eventually(t, func() bool {
   321  		err := w.WriteMessages(context.Background(),
   322  			kafka.Message{Topic: "my-topic", Key: []byte("foo"), Value: []byte("bar!")},
   323  		)
   324  		if err != nil {
   325  			t.Logf("failed to write messages: %s", err)
   326  		}
   327  		return err == nil
   328  	}, defaultTestTimeout, 500*time.Millisecond)
   329  }
   330  
   331  func registerSchema(
   332  	t *testing.T, schemaName, schemaPath string, c schemaregistry.Client,
   333  ) (schema string, schemaID int) {
   334  	t.Helper()
   335  
   336  	buf, err := os.ReadFile(schemaPath)
   337  	require.NoError(t, err)
   338  
   339  	si := schemaregistry.SchemaInfo{Schema: string(buf)}
   340  	require.Eventuallyf(t, func() bool {
   341  		schemaID, err = c.Register(schemaName, si, true)
   342  		return err == nil
   343  	}, defaultTestTimeout, time.Second, "failed to register schema %s: %v", schemaName, err)
   344  
   345  	schema = string(buf)
   346  	return
   347  }
   348  
   349  func serializeAvroMessage(t *testing.T, schemaID int, schema string, value []byte) []byte {
   350  	t.Helper()
   351  
   352  	codec, err := goavro.NewCodec(schema)
   353  	require.NoError(t, err)
   354  
   355  	native, _, err := codec.NativeFromTextual(value)
   356  	require.NoError(t, err)
   357  
   358  	bin, err := codec.BinaryFromNative(nil, native)
   359  	require.NoError(t, err)
   360  
   361  	return addAvroSchemaIDHeader(t, schemaID, bin)
   362  }
   363  
   364  func addAvroSchemaIDHeader(t *testing.T, schemaID int, msgBytes []byte) []byte {
   365  	t.Helper()
   366  
   367  	var buf bytes.Buffer
   368  	require.NoErrorf(t, buf.WriteByte(byte(0x0)), "avro header: unable to write magic byte")
   369  
   370  	idBytes := make([]byte, 4)
   371  	binary.BigEndian.PutUint32(idBytes, uint32(schemaID))
   372  	_, err := buf.Write(idBytes)
   373  	require.NoError(t, err)
   374  
   375  	_, err = buf.Write(msgBytes)
   376  	require.NoError(t, err)
   377  
   378  	return buf.Bytes()
   379  }