github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/cdc/sink/dmlsink/mq/mq_dml_sink_test.go (about) 1 // Copyright 2022 PingCAP, Inc. 2 // 3 // Licensed under the Apache License, Version 2.0 (the "License"); 4 // you may not use this file except in compliance with the License. 5 // You may obtain a copy of the License at 6 // 7 // http://www.apache.org/licenses/LICENSE-2.0 8 // 9 // Unless required by applicable law or agreed to in writing, software 10 // distributed under the License is distributed on an "AS IS" BASIS, 11 // See the License for the specific language governing permissions and 12 // limitations under the License. 13 14 package mq 15 16 import ( 17 "context" 18 "fmt" 19 "net/url" 20 "testing" 21 "time" 22 23 "github.com/pingcap/tiflow/cdc/entry" 24 "github.com/pingcap/tiflow/cdc/model" 25 "github.com/pingcap/tiflow/cdc/sink/dmlsink" 26 "github.com/pingcap/tiflow/cdc/sink/dmlsink/mq/dmlproducer" 27 "github.com/pingcap/tiflow/cdc/sink/tablesink/state" 28 "github.com/pingcap/tiflow/pkg/config" 29 "github.com/pingcap/tiflow/pkg/sink/kafka" 30 "github.com/stretchr/testify/require" 31 ) 32 33 func TestNewKafkaDMLSinkFailed(t *testing.T) { 34 t.Parallel() 35 36 ctx, cancel := context.WithCancel(context.Background()) 37 defer cancel() 38 39 uriTemplate := "kafka://%s/%s?kafka-version=0.9.0.0&max-batch-size=1" + 40 "&max-message-bytes=1048576&partition-num=1" + 41 "&kafka-client-id=unit-test&auto-create-topic=false&compression=gzip&protocol=avro" 42 uri := fmt.Sprintf(uriTemplate, "127.0.0.1:9092", kafka.DefaultMockTopicName) 43 44 sinkURI, err := url.Parse(uri) 45 require.NoError(t, err) 46 replicaConfig := config.GetDefaultReplicaConfig() 47 require.NoError(t, replicaConfig.ValidateAndAdjust(sinkURI)) 48 49 ctx = context.WithValue(ctx, "testing.T", t) 50 changefeedID := model.DefaultChangeFeedID("test") 51 52 errCh := make(chan error, 1) 53 s, err := NewKafkaDMLSink(ctx, changefeedID, sinkURI, replicaConfig, errCh, 54 kafka.NewMockFactory, dmlproducer.NewDMLMockProducer) 55 require.ErrorContains(t, err, "Avro protocol requires parameter \"schema-registry\"", 56 "should report error when protocol is avro but schema-registry is not set") 57 require.Nil(t, s) 58 } 59 60 func TestWriteEvents(t *testing.T) { 61 ctx, cancel := context.WithCancel(context.Background()) 62 defer cancel() 63 64 uriTemplate := "kafka://%s/%s?kafka-version=0.9.0.0&max-batch-size=1" + 65 "&max-message-bytes=1048576&partition-num=1" + 66 "&kafka-client-id=unit-test&auto-create-topic=false&compression=gzip&protocol=open-protocol" 67 uri := fmt.Sprintf(uriTemplate, "127.0.0.1:9092", kafka.DefaultMockTopicName) 68 69 sinkURI, err := url.Parse(uri) 70 require.NoError(t, err) 71 replicaConfig := config.GetDefaultReplicaConfig() 72 require.NoError(t, replicaConfig.ValidateAndAdjust(sinkURI)) 73 errCh := make(chan error, 1) 74 75 ctx = context.WithValue(ctx, "testing.T", t) 76 changefeedID := model.DefaultChangeFeedID("test") 77 s, err := NewKafkaDMLSink(ctx, changefeedID, sinkURI, replicaConfig, errCh, 78 kafka.NewMockFactory, dmlproducer.NewDMLMockProducer) 79 require.NoError(t, err) 80 require.NotNil(t, s) 81 defer s.Close() 82 83 helper := entry.NewSchemaTestHelper(t) 84 defer helper.Close() 85 86 sql := `create table test.t(a varchar(255) primary key)` 87 job := helper.DDL2Job(sql) 88 tableInfo := model.WrapTableInfo(0, "test", 1, job.BinlogInfo.TableInfo) 89 90 tableStatus := state.TableSinkSinking 91 row := &model.RowChangedEvent{ 92 CommitTs: 1, 93 TableInfo: tableInfo, 94 Columns: model.Columns2ColumnDatas([]*model.Column{{Name: "a", Value: "aa"}}, tableInfo), 95 } 96 97 events := make([]*dmlsink.CallbackableEvent[*model.SingleTableTxn], 0, 3000) 98 for i := 0; i < 3000; i++ { 99 events = append(events, &dmlsink.TxnCallbackableEvent{ 100 Event: &model.SingleTableTxn{ 101 Rows: []*model.RowChangedEvent{row}, 102 }, 103 Callback: func() {}, 104 SinkState: &tableStatus, 105 }) 106 } 107 108 err = s.WriteEvents(events...) 109 // Wait for the events to be received by the worker. 110 time.Sleep(time.Second) 111 require.NoError(t, err) 112 require.Len(t, errCh, 0) 113 require.Len(t, s.alive.worker.producer.(*dmlproducer.MockDMLProducer).GetAllEvents(), 3000) 114 }