github.com/pingcap/tiflow@v0.0.0-20240520035814-5bf52d54e205/tests/integration_tests/run_group.sh (about)

     1  #!/bin/bash
     2  
     3  set -eo pipefail
     4  
     5  CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
     6  
     7  sink_type=$1
     8  group=$2
     9  group_num=${group#G}
    10  
    11  # Other tests that only support mysql: batch_update_to_no_batch ddl_reentrant
    12  # changefeed_fast_fail changefeed_resume_with_checkpoint_ts sequence
    13  # multi_cdc_cluster capture_suicide_while_balance_table
    14  mysql_only="bdr_mode capture_suicide_while_balance_table syncpoint hang_sink_suicide server_config_compatibility changefeed_dup_error_restart"
    15  mysql_only_http="http_api http_api_tls api_v2 http_api_tls_with_user_auth cli_tls_with_auth"
    16  mysql_only_consistent_replicate="consistent_replicate_ddl consistent_replicate_gbk consistent_replicate_nfs consistent_replicate_storage_file consistent_replicate_storage_file_large_value consistent_replicate_storage_s3 consistent_partition_table"
    17  
    18  kafka_only="kafka_big_messages kafka_compression kafka_messages kafka_sink_error_resume mq_sink_lost_callback mq_sink_dispatcher kafka_column_selector kafka_column_selector_avro debezium"
    19  kafka_only_protocol="kafka_simple_basic kafka_simple_basic_avro kafka_simple_handle_key_only kafka_simple_handle_key_only_avro kafka_simple_claim_check kafka_simple_claim_check_avro canal_json_adapter_compatibility canal_json_basic canal_json_content_compatible multi_topics avro_basic canal_json_handle_key_only open_protocol_handle_key_only canal_json_claim_check open_protocol_claim_check"
    20  kafka_only_v2="kafka_big_messages_v2 multi_tables_ddl_v2 multi_topics_v2"
    21  
    22  storage_only="lossy_ddl storage_csv_update"
    23  storage_only_csv="storage_cleanup csv_storage_basic csv_storage_multi_tables_ddl csv_storage_partition_table"
    24  storage_only_canal_json="canal_json_storage_basic canal_json_storage_partition_table"
    25  
    26  # Define groups
    27  # Note: If new group is added, the group name must also be added to CI
    28  # * https://github.com/PingCAP-QE/ci/blob/main/pipelines/pingcap/tiflow/latest/pull_cdc_integration_kafka_test.groovy
    29  # * https://github.com/PingCAP-QE/ci/blob/main/pipelines/pingcap/tiflow/latest/pull_cdc_integration_test.groovy
    30  # Each group of tests consumes as much time as possible, thus reducing CI waiting time.
    31  # Putting multiple light tests together and heavy tests in a separate group.
    32  groups=(
    33  	# Note: only the tests in the first three groups are running in storage sink pipeline.
    34  	# G00
    35  	"$mysql_only $kafka_only $storage_only"
    36  	# G01
    37  	"$mysql_only_http $kafka_only_protocol $storage_only_canal_json multi_tables_ddl"
    38  	# G02
    39  	"$mysql_only_consistent_replicate $kafka_only_v2 $storage_only_csv"
    40  	# G03
    41  	'row_format drop_many_tables processor_stop_delay partition_table'
    42  	# G04
    43  	'foreign_key ddl_puller_lag ddl_only_block_related_table changefeed_auto_stop'
    44  	# G05
    45  	'charset_gbk ddl_manager multi_source'
    46  	# G06
    47  	'sink_retry changefeed_error ddl_sequence resourcecontrol'
    48  	# G07 pulsar oauth2 authentication enabled
    49  	'kv_client_stream_reconnect cdc split_region'
    50  	# G08
    51  	'processor_err_chan changefeed_reconstruct multi_capture synced_status_with_redo'
    52  	# G09
    53  	'gc_safepoint changefeed_pause_resume cli_with_auth savepoint synced_status'
    54  	# G10
    55  	'default_value simple cdc_server_tips event_filter sql_mode'
    56  	# G11
    57  	'resolve_lock move_table autorandom generate_column'
    58  	# G12
    59  	'many_pk_or_uk capture_session_done_during_task ddl_attributes'
    60  	# G13 pulsar mtls authentication enabled
    61  	'tiflash region_merge common_1'
    62  	# G14
    63  	'changefeed_finish force_replicate_table'
    64  	# G15
    65  	'new_ci_collation batch_add_table multi_rocks'
    66  	# G16, currently G16 is not running in kafka pipeline
    67  	'owner_resign processor_etcd_worker_delay sink_hang'
    68  	# G17
    69  	'clustered_index processor_resolved_ts_fallback'
    70  	# only run the following tests in mysql pipeline
    71  	# G18
    72  	'availability http_proxies sequence'
    73  	# G19
    74  	'changefeed_fast_fail batch_update_to_no_batch changefeed_resume_with_checkpoint_ts'
    75  	# G20
    76  	'tidb_mysql_test ddl_reentrant multi_cdc_cluster'
    77  	# G21
    78  	'bank kill_owner_with_ddl owner_remove_table_error'
    79  )
    80  
    81  # Get other cases not in groups, to avoid missing any case
    82  others=()
    83  for script in "$CUR"/*/run.sh; do
    84  	test_name="$(basename "$(dirname "$script")")"
    85  	# shellcheck disable=SC2076
    86  	if [[ ! " ${groups[*]} " =~ " ${test_name} " ]]; then
    87  		others=("${others[@]} ${test_name}")
    88  	fi
    89  done
    90  
    91  if [[ "$group" == "others" ]]; then
    92  	if [[ -z $others ]]; then
    93  		echo "All CDC integration test cases are added to groups"
    94  		exit 0
    95  	fi
    96  	echo "Error: "$others" is not added to any group in tests/integration_tests/run_group.sh"
    97  	exit 1
    98  elif [[ $group_num =~ ^[0-9]+$ ]] && [[ -n ${groups[10#${group_num}]} ]]; then
    99  	# force use decimal index
   100  	test_names="${groups[10#${group_num}]}"
   101  	# Run test cases
   102  	echo "Run cases: ${test_names}"
   103  	"${CUR}"/run.sh "${sink_type}" "${test_names}"
   104  else
   105  	echo "Error: invalid group name: ${group}"
   106  	exit 1
   107  fi