diff --git a/tests/integration_tests/big_txn/conf/diff_config.toml b/tests/integration_tests/big_txn/conf/diff_config.toml deleted file mode 100644 index 367c21817c8..00000000000 --- a/tests/integration_tests/big_txn/conf/diff_config.toml +++ /dev/null @@ -1,29 +0,0 @@ -# diff Configuration. - -check-thread-count = 4 - -export-fix-sql = true - -check-struct-only = false - -[task] - output-dir = "/tmp/tidb_cdc_test/big_txn/sync_diff/output" - - source-instances = ["tidb"] - - target-instance = "mysql" - - target-check-tables = ["big_txn.*"] - -[data-sources] -[data-sources.tidb] - host = "127.0.0.1" - port = 4000 - user = "root" - password = "" - -[data-sources.mysql] - host = "127.0.0.1" - port = 3306 - user = "root" - password = "" diff --git a/tests/integration_tests/big_txn/conf/workload b/tests/integration_tests/big_txn/conf/workload deleted file mode 100644 index d15d4a81bfd..00000000000 --- a/tests/integration_tests/big_txn/conf/workload +++ /dev/null @@ -1,14 +0,0 @@ -threadcount=1 -recordcount=5000 -operationcount=0 -workload=core -fieldcount=100 - -readallfields=true - -readproportion=0 -updateproportion=0 -scanproportion=0 -insertproportion=0 - -requestdistribution=uniform diff --git a/tests/integration_tests/big_txn/run.sh b/tests/integration_tests/big_txn/run.sh deleted file mode 100755 index f13932ba42f..00000000000 --- a/tests/integration_tests/big_txn/run.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash - -set -eu - -CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) -source $CUR/../_utils/test_prepare -WORK_DIR=$OUT_DIR/$TEST_NAME -CDC_BINARY=cdc.test -SINK_TYPE=$1 - -CDC_COUNT=3 -DB_COUNT=4 - -function run() { - rm -rf $WORK_DIR && mkdir -p $WORK_DIR - - start_tidb_cluster --workdir $WORK_DIR - - cd $WORK_DIR - - run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY - run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY - - TOPIC_NAME="ticdc-big-txn-test-$RANDOM" - case $SINK_TYPE in - kafka) SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&kafka-version=${KAFKA_VERSION}&max-message-bytes=10485760" ;; - storage) SINK_URI="file://$WORK_DIR/storage_test/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true" ;; - pulsar) - run_pulsar_cluster $WORK_DIR normal - SINK_URI="pulsar://127.0.0.1:6650/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true" - ;; - *) SINK_URI="mysql://normal:123456@127.0.0.1:3306?transaction-atomicity=none" ;; - esac - run_cdc_cli changefeed create --sink-uri="$SINK_URI" - - run_sql "CREATE DATABASE big_txn;" - go-ycsb load mysql -P $CUR/conf/workload -p mysql.host=${UP_TIDB_HOST} -p mysql.port=${UP_TIDB_PORT} -p mysql.user=root -p mysql.db=big_txn - - case $SINK_TYPE in - kafka) run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&version=${KAFKA_VERSION}&max-message-bytes=10485760" ;; - storage) run_storage_consumer $WORK_DIR $SINK_URI "" "" ;; - pulsar) run_pulsar_consumer --upstream-uri $SINK_URI ;; - esac - - check_table_exists "big_txn.usertable" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} - run_sql "CREATE TABLE big_txn.usertable1 LIKE big_txn.usertable" ${UP_TIDB_HOST} ${UP_TIDB_PORT} - run_sql "INSERT INTO big_txn.usertable1 SELECT * FROM big_txn.usertable" ${UP_TIDB_HOST} ${UP_TIDB_PORT} - sleep 60 - check_table_exists "big_txn.usertable1" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} - - run_sql "CREATE TABLE big_txn.finish_mark_1 (a int primary key);" - sleep 120 - check_table_exists "big_txn.finish_mark_1" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 60 - - check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml - -} - -trap stop_tidb_cluster EXIT -run $* -check_logs $WORK_DIR -echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>" diff --git a/tests/integration_tests/kafka_big_txn_v2/conf/changefeed.toml b/tests/integration_tests/kafka_big_txn_v2/conf/changefeed.toml deleted file mode 100644 index 75f14fb9006..00000000000 --- a/tests/integration_tests/kafka_big_txn_v2/conf/changefeed.toml +++ /dev/null @@ -1,3 +0,0 @@ -[sink] -# Enable the kafka sink v2, the kafka-go client implementation -enable-kafka-sink-v2 = true diff --git a/tests/integration_tests/kafka_big_txn_v2/conf/diff_config.toml b/tests/integration_tests/kafka_big_txn_v2/conf/diff_config.toml deleted file mode 100644 index 71521d8b006..00000000000 --- a/tests/integration_tests/kafka_big_txn_v2/conf/diff_config.toml +++ /dev/null @@ -1,29 +0,0 @@ -# diff Configuration. - -check-thread-count = 4 - -export-fix-sql = true - -check-struct-only = false - -[task] - output-dir = "/tmp/tidb_cdc_test/kafka_big_txn_v2/sync_diff/output" - - source-instances = ["tidb"] - - target-instance = "mysql" - - target-check-tables = ["big_txn.*"] - -[data-sources] -[data-sources.tidb] - host = "127.0.0.1" - port = 4000 - user = "root" - password = "" - -[data-sources.mysql] - host = "127.0.0.1" - port = 3306 - user = "root" - password = "" diff --git a/tests/integration_tests/kafka_big_txn_v2/conf/workload b/tests/integration_tests/kafka_big_txn_v2/conf/workload deleted file mode 100644 index d15d4a81bfd..00000000000 --- a/tests/integration_tests/kafka_big_txn_v2/conf/workload +++ /dev/null @@ -1,14 +0,0 @@ -threadcount=1 -recordcount=5000 -operationcount=0 -workload=core -fieldcount=100 - -readallfields=true - -readproportion=0 -updateproportion=0 -scanproportion=0 -insertproportion=0 - -requestdistribution=uniform diff --git a/tests/integration_tests/kafka_big_txn_v2/run.sh b/tests/integration_tests/kafka_big_txn_v2/run.sh deleted file mode 100755 index 5190a932167..00000000000 --- a/tests/integration_tests/kafka_big_txn_v2/run.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash - -set -eu - -CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd) -source $CUR/../_utils/test_prepare -WORK_DIR=$OUT_DIR/$TEST_NAME -CDC_BINARY=cdc.test -SINK_TYPE=$1 - -CDC_COUNT=3 -DB_COUNT=4 - -function run() { - # test kafka sink only in this case - if [ "$SINK_TYPE" != "kafka" ]; then - return - fi - rm -rf $WORK_DIR && mkdir -p $WORK_DIR - - start_tidb_cluster --workdir $WORK_DIR - - cd $WORK_DIR - - run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY - start_ts=$(run_cdc_cli_tso_query ${UP_PD_HOST_1} ${UP_PD_PORT_1}) - run_cdc_server --workdir $WORK_DIR --binary $CDC_BINARY - - TOPIC_NAME="ticdc-big-txn-test-$RANDOM" - case $SINK_TYPE in - kafka) SINK_URI="kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&kafka-version=${KAFKA_VERSION}&max-message-bytes=10485760" ;; - storage) SINK_URI="file://$WORK_DIR/storage_test/$TOPIC_NAME?protocol=canal-json&enable-tidb-extension=true" ;; - *) SINK_URI="mysql://normal:123456@127.0.0.1:3306?transaction-atomicity=none" ;; - esac - run_cdc_cli changefeed create --start-ts=$start_ts --sink-uri="$SINK_URI" --config="$CUR/conf/changefeed.toml" - - run_sql "CREATE DATABASE big_txn;" - go-ycsb load mysql -P $CUR/conf/workload -p mysql.host=${UP_TIDB_HOST} -p mysql.port=${UP_TIDB_PORT} -p mysql.user=root -p mysql.db=big_txn - - case $SINK_TYPE in - kafka) run_kafka_consumer $WORK_DIR "kafka://127.0.0.1:9092/$TOPIC_NAME?protocol=open-protocol&partition-num=4&version=${KAFKA_VERSION}&max-message-bytes=10485760" ;; - storage) run_storage_consumer $WORK_DIR $SINK_URI "" "" ;; - esac - - check_table_exists "big_txn.usertable" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} - run_sql "CREATE TABLE big_txn.usertable1 LIKE big_txn.usertable" ${UP_TIDB_HOST} ${UP_TIDB_PORT} - run_sql "INSERT INTO big_txn.usertable1 SELECT * FROM big_txn.usertable" ${UP_TIDB_HOST} ${UP_TIDB_PORT} - run_sql "CREATE TABLE big_txn.finish_mark (a int primary key);" - sleep 120 - check_table_exists "big_txn.finish_mark" ${DOWN_TIDB_HOST} ${DOWN_TIDB_PORT} 120 - - check_sync_diff $WORK_DIR $CUR/conf/diff_config.toml - - cleanup_process $CDC_BINARY -} - -trap stop_tidb_cluster EXIT -run $* -check_logs $WORK_DIR -echo "[$(date)] <<<<<< run test case $TEST_NAME success! >>>>>>" diff --git a/tests/integration_tests/run_group.sh b/tests/integration_tests/run_group.sh index 2025a358409..18e5e8ade8a 100755 --- a/tests/integration_tests/run_group.sh +++ b/tests/integration_tests/run_group.sh @@ -17,7 +17,7 @@ mysql_only_consistent_replicate="consistent_replicate_ddl consistent_replicate_g kafka_only="kafka_big_messages kafka_compression kafka_messages kafka_sink_error_resume mq_sink_lost_callback mq_sink_dispatcher kafka_column_selector kafka_column_selector_avro debezium" kafka_only_protocol="kafka_simple_basic kafka_simple_basic_avro kafka_simple_handle_key_only kafka_simple_handle_key_only_avro kafka_simple_claim_check kafka_simple_claim_check_avro canal_json_adapter_compatibility canal_json_basic canal_json_content_compatible multi_topics avro_basic canal_json_handle_key_only open_protocol_handle_key_only canal_json_claim_check open_protocol_claim_check" -kafka_only_v2="kafka_big_txn_v2 kafka_big_messages_v2 multi_tables_ddl_v2 multi_topics_v2" +kafka_only_v2="kafka_big_messages_v2 multi_tables_ddl_v2 multi_topics_v2" storage_only="lossy_ddl storage_csv_update" storage_only_csv="storage_cleanup csv_storage_basic csv_storage_multi_tables_ddl csv_storage_partition_table" @@ -60,7 +60,7 @@ groups=( # G13 pulsar mtls authentication enabled 'tiflash region_merge common_1' # G14 - 'big_txn changefeed_finish force_replicate_table' + 'changefeed_finish force_replicate_table' # G15 'new_ci_collation batch_add_table multi_rocks' # G16, currently G16 is not running in kafka pipeline