Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

br: rebase auto random id if the table is common handle (#52256) #52292

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
This is an automated cherry-pick of #52256
Signed-off-by: ti-chi-bot <ti-community-prow-bot@tidb.io>
  • Loading branch information
Leavrth authored and ti-chi-bot committed Apr 1, 2024
commit 98d2cb81511137ab67f2486022077b21baad5a46
2 changes: 1 addition & 1 deletion br/pkg/backup/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -633,7 +633,7 @@ func BuildBackupRangeAndSchema(
// Treat cached table as normal table.
tableInfo.TableCacheStatusType = model.TableCacheStatusDisable

if tableInfo.PKIsHandle && tableInfo.ContainsAutoRandomBits() {
if tableInfo.ContainsAutoRandomBits() {
// this table has auto_random id, we need backup and rebase in restoration
var globalAutoRandID int64
globalAutoRandID, err = randAlloc.NextGlobalAutoID()
Expand Down
2 changes: 1 addition & 1 deletion br/pkg/restore/db.go
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ func (db *DB) CreateTablePostRestore(ctx context.Context, table *metautil.Table,
utils.EncloseName(table.DB.Name.O),
utils.EncloseName(table.Info.Name.O),
table.Info.AutoIncID)
} else if table.Info.PKIsHandle && table.Info.ContainsAutoRandomBits() {
} else if table.Info.ContainsAutoRandomBits() {
restoreMetaSQL = fmt.Sprintf(
"alter table %s.%s auto_random_base = %d",
utils.EncloseName(table.DB.Name.O),
Expand Down
71 changes: 71 additions & 0 deletions br/tests/br_autorandom/run.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
#!/bin/bash
#
# Copyright 2024 PingCAP, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

set -eu
. run_services
CUR=$(cd `dirname $0`; pwd)

# const value
PREFIX="autorandom" # NOTICE: don't start with 'br' because `restart services` would remove file/directory br*.
res_file="$TEST_DIR/sql_res.$TEST_NAME.txt"

# start a new cluster
echo "restart a services"
restart_services

# prepare the data
echo "prepare the data"
run_sql "CREATE TABLE test.common (a BIGINT UNSIGNED AUTO_RANDOM(1), b VARCHAR(255), uid INT, c VARCHAR(255) DEFAULT 'c', PRIMARY KEY (a, b), UNIQUE INDEX (uid));"
run_sql "INSERT INTO test.common (b, uid, c) values ('a', 1, 'a');"
run_sql "INSERT INTO test.common (b, uid, c) values ('a', 2, 'a');"
run_sql "INSERT INTO test.common (b, uid, c) values ('a', 3, 'a');"
run_sql "INSERT INTO test.common (b, uid, c) values ('a', 4, 'a');"
run_sql "INSERT INTO test.common (b, uid, c) values ('a', 5, 'a');"
run_sql "INSERT INTO test.common (b, uid, c) values ('a', 6, 'a');"
run_sql "INSERT INTO test.common (b, uid, c) values ('a', 7, 'a');"
run_sql "INSERT INTO test.common (b, uid, c) values ('a', 8, 'a');"
run_sql "INSERT INTO test.common (b, uid, c) values ('a', 9, 'a');"
run_sql "INSERT INTO test.common (b, uid, c) values ('a', 10, 'a');"

run_sql "CREATE TABLE test.pk (a BIGINT UNSIGNED AUTO_RANDOM(1), uid INT, c VARCHAR(255) DEFAULT 'c', PRIMARY KEY (a), UNIQUE INDEX (uid));"
run_sql "INSERT INTO test.pk (uid, c) values (1, 'a');"
run_sql "INSERT INTO test.pk (uid, c) values (2, 'a');"
run_sql "INSERT INTO test.pk (uid, c) values (3, 'a');"
run_sql "INSERT INTO test.pk (uid, c) values (4, 'a');"
run_sql "INSERT INTO test.pk (uid, c) values (5, 'a');"
run_sql "INSERT INTO test.pk (uid, c) values (6, 'a');"
run_sql "INSERT INTO test.pk (uid, c) values (7, 'a');"
run_sql "INSERT INTO test.pk (uid, c) values (8, 'a');"
run_sql "INSERT INTO test.pk (uid, c) values (9, 'a');"
run_sql "INSERT INTO test.pk (uid, c) values (10, 'a');"

# backup & restore
run_br --pd $PD_ADDR backup full -s "local://$TEST_DIR/$PREFIX/full"
echo "restart a services"
restart_services
run_br --pd $PD_ADDR restore full -s "local://$TEST_DIR/$PREFIX/full"

# new workload
for i in `seq 1 9`; do
run_sql "INSERT INTO test.common (b, uid) values ('a', 10) on duplicate key update c = 'b';"
run_sql "INSERT INTO test.pk (uid) values (10) on duplicate key update c = 'b';"
done

# check consistency
run_sql "SELECT COUNT(*) AS RESCNT FROM test.common WHERE uid < 10 AND c = 'b';"
check_contains "RESCNT: 0"
run_sql "SELECT COUNT(*) AS RESCNT FROM test.pk WHERE uid < 10 AND c = 'b';"
check_contains "RESCNT: 0"
70 changes: 70 additions & 0 deletions br/tests/run_group_br_tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
#!/usr/bin/env bash

# This script split the integration tests into 9 groups to support parallel group tests execution.
# all the integration tests are located in br/tests directory. only the directories
# containing run.sh will be considered as valid br integration tests. the script will print the total case number

set -eo pipefail

# Step 1
CUR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
group=$1
export COV_DIR="/tmp/group_cover"
rm -rf $COV_DIR
mkdir -p $COV_DIR

# Define groups
# Note: If new group is added, the group name must also be added to CI
# * https://github.com/PingCAP-QE/ci/blob/main/pipelines/pingcap/tidb/latest/pull_br_integration_test.groovy
# Each group of tests consumes as much time as possible, thus reducing CI waiting time.
# Putting multiple light tests together and heavy tests in a separate group.
declare -A groups
groups=(
["G00"]="br_300_small_tables br_backup_empty br_backup_version br_cache_table br_case_sensitive br_charset_gbk br_check_new_collocation_enable"
["G01"]="br_autoid br_crypter2 br_db br_db_online br_db_online_newkv br_db_skip br_debug_meta br_ebs br_foreign_key br_full"
["G02"]="br_full_cluster_restore br_full_ddl br_full_index br_gcs br_history"
["G03"]='br_incompatible_tidb_config br_incremental br_incremental_ddl br_incremental_index br_pitr'
["G04"]='br_incremental_only_ddl br_incremental_same_table br_insert_after_restore br_key_locked br_log_test br_move_backup br_mv_index br_other br_partition_add_index'
["G05"]='br_range br_rawkv br_replica_read br_restore_TDE_enable br_restore_log_task_enable br_s3 br_shuffle_leader br_shuffle_region br_single_table'
["G06"]='br_skip_checksum br_small_batch_size br_split_region_fail br_systables br_table_filter br_txn br_stats'
["G07"]='br_clustered_index br_crypter br_table_partition br_tidb_placement_policy br_tiflash br_tikv_outage'
["G08"]='br_tikv_outage2 br_ttl br_views_and_sequences br_z_gc_safepoint br_autorandom'
)

# Get other cases not in groups, to avoid missing any case
others=()
for script in "$CUR"/*/run.sh; do
test_name="$(basename "$(dirname "$script")")"
if [[ $test_name != br* ]]; then
continue
fi
# shellcheck disable=SC2076
if [[ ! " ${groups[*]} " =~ " ${test_name} " ]]; then
others=("${others[@]} ${test_name}")
fi
done

if [[ "$group" == "others" ]]; then
if [[ -z $others ]]; then
echo "All br integration test cases have been added to groups"
exit 0
fi
echo "Error: "$others" is not added to any group in br/tests/run_group_br_tests.sh"
exit 1
elif [[ " ${!groups[*]} " =~ " ${group} " ]]; then
test_names="${groups[${group}]}"
# Run test cases
if [[ -n $test_names ]]; then
echo ""
echo "Run cases: ${test_names}"
for case_name in $test_names; do
echo "Run cases: ${case_name}"
rm -rf /tmp/backup_restore_test
mkdir -p /tmp/backup_restore_test
TEST_NAME=${case_name} ${CUR}/run.sh
done
fi
else
echo "Error: invalid group name: ${group}"
exit 1
fi
Loading