Skip to content

Commit

Permalink
Merge branch 'master' into issue39593
Browse files Browse the repository at this point in the history
  • Loading branch information
hawkingrei authored Jan 11, 2023
2 parents 0d0e580 + eff7462 commit 7ce97bb
Show file tree
Hide file tree
Showing 9 changed files with 158 additions and 23 deletions.
25 changes: 25 additions & 0 deletions ci.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# Commands to trigger ci pipeline

## Guide

ci pipeline will be triggered when your comment on pull request matched command. But we have some task that will be triggered manually.

## Commands

| ci pipeline | Commands |
| ---------------------------------------- |-----------------------------------------------------------------|
| tidb_ghpr_coverage | /run-coverage |
| tidb_ghpr_build_arm64 | /run-build-arm64 comment=true |
| tidb_ghpr_common_test | /run-common-test<br />/run-integration-tests |
| tidb_ghpr_integration_br_test | /run-integration-br-test<br />/run-integration-tests |
| tidb_ghpr_integration_campatibility_test | /run-integration-compatibility-test<br />/run-integration-tests |
| tidb_ghpr_integration_common_test | /run-integration-common-test<br />/run-integration-tests |
| tidb_ghpr_integration_copr_test | /run-integration-copr-test<br />/run-integration-tests |
| tidb_ghpr_integration_ddl_test | /run-integration-ddl-test<br />/run-integration-tests |
| tidb_ghpr_monitor_test | /run-monitor-test |
| tidb_ghpr_mybatis | /run-mybatis-test<br />/run-integration-tests |
| tidb_ghpr_sqllogic_test_1 | /run-sqllogic-test<br />/run-integration-tests |
| tidb_ghpr_sqllogic_test_2 | /run-sqllogic-test<br />/run-integration-tests |
| tidb_ghpr_tics_test | /run-tics-test<br />/run-integration-tests |
| tidb_ghpr_unit_test | /run-unit-test<br />/run-all-tests<br />/merge |

16 changes: 13 additions & 3 deletions ddl/index_merge_tmp.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,15 @@ func (w *mergeIndexWorker) batchCheckTemporaryUniqueKey(txn kv.Transaction, idxR
}
if !idxRecords[i].delete {
idxRecords[i].skip = true
} else {
// Prevent deleting an unexpected index KV.
hdInVal, err := tablecodec.DecodeHandleInUniqueIndexValue(val, w.table.Meta().IsCommonHandle)
if err != nil {
return errors.Trace(err)
}
if !idxRecords[i].handle.Equal(hdInVal) {
idxRecords[i].skip = true
}
}
} else if idxRecords[i].distinct {
// The keys in w.batchCheckKeys also maybe duplicate,
Expand All @@ -75,6 +84,7 @@ type temporaryIndexRecord struct {
delete bool
unique bool
distinct bool
handle kv.Handle
rowKey kv.Key
}

Expand Down Expand Up @@ -136,7 +146,8 @@ func (w *mergeIndexWorker) BackfillDataInTxn(taskRange reorgBackfillTask) (taskC

// Lock the corresponding row keys so that it doesn't modify the index KVs
// that are changing by a pessimistic transaction.
err := txn.LockKeys(context.Background(), new(kv.LockCtx), idxRecord.rowKey)
rowKey := tablecodec.EncodeRecordKey(w.table.RecordPrefix(), idxRecord.handle)
err := txn.LockKeys(context.Background(), new(kv.LockCtx), rowKey)
if err != nil {
return errors.Trace(err)
}
Expand Down Expand Up @@ -228,14 +239,13 @@ func (w *mergeIndexWorker) fetchTempIndexVals(txn kv.Transaction, taskRange reor
return false, err
}
}
rowKey := tablecodec.EncodeRecordKey(w.table.RecordPrefix(), handle)

originIdxKey := make([]byte, len(indexKey))
copy(originIdxKey, indexKey)
tablecodec.TempIndexKey2IndexKey(w.index.Meta().ID, originIdxKey)

idxRecord := &temporaryIndexRecord{
rowKey: rowKey,
handle: handle,
delete: isDelete,
unique: unique,
skip: false,
Expand Down
37 changes: 37 additions & 0 deletions ddl/index_merge_tmp_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -368,6 +368,43 @@ func TestAddIndexMergeIndexUpdateOnDeleteOnly(t *testing.T) {
tk.MustExec("admin check table t;")
}

func TestAddIndexMergeDeleteUniqueOnWriteOnly(t *testing.T) {
store, dom := testkit.CreateMockStoreAndDomain(t)

tk := testkit.NewTestKit(t, store)
tk.MustExec("use test")
tk.MustExec("create table t(a int default 0, b int default 0);")
tk.MustExec("insert into t values (1, 1), (2, 2), (3, 3), (4, 4);")

tk1 := testkit.NewTestKit(t, store)
tk1.MustExec("use test")

d := dom.DDL()
originalCallback := d.GetHook()
defer d.SetHook(originalCallback)
callback := &ddl.TestDDLCallback{}
onJobUpdatedExportedFunc := func(job *model.Job) {
if t.Failed() {
return
}
var err error
switch job.SchemaState {
case model.StateDeleteOnly:
_, err = tk1.Exec("insert into t values (5, 5);")
assert.NoError(t, err)
case model.StateWriteOnly:
_, err = tk1.Exec("insert into t values (5, 7);")
assert.NoError(t, err)
_, err = tk1.Exec("delete from t where b = 7;")
assert.NoError(t, err)
}
}
callback.OnJobUpdatedExported.Store(&onJobUpdatedExportedFunc)
d.SetHook(callback)
tk.MustExec("alter table t add unique index idx(a);")
tk.MustExec("admin check table t;")
}

func TestAddIndexMergeConflictWithPessimistic(t *testing.T) {
store, dom := testkit.CreateMockStoreAndDomain(t)
tk := testkit.NewTestKit(t, store)
Expand Down
2 changes: 1 addition & 1 deletion errno/errname.go
Original file line number Diff line number Diff line change
Expand Up @@ -1102,7 +1102,7 @@ var MySQLErrName = map[uint16]*mysql.ErrMessage{

ErrColumnInChange: mysql.Message("column %s id %d does not exist, this column may have been updated by other DDL ran in parallel", nil),
// TiKV/PD errors.
ErrPDServerTimeout: mysql.Message("PD server timeout", nil),
ErrPDServerTimeout: mysql.Message("PD server timeout: %s", nil),
ErrTiKVServerTimeout: mysql.Message("TiKV server timeout", nil),
ErrTiKVServerBusy: mysql.Message("TiKV server is busy", nil),
ErrTiFlashServerTimeout: mysql.Message("TiFlash server timeout", nil),
Expand Down
2 changes: 1 addition & 1 deletion errors.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2698,7 +2698,7 @@ TTL manager has timed out, pessimistic locks may expire, please commit or rollba

["tikv:9001"]
error = '''
PD server timeout
PD server timeout: %s
'''

["tikv:9002"]
Expand Down
3 changes: 0 additions & 3 deletions store/driver/error/error.go
Original file line number Diff line number Diff line change
Expand Up @@ -102,9 +102,6 @@ func ToTiDBErr(err error) error {

var pdServerTimeout *tikverr.ErrPDServerTimeout
if stderrs.As(err, &pdServerTimeout) {
if len(pdServerTimeout.Error()) == 0 {
return ErrPDServerTimeout
}
return ErrPDServerTimeout.GenWithStackByArgs(pdServerTimeout.Error())
}

Expand Down
2 changes: 1 addition & 1 deletion ttl/sqlbuilder/sql.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ func writeDatum(restoreCtx *format.RestoreCtx, d types.Datum, ft *types.FieldTyp
switch ft.GetType() {
case mysql.TypeBit, mysql.TypeBlob, mysql.TypeLongBlob, mysql.TypeTinyBlob:
return writeHex(restoreCtx.In, d)
case mysql.TypeString, mysql.TypeVarString, mysql.TypeVarchar:
case mysql.TypeString, mysql.TypeVarString, mysql.TypeVarchar, mysql.TypeEnum, mysql.TypeSet:
if mysql.HasBinaryFlag(ft.GetFlag()) {
return writeHex(restoreCtx.In, d)
}
Expand Down
93 changes: 79 additions & 14 deletions ttl/sqlbuilder/sql_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -159,11 +159,50 @@ func TestEscape(t *testing.T) {
}

func TestFormatSQLDatum(t *testing.T) {
// invalid pk types contains the types that should not exist in primary keys of a TTL table.
// We do not need to check sqlbuilder.FormatSQLDatum for these types
invalidPKTypes := []struct {
types []string
errMsg string
}{
{
types: []string{"json"},
errMsg: "[ddl:3152]JSON column 'pk0' cannot be used in key specification.",
},
{
types: []string{"blob"},
errMsg: "[ddl:1170]BLOB/TEXT column 'pk0' used in key specification without a key length",
},
{
types: []string{"blob(8)"},
errMsg: "[ddl:1170]BLOB/TEXT column 'pk0' used in key specification without a key length",
},
{
types: []string{"text"},
errMsg: "[ddl:1170]BLOB/TEXT column 'pk0' used in key specification without a key length",
},
{
types: []string{"text(8)"},
errMsg: "[ddl:1170]BLOB/TEXT column 'pk0' used in key specification without a key length",
},
{
types: []string{"int", "json"},
errMsg: "[ddl:3152]JSON column 'pk1' cannot be used in key specification.",
},
{
types: []string{"int", "blob"},
errMsg: "[ddl:1170]BLOB/TEXT column 'pk1' used in key specification without a key length",
},
{
types: []string{"int", "text"},
errMsg: "[ddl:1170]BLOB/TEXT column 'pk1' used in key specification without a key length",
},
}

cases := []struct {
ft string
values []interface{}
hex bool
notSupport bool
ft string
values []interface{}
hex bool
}{
{
ft: "int",
Expand Down Expand Up @@ -240,21 +279,52 @@ func TestFormatSQLDatum(t *testing.T) {
ft: "datetime",
values: []interface{}{"2022-01-02 12:11:11", "2022-01-02"},
},
{
ft: "datetime(6)",
values: []interface{}{"2022-01-02 12:11:11.123456"},
},
{
ft: "timestamp",
values: []interface{}{"2022-01-02 12:11:11", "2022-01-02"},
},
{
ft: "json",
values: []interface{}{"{}"},
notSupport: true,
ft: "timestamp(6)",
values: []interface{}{"2022-01-02 12:11:11.123456"},
},
{
ft: "enum('e1', 'e2', \"e3'\", 'e4\"', ';你好👋')",
values: []interface{}{"e1", "e2", "e3'", "e4\"", ";你好👋"},
},
{
ft: "set('e1', 'e2', \"e3'\", 'e4\"', ';你好👋')",
values: []interface{}{"", "e1", "e2", "e3'", "e4\"", ";你好👋"},
},
}

store, do := testkit.CreateMockStoreAndDomain(t)
tk := testkit.NewTestKit(t, store)
tk.MustExec("use test")

for _, c := range invalidPKTypes {
var sb strings.Builder
sb.WriteString("create table t(")
cols := make([]string, 0, len(invalidPKTypes))
for i, tp := range c.types {
colName := fmt.Sprintf("pk%d", i)
cols = append(cols, colName)
sb.WriteString(colName)
sb.WriteString(" ")
sb.WriteString(tp)
sb.WriteString(", ")
}
sb.WriteString("t timestamp, ")
sb.WriteString("primary key (")
sb.WriteString(strings.Join(cols, ", "))
sb.WriteString(")) TTL=`t` + INTERVAL 1 DAY")
err := tk.ExecToErr(sb.String())
require.Equal(t, c.errMsg, err.Error(), sb.String())
}

// create a table with n columns
var sb strings.Builder
sb.WriteString("CREATE TABLE t (id varchar(32) primary key")
Expand Down Expand Up @@ -290,13 +360,8 @@ func TestFormatSQLDatum(t *testing.T) {
col := tbl.Meta().FindPublicColumnByName(colName)
d := rows[0].GetDatum(0, &col.FieldType)
s, err := sqlbuilder.FormatSQLDatum(d, &col.FieldType)
if c.notSupport {
require.Error(t, err)
} else {
require.NoError(t, err)
//fmt.Printf("%s: %s\n", c.ft, s)
tk.MustQuery("select id from t where " + colName + "=" + s).Check(testkit.Rows(rowID))
}
require.NoError(t, err)
tk.MustQuery("select id from t where " + colName + "=" + s).Check(testkit.Rows(rowID))
if c.hex {
require.True(t, strings.HasPrefix(s, "x'"), "ft: %s, got: %s", c.ft, s)
}
Expand Down
1 change: 1 addition & 0 deletions ttl/ttlworker/del.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ func (t *ttlDeleteTask) doDelete(ctx context.Context, rawSe session.Session) (re
zap.Error(err),
zap.String("table", t.tbl.Schema.O+"."+t.tbl.Name.O),
)
return
}

tracer.EnterPhase(metrics.PhaseWaitToken)
Expand Down

0 comments on commit 7ce97bb

Please sign in to comment.