From 9805317074e95222dd1a6adcb75bfe9439b33717 Mon Sep 17 00:00:00 2001 From: Chunzhu Li Date: Mon, 9 Nov 2020 01:30:06 -0600 Subject: [PATCH] support both filesize and rows arguments (#177) * support both filesize and rows arguments * fix bash * add unit test for the situation that both filesize and rows are enabled * address comment * address comment --- dumpling/tests/consistency/run.sh | 4 +- dumpling/tests/naughty_strings/run.sh | 4 +- dumpling/tests/null_unique_index/run.sh | 2 +- dumpling/tests/primary_key/run.sh | 4 +- ... quote-database.quote-table.000000000.sql} | 0 dumpling/tests/quote/run.sh | 4 +- .../{rows.t.0.sql => rows.t.000000000.sql} | 0 dumpling/tests/rows/run.sh | 45 +++++++++++-- dumpling/tests/s3/run.sh | 4 +- dumpling/tests/tls/run.sh | 2 +- dumpling/v4/export/config.go | 2 +- dumpling/v4/export/dump.go | 4 +- dumpling/v4/export/prepare.go | 4 -- dumpling/v4/export/prepare_test.go | 4 +- dumpling/v4/export/writer.go | 53 +++++++++------ dumpling/v4/export/writer_test.go | 67 +++++++++++++++++-- 16 files changed, 153 insertions(+), 50 deletions(-) rename dumpling/tests/quote/data/{quote-database.quote-table.0.sql => quote-database.quote-table.000000000.sql} (100%) rename dumpling/tests/rows/data/{rows.t.0.sql => rows.t.000000000.sql} (100%) diff --git a/dumpling/tests/consistency/run.sh b/dumpling/tests/consistency/run.sh index fe5379c6..a12419a4 100644 --- a/dumpling/tests/consistency/run.sh +++ b/dumpling/tests/consistency/run.sh @@ -34,7 +34,7 @@ run_sql "insert into $DB_NAME.$TABLE_NAME values $(seq -s, 100 | sed 's/,*$//g' wait # check data record count -cnt=`grep -o "(1)" ${DUMPLING_OUTPUT_DIR}/${DB_NAME}.${TABLE_NAME}.0.sql|wc -l` +cnt=`grep -o "(1)" ${DUMPLING_OUTPUT_DIR}/${DB_NAME}.${TABLE_NAME}.000000000.sql|wc -l` echo "1st records count is ${cnt}" [ $cnt = 100 ] @@ -55,6 +55,6 @@ fi # test dumpling normally export GO_FAILPOINTS="" run_dumpling -cnt=`grep -o "(1)" ${DUMPLING_OUTPUT_DIR}/${DB_NAME}.${TABLE_NAME}.0.sql|wc -l` +cnt=`grep -o "(1)" ${DUMPLING_OUTPUT_DIR}/${DB_NAME}.${TABLE_NAME}.000000000.sql|wc -l` echo "2nd records count is ${cnt}" [ $cnt = 200 ] diff --git a/dumpling/tests/naughty_strings/run.sh b/dumpling/tests/naughty_strings/run.sh index 105839ef..238bc694 100755 --- a/dumpling/tests/naughty_strings/run.sh +++ b/dumpling/tests/naughty_strings/run.sh @@ -9,9 +9,9 @@ run_sql_file "$DUMPLING_BASE_NAME/data/naughty_strings.t-schema.sql" run_sql_file "$DUMPLING_BASE_NAME/data/naughty_strings.t.sql" run_dumpling --escape-backslash=false # FIXME should compare the schemas too, but they differ too much among MySQL versions. -diff "$DUMPLING_BASE_NAME/expect/naughty_strings.t.sql" "$DUMPLING_OUTPUT_DIR/naughty_strings.t.0.sql" +diff "$DUMPLING_BASE_NAME/expect/naughty_strings.t.sql" "$DUMPLING_OUTPUT_DIR/naughty_strings.t.000000000.sql" run_sql_file "$DUMPLING_BASE_NAME/data/naughty_strings.escape-schema.sql" run_sql_file "$DUMPLING_BASE_NAME/data/naughty_strings.escape.sql" run_dumpling --escape-backslash=true # FIXME should compare the schemas too, but they differ too much among MySQL versions. -diff "$DUMPLING_BASE_NAME/expect/naughty_strings.escape.sql" "$DUMPLING_OUTPUT_DIR/naughty_strings.escape.0.sql" +diff "$DUMPLING_BASE_NAME/expect/naughty_strings.escape.sql" "$DUMPLING_OUTPUT_DIR/naughty_strings.escape.000000000.sql" diff --git a/dumpling/tests/null_unique_index/run.sh b/dumpling/tests/null_unique_index/run.sh index 6b67baac..e007c1ae 100644 --- a/dumpling/tests/null_unique_index/run.sh +++ b/dumpling/tests/null_unique_index/run.sh @@ -19,6 +19,6 @@ export DUMPLING_TEST_DATABASE=$DB_NAME run_dumpling -r 1 data="NULL" -cnt=$(sed "s/$data/$data\n/g" $DUMPLING_OUTPUT_DIR/$DB_NAME.t.1.sql | grep -c "$data") || true +cnt=$(sed "s/$data/$data\n/g" $DUMPLING_OUTPUT_DIR/$DB_NAME.t.000000001.sql | grep -c "$data") || true [ $cnt = 1 ] diff --git a/dumpling/tests/primary_key/run.sh b/dumpling/tests/primary_key/run.sh index 04cb9f2a..90e11f7a 100644 --- a/dumpling/tests/primary_key/run.sh +++ b/dumpling/tests/primary_key/run.sh @@ -16,6 +16,6 @@ for file_path in "$DUMPLING_BASE_NAME"/data/*; do base_name=$(basename "$file_path") table_name="${base_name%.sql}" file_should_exist "$DUMPLING_BASE_NAME/result/$table_name.sql" - file_should_exist "$DUMPLING_OUTPUT_DIR/primary_key.$table_name.0.sql" - diff "$DUMPLING_BASE_NAME/result/$table_name.sql" "$DUMPLING_OUTPUT_DIR/primary_key.$table_name.0.sql" + file_should_exist "$DUMPLING_OUTPUT_DIR/primary_key.$table_name.000000000.sql" + diff "$DUMPLING_BASE_NAME/result/$table_name.sql" "$DUMPLING_OUTPUT_DIR/primary_key.$table_name.000000000.sql" done diff --git a/dumpling/tests/quote/data/quote-database.quote-table.0.sql b/dumpling/tests/quote/data/quote-database.quote-table.000000000.sql similarity index 100% rename from dumpling/tests/quote/data/quote-database.quote-table.0.sql rename to dumpling/tests/quote/data/quote-database.quote-table.000000000.sql diff --git a/dumpling/tests/quote/run.sh b/dumpling/tests/quote/run.sh index 123e4126..6ad521a3 100644 --- a/dumpling/tests/quote/run.sh +++ b/dumpling/tests/quote/run.sh @@ -3,7 +3,7 @@ set -eu mkdir -p "$DUMPLING_OUTPUT_DIR"/data -cp "$DUMPLING_BASE_NAME/data/quote-database.quote-table.0.sql" "$DUMPLING_OUTPUT_DIR/data/quo\`te%2Fdatabase.quo\`te%2Ftable.0.sql" +cp "$DUMPLING_BASE_NAME/data/quote-database.quote-table.000000000.sql" "$DUMPLING_OUTPUT_DIR/data/quo\`te%2Fdatabase.quo\`te%2Ftable.000000000.sql" cp "$DUMPLING_BASE_NAME/data/quote-database.quote-table-schema.sql" "$DUMPLING_OUTPUT_DIR/data/quo\`te%2Fdatabase.quo\`te%2Ftable-schema.sql" cp "$DUMPLING_BASE_NAME/data/quote-database-schema-create.sql" "$DUMPLING_OUTPUT_DIR/data/quo\`te%2Fdatabase-schema-create.sql" @@ -13,7 +13,7 @@ run_sql_file "$DUMPLING_OUTPUT_DIR/data/quo\`te%2Fdatabase-schema-create.sql" export DUMPLING_TEST_DATABASE=$db run_sql_file "$DUMPLING_OUTPUT_DIR/data/quo\`te%2Fdatabase.quo\`te%2Ftable-schema.sql" -run_sql_file "$DUMPLING_OUTPUT_DIR/data/quo\`te%2Fdatabase.quo\`te%2Ftable.0.sql" +run_sql_file "$DUMPLING_OUTPUT_DIR/data/quo\`te%2Fdatabase.quo\`te%2Ftable.000000000.sql" run_dumpling diff --git a/dumpling/tests/rows/data/rows.t.0.sql b/dumpling/tests/rows/data/rows.t.000000000.sql similarity index 100% rename from dumpling/tests/rows/data/rows.t.0.sql rename to dumpling/tests/rows/data/rows.t.000000000.sql diff --git a/dumpling/tests/rows/run.sh b/dumpling/tests/rows/run.sh index 4d905be3..ebda8d3f 100644 --- a/dumpling/tests/rows/run.sh +++ b/dumpling/tests/rows/run.sh @@ -19,7 +19,10 @@ run_sql "create database $DB_NAME;" run_sql "create table $DB_NAME.$TABLE_NAME (id int not null auto_increment primary key, a varchar(24));" # insert 100 records -run_sql_file "$cur/data/rows.t.0.sql" +run_sql_file "$cur/data/rows.t.000000000.sql" + +# make sure the estimated count is accurate +run_sql "analyze table $DB_NAME.$TABLE_NAME" # dumping export DUMPLING_TEST_DATABASE=$DB_NAME @@ -27,11 +30,10 @@ run_dumpling --rows 10 --loglevel debug # the dumping result is expected to be: # 10 files for insertion -# FIXME the result of EXPLAIN SELECT `id` FROM `rows`.`t` randomly equal to 1 or 100, this could affect on file num. -# file_num=$(find "$DUMPLING_OUTPUT_DIR" -maxdepth 1 -iname "$DB_NAME.$TABLE_NAME.*.sql" | wc -l) -# if [ "$file_num" -ne 10 ]; then -# echo "obtain file number: $file_num, but expect: 10" && exit 1 -# fi +file_num=$(find "$DUMPLING_OUTPUT_DIR" -maxdepth 1 -iname "$DB_NAME.$TABLE_NAME.*.sql" | wc -l) +if [ "$file_num" -ne 10 ]; then + echo "obtain file number: $file_num, but expect: 10" && exit 1 +fi cat "$cur/conf/lightning.toml" # use lightning import data to tidb @@ -40,4 +42,35 @@ run_lightning $cur/conf/lightning.toml # check mysql and tidb data check_sync_diff $cur/conf/diff_config.toml +# test dumpling with both rows and filesize +rm -rf "$DUMPLING_OUTPUT_DIR" +run_dumpling --rows 10 --filesize 100B --loglevel debug +# the dumping result is expected to be: +# 50 files for insertion +file_num=$(find "$DUMPLING_OUTPUT_DIR" -maxdepth 1 -iname "$DB_NAME.$TABLE_NAME.*.sql" | wc -l) +if [ "$file_num" -ne 50 ]; then + echo "obtain file number: $file_num, but expect: 50" && exit 1 +fi + +for i in `seq 1 10` +do + r=$(printf "%02d" $i) + for j in `seq 0 4` + do + file_name="$DUMPLING_OUTPUT_DIR/$DB_NAME.$TABLE_NAME.0000000${r}000${j}.sql" + if [ ! -f "$file_name" ]; then + echo "file $file_name doesn't exist, which is not expected" && exit 1 + fi + done +done + +# drop database on tidb +export DUMPLING_TEST_PORT=4000 +run_sql "drop database if exists \`$DB_NAME\`;" +cat "$cur/conf/lightning.toml" +# use lightning import data to tidb +run_lightning $cur/conf/lightning.toml + +# check mysql and tidb data +check_sync_diff $cur/conf/diff_config.toml diff --git a/dumpling/tests/s3/run.sh b/dumpling/tests/s3/run.sh index 83d519de..c154a1cb 100755 --- a/dumpling/tests/s3/run.sh +++ b/dumpling/tests/s3/run.sh @@ -45,8 +45,8 @@ ls "${HOST_DIR}" curl -o "${HOST_DIR}/s3-schema-create.sql" http://localhost:5000/mybucket/dump/s3-schema-create.sql curl -o "${HOST_DIR}/s3.t-schema.sql" http://localhost:5000/mybucket/dump/s3.t-schema.sql -curl -o "${HOST_DIR}/s3.t.0.sql" http://localhost:5000/mybucket/dump/s3.t.0.sql +curl -o "${HOST_DIR}/s3.t.000000000.sql" http://localhost:5000/mybucket/dump/s3.t.000000000.sql file_should_exist "$HOST_DIR/s3-schema-create.sql" file_should_exist "$HOST_DIR/s3.t-schema.sql" -file_should_exist "$HOST_DIR/s3.t.0.sql" +file_should_exist "$HOST_DIR/s3.t.000000000.sql" diff --git a/dumpling/tests/tls/run.sh b/dumpling/tests/tls/run.sh index ae1e1cc7..05ab096d 100755 --- a/dumpling/tests/tls/run.sh +++ b/dumpling/tests/tls/run.sh @@ -24,4 +24,4 @@ run_dumpling --ca "$DUMPLING_TEST_DIR/ca.pem" --cert "$DUMPLING_TEST_DIR/dumplin file_should_exist "$DUMPLING_OUTPUT_DIR/tls-schema-create.sql" file_should_exist "$DUMPLING_OUTPUT_DIR/tls.t-schema.sql" -file_should_exist "$DUMPLING_OUTPUT_DIR/tls.t.0.sql" +file_should_exist "$DUMPLING_OUTPUT_DIR/tls.t.000000000.sql" diff --git a/dumpling/v4/export/config.go b/dumpling/v4/export/config.go index 2d97e192..48f95a35 100644 --- a/dumpling/v4/export/config.go +++ b/dumpling/v4/export/config.go @@ -95,7 +95,7 @@ func DefaultConfig() *Config { NoViews: true, Rows: UnspecifiedSize, Where: "", - FileType: "SQL", + FileType: "sql", NoHeader: false, NoSchemas: false, NoData: false, diff --git a/dumpling/v4/export/dump.go b/dumpling/v4/export/dump.go index dbf59220..d41410af 100755 --- a/dumpling/v4/export/dump.go +++ b/dumpling/v4/export/dump.go @@ -3,7 +3,6 @@ package export import ( "context" "database/sql" - "errors" "strconv" "strings" "time" @@ -12,6 +11,7 @@ import ( _ "github.com/go-sql-driver/mysql" "github.com/pingcap/br/pkg/utils" + "github.com/pingcap/errors" "github.com/pingcap/failpoint" pd "github.com/tikv/pd/client" "go.uber.org/zap" @@ -218,6 +218,8 @@ func Dump(pCtx context.Context, conf *Config) (err error) { writer = SQLWriter{SimpleWriter: simpleWriter} case "csv": writer = CSVWriter{SimpleWriter: simpleWriter} + default: + return errors.Errorf("unsupported filetype %s", conf.FileType) } if conf.Sql == "" { diff --git a/dumpling/v4/export/prepare.go b/dumpling/v4/export/prepare.go index 06c5e6be..9273314b 100644 --- a/dumpling/v4/export/prepare.go +++ b/dumpling/v4/export/prepare.go @@ -99,10 +99,6 @@ func adjustConfig(ctx context.Context, conf *Config) error { } } - if conf.Rows != UnspecifiedSize { - // Disable filesize if rows was set - conf.FileSize = UnspecifiedSize - } if conf.SessionParams == nil { conf.SessionParams = make(map[string]interface{}) } diff --git a/dumpling/v4/export/prepare_test.go b/dumpling/v4/export/prepare_test.go index ff82d364..95c78b1e 100644 --- a/dumpling/v4/export/prepare_test.go +++ b/dumpling/v4/export/prepare_test.go @@ -119,7 +119,7 @@ func (s *testPrepareSuite) TestAdjustConfig(c *C) { c.Assert(adjustConfig(nil, conf), IsNil) conf.Sql = "" conf.Rows = 5000 - conf.FileSize = uint64(5000) + conf.FileSize = 5000 c.Assert(adjustConfig(nil, conf), IsNil) - c.Assert(conf.FileSize, Equals, uint64(UnspecifiedSize)) + c.Assert(conf.FileSize, Equals, uint64(5000)) } diff --git a/dumpling/v4/export/writer.go b/dumpling/v4/export/writer.go index 7209cacc..4977cde2 100644 --- a/dumpling/v4/export/writer.go +++ b/dumpling/v4/export/writer.go @@ -3,6 +3,8 @@ package export import ( "bytes" "context" + "fmt" + "strings" "text/template" "github.com/pingcap/br/pkg/storage" @@ -72,12 +74,12 @@ func (f SQLWriter) WriteTableData(ctx context.Context, ir TableDataIR) error { fileName = fmt.Sprintf("%s.%s.%d.sql", ir.DatabaseName(), ir.TableName(), 0) } }*/ - namer := newOutputFileNamer(ir) - fileName, err := namer.NextName(f.cfg.OutputFileTemplate) + namer := newOutputFileNamer(ir, f.cfg.Rows != UnspecifiedSize, f.cfg.FileSize != UnspecifiedSize) + fileType := strings.ToLower(f.cfg.FileType) + fileName, err := namer.NextName(f.cfg.OutputFileTemplate, fileType) if err != nil { return err } - fileName += ".sql" chunksIter := ir defer chunksIter.Rows().Close() @@ -96,11 +98,10 @@ func (f SQLWriter) WriteTableData(ctx context.Context, ir TableDataIR) error { if f.cfg.FileSize == UnspecifiedSize { break } - fileName, err = namer.NextName(f.cfg.OutputFileTemplate) + fileName, err = namer.NextName(f.cfg.OutputFileTemplate, fileType) if err != nil { return err } - fileName += ".sql" } log.Debug("dumping table successfully", zap.String("table", ir.TableName())) @@ -126,9 +127,11 @@ func writeMetaToFile(ctx context.Context, target, metaSQL string, s storage.Exte type CSVWriter struct{ SimpleWriter } type outputFileNamer struct { - Index int - DB string - Table string + ChunkIndex int + FileIndex int + DB string + Table string + format string } type csvOption struct { @@ -137,12 +140,21 @@ type csvOption struct { delimiter []byte } -func newOutputFileNamer(ir TableDataIR) *outputFileNamer { - return &outputFileNamer{ - Index: ir.ChunkIndex(), +func newOutputFileNamer(ir TableDataIR, rows, fileSize bool) *outputFileNamer { + o := &outputFileNamer{ DB: ir.DatabaseName(), Table: ir.TableName(), } + o.ChunkIndex = ir.ChunkIndex() + o.FileIndex = 0 + if rows && fileSize { + o.format = "%09d%04d" + } else if fileSize { + o.format = "%09[2]d" + } else { + o.format = "%09[1]d" + } + return o } func (namer *outputFileNamer) render(tmpl *template.Template, subName string) (string, error) { @@ -153,21 +165,25 @@ func (namer *outputFileNamer) render(tmpl *template.Template, subName string) (s return bf.String(), nil } -func (namer *outputFileNamer) NextName(tmpl *template.Template) (string, error) { +func (namer *outputFileNamer) Index() string { + return fmt.Sprintf(namer.format, namer.ChunkIndex, namer.FileIndex) +} + +func (namer *outputFileNamer) NextName(tmpl *template.Template, fileType string) (string, error) { res, err := namer.render(tmpl, outputFileTemplateData) - namer.Index++ - return res, err + namer.FileIndex++ + return res + "." + fileType, err } func (f CSVWriter) WriteTableData(ctx context.Context, ir TableDataIR) error { log.Debug("start dumping table in csv format...", zap.String("table", ir.TableName())) - namer := newOutputFileNamer(ir) - fileName, err := namer.NextName(f.cfg.OutputFileTemplate) + namer := newOutputFileNamer(ir, f.cfg.Rows != UnspecifiedSize, f.cfg.FileSize != UnspecifiedSize) + fileType := strings.ToLower(f.cfg.FileType) + fileName, err := namer.NextName(f.cfg.OutputFileTemplate, fileType) if err != nil { return err } - fileName += ".csv" chunksIter := ir defer chunksIter.Rows().Close() @@ -192,11 +208,10 @@ func (f CSVWriter) WriteTableData(ctx context.Context, ir TableDataIR) error { if f.cfg.FileSize == UnspecifiedSize { break } - fileName, err = namer.NextName(f.cfg.OutputFileTemplate) + fileName, err = namer.NextName(f.cfg.OutputFileTemplate, fileType) if err != nil { return err } - fileName += ".csv" } log.Debug("dumping table in csv format successfully", zap.String("table", ir.TableName())) diff --git a/dumpling/v4/export/writer_test.go b/dumpling/v4/export/writer_test.go index 79fd9e18..6451b0ac 100644 --- a/dumpling/v4/export/writer_test.go +++ b/dumpling/v4/export/writer_test.go @@ -117,7 +117,7 @@ func (s *testDumpSuite) TestWriteTableData(c *C) { err = writer.WriteTableData(ctx, tableIR) c.Assert(err, IsNil) - p := path.Join(dir, "test.employee.0.sql") + p := path.Join(dir, "test.employee.000000000.sql") _, err = os.Stat(p) c.Assert(err, IsNil) bytes, err := ioutil.ReadFile(p) @@ -167,12 +167,69 @@ func (s *testDumpSuite) TestWriteTableDataWithFileSize(c *C) { c.Assert(err, IsNil) cases := map[string]string{ - "test.employee.0.sql": "/*!40101 SET NAMES binary*/;\n" + + "test.employee.000000000.sql": "/*!40101 SET NAMES binary*/;\n" + "/*!40014 SET FOREIGN_KEY_CHECKS=0*/;\n" + "INSERT INTO `employee` VALUES\n" + "(1,'male','bob@mail.com','020-1234',NULL),\n" + "(2,'female','sarah@mail.com','020-1253','healthy');\n", - "test.employee.1.sql": "/*!40101 SET NAMES binary*/;\n" + + "test.employee.000000001.sql": "/*!40101 SET NAMES binary*/;\n" + + "/*!40014 SET FOREIGN_KEY_CHECKS=0*/;\n" + + "INSERT INTO `employee` VALUES\n" + + "(3,'male','john@mail.com','020-1256','healthy'),\n" + + "(4,'female','sarah@mail.com','020-1235','healthy');\n", + } + + for p, expected := range cases { + p := path.Join(dir, p) + _, err = os.Stat(p) + c.Assert(err, IsNil) + bytes, err := ioutil.ReadFile(p) + c.Assert(err, IsNil) + c.Assert(string(bytes), Equals, expected) + } +} + +func (s *testDumpSuite) TestWriteTableDataWithFileSizeAndRows(c *C) { + dir := c.MkDir() + + ctx := context.Background() + + config := DefaultConfig() + config.OutputDirPath = dir + config.FileSize = 50 + config.Rows = 4 + err := adjustConfig(ctx, config) + c.Assert(err, IsNil) + specCmts := []string{ + "/*!40101 SET NAMES binary*/;", + "/*!40014 SET FOREIGN_KEY_CHECKS=0*/;", + } + config.FileSize += uint64(len(specCmts[0]) + 1) + config.FileSize += uint64(len(specCmts[1]) + 1) + config.FileSize += uint64(len("INSERT INTO `employees` VALUES\n")) + + simpleWriter, err := NewSimpleWriter(config) + c.Assert(err, IsNil) + writer := SQLWriter{SimpleWriter: simpleWriter} + + data := [][]driver.Value{ + {"1", "male", "bob@mail.com", "020-1234", nil}, + {"2", "female", "sarah@mail.com", "020-1253", "healthy"}, + {"3", "male", "john@mail.com", "020-1256", "healthy"}, + {"4", "female", "sarah@mail.com", "020-1235", "healthy"}, + } + colTypes := []string{"INT", "SET", "VARCHAR", "VARCHAR", "TEXT"} + tableIR := newMockTableIR("test", "employee", data, specCmts, colTypes) + err = writer.WriteTableData(ctx, tableIR) + c.Assert(err, IsNil) + + cases := map[string]string{ + "test.employee.0000000000000.sql": "/*!40101 SET NAMES binary*/;\n" + + "/*!40014 SET FOREIGN_KEY_CHECKS=0*/;\n" + + "INSERT INTO `employee` VALUES\n" + + "(1,'male','bob@mail.com','020-1234',NULL),\n" + + "(2,'female','sarah@mail.com','020-1253','healthy');\n", + "test.employee.0000000000001.sql": "/*!40101 SET NAMES binary*/;\n" + "/*!40014 SET FOREIGN_KEY_CHECKS=0*/;\n" + "INSERT INTO `employee` VALUES\n" + "(3,'male','john@mail.com','020-1256','healthy'),\n" + @@ -260,14 +317,14 @@ func (s *testDumpSuite) TestWriteTableDataWithStatementSize(c *C) { config.ExternalStorage = newStorage cases = map[string]string{ - "0-employee-te%25%2Fst.sql": "/*!40101 SET NAMES binary*/;\n" + + "000000000-employee-te%25%2Fst.sql": "/*!40101 SET NAMES binary*/;\n" + "/*!40014 SET FOREIGN_KEY_CHECKS=0*/;\n" + "INSERT INTO `employee` VALUES\n" + "(1,'male','bob@mail.com','020-1234',NULL),\n" + "(2,'female','sarah@mail.com','020-1253','healthy');\n" + "INSERT INTO `employee` VALUES\n" + "(3,'male','john@mail.com','020-1256','healthy');\n", - "1-employee-te%25%2Fst.sql": "/*!40101 SET NAMES binary*/;\n" + + "000000001-employee-te%25%2Fst.sql": "/*!40101 SET NAMES binary*/;\n" + "/*!40014 SET FOREIGN_KEY_CHECKS=0*/;\n" + "INSERT INTO `employee` VALUES\n" + "(4,'female','sarah@mail.com','020-1235','healthy');\n",