Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

types: merge json and types pkg #37578

Merged
merged 2 commits into from
Sep 5, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .github/workflows/integration-test-dumpling-common.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,13 @@ jobs:
steps:
- name: 'checkout repository'
uses: actions/checkout@v3
with:
# FIXME: the version of dumpling is getting from git
# if it doesn't contain a meaningful git history,
# the release version will be empty and the program will panic.
#
# See the `DUMPLING_LDFLAGS` in `Makefile.common` for more information
fetch-depth: 0
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@hawkingrei PTAL, I let it download the full history of TiDB to get the version through git describe --tags --dirty='-dev'. I think it's necessary to pass the dumpling integration test 🤦‍♂️

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@lance6716 PTAL

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I created an issue just a minutes ago. #37628

LGTM

- name: 'set up golang'
uses: actions/setup-go@v3
with:
Expand Down
12 changes: 6 additions & 6 deletions build/nogo_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -200,8 +200,8 @@
"br/pkg/lightning/mydump/": "br/pkg/lightning/mydump/",
"br/pkg/lightning/restore/opts": "br/pkg/lightning/restore/opts",
"executor/aggregate.go": "executor/aggregate.go",
"types/json/binary_functions.go": "types/json/binary_functions.go",
Copy link
Member

@hawkingrei hawkingrei Sep 5, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If you have time to make nogo enable for all the types folder. it is better.

"types/json/binary_test.go": "types/json/binary_test.go",
"types/json_binary_functions.go": "types/json_binary_functions.go",
"types/json_binary_test.go": "types/json_binary_test.go",
"ddl/backfilling.go": "ddl/backfilling.go",
"ddl/column.go": "ddl/column.go",
"ddl/index.go": "ddl/index.go",
Expand Down Expand Up @@ -355,8 +355,8 @@
"br/pkg/lightning/mydump/": "br/pkg/lightning/mydump/",
"br/pkg/lightning/restore/opts": "br/pkg/lightning/restore/opts",
"executor/aggregate.go": "executor/aggregate.go",
"types/json/binary_functions.go": "types/json/binary_functions.go",
"types/json/binary_test.go": "types/json/binary_test.go",
"types/json_binary_functions.go": "types/json_binary_functions.go",
"types/json_binary_test.go": "types/json_binary_test.go",
"ddl/backfilling.go": "ddl/backfilling.go",
"ddl/column.go": "ddl/column.go",
"ddl/index.go": "ddl/index.go",
Expand Down Expand Up @@ -718,8 +718,8 @@
"br/pkg/lightning/mydump/": "br/pkg/lightning/mydump/",
"br/pkg/lightning/restore/opts": "br/pkg/lightning/restore/opts",
"executor/aggregate.go": "executor/aggregate.go",
"types/json/binary_functions.go": "types/json/binary_functions.go",
"types/json/binary_test.go": "types/json/binary_test.go",
"types/json_binary_functions.go": "types/json_binary_functions.go",
"types/json_binary_test.go": "types/json_binary_test.go",
"ddl/": "enable to ddl",
"expression/builtin_cast.go": "enable expression/builtin_cast.go",
"planner/core/plan.go": "planner/core/plan.go",
Expand Down
1 change: 0 additions & 1 deletion config/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ go_library(
deps = [
"//br/pkg/streamhelper/config",
"//parser/terror",
"//types/json",
"//util/logutil",
"//util/tikvutil",
"//util/versioninfo",
Expand Down
66 changes: 29 additions & 37 deletions config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ import (
zaplog "github.com/pingcap/log"
logbackupconf "github.com/pingcap/tidb/br/pkg/streamhelper/config"
"github.com/pingcap/tidb/parser/terror"
typejson "github.com/pingcap/tidb/types/json"
"github.com/pingcap/tidb/util/logutil"
"github.com/pingcap/tidb/util/tikvutil"
"github.com/pingcap/tidb/util/versioninfo"
Expand Down Expand Up @@ -1350,12 +1349,6 @@ var hideConfig = []string{
"performance.index-usage-sync-lease",
}

// jsonifyPath converts the item to json path, so it can be extracted.
func jsonifyPath(str string) string {
s := strings.Split(str, ".")
return fmt.Sprintf("$.\"%s\"", strings.Join(s, "\".\""))
}

// GetJSONConfig returns the config as JSON with hidden items removed
// It replaces the earlier HideConfig() which used strings.Split() in
// an way that didn't work for similarly named items (like enable).
Expand All @@ -1364,46 +1357,45 @@ func GetJSONConfig() (string, error) {
if err != nil {
return "", err
}
jsonValue, err := typejson.ParseBinaryFromString(string(j))

jsonValue := make(map[string]interface{})
err = json.Unmarshal(j, &jsonValue)
if err != nil {
return "", err
}
// Approximately length of removed items + hidden items.
pathExprs := make([]typejson.PathExpression, 0, len(removedConfig)+len(hideConfig))
var pathExpr typejson.PathExpression

// Patch out removed items.
removedPaths := make([]string, 0, len(removedConfig)+len(hideConfig))
for removedItem := range removedConfig {
s := jsonifyPath(removedItem)
pathExpr, err = typejson.ParseJSONPathExpr(s)
if err != nil {
// Should not be reachable, but not worth bailing for.
// It just means we can't patch out this line.
continue
}
pathExprs = append(pathExprs, pathExpr)
}
// Patch out hidden items
for _, hiddenItem := range hideConfig {
s := jsonifyPath(hiddenItem)
pathExpr, err = typejson.ParseJSONPathExpr(s)
if err != nil {
// Should not be reachable, but not worth bailing for.
// It just means we can't patch out this line.
continue
}
pathExprs = append(pathExprs, pathExpr)
removedPaths = append(removedPaths, removedItem)
}
newJSONValue, err := jsonValue.Remove(pathExprs)
if err != nil {
return "", err
removedPaths = append(removedPaths, hideConfig...)

for _, path := range removedPaths {
s := strings.Split(path, ".")
curValue := jsonValue
for i, key := range s {
if i == len(s)-1 {
delete(curValue, key)
}

if curValue[key] != nil {
mapValue, ok := curValue[key].(map[string]interface{})
if !ok {
break
}

curValue = mapValue
} else {
break
}
}
}
// Convert back to GoJSON so it can be pretty formatted.
// This is expected for compatibility with previous versions.
buf, err := newJSONValue.MarshalJSON()

buf, err := json.Marshal(jsonValue)
if err != nil {
return "", err
}

var resBuf bytes.Buffer
if err = json.Indent(&resBuf, buf, "", "\t"); err != nil {
return "", err
Expand Down
2 changes: 0 additions & 2 deletions executor/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,6 @@ go_library(
"//telemetry",
"//tidb-binlog/node",
"//types",
"//types/json",
"//util",
"//util/admin",
"//util/bitmap",
Expand Down Expand Up @@ -385,7 +384,6 @@ go_test(
"//testkit/testsetup",
"//testkit/testutil",
"//types",
"//types/json",
"//util",
"//util/benchdaily",
"//util/chunk",
Expand Down
2 changes: 0 additions & 2 deletions executor/aggfuncs/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ go_library(
"//sessionctx",
"//sessionctx/variable",
"//types",
"//types/json",
"//util/chunk",
"//util/codec",
"//util/collate",
Expand Down Expand Up @@ -102,7 +101,6 @@ go_test(
"//testkit",
"//testkit/testsetup",
"//types",
"//types/json",
"//util/chunk",
"//util/codec",
"//util/collate",
Expand Down
3 changes: 1 addition & 2 deletions executor/aggfuncs/aggfunc_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ import (
"github.com/pingcap/tidb/planner/util"
"github.com/pingcap/tidb/sessionctx"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/types/json"
"github.com/pingcap/tidb/util/chunk"
"github.com/pingcap/tidb/util/codec"
"github.com/pingcap/tidb/util/collate"
Expand Down Expand Up @@ -493,7 +492,7 @@ func getDataGenFunc(ft *types.FieldType) func(i int) types.Datum {
case mysql.TypeDuration:
return func(i int) types.Datum { return types.NewDurationDatum(types.Duration{Duration: time.Duration(i)}) }
case mysql.TypeJSON:
return func(i int) types.Datum { return types.NewDatum(json.CreateBinary(int64(i))) }
return func(i int) types.Datum { return types.NewDatum(types.CreateBinaryJSON(int64(i))) }
case mysql.TypeEnum:
elems := []string{"e", "d", "c", "b", "a"}
return func(i int) types.Datum {
Expand Down
5 changes: 2 additions & 3 deletions executor/aggfuncs/func_count_distinct.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import (
"github.com/pingcap/tidb/expression"
"github.com/pingcap/tidb/sessionctx"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/types/json"
"github.com/pingcap/tidb/util/chunk"
"github.com/pingcap/tidb/util/codec"
"github.com/pingcap/tidb/util/collate"
Expand Down Expand Up @@ -398,7 +397,7 @@ func evalAndEncode(
}
encodedBytes = appendDuration(encodedBytes, buf, val)
case types.ETJson:
var val json.BinaryJSON
var val types.BinaryJSON
val, isNull, err = arg.EvalJSON(sctx, row)
if err != nil || isNull {
break
Expand Down Expand Up @@ -466,7 +465,7 @@ func appendDuration(encodedBytes, buf []byte, val types.Duration) []byte {
return encodedBytes
}

func appendJSON(encodedBytes, _ []byte, val json.BinaryJSON) []byte {
func appendJSON(encodedBytes, _ []byte, val types.BinaryJSON) []byte {
encodedBytes = append(encodedBytes, val.TypeCode)
encodedBytes = append(encodedBytes, val.Value...)
return encodedBytes
Expand Down
3 changes: 1 addition & 2 deletions executor/aggfuncs/func_first_row.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import (
"github.com/pingcap/tidb/parser/mysql"
"github.com/pingcap/tidb/sessionctx"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/types/json"
"github.com/pingcap/tidb/util/chunk"
"github.com/pingcap/tidb/util/stringutil"
)
Expand Down Expand Up @@ -103,7 +102,7 @@ type partialResult4FirstRowDuration struct {
type partialResult4FirstRowJSON struct {
basePartialResult4FirstRow

val json.BinaryJSON
val types.BinaryJSON
}

type partialResult4FirstRowEnum struct {
Expand Down
3 changes: 1 addition & 2 deletions executor/aggfuncs/func_first_row_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import (
"github.com/pingcap/tidb/parser/ast"
"github.com/pingcap/tidb/parser/mysql"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/types/json"
"github.com/pingcap/tidb/util/chunk"
)

Expand All @@ -42,7 +41,7 @@ func TestMergePartialResult4FirstRow(t *testing.T) {
buildAggTester(ast.AggFuncFirstRow, mysql.TypeString, 5, "0", "2", "0"),
buildAggTester(ast.AggFuncFirstRow, mysql.TypeDate, 5, types.TimeFromDays(365), types.TimeFromDays(367), types.TimeFromDays(365)),
buildAggTester(ast.AggFuncFirstRow, mysql.TypeDuration, 5, types.Duration{Duration: time.Duration(0)}, types.Duration{Duration: time.Duration(2)}, types.Duration{Duration: time.Duration(0)}),
buildAggTester(ast.AggFuncFirstRow, mysql.TypeJSON, 5, json.CreateBinary(int64(0)), json.CreateBinary(int64(2)), json.CreateBinary(int64(0))),
buildAggTester(ast.AggFuncFirstRow, mysql.TypeJSON, 5, types.CreateBinaryJSON(int64(0)), types.CreateBinaryJSON(int64(2)), types.CreateBinaryJSON(int64(0))),
buildAggTester(ast.AggFuncFirstRow, mysql.TypeEnum, 5, enumE, enumC, enumE),
buildAggTester(ast.AggFuncFirstRow, mysql.TypeSet, 5, setE, setED, setE),
}
Expand Down
8 changes: 4 additions & 4 deletions executor/aggfuncs/func_json_arrayagg.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import (

"github.com/pingcap/errors"
"github.com/pingcap/tidb/sessionctx"
"github.com/pingcap/tidb/types/json"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/util/chunk"
)

Expand Down Expand Up @@ -54,7 +54,7 @@ func (e *jsonArrayagg) AppendFinalResult2Chunk(sctx sessionctx.Context, pr Parti
return nil
}

chk.AppendJSON(e.ordinal, json.CreateBinary(p.entries))
chk.AppendJSON(e.ordinal, types.CreateBinaryJSON(p.entries))
return nil
}

Expand All @@ -72,11 +72,11 @@ func (e *jsonArrayagg) UpdatePartialResult(sctx sessionctx.Context, rowsInGroup
}

switch x := realItem.(type) {
case nil, bool, int64, uint64, float64, string, json.BinaryJSON, json.Opaque:
case nil, bool, int64, uint64, float64, string, types.BinaryJSON, types.Opaque:
p.entries = append(p.entries, realItem)
memDelta += getValMemDelta(realItem)
default:
return 0, json.ErrUnsupportedSecondArgumentType.GenWithStackByArgs(x)
return 0, types.ErrUnsupportedSecondArgumentType.GenWithStackByArgs(x)
}
}
return memDelta, nil
Expand Down
7 changes: 3 additions & 4 deletions executor/aggfuncs/func_json_arrayagg_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import (
"github.com/pingcap/tidb/parser/ast"
"github.com/pingcap/tidb/parser/mysql"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/types/json"
"github.com/pingcap/tidb/util/chunk"
)

Expand Down Expand Up @@ -56,7 +55,7 @@ func TestMergePartialResult4JsonArrayagg(t *testing.T) {
entries3 = append(entries3, entries1...)
entries3 = append(entries3, entries2...)

tests = append(tests, buildAggTester(ast.AggFuncJsonArrayagg, argType, numRows, json.CreateBinary(entries1), json.CreateBinary(entries2), json.CreateBinary(entries3)))
tests = append(tests, buildAggTester(ast.AggFuncJsonArrayagg, argType, numRows, types.CreateBinaryJSON(entries1), types.CreateBinaryJSON(entries2), types.CreateBinaryJSON(entries3)))
}

for _, test := range tests {
Expand All @@ -83,7 +82,7 @@ func TestJsonArrayagg(t *testing.T) {
// to adapt the `genSrcChk` Chunk format
entries = append(entries, nil)

tests = append(tests, buildAggTester(ast.AggFuncJsonArrayagg, argType, numRows, nil, json.CreateBinary(entries)))
tests = append(tests, buildAggTester(ast.AggFuncJsonArrayagg, argType, numRows, nil, types.CreateBinaryJSON(entries)))
}

for _, test := range tests {
Expand Down Expand Up @@ -114,7 +113,7 @@ func jsonArrayaggMemDeltaGens(srcChk *chunk.Chunk, dataType *types.FieldType) (m
memDelta += int64(len(val))
case mysql.TypeJSON:
val := row.GetJSON(0)
// +1 for the memory usage of the TypeCode of json
// +1 for the memory usage of the JSONTypeCode of json
memDelta += int64(len(val.Value) + 1)
case mysql.TypeDuration:
val := row.GetDuration(0, dataType.GetDecimal())
Expand Down
Loading