From 853c41e3fbda3b60ca689dd34afce2e187edb000 Mon Sep 17 00:00:00 2001 From: Arthur Yang Date: Wed, 4 Aug 2021 16:15:07 +0800 Subject: [PATCH] executor: add builtin aggregate function `json_arrayagg` (#19957) --- executor/aggfuncs/aggfunc_test.go | 60 ++++++++ executor/aggfuncs/aggfuncs.go | 3 + executor/aggfuncs/builder.go | 16 +++ executor/aggfuncs/func_json_arrayagg.go | 103 ++++++++++++++ executor/aggfuncs/func_json_arrayagg_test.go | 139 +++++++++++++++++++ expression/aggregation/agg_to_pb.go | 2 + expression/aggregation/base_func.go | 3 + expression/aggregation/descriptor.go | 2 +- expression/integration_test.go | 40 ++++++ planner/core/rule_aggregation_push_down.go | 4 +- types/json/constants.go | 4 +- 11 files changed, 371 insertions(+), 5 deletions(-) create mode 100644 executor/aggfuncs/func_json_arrayagg.go create mode 100644 executor/aggfuncs/func_json_arrayagg_test.go diff --git a/executor/aggfuncs/aggfunc_test.go b/executor/aggfuncs/aggfunc_test.go index 96bc093a48214..997f27a343444 100644 --- a/executor/aggfuncs/aggfunc_test.go +++ b/executor/aggfuncs/aggfunc_test.go @@ -389,6 +389,9 @@ func (s *testSuite) testMergePartialResult(c *C, p aggTest) { if p.funcName == ast.AggFuncApproxCountDistinct { resultChk = chunk.NewChunkWithCapacity([]*types.FieldType{types.NewFieldType(mysql.TypeString)}, 1) } + if p.funcName == ast.AggFuncJsonArrayagg { + resultChk = chunk.NewChunkWithCapacity([]*types.FieldType{types.NewFieldType(mysql.TypeJSON)}, 1) + } // update partial result. for row := iter.Begin(); row != iter.End(); row = iter.Next() { @@ -402,6 +405,9 @@ func (s *testSuite) testMergePartialResult(c *C, p aggTest) { if p.funcName == ast.AggFuncApproxCountDistinct { dt = resultChk.GetRow(0).GetDatum(0, types.NewFieldType(mysql.TypeString)) } + if p.funcName == ast.AggFuncJsonArrayagg { + dt = resultChk.GetRow(0).GetDatum(0, types.NewFieldType(mysql.TypeJSON)) + } result, err := dt.CompareDatum(s.ctx.GetSessionVars().StmtCtx, &p.results[0]) c.Assert(err, IsNil) c.Assert(result, Equals, 0, Commentf("%v != %v", dt.String(), p.results[0])) @@ -426,6 +432,9 @@ func (s *testSuite) testMergePartialResult(c *C, p aggTest) { if p.funcName == ast.AggFuncApproxCountDistinct { dt = resultChk.GetRow(0).GetDatum(0, types.NewFieldType(mysql.TypeString)) } + if p.funcName == ast.AggFuncJsonArrayagg { + dt = resultChk.GetRow(0).GetDatum(0, types.NewFieldType(mysql.TypeJSON)) + } result, err = dt.CompareDatum(s.ctx.GetSessionVars().StmtCtx, &p.results[1]) c.Assert(err, IsNil) c.Assert(result, Equals, 0, Commentf("%v != %v", dt.String(), p.results[1])) @@ -435,6 +444,9 @@ func (s *testSuite) testMergePartialResult(c *C, p aggTest) { if p.funcName == ast.AggFuncApproxCountDistinct { resultChk = chunk.NewChunkWithCapacity([]*types.FieldType{types.NewFieldType(mysql.TypeLonglong)}, 1) } + if p.funcName == ast.AggFuncJsonArrayagg { + resultChk = chunk.NewChunkWithCapacity([]*types.FieldType{types.NewFieldType(mysql.TypeJSON)}, 1) + } resultChk.Reset() err = finalFunc.AppendFinalResult2Chunk(s.ctx, finalPr, resultChk) c.Assert(err, IsNil) @@ -443,6 +455,9 @@ func (s *testSuite) testMergePartialResult(c *C, p aggTest) { if p.funcName == ast.AggFuncApproxCountDistinct { dt = resultChk.GetRow(0).GetDatum(0, types.NewFieldType(mysql.TypeLonglong)) } + if p.funcName == ast.AggFuncJsonArrayagg { + dt = resultChk.GetRow(0).GetDatum(0, types.NewFieldType(mysql.TypeJSON)) + } result, err = dt.CompareDatum(s.ctx.GetSessionVars().StmtCtx, &p.results[2]) c.Assert(err, IsNil) c.Assert(result, Equals, 0, Commentf("%v != %v", dt.String(), p.results[2])) @@ -687,6 +702,51 @@ func (s *testSuite) testAggFunc(c *C, p aggTest) { c.Assert(result, Equals, 0, Commentf("%v != %v", dt.String(), p.results[0])) } +func (s *testSuite) testAggFuncWithoutDistinct(c *C, p aggTest) { + srcChk := p.genSrcChk() + + args := []expression.Expression{&expression.Column{RetType: p.dataType, Index: 0}} + if p.funcName == ast.AggFuncGroupConcat { + args = append(args, &expression.Constant{Value: types.NewStringDatum(separator), RetType: types.NewFieldType(mysql.TypeString)}) + } + if p.funcName == ast.AggFuncApproxPercentile { + args = append(args, &expression.Constant{Value: types.NewIntDatum(50), RetType: types.NewFieldType(mysql.TypeLong)}) + } + desc, err := aggregation.NewAggFuncDesc(s.ctx, p.funcName, args, false) + c.Assert(err, IsNil) + if p.orderBy { + desc.OrderByItems = []*util.ByItems{ + {Expr: args[0], Desc: true}, + } + } + finalFunc := aggfuncs.Build(s.ctx, desc, 0) + finalPr, _ := finalFunc.AllocPartialResult() + resultChk := chunk.NewChunkWithCapacity([]*types.FieldType{desc.RetTp}, 1) + + iter := chunk.NewIterator4Chunk(srcChk) + for row := iter.Begin(); row != iter.End(); row = iter.Next() { + _, err = finalFunc.UpdatePartialResult(s.ctx, []chunk.Row{row}, finalPr) + c.Assert(err, IsNil) + } + p.messUpChunk(srcChk) + err = finalFunc.AppendFinalResult2Chunk(s.ctx, finalPr, resultChk) + c.Assert(err, IsNil) + dt := resultChk.GetRow(0).GetDatum(0, desc.RetTp) + result, err := dt.CompareDatum(s.ctx.GetSessionVars().StmtCtx, &p.results[1]) + c.Assert(err, IsNil) + c.Assert(result, Equals, 0, Commentf("%v != %v", dt.String(), p.results[1])) + + // test the empty input + resultChk.Reset() + finalFunc.ResetPartialResult(finalPr) + err = finalFunc.AppendFinalResult2Chunk(s.ctx, finalPr, resultChk) + c.Assert(err, IsNil) + dt = resultChk.GetRow(0).GetDatum(0, desc.RetTp) + result, err = dt.CompareDatum(s.ctx.GetSessionVars().StmtCtx, &p.results[0]) + c.Assert(err, IsNil) + c.Assert(result, Equals, 0, Commentf("%v != %v", dt.String(), p.results[0])) +} + func (s *testSuite) testAggMemFunc(c *C, p aggMemTest) { srcChk := p.aggTest.genSrcChk() diff --git a/executor/aggfuncs/aggfuncs.go b/executor/aggfuncs/aggfuncs.go index 6dd807c2ea5e1..e608d910ba821 100644 --- a/executor/aggfuncs/aggfuncs.go +++ b/executor/aggfuncs/aggfuncs.go @@ -103,6 +103,9 @@ var ( // All the AggFunc implementations for "BIT_AND" are listed here. _ AggFunc = (*bitAndUint64)(nil) + // All the AggFunc implementations for "JSON_ARRAYAGG" are listed here + _ AggFunc = (*jsonArrayagg)(nil) + // All the AggFunc implementations for "JSON_OBJECTAGG" are listed here _ AggFunc = (*jsonObjectAgg)(nil) ) diff --git a/executor/aggfuncs/builder.go b/executor/aggfuncs/builder.go index c914ea4838f2d..4627d0ae2f12e 100644 --- a/executor/aggfuncs/builder.go +++ b/executor/aggfuncs/builder.go @@ -58,6 +58,8 @@ func Build(ctx sessionctx.Context, aggFuncDesc *aggregation.AggFuncDesc, ordinal return buildVarPop(aggFuncDesc, ordinal) case ast.AggFuncStddevPop: return buildStdDevPop(aggFuncDesc, ordinal) + case ast.AggFuncJsonArrayagg: + return buildJSONArrayagg(aggFuncDesc, ordinal) case ast.AggFuncJsonObjectAgg: return buildJSONObjectAgg(aggFuncDesc, ordinal) case ast.AggFuncApproxCountDistinct: @@ -615,6 +617,20 @@ func buildStddevSamp(aggFuncDesc *aggregation.AggFuncDesc, ordinal int) AggFunc } } +// buildJSONArrayagg builds the AggFunc implementation for function "json_arrayagg". +func buildJSONArrayagg(aggFuncDesc *aggregation.AggFuncDesc, ordinal int) AggFunc { + base := baseAggFunc{ + args: aggFuncDesc.Args, + ordinal: ordinal, + } + switch aggFuncDesc.Mode { + case aggregation.DedupMode: + return nil + default: + return &jsonArrayagg{base} + } +} + // buildJSONObjectAgg builds the AggFunc implementation for function "json_objectagg". func buildJSONObjectAgg(aggFuncDesc *aggregation.AggFuncDesc, ordinal int) AggFunc { base := baseAggFunc{ diff --git a/executor/aggfuncs/func_json_arrayagg.go b/executor/aggfuncs/func_json_arrayagg.go new file mode 100644 index 0000000000000..61500afc04ad9 --- /dev/null +++ b/executor/aggfuncs/func_json_arrayagg.go @@ -0,0 +1,103 @@ +// Copyright 2020 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// See the License for the specific language governing permissions and +// limitations under the License. + +package aggfuncs + +import ( + "unsafe" + + "github.com/pingcap/errors" + "github.com/pingcap/tidb/sessionctx" + "github.com/pingcap/tidb/types" + "github.com/pingcap/tidb/types/json" + "github.com/pingcap/tidb/util/chunk" +) + +const ( + // DefPartialResult4JsonArrayagg is the size of partialResult4JsonArrayagg + DefPartialResult4JsonArrayagg = int64(unsafe.Sizeof(partialResult4JsonArrayagg{})) +) + +type jsonArrayagg struct { + baseAggFunc +} + +type partialResult4JsonArrayagg struct { + entries []interface{} +} + +func (e *jsonArrayagg) AllocPartialResult() (pr PartialResult, memDelta int64) { + p := partialResult4JsonArrayagg{} + p.entries = make([]interface{}, 0) + return PartialResult(&p), DefPartialResult4JsonArrayagg + DefSliceSize +} + +func (e *jsonArrayagg) ResetPartialResult(pr PartialResult) { + p := (*partialResult4JsonArrayagg)(pr) + p.entries = p.entries[:0] +} + +func (e *jsonArrayagg) AppendFinalResult2Chunk(sctx sessionctx.Context, pr PartialResult, chk *chunk.Chunk) error { + p := (*partialResult4JsonArrayagg)(pr) + if len(p.entries) == 0 { + chk.AppendNull(e.ordinal) + return nil + } + + // appendBinary does not support some type such as uint8、types.time,so convert is needed here + for idx, val := range p.entries { + switch x := val.(type) { + case *types.MyDecimal: + float64Val, err := x.ToFloat64() + if err != nil { + return errors.Trace(err) + } + p.entries[idx] = float64Val + case []uint8, types.Time, types.Duration: + strVal, err := types.ToString(x) + if err != nil { + return errors.Trace(err) + } + p.entries[idx] = strVal + } + } + + chk.AppendJSON(e.ordinal, json.CreateBinary(p.entries)) + return nil +} + +func (e *jsonArrayagg) UpdatePartialResult(sctx sessionctx.Context, rowsInGroup []chunk.Row, pr PartialResult) (memDelta int64, err error) { + p := (*partialResult4JsonArrayagg)(pr) + for _, row := range rowsInGroup { + item, err := e.args[0].Eval(row) + if err != nil { + return 0, errors.Trace(err) + } + + realItem := item.Clone().GetValue() + switch x := realItem.(type) { + case nil, bool, int64, uint64, float64, string, json.BinaryJSON, *types.MyDecimal, []uint8, types.Time, types.Duration: + p.entries = append(p.entries, realItem) + memDelta += getValMemDelta(realItem) + default: + return 0, json.ErrUnsupportedSecondArgumentType.GenWithStackByArgs(x) + } + } + return memDelta, nil +} + +func (e *jsonArrayagg) MergePartialResult(sctx sessionctx.Context, src, dst PartialResult) (memDelta int64, err error) { + p1, p2 := (*partialResult4JsonArrayagg)(src), (*partialResult4JsonArrayagg)(dst) + p2.entries = append(p2.entries, p1.entries...) + return 0, nil +} diff --git a/executor/aggfuncs/func_json_arrayagg_test.go b/executor/aggfuncs/func_json_arrayagg_test.go new file mode 100644 index 0000000000000..aa88e0f636499 --- /dev/null +++ b/executor/aggfuncs/func_json_arrayagg_test.go @@ -0,0 +1,139 @@ +// Copyright 2020 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// See the License for the specific language governing permissions and +// limitations under the License. + +package aggfuncs_test + +import ( + . "github.com/pingcap/check" + "github.com/pingcap/errors" + "github.com/pingcap/parser/ast" + "github.com/pingcap/parser/mysql" + "github.com/pingcap/tidb/executor/aggfuncs" + "github.com/pingcap/tidb/types" + "github.com/pingcap/tidb/types/json" + "github.com/pingcap/tidb/util/chunk" +) + +func (s *testSuite) TestMergePartialResult4JsonArrayagg(c *C) { + typeList := []byte{mysql.TypeLonglong, mysql.TypeDouble, mysql.TypeString, mysql.TypeJSON} + + var tests []aggTest + numRows := 5 + for _, argType := range typeList { + entries1 := make([]interface{}, 0) + entries2 := make([]interface{}, 0) + entries3 := make([]interface{}, 0) + + genFunc := getDataGenFunc(types.NewFieldType(argType)) + + for m := 0; m < numRows; m++ { + arg := genFunc(m) + entries1 = append(entries1, arg.GetValue()) + } + // to adapt the `genSrcChk` Chunk format + entries1 = append(entries1, nil) + + for m := 2; m < numRows; m++ { + arg := genFunc(m) + entries2 = append(entries2, arg.GetValue()) + } + // to adapt the `genSrcChk` Chunk format + entries2 = append(entries2, nil) + + entries3 = append(entries3, entries1...) + entries3 = append(entries3, entries2...) + + tests = append(tests, buildAggTester(ast.AggFuncJsonArrayagg, argType, numRows, json.CreateBinary(entries1), json.CreateBinary(entries2), json.CreateBinary(entries3))) + } + + for _, test := range tests { + s.testMergePartialResult(c, test) + } +} + +func (s *testSuite) TestJsonArrayagg(c *C) { + typeList := []byte{mysql.TypeLonglong, mysql.TypeDouble, mysql.TypeString, mysql.TypeJSON} + + var tests []aggTest + numRows := 5 + + for _, argType := range typeList { + entries := make([]interface{}, 0) + + genFunc := getDataGenFunc(types.NewFieldType(argType)) + + for m := 0; m < numRows; m++ { + arg := genFunc(m) + entries = append(entries, arg.GetValue()) + } + // to adapt the `genSrcChk` Chunk format + entries = append(entries, nil) + + tests = append(tests, buildAggTester(ast.AggFuncJsonArrayagg, argType, numRows, nil, json.CreateBinary(entries))) + } + + for _, test := range tests { + s.testAggFuncWithoutDistinct(c, test) + } +} + +func jsonArrayaggMemDeltaGens(srcChk *chunk.Chunk, dataType *types.FieldType) (memDeltas []int64, err error) { + memDeltas = make([]int64, 0) + for i := 0; i < srcChk.NumRows(); i++ { + row := srcChk.GetRow(i) + if row.IsNull(0) { + memDeltas = append(memDeltas, aggfuncs.DefInterfaceSize) + continue + } + + memDelta := int64(0) + memDelta += aggfuncs.DefInterfaceSize + switch dataType.Tp { + case mysql.TypeLonglong: + memDelta += aggfuncs.DefUint64Size + case mysql.TypeDouble: + memDelta += aggfuncs.DefFloat64Size + case mysql.TypeString: + val := row.GetString(0) + memDelta += int64(len(val)) + case mysql.TypeJSON: + val := row.GetJSON(0) + // +1 for the memory usage of the TypeCode of json + memDelta += int64(len(val.Value) + 1) + case mysql.TypeDuration: + memDelta += aggfuncs.DefDurationSize + case mysql.TypeDate: + memDelta += aggfuncs.DefTimeSize + case mysql.TypeNewDecimal: + memDelta += aggfuncs.DefMyDecimalSize + default: + return memDeltas, errors.Errorf("unsupported type - %v", dataType.Tp) + } + memDeltas = append(memDeltas, memDelta) + } + return memDeltas, nil +} + +func (s *testSuite) TestMemJsonArrayagg(c *C) { + typeList := []byte{mysql.TypeLonglong, mysql.TypeDouble, mysql.TypeString, mysql.TypeJSON} + + var tests []aggMemTest + numRows := 5 + for _, argType := range typeList { + tests = append(tests, buildAggMemTester(ast.AggFuncJsonArrayagg, argType, numRows, aggfuncs.DefPartialResult4JsonArrayagg+aggfuncs.DefSliceSize, jsonArrayaggMemDeltaGens, false)) + } + + for _, test := range tests { + s.testAggMemFunc(c, test) + } +} diff --git a/expression/aggregation/agg_to_pb.go b/expression/aggregation/agg_to_pb.go index e07c598bcf005..d3c9ef9b7e3c2 100644 --- a/expression/aggregation/agg_to_pb.go +++ b/expression/aggregation/agg_to_pb.go @@ -59,6 +59,8 @@ func AggFuncToPBExpr(sc *stmtctx.StatementContext, client kv.Client, aggFunc *Ag tp = tipb.ExprType_Agg_BitAnd case ast.AggFuncVarPop: tp = tipb.ExprType_VarPop + case ast.AggFuncJsonArrayagg: + tp = tipb.ExprType_JsonArrayAgg case ast.AggFuncJsonObjectAgg: tp = tipb.ExprType_JsonObjectAgg case ast.AggFuncStddevPop: diff --git a/expression/aggregation/base_func.go b/expression/aggregation/base_func.go index 9a5ef95d49bb2..eef2c144361ea 100644 --- a/expression/aggregation/base_func.go +++ b/expression/aggregation/base_func.go @@ -115,6 +115,8 @@ func (a *baseFuncDesc) typeInfer(ctx sessionctx.Context) error { a.typeInfer4LeadLag(ctx) case ast.AggFuncVarPop, ast.AggFuncStddevPop, ast.AggFuncVarSamp, ast.AggFuncStddevSamp: a.typeInfer4PopOrSamp(ctx) + case ast.AggFuncJsonArrayagg: + a.typeInfer4JsonFuncs(ctx) case ast.AggFuncJsonObjectAgg: a.typeInfer4JsonFuncs(ctx) default: @@ -362,6 +364,7 @@ var noNeedCastAggFuncs = map[string]struct{}{ ast.AggFuncMin: {}, ast.AggFuncFirstRow: {}, ast.WindowFuncNtile: {}, + ast.AggFuncJsonArrayagg: {}, ast.AggFuncJsonObjectAgg: {}, } diff --git a/expression/aggregation/descriptor.go b/expression/aggregation/descriptor.go index 4415b0688ce09..10408a374cbc7 100644 --- a/expression/aggregation/descriptor.go +++ b/expression/aggregation/descriptor.go @@ -282,7 +282,7 @@ func (a *AggFuncDesc) UpdateNotNullFlag4RetType(hasGroupBy, allAggsFirstRow bool ast.AggFuncBitAnd, ast.AggFuncBitOr, ast.AggFuncBitXor, ast.WindowFuncFirstValue, ast.WindowFuncLastValue, ast.WindowFuncNthValue, ast.WindowFuncRowNumber, ast.WindowFuncRank, ast.WindowFuncDenseRank, ast.WindowFuncCumeDist, ast.WindowFuncNtile, ast.WindowFuncPercentRank, - ast.WindowFuncLead, ast.WindowFuncLag, ast.AggFuncJsonObjectAgg, + ast.WindowFuncLead, ast.WindowFuncLag, ast.AggFuncJsonObjectAgg, ast.AggFuncJsonArrayagg, ast.AggFuncVarSamp, ast.AggFuncVarPop, ast.AggFuncStddevPop, ast.AggFuncStddevSamp: removeNotNull = false case ast.AggFuncSum, ast.AggFuncAvg, ast.AggFuncGroupConcat: diff --git a/expression/integration_test.go b/expression/integration_test.go index ef39bda7a0cb8..72dda661201d2 100644 --- a/expression/integration_test.go +++ b/expression/integration_test.go @@ -4106,6 +4106,46 @@ func (s *testIntegrationSuite) TestAggregationBuiltinGroupConcat(c *C) { tk.MustQuery("select * from d").Check(testkit.Rows("hello,h")) } +func (s *testIntegrationSuite) TestAggregationBuiltinJSONArrayagg(c *C) { + defer s.cleanEnv(c) + tk := testkit.NewTestKit(c, s.store) + tk.MustExec("use test") + + tk.MustExec("drop table if exists t;") + tk.MustExec(`CREATE TABLE t ( + a int(11), + b varchar(100), + c decimal(3,2), + d json, + e date, + f time, + g datetime DEFAULT '2012-01-01', + h timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + i char(36), + j text(50));`) + + tk.MustExec(`insert into t values(1, 'ab', 5.5, '{"id": 1}', '2020-01-10', '11:12:13', '2020-01-11', '2020-10-18 00:00:00', 'first', 'json_arrayagg_test');`) + + result := tk.MustQuery("select a, json_arrayagg(b) from t group by a order by a;") + result.Check(testkit.Rows(`1 ["ab"]`)) + result = tk.MustQuery("select b, json_arrayagg(c) from t group by b order by b;") + result.Check(testkit.Rows(`ab [5.5]`)) + result = tk.MustQuery("select e, json_arrayagg(f) from t group by e order by e;") + result.Check(testkit.Rows(`2020-01-10 ["11:12:13"]`)) + result = tk.MustQuery("select f, json_arrayagg(g) from t group by f order by f;") + result.Check(testkit.Rows(`11:12:13 ["2020-01-11 00:00:00"]`)) + result = tk.MustQuery("select g, json_arrayagg(h) from t group by g order by g;") + result.Check(testkit.Rows(`2020-01-11 00:00:00 ["2020-10-18 00:00:00"]`)) + result = tk.MustQuery("select h, json_arrayagg(i) from t group by h order by h;") + result.Check(testkit.Rows(`2020-10-18 00:00:00 ["first"]`)) + result = tk.MustQuery("select i, json_arrayagg(j) from t group by i order by i;") + result.Check(testkit.Rows(`first ["json_arrayagg_test"]`)) + result = tk.MustQuery("select json_arrayagg(23) from t group by a order by a;") + result.Check(testkit.Rows(`[23]`)) + result = tk.MustQuery("select json_arrayagg(null) from t group by a order by a;") + result.Check(testkit.Rows(`[null]`)) +} + func (s *testIntegrationSuite) TestAggregationBuiltinJSONObjectAgg(c *C) { defer s.cleanEnv(c) tk := testkit.NewTestKit(c, s.store) diff --git a/planner/core/rule_aggregation_push_down.go b/planner/core/rule_aggregation_push_down.go index b3dd0e4b95295..e02583355910e 100644 --- a/planner/core/rule_aggregation_push_down.go +++ b/planner/core/rule_aggregation_push_down.go @@ -38,7 +38,7 @@ func (a *aggregationPushDownSolver) isDecomposableWithJoin(fun *aggregation.AggF return false } switch fun.Name { - case ast.AggFuncAvg, ast.AggFuncGroupConcat, ast.AggFuncVarPop, ast.AggFuncJsonObjectAgg, ast.AggFuncStddevPop, ast.AggFuncVarSamp, ast.AggFuncStddevSamp, ast.AggFuncApproxPercentile: + case ast.AggFuncAvg, ast.AggFuncGroupConcat, ast.AggFuncVarPop, ast.AggFuncJsonArrayagg, ast.AggFuncJsonObjectAgg, ast.AggFuncStddevPop, ast.AggFuncVarSamp, ast.AggFuncApproxPercentile, ast.AggFuncStddevSamp: // TODO: Support avg push down. return false case ast.AggFuncMax, ast.AggFuncMin, ast.AggFuncFirstRow: @@ -55,7 +55,7 @@ func (a *aggregationPushDownSolver) isDecomposableWithUnion(fun *aggregation.Agg return false } switch fun.Name { - case ast.AggFuncGroupConcat, ast.AggFuncVarPop, ast.AggFuncJsonObjectAgg, ast.AggFuncApproxPercentile: + case ast.AggFuncGroupConcat, ast.AggFuncVarPop, ast.AggFuncJsonArrayagg, ast.AggFuncApproxPercentile, ast.AggFuncJsonObjectAgg: return false case ast.AggFuncMax, ast.AggFuncMin, ast.AggFuncFirstRow: return true diff --git a/types/json/constants.go b/types/json/constants.go index e0184b7301bbe..5a9d7cec19109 100644 --- a/types/json/constants.go +++ b/types/json/constants.go @@ -217,12 +217,12 @@ var ( ErrInvalidJSONContainsPathType = dbterror.ClassJSON.NewStd(mysql.ErrInvalidJSONContainsPathType) // ErrJSONDocumentNULLKey means that json's key is null ErrJSONDocumentNULLKey = dbterror.ClassJSON.NewStd(mysql.ErrJSONDocumentNULLKey) + // ErrJSONObjectKeyTooLong means JSON object with key length >= 65536 which is not yet supported. + ErrJSONObjectKeyTooLong = dbterror.ClassTypes.NewStdErr(mysql.ErrJSONObjectKeyTooLong, mysql.MySQLErrName[mysql.ErrJSONObjectKeyTooLong]) // ErrInvalidJSONPathArrayCell means invalid JSON path for an array cell. ErrInvalidJSONPathArrayCell = dbterror.ClassJSON.NewStd(mysql.ErrInvalidJSONPathArrayCell) // ErrUnsupportedSecondArgumentType means unsupported second argument type in json_objectagg ErrUnsupportedSecondArgumentType = dbterror.ClassJSON.NewStd(mysql.ErrUnsupportedSecondArgumentType) - // ErrJSONObjectKeyTooLong means JSON object with key length >= 65536 which is not yet supported. - ErrJSONObjectKeyTooLong = dbterror.ClassTypes.NewStdErr(mysql.ErrJSONObjectKeyTooLong, mysql.MySQLErrName[mysql.ErrJSONObjectKeyTooLong]) ) // json_contains_path function type choices