diff --git a/backends-clickhouse/src/test/scala/org/apache/gluten/execution/GlutenClickHouseTPCHSaltNullParquetSuite.scala b/backends-clickhouse/src/test/scala/org/apache/gluten/execution/GlutenClickHouseTPCHSaltNullParquetSuite.scala index c0f37b08616e..c77c41fb5e39 100644 --- a/backends-clickhouse/src/test/scala/org/apache/gluten/execution/GlutenClickHouseTPCHSaltNullParquetSuite.scala +++ b/backends-clickhouse/src/test/scala/org/apache/gluten/execution/GlutenClickHouseTPCHSaltNullParquetSuite.scala @@ -2572,6 +2572,19 @@ class GlutenClickHouseTPCHSaltNullParquetSuite extends GlutenClickHouseTPCHAbstr runQueryAndCompare(sql2)({ _ => }) } + test("aggregate function percentile") { + // single percentage + val sql1 = "select l_linenumber % 10, percentile(l_extendedprice, 0.5) " + + "from lineitem group by l_linenumber % 10" + runQueryAndCompare(sql1)({ _ => }) + + // multiple percentages + val sql2 = + "select l_linenumber % 10, percentile(l_extendedprice, array(0.1, 0.2, 0.3)) " + + "from lineitem group by l_linenumber % 10" + runQueryAndCompare(sql2)({ _ => }) + } + test("GLUTEN-5096: Bug fix regexp_extract diff") { val tbl_create_sql = "create table test_tbl_5096(id bigint, data string) using parquet" val tbl_insert_sql = "insert into test_tbl_5096 values(1, 'abc'), (2, 'abc\n')" diff --git a/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxBackend.scala b/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxBackend.scala index 0238508d9699..50bec8d870ea 100644 --- a/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxBackend.scala +++ b/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxBackend.scala @@ -28,7 +28,7 @@ import org.apache.gluten.substrait.rel.LocalFilesNode.ReadFileFormat.{DwrfReadFo import org.apache.spark.sql.catalyst.catalog.BucketSpec import org.apache.spark.sql.catalyst.expressions.{Alias, CumeDist, DenseRank, Descending, Expression, Lag, Lead, Literal, MakeYMInterval, NamedExpression, NthValue, NTile, PercentRank, Rand, RangeFrame, Rank, RowNumber, SortOrder, SparkPartitionID, SpecialFrameBoundary, SpecifiedWindowFrame, Uuid} -import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, ApproximatePercentile, Count, Sum} +import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, ApproximatePercentile, Percentile, Count, Sum} import org.apache.spark.sql.catalyst.plans.{JoinType, LeftOuter, RightOuter} import org.apache.spark.sql.catalyst.util.CharVarcharUtils import org.apache.spark.sql.execution.{ProjectExec, SparkPlan} @@ -374,7 +374,7 @@ object VeloxBackendSettings extends BackendSettingsApi { case _: RowNumber | _: Rank | _: CumeDist | _: DenseRank | _: PercentRank | _: NthValue | _: NTile | _: Lag | _: Lead => case aggrExpr: AggregateExpression - if !aggrExpr.aggregateFunction.isInstanceOf[ApproximatePercentile] => + if !aggrExpr.aggregateFunction.isInstanceOf[ApproximatePercentile] && !aggrExpr.aggregateFunction.isInstanceOf[Percentile] => case _ => allSupported = false }