Skip to content

Commit

Permalink
add some uts
Browse files Browse the repository at this point in the history
  • Loading branch information
taiyang-li committed Jul 11, 2024
1 parent d7e74bb commit bd0cfe8
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2572,6 +2572,19 @@ class GlutenClickHouseTPCHSaltNullParquetSuite extends GlutenClickHouseTPCHAbstr
runQueryAndCompare(sql2)({ _ => })
}

test("aggregate function percentile") {
// single percentage
val sql1 = "select l_linenumber % 10, percentile(l_extendedprice, 0.5) " +
"from lineitem group by l_linenumber % 10"
runQueryAndCompare(sql1)({ _ => })

// multiple percentages
val sql2 =
"select l_linenumber % 10, percentile(l_extendedprice, array(0.1, 0.2, 0.3)) " +
"from lineitem group by l_linenumber % 10"
runQueryAndCompare(sql2)({ _ => })
}

test("GLUTEN-5096: Bug fix regexp_extract diff") {
val tbl_create_sql = "create table test_tbl_5096(id bigint, data string) using parquet"
val tbl_insert_sql = "insert into test_tbl_5096 values(1, 'abc'), (2, 'abc\n')"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.apache.gluten.substrait.rel.LocalFilesNode.ReadFileFormat.{DwrfReadFo

import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.expressions.{Alias, CumeDist, DenseRank, Descending, Expression, Lag, Lead, Literal, MakeYMInterval, NamedExpression, NthValue, NTile, PercentRank, Rand, RangeFrame, Rank, RowNumber, SortOrder, SparkPartitionID, SpecialFrameBoundary, SpecifiedWindowFrame, Uuid}
import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, ApproximatePercentile, Count, Sum}
import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, ApproximatePercentile, Percentile, Count, Sum}
import org.apache.spark.sql.catalyst.plans.{JoinType, LeftOuter, RightOuter}
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.execution.{ProjectExec, SparkPlan}
Expand Down Expand Up @@ -374,7 +374,7 @@ object VeloxBackendSettings extends BackendSettingsApi {
case _: RowNumber | _: Rank | _: CumeDist | _: DenseRank | _: PercentRank |
_: NthValue | _: NTile | _: Lag | _: Lead =>
case aggrExpr: AggregateExpression
if !aggrExpr.aggregateFunction.isInstanceOf[ApproximatePercentile] =>
if !aggrExpr.aggregateFunction.isInstanceOf[ApproximatePercentile] && !aggrExpr.aggregateFunction.isInstanceOf[Percentile] =>
case _ =>
allSupported = false
}
Expand Down

0 comments on commit bd0cfe8

Please sign in to comment.