File tree Expand file tree Collapse file tree 2 files changed +15
-1
lines changed
catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer
core/src/test/scala/org/apache/spark/sql Expand file tree Collapse file tree 2 files changed +15
-1
lines changed Original file line number Diff line number Diff line change @@ -142,7 +142,6 @@ abstract class Optimizer(catalogManager: CatalogManager)
142
142
RewriteNonCorrelatedExists ,
143
143
ComputeCurrentTime ,
144
144
GetCurrentDatabaseAndCatalog (catalogManager),
145
- RewriteDistinctAggregates ,
146
145
ReplaceDeduplicateWithAggregate ) ::
147
146
// ////////////////////////////////////////////////////////////////////////////////////////
148
147
// Optimizer rules start here
@@ -196,6 +195,8 @@ abstract class Optimizer(catalogManager: CatalogManager)
196
195
EliminateSorts ) :+
197
196
Batch (" Decimal Optimizations" , fixedPoint,
198
197
DecimalAggregates ) :+
198
+ Batch (" Distinct Aggregate Rewrite" , Once ,
199
+ RewriteDistinctAggregates ) :+
199
200
Batch (" Object Expressions Optimization" , fixedPoint,
200
201
EliminateMapObjects ,
201
202
CombineTypedFilters ,
Original file line number Diff line number Diff line change @@ -2555,6 +2555,19 @@ class DataFrameSuite extends QueryTest
2555
2555
val df = Seq (0.0 -> - 0.0 ).toDF(" pos" , " neg" )
2556
2556
checkAnswer(df.select($" pos" > $" neg" ), Row (false ))
2557
2557
}
2558
+
2559
+ test(" SPARK-32816: aggregating multiple distinct DECIMAL columns" ) {
2560
+ withTempPath { path =>
2561
+ spark.range(0 , 100 , 1 , 1 )
2562
+ .selectExpr(" id" , " cast(id as decimal(9, 0)) as decimal_col" )
2563
+ .write.mode(" overwrite" )
2564
+ .parquet(path.getAbsolutePath)
2565
+ spark.read.parquet(path.getAbsolutePath).createOrReplaceTempView(" test_table" )
2566
+ checkAnswer(
2567
+ sql(" select avg(distinct decimal_col), sum(distinct decimal_col) from test_table" ),
2568
+ Row (49.5 , 4950 ))
2569
+ }
2570
+ }
2558
2571
}
2559
2572
2560
2573
case class GroupByKey (a : Int , b : Int )
You can’t perform that action at this time.
0 commit comments