Skip to content

Commit fbafedf

Browse files
committed
address PR comments
1 parent b877de7 commit fbafedf

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -943,7 +943,7 @@ class PlannerSuite extends SharedSparkSession with AdaptiveSparkPlanHelper {
943943
withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1") {
944944
withTempView("df1", "df2") {
945945
spark.range(10).selectExpr("id AS key", "0").repartition($"key").createTempView("df1")
946-
spark.range(10).selectExpr("id AS key", "0").repartition($"key").createTempView("df2")
946+
spark.range(20).selectExpr("id AS key", "0").repartition($"key").createTempView("df2")
947947
val planned = sql(
948948
"""
949949
|SELECT * FROM
@@ -953,7 +953,7 @@ class PlannerSuite extends SharedSparkSession with AdaptiveSparkPlanHelper {
953953
|ON t1.k = t2.k
954954
""".stripMargin).queryExecution.executedPlan
955955
val exchanges = planned.collect { case s: ShuffleExchangeExec => s }
956-
assert(exchanges.size == 1)
956+
assert(exchanges.size == 2)
957957
}
958958
}
959959
}
@@ -962,7 +962,7 @@ class PlannerSuite extends SharedSparkSession with AdaptiveSparkPlanHelper {
962962
withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1") {
963963
withTempView("df1", "df2") {
964964
spark.range(10).selectExpr("id AS key", "0").repartition($"key").createTempView("df1")
965-
spark.range(10).selectExpr("id AS key", "0").repartition($"key").createTempView("df2")
965+
spark.range(20).selectExpr("id AS key", "0").repartition($"key").createTempView("df2")
966966
val planned = sql(
967967
"""
968968
|SELECT * FROM
@@ -987,7 +987,7 @@ class PlannerSuite extends SharedSparkSession with AdaptiveSparkPlanHelper {
987987
test("aliases in the aggregate expressions should not introduce extra shuffle") {
988988
withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1") {
989989
val t1 = spark.range(10).selectExpr("floor(id/4) as k1")
990-
val t2 = spark.range(10).selectExpr("floor(id/4) as k2")
990+
val t2 = spark.range(20).selectExpr("floor(id/4) as k2")
991991

992992
val agg1 = t1.groupBy("k1").agg(count(lit("1")).as("cnt1"))
993993
val agg2 = t2.groupBy("k2").agg(count(lit("1")).as("cnt2")).withColumnRenamed("k2", "k3")
@@ -1006,7 +1006,7 @@ class PlannerSuite extends SharedSparkSession with AdaptiveSparkPlanHelper {
10061006
Seq(true, false).foreach { useObjectHashAgg =>
10071007
withSQLConf(SQLConf.USE_OBJECT_HASH_AGG.key -> useObjectHashAgg.toString) {
10081008
val t1 = spark.range(10).selectExpr("floor(id/4) as k1")
1009-
val t2 = spark.range(10).selectExpr("floor(id/4) as k2")
1009+
val t2 = spark.range(20).selectExpr("floor(id/4) as k2")
10101010

10111011
val agg1 = t1.groupBy("k1").agg(collect_list("k1"))
10121012
val agg2 = t2.groupBy("k2").agg(collect_list("k2")).withColumnRenamed("k2", "k3")

0 commit comments

Comments
 (0)