Skip to content

Commit 18360ff

Browse files
committed
Print the identical range parameters of SparkContext and SQL in EXPLAIN
1 parent 86d251c commit 18360ff

File tree

4 files changed

+16
-3
lines changed

4 files changed

+16
-3
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/basicPhysicalOperators.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -332,6 +332,7 @@ case class RangeExec(range: org.apache.spark.sql.catalyst.plans.logical.Range)
332332
extends LeafExecNode with CodegenSupport {
333333

334334
def start: Long = range.start
335+
def end: Long = range.end
335336
def step: Long = range.step
336337
def numSlices: Int = range.numSlices.getOrElse(sparkContext.defaultParallelism)
337338
def numElements: BigInt = range.numElements
@@ -540,7 +541,7 @@ case class RangeExec(range: org.apache.spark.sql.catalyst.plans.logical.Range)
540541
}
541542
}
542543

543-
override def simpleString: String = range.simpleString
544+
override def simpleString: String = s"Range ($start, $end, step=$step, splits=$numSlices)"
544545
}
545546

546547
/**

sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ Project [coalesce(cast(id#xL as string), x) AS ifnull(`id`, 'x')#x, id#xL AS nul
8888

8989
== Physical Plan ==
9090
*Project [coalesce(cast(id#xL as string), x) AS ifnull(`id`, 'x')#x, id#xL AS nullif(`id`, 'x')#xL, coalesce(cast(id#xL as string), x) AS nvl(`id`, 'x')#x, x AS nvl2(`id`, 'x', 'y')#x]
91-
+- *Range (0, 2, step=1, splits=None)
91+
+- *Range (0, 2, step=1, splits=2)
9292

9393

9494
-- !query 9

sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,4 +102,4 @@ EXPLAIN select * from RaNgE(2)
102102
struct<plan:string>
103103
-- !query 8 output
104104
== Physical Plan ==
105-
*Range (0, 2, step=1, splits=None)
105+
*Range (0, 2, step=1, splits=2)

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ import org.apache.spark.{AccumulatorSuite, SparkException}
2626
import org.apache.spark.scheduler.{SparkListener, SparkListenerJobStart}
2727
import org.apache.spark.sql.catalyst.util.StringUtils
2828
import org.apache.spark.sql.execution.aggregate
29+
import org.apache.spark.sql.execution.command.ExplainCommand
2930
import org.apache.spark.sql.execution.joins.{BroadcastHashJoinExec, CartesianProductExec, SortMergeJoinExec}
3031
import org.apache.spark.sql.functions._
3132
import org.apache.spark.sql.internal.SQLConf
@@ -2606,4 +2607,15 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
26062607
case ae: AnalysisException => assert(ae.plan == null && ae.getMessage == ae.getSimpleMessage)
26072608
}
26082609
}
2610+
2611+
test("SPARK-20281 Print the identical range parameters of SparkContext and SQL in EXPLAIN") {
2612+
def explainStr(df: DataFrame): String = {
2613+
val explain = ExplainCommand(df.queryExecution.logical, extended = false)
2614+
val sparkPlan = spark.sessionState.executePlan(explain).executedPlan
2615+
sparkPlan.executeCollect().map(_.getString(0).trim).headOption.getOrElse("")
2616+
}
2617+
val scRange = sqlContext.range(10)
2618+
val sqlRange = sqlContext.sql("SELECT * FROM range(10)")
2619+
assert(explainStr(scRange) === explainStr(sqlRange))
2620+
}
26092621
}

0 commit comments

Comments
 (0)