Skip to content

Commit d5dbf05

Browse files
committed
Revert "[SPARK-27439][SQL] Use analyzed plan when explaining Dataset"
This reverts commit ad60c6d.
1 parent 8b86326 commit d5dbf05

File tree

2 files changed

+2
-22
lines changed

2 files changed

+2
-22
lines changed

sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -498,10 +498,7 @@ class Dataset[T] private[sql](
498498
* @since 1.6.0
499499
*/
500500
def explain(extended: Boolean): Unit = {
501-
// Because views are possibly resolved in the analyzed plan of this dataset. We use analyzed
502-
// plan in `ExplainCommand`, for consistency. Otherwise, the plans shown by explain command
503-
// might be inconsistent with the evaluated data of this dataset.
504-
val explain = ExplainCommand(queryExecution.analyzed, extended = extended)
501+
val explain = ExplainCommand(queryExecution.logical, extended = extended)
505502
sparkSession.sessionState.executePlan(explain).executedPlan.executeCollect().foreach {
506503
// scalastyle:off println
507504
r => println(r.getString(0))

sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala

Lines changed: 1 addition & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql
1919

20-
import java.io.{ByteArrayOutputStream, File}
20+
import java.io.File
2121
import java.nio.charset.StandardCharsets
2222
import java.sql.{Date, Timestamp}
2323
import java.util.UUID
@@ -2133,21 +2133,4 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
21332133
checkAnswer(res, Row("1-1", 6, 6))
21342134
}
21352135
}
2136-
2137-
test("SPARK-27439: Explain result should match collected result after view change") {
2138-
withTempView("test", "test2", "tmp") {
2139-
spark.range(10).createOrReplaceTempView("test")
2140-
spark.range(5).createOrReplaceTempView("test2")
2141-
spark.sql("select * from test").createOrReplaceTempView("tmp")
2142-
val df = spark.sql("select * from tmp")
2143-
spark.sql("select * from test2").createOrReplaceTempView("tmp")
2144-
2145-
val captured = new ByteArrayOutputStream()
2146-
Console.withOut(captured) {
2147-
df.explain()
2148-
}
2149-
checkAnswer(df, spark.range(10).toDF)
2150-
assert(captured.toString().contains("Range (0, 10, step=1, splits=2)"))
2151-
}
2152-
}
21532136
}

0 commit comments

Comments
 (0)