|
| 1 | +/* |
| 2 | + * Licensed to the Apache Software Foundation (ASF) under one or more |
| 3 | + * contributor license agreements. See the NOTICE file distributed with |
| 4 | + * this work for additional information regarding copyright ownership. |
| 5 | + * The ASF licenses this file to You under the Apache License, Version 2.0 |
| 6 | + * (the "License"); you may not use this file except in compliance with |
| 7 | + * the License. You may obtain a copy of the License at |
| 8 | + * |
| 9 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 10 | + * |
| 11 | + * Unless required by applicable law or agreed to in writing, software |
| 12 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 13 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | + * See the License for the specific language governing permissions and |
| 15 | + * limitations under the License. |
| 16 | + */ |
| 17 | + |
| 18 | +package org.apache.spark.sql.execution |
| 19 | + |
| 20 | +import scala.language.implicitConversions |
| 21 | +import scala.reflect.runtime.universe.TypeTag |
| 22 | +import scala.util.control.NonFatal |
| 23 | + |
| 24 | +import org.apache.spark.SparkFunSuite |
| 25 | + |
| 26 | +import org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute |
| 27 | +import org.apache.spark.sql.catalyst.expressions.BoundReference |
| 28 | +import org.apache.spark.sql.catalyst.util._ |
| 29 | + |
| 30 | +import org.apache.spark.sql.test.TestSQLContext |
| 31 | +import org.apache.spark.sql.{DataFrameHolder, Row, DataFrame} |
| 32 | + |
| 33 | +/** |
| 34 | + * Base class for writing tests for individual physical operators. For an example of how this |
| 35 | + * class's test helper methods can be used, see [[SortSuite]]. |
| 36 | + */ |
| 37 | +class SparkPlanTest extends SparkFunSuite { |
| 38 | + |
| 39 | + /** |
| 40 | + * Creates a DataFrame from a local Seq of Product. |
| 41 | + */ |
| 42 | + implicit def localSeqToDataFrameHolder[A <: Product : TypeTag](data: Seq[A]): DataFrameHolder = { |
| 43 | + TestSQLContext.implicits.localSeqToDataFrameHolder(data) |
| 44 | + } |
| 45 | + |
| 46 | + /** |
| 47 | + * Runs the plan and makes sure the answer matches the expected result. |
| 48 | + * @param input the input data to be used. |
| 49 | + * @param planFunction a function which accepts the input SparkPlan and uses it to instantiate |
| 50 | + * the physical operator that's being tested. |
| 51 | + * @param expectedAnswer the expected result in a [[Seq]] of [[Row]]s. |
| 52 | + */ |
| 53 | + protected def checkAnswer( |
| 54 | + input: DataFrame, |
| 55 | + planFunction: SparkPlan => SparkPlan, |
| 56 | + expectedAnswer: Seq[Row]): Unit = { |
| 57 | + SparkPlanTest.checkAnswer(input, planFunction, expectedAnswer) match { |
| 58 | + case Some(errorMessage) => fail(errorMessage) |
| 59 | + case None => |
| 60 | + } |
| 61 | + } |
| 62 | + |
| 63 | + /** |
| 64 | + * Runs the plan and makes sure the answer matches the expected result. |
| 65 | + * @param input the input data to be used. |
| 66 | + * @param planFunction a function which accepts the input SparkPlan and uses it to instantiate |
| 67 | + * the physical operator that's being tested. |
| 68 | + * @param expectedAnswer the expected result in a [[Seq]] of [[Product]]s. |
| 69 | + */ |
| 70 | + protected def checkAnswer[A <: Product : TypeTag]( |
| 71 | + input: DataFrame, |
| 72 | + planFunction: SparkPlan => SparkPlan, |
| 73 | + expectedAnswer: Seq[A]): Unit = { |
| 74 | + val expectedRows = expectedAnswer.map(Row.fromTuple) |
| 75 | + SparkPlanTest.checkAnswer(input, planFunction, expectedRows) match { |
| 76 | + case Some(errorMessage) => fail(errorMessage) |
| 77 | + case None => |
| 78 | + } |
| 79 | + } |
| 80 | +} |
| 81 | + |
| 82 | +/** |
| 83 | + * Helper methods for writing tests of individual physical operators. |
| 84 | + */ |
| 85 | +object SparkPlanTest { |
| 86 | + |
| 87 | + /** |
| 88 | + * Runs the plan and makes sure the answer matches the expected result. |
| 89 | + * @param input the input data to be used. |
| 90 | + * @param planFunction a function which accepts the input SparkPlan and uses it to instantiate |
| 91 | + * the physical operator that's being tested. |
| 92 | + * @param expectedAnswer the expected result in a [[Seq]] of [[Row]]s. |
| 93 | + */ |
| 94 | + def checkAnswer( |
| 95 | + input: DataFrame, |
| 96 | + planFunction: SparkPlan => SparkPlan, |
| 97 | + expectedAnswer: Seq[Row]): Option[String] = { |
| 98 | + |
| 99 | + val outputPlan = planFunction(input.queryExecution.sparkPlan) |
| 100 | + |
| 101 | + // A very simple resolver to make writing tests easier. In contrast to the real resolver |
| 102 | + // this is always case sensitive and does not try to handle scoping or complex type resolution. |
| 103 | + val resolvedPlan = outputPlan transform { |
| 104 | + case plan: SparkPlan => |
| 105 | + val inputMap = plan.children.flatMap(_.output).zipWithIndex.map { |
| 106 | + case (a, i) => |
| 107 | + (a.name, BoundReference(i, a.dataType, a.nullable)) |
| 108 | + }.toMap |
| 109 | + |
| 110 | + plan.transformExpressions { |
| 111 | + case UnresolvedAttribute(Seq(u)) => |
| 112 | + inputMap.getOrElse(u, |
| 113 | + sys.error(s"Invalid Test: Cannot resolve $u given input $inputMap")) |
| 114 | + } |
| 115 | + } |
| 116 | + |
| 117 | + def prepareAnswer(answer: Seq[Row]): Seq[Row] = { |
| 118 | + // Converts data to types that we can do equality comparison using Scala collections. |
| 119 | + // For BigDecimal type, the Scala type has a better definition of equality test (similar to |
| 120 | + // Java's java.math.BigDecimal.compareTo). |
| 121 | + // For binary arrays, we convert it to Seq to avoid of calling java.util.Arrays.equals for |
| 122 | + // equality test. |
| 123 | + // This function is copied from Catalyst's QueryTest |
| 124 | + val converted: Seq[Row] = answer.map { s => |
| 125 | + Row.fromSeq(s.toSeq.map { |
| 126 | + case d: java.math.BigDecimal => BigDecimal(d) |
| 127 | + case b: Array[Byte] => b.toSeq |
| 128 | + case o => o |
| 129 | + }) |
| 130 | + } |
| 131 | + converted.sortBy(_.toString()) |
| 132 | + } |
| 133 | + |
| 134 | + val sparkAnswer: Seq[Row] = try { |
| 135 | + resolvedPlan.executeCollect().toSeq |
| 136 | + } catch { |
| 137 | + case NonFatal(e) => |
| 138 | + val errorMessage = |
| 139 | + s""" |
| 140 | + | Exception thrown while executing Spark plan: |
| 141 | + | $outputPlan |
| 142 | + | == Exception == |
| 143 | + | $e |
| 144 | + | ${org.apache.spark.sql.catalyst.util.stackTraceToString(e)} |
| 145 | + """.stripMargin |
| 146 | + return Some(errorMessage) |
| 147 | + } |
| 148 | + |
| 149 | + if (prepareAnswer(expectedAnswer) != prepareAnswer(sparkAnswer)) { |
| 150 | + val errorMessage = |
| 151 | + s""" |
| 152 | + | Results do not match for Spark plan: |
| 153 | + | $outputPlan |
| 154 | + | == Results == |
| 155 | + | ${sideBySide( |
| 156 | + s"== Correct Answer - ${expectedAnswer.size} ==" +: |
| 157 | + prepareAnswer(expectedAnswer).map(_.toString()), |
| 158 | + s"== Spark Answer - ${sparkAnswer.size} ==" +: |
| 159 | + prepareAnswer(sparkAnswer).map(_.toString())).mkString("\n")} |
| 160 | + """.stripMargin |
| 161 | + return Some(errorMessage) |
| 162 | + } |
| 163 | + |
| 164 | + None |
| 165 | + } |
| 166 | +} |
| 167 | + |
0 commit comments