Skip to content

Commit 287d3bb

Browse files
committed
apply type check interface to more expressions
1 parent a458efc commit 287d3bb

File tree

17 files changed

+296
-270
lines changed

17 files changed

+296
-270
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import java.sql.{Date, Timestamp}
2222
import java.text.{DateFormat, SimpleDateFormat}
2323

2424
import org.apache.spark.Logging
25-
import org.apache.spark.sql.catalyst
25+
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
2626
import org.apache.spark.sql.catalyst.expressions.codegen.{CodeGenContext, GeneratedExpressionCode}
2727
import org.apache.spark.sql.catalyst.util.DateTimeUtils
2828
import org.apache.spark.sql.types._
@@ -31,7 +31,14 @@ import org.apache.spark.unsafe.types.UTF8String
3131
/** Cast the child expression to the target data type. */
3232
case class Cast(child: Expression, dataType: DataType) extends UnaryExpression with Logging {
3333

34-
override lazy val resolved = childrenResolved && resolve(child.dataType, dataType)
34+
override def checkInputDataTypes(): TypeCheckResult = {
35+
if (resolve(child.dataType, dataType)) {
36+
TypeCheckResult.TypeCheckSuccess
37+
} else {
38+
TypeCheckResult.TypeCheckFailure(
39+
s"cannot cast ${child.dataType} to $dataType")
40+
}
41+
}
3542

3643
override def foldable: Boolean = child.foldable
3744

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -162,9 +162,7 @@ abstract class Expression extends TreeNode[Expression] {
162162
/**
163163
* Checks the input data types, returns `TypeCheckResult.success` if it's valid,
164164
* or returns a `TypeCheckResult` with an error message if invalid.
165-
* Note: it's not valid to call this method until `childrenResolved == true`
166-
* TODO: we should remove the default implementation and implement it for all
167-
* expressions with proper error message.
165+
* Note: it's not valid to call this method until `childrenResolved == true`.
168166
*/
169167
def checkInputDataTypes(): TypeCheckResult = TypeCheckResult.TypeCheckSuccess
170168
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExtractValue.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,11 @@ object ExtractValue {
9696
}
9797
}
9898

99+
/**
100+
* A common interface of all kinds of extract value expressions.
101+
* Note: concrete extract value expressions are created only by `ExtractValue.apply`,
102+
* we don't need to do type check for them.
103+
*/
99104
trait ExtractValue extends UnaryExpression {
100105
self: Product =>
101106
}
@@ -179,9 +184,6 @@ case class GetArrayItem(child: Expression, ordinal: Expression)
179184

180185
override def dataType: DataType = child.dataType.asInstanceOf[ArrayType].elementType
181186

182-
override lazy val resolved = childrenResolved &&
183-
child.dataType.isInstanceOf[ArrayType] && ordinal.dataType.isInstanceOf[IntegralType]
184-
185187
protected def evalNotNull(value: Any, ordinal: Any) = {
186188
// TODO: consider using Array[_] for ArrayType child to avoid
187189
// boxing of primitives
@@ -203,8 +205,6 @@ case class GetMapValue(child: Expression, ordinal: Expression)
203205

204206
override def dataType: DataType = child.dataType.asInstanceOf[MapType].valueType
205207

206-
override lazy val resolved = childrenResolved && child.dataType.isInstanceOf[MapType]
207-
208208
protected def evalNotNull(value: Any, ordinal: Any) = {
209209
val baseValue = value.asInstanceOf[Map[Any, _]]
210210
baseValue.get(ordinal).orNull

0 commit comments

Comments
 (0)