Skip to content

Commit a7fb330

Browse files
maropucloud-fan
authored andcommitted
[SPARK-31468][SQL] Null types should be implicitly casted to Decimal types
### What changes were proposed in this pull request? This PR intends to fix a bug that occurs when comparing null types to decimal types in master/branch-3.0; ``` scala> Seq(BigDecimal(10)).toDF("v1").selectExpr("v1 = NULL").explain(true) org.apache.spark.sql.AnalysisException: cannot resolve '(`v1` = NULL)' due to data type mismatch: differing types in '(`v1` = NULL)' (decimal(38,18) and null).; line 1 pos 0; 'Project [(v1#5 = null) AS (v1 = NULL)#7] +- Project [value#2 AS v1#5] +- LocalRelation [value#2] ... ``` The query above passed in v2.4.5. ### Why are the changes needed? bugfix ### Does this PR introduce any user-facing change? No. ### How was this patch tested? Added tests. Closes #28241 from maropu/SPARK-31468. Authored-by: Takeshi Yamamuro <yamamuro@apache.org> Signed-off-by: Wenchen Fan <wenchen@databricks.com>
1 parent 697083c commit a7fb330

File tree

2 files changed

+33
-4
lines changed

2 files changed

+33
-4
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -842,15 +842,26 @@ object TypeCoercion {
842842
* Casts types according to the expected input types for [[Expression]]s.
843843
*/
844844
object ImplicitTypeCasts extends TypeCoercionRule {
845+
846+
private def canHandleTypeCoercion(leftType: DataType, rightType: DataType): Boolean = {
847+
(leftType, rightType) match {
848+
case (_: DecimalType, NullType) => true
849+
case (NullType, _: DecimalType) => true
850+
case _ =>
851+
// If DecimalType operands are involved except for the two cases above,
852+
// DecimalPrecision will handle it.
853+
!leftType.isInstanceOf[DecimalType] && !rightType.isInstanceOf[DecimalType] &&
854+
leftType != rightType
855+
}
856+
}
857+
845858
override protected def coerceTypes(
846859
plan: LogicalPlan): LogicalPlan = plan resolveExpressions {
847860
// Skip nodes who's children have not been resolved yet.
848861
case e if !e.childrenResolved => e
849862

850-
// If DecimalType operands are involved, DecimalPrecision will handle it
851-
case b @ BinaryOperator(left, right) if !left.dataType.isInstanceOf[DecimalType] &&
852-
!right.dataType.isInstanceOf[DecimalType] &&
853-
left.dataType != right.dataType =>
863+
case b @ BinaryOperator(left, right)
864+
if canHandleTypeCoercion(left.dataType, right.dataType) =>
854865
findTightestCommonType(left.dataType, right.dataType).map { commonType =>
855866
if (b.inputType.acceptsType(commonType)) {
856867
// If the expression accepts the tightest common type, cast to that.

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1541,6 +1541,24 @@ class TypeCoercionSuite extends AnalysisTest {
15411541
Multiply(CaseWhen(Seq((EqualTo(1, 2), Cast(1, DecimalType(34, 24)))),
15421542
Cast(100, DecimalType(34, 24))), Cast(1, IntegerType)))
15431543
}
1544+
1545+
test("SPARK-31468: null types should be casted to decimal types in ImplicitTypeCasts") {
1546+
Seq(AnyTypeBinaryOperator(_, _), NumericTypeBinaryOperator(_, _)).foreach { binaryOp =>
1547+
// binaryOp(decimal, null) case
1548+
ruleTest(TypeCoercion.ImplicitTypeCasts,
1549+
binaryOp(Literal.create(null, DecimalType.SYSTEM_DEFAULT),
1550+
Literal.create(null, NullType)),
1551+
binaryOp(Literal.create(null, DecimalType.SYSTEM_DEFAULT),
1552+
Cast(Literal.create(null, NullType), DecimalType.SYSTEM_DEFAULT)))
1553+
1554+
// binaryOp(null, decimal) case
1555+
ruleTest(TypeCoercion.ImplicitTypeCasts,
1556+
binaryOp(Literal.create(null, NullType),
1557+
Literal.create(null, DecimalType.SYSTEM_DEFAULT)),
1558+
binaryOp(Cast(Literal.create(null, NullType), DecimalType.SYSTEM_DEFAULT),
1559+
Literal.create(null, DecimalType.SYSTEM_DEFAULT)))
1560+
}
1561+
}
15441562
}
15451563

15461564

0 commit comments

Comments
 (0)