Skip to content

[SPARK-29837][SQL] PostgreSQL dialect: cast to boolean #26463

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 17 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -19,27 +19,27 @@ package org.apache.spark.sql.catalyst.analysis

import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.expressions.Cast
import org.apache.spark.sql.catalyst.expressions.postgreSQL.PostgreCastStringToBoolean
import org.apache.spark.sql.catalyst.expressions.postgreSQL.PostgreCastToBoolean
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{BooleanType, StringType}

object PostgreSQLDialect {
val postgreSQLDialectRules: List[Rule[LogicalPlan]] =
CastStringToBoolean ::
CastToBoolean ::
Nil

object CastStringToBoolean extends Rule[LogicalPlan] with Logging {
object CastToBoolean extends Rule[LogicalPlan] with Logging {
override def apply(plan: LogicalPlan): LogicalPlan = {
// The SQL configuration `spark.sql.dialect` can be changed in runtime.
// To make sure the configuration is effective, we have to check it during rule execution.
val conf = SQLConf.get
if (conf.usePostgreSQLDialect) {
plan.transformExpressions {
case Cast(child, dataType, _)
if dataType == BooleanType && child.dataType == StringType =>
PostgreCastStringToBoolean(child)
case Cast(child, dataType, timeZoneId)
if child.dataType != BooleanType && dataType == BooleanType =>
PostgreCastToBoolean(child, timeZoneId)
}
} else {
plan
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit
private[this] def needsTimeZone: Boolean = Cast.needsTimeZone(child.dataType, dataType)

// [[func]] assumes the input is no longer null because eval already does the null check.
@inline private[this] def buildCast[T](a: Any, func: T => Any): Any = func(a.asInstanceOf[T])
@inline protected def buildCast[T](a: Any, func: T => Any): Any = func(a.asInstanceOf[T])

private lazy val dateFormatter = DateFormatter(zoneId)
private lazy val timestampFormatter = TimestampFormatter.getFractionFormatter(zoneId)
Expand Down Expand Up @@ -376,7 +376,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit
}

// UDFToBoolean
private[this] def castToBoolean(from: DataType): Any => Any = from match {
protected[this] def castToBoolean(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, s => {
if (StringUtils.isTrueString(s)) {
Expand Down Expand Up @@ -781,7 +781,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit
}
}

override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval = child.genCode(ctx)
val nullSafeCast = nullSafeCastFunction(child.dataType, dataType, ctx)

Expand All @@ -791,7 +791,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit

// The function arguments are: `input`, `result` and `resultIsNull`. We don't need `inputIsNull`
// in parameter list, because the returned code will be put in null safe evaluation region.
private[this] type CastFunction = (ExprValue, ExprValue, ExprValue) => Block
protected type CastFunction = (ExprValue, ExprValue, ExprValue) => Block

private[this] def nullSafeCastFunction(
from: DataType,
Expand Down Expand Up @@ -1233,7 +1233,7 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit
private[this] def timestampToDoubleCode(ts: ExprValue): Block =
code"$ts / (double)$MICROS_PER_SECOND"

private[this] def castToBooleanCode(from: DataType): CastFunction = from match {
protected[this] def castToBooleanCode(from: DataType): CastFunction = from match {
case StringType =>
val stringUtils = inline"${StringUtils.getClass.getName.stripSuffix("$")}"
(c, evPrim, evNull) =>
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions.postgreSQL

import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.{CastBase, Expression, TimeZoneAwareExpression}
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.util.postgreSQL.StringUtils
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String

case class PostgreCastToBoolean(child: Expression, timeZoneId: Option[String])
extends CastBase {

override protected def ansiEnabled =
throw new UnsupportedOperationException("PostgreSQL dialect doesn't support ansi mode")
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This looks dangerous. Whenever the code checked the flag ansiEnabled, we will throw an exception. We are removing the pgSQL dialect, this will be updated anyway.


override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))

override def checkInputDataTypes(): TypeCheckResult = child.dataType match {
case StringType | IntegerType | NullType =>
TypeCheckResult.TypeCheckSuccess
case _ =>
TypeCheckResult.TypeCheckFailure(s"cannot cast type ${child.dataType} to boolean")
}

override def castToBoolean(from: DataType): Any => Any = from match {
case StringType =>
buildCast[UTF8String](_, str => {
val s = str.trim().toLowerCase()
if (StringUtils.isTrueString(s)) {
true
} else if (StringUtils.isFalseString(s)) {
false
} else {
throw new IllegalArgumentException(s"invalid input syntax for type boolean: $s")
}
})
case IntegerType =>
super.castToBoolean(from)
}

override def castToBooleanCode(from: DataType): CastFunction = from match {
case StringType =>
val stringUtils = inline"${StringUtils.getClass.getName.stripSuffix("$")}"
(c, evPrim, evNull) =>
code"""
if ($stringUtils.isTrueString($c.trim().toLowerCase())) {
$evPrim = true;
} else if ($stringUtils.isFalseString($c.trim().toLowerCase())) {
$evPrim = false;
} else {
throw new IllegalArgumentException("invalid input syntax for type boolean: $c");
}
"""

case IntegerType =>
super.castToBooleanCode(from)
}

override def dataType: DataType = BooleanType

override def nullable: Boolean = child.nullable

override def toString: String = s"PostgreCastToBoolean($child as ${dataType.simpleString})"

override def sql: String = s"CAST(${child.sql} AS ${dataType.sql})"
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,44 +16,58 @@
*/
package org.apache.spark.sql.catalyst.expressions.postgreSQL

import java.sql.{Date, Timestamp}

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.expressions.{ExpressionEvalHelper, Literal}

class CastSuite extends SparkFunSuite with ExpressionEvalHelper {
private def checkPostgreCastStringToBoolean(v: Any, expected: Any): Unit = {
checkEvaluation(PostgreCastStringToBoolean(Literal(v)), expected)
private def checkPostgreCastToBoolean(v: Any, expected: Any): Unit = {
checkEvaluation(PostgreCastToBoolean(Literal(v), None), expected)
}

test("cast string to boolean") {
checkPostgreCastStringToBoolean("true", true)
checkPostgreCastStringToBoolean("tru", true)
checkPostgreCastStringToBoolean("tr", true)
checkPostgreCastStringToBoolean("t", true)
checkPostgreCastStringToBoolean("tRUe", true)
checkPostgreCastStringToBoolean(" tRue ", true)
checkPostgreCastStringToBoolean(" tRu ", true)
checkPostgreCastStringToBoolean("yes", true)
checkPostgreCastStringToBoolean("ye", true)
checkPostgreCastStringToBoolean("y", true)
checkPostgreCastStringToBoolean("1", true)
checkPostgreCastStringToBoolean("on", true)
checkPostgreCastToBoolean("true", true)
checkPostgreCastToBoolean("tru", true)
checkPostgreCastToBoolean("tr", true)
checkPostgreCastToBoolean("t", true)
checkPostgreCastToBoolean("tRUe", true)
checkPostgreCastToBoolean(" tRue ", true)
checkPostgreCastToBoolean(" tRu ", true)
checkPostgreCastToBoolean("yes", true)
checkPostgreCastToBoolean("ye", true)
checkPostgreCastToBoolean("y", true)
checkPostgreCastToBoolean("1", true)
checkPostgreCastToBoolean("on", true)

checkPostgreCastToBoolean("false", false)
checkPostgreCastToBoolean("fals", false)
checkPostgreCastToBoolean("fal", false)
checkPostgreCastToBoolean("fa", false)
checkPostgreCastToBoolean("f", false)
checkPostgreCastToBoolean(" fAlse ", false)
checkPostgreCastToBoolean(" fAls ", false)
checkPostgreCastToBoolean(" FAlsE ", false)
checkPostgreCastToBoolean("no", false)
checkPostgreCastToBoolean("n", false)
checkPostgreCastToBoolean("0", false)
checkPostgreCastToBoolean("off", false)
checkPostgreCastToBoolean("of", false)

checkPostgreCastStringToBoolean("false", false)
checkPostgreCastStringToBoolean("fals", false)
checkPostgreCastStringToBoolean("fal", false)
checkPostgreCastStringToBoolean("fa", false)
checkPostgreCastStringToBoolean("f", false)
checkPostgreCastStringToBoolean(" fAlse ", false)
checkPostgreCastStringToBoolean(" fAls ", false)
checkPostgreCastStringToBoolean(" FAlsE ", false)
checkPostgreCastStringToBoolean("no", false)
checkPostgreCastStringToBoolean("n", false)
checkPostgreCastStringToBoolean("0", false)
checkPostgreCastStringToBoolean("off", false)
checkPostgreCastStringToBoolean("of", false)
intercept[IllegalArgumentException](PostgreCastToBoolean(Literal("o"), None).eval())
intercept[IllegalArgumentException](PostgreCastToBoolean(Literal("abc"), None).eval())
intercept[IllegalArgumentException](PostgreCastToBoolean(Literal(""), None).eval())
}

checkPostgreCastStringToBoolean("o", null)
checkPostgreCastStringToBoolean("abc", null)
checkPostgreCastStringToBoolean("", null)
test("unsupported data types to cast to boolean") {
assert(PostgreCastToBoolean(Literal(new Timestamp(1)), None).checkInputDataTypes().isFailure)
assert(PostgreCastToBoolean(Literal(new Date(1)), None).checkInputDataTypes().isFailure)
assert(PostgreCastToBoolean(Literal(1.toLong), None).checkInputDataTypes().isFailure)
assert(PostgreCastToBoolean(Literal(1.toShort), None).checkInputDataTypes().isFailure)
assert(PostgreCastToBoolean(Literal(1.toByte), None).checkInputDataTypes().isFailure)
assert(PostgreCastToBoolean(Literal(BigDecimal(1.0)), None).checkInputDataTypes().isFailure)
assert(PostgreCastToBoolean(Literal(1.toDouble), None).checkInputDataTypes().isFailure)
assert(PostgreCastToBoolean(Literal(1.toFloat), None).checkInputDataTypes().isFailure)
}
}
Loading