Skip to content

[SPARK-24834][CORE] use java comparison for float and double #21794

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 0 additions & 28 deletions core/src/main/scala/org/apache/spark/util/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1703,34 +1703,6 @@ private[spark] object Utils extends Logging {
hashAbs
}

/**
* NaN-safe version of `java.lang.Double.compare()` which allows NaN values to be compared
* according to semantics where NaN == NaN and NaN is greater than any non-NaN double.
*/
def nanSafeCompareDoubles(x: Double, y: Double): Int = {
val xIsNan: Boolean = java.lang.Double.isNaN(x)
val yIsNan: Boolean = java.lang.Double.isNaN(y)
if ((xIsNan && yIsNan) || (x == y)) 0
else if (xIsNan) 1
else if (yIsNan) -1
else if (x > y) 1
else -1
}

/**
* NaN-safe version of `java.lang.Float.compare()` which allows NaN values to be compared
* according to semantics where NaN == NaN and NaN is greater than any non-NaN float.
*/
def nanSafeCompareFloats(x: Float, y: Float): Int = {
val xIsNan: Boolean = java.lang.Float.isNaN(x)
val yIsNan: Boolean = java.lang.Float.isNaN(y)
if ((xIsNan && yIsNan) || (x == y)) 0
else if (xIsNan) 1
else if (yIsNan) -1
else if (x > y) 1
else -1
}

/**
* Returns the system properties map that is thread-safe to iterator over. It gets the
* properties which have been set explicitly, as well as those for which only a default value
Expand Down
30 changes: 0 additions & 30 deletions core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -806,36 +806,6 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging {
assert(buffer.toString === "st circular test circular")
}

test("nanSafeCompareDoubles") {
def shouldMatchDefaultOrder(a: Double, b: Double): Unit = {
assert(Utils.nanSafeCompareDoubles(a, b) === JDouble.compare(a, b))
assert(Utils.nanSafeCompareDoubles(b, a) === JDouble.compare(b, a))
}
shouldMatchDefaultOrder(0d, 0d)
shouldMatchDefaultOrder(0d, 1d)
shouldMatchDefaultOrder(Double.MinValue, Double.MaxValue)
assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.NaN) === 0)
assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.PositiveInfinity) === 1)
assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.NegativeInfinity) === 1)
assert(Utils.nanSafeCompareDoubles(Double.PositiveInfinity, Double.NaN) === -1)
assert(Utils.nanSafeCompareDoubles(Double.NegativeInfinity, Double.NaN) === -1)
}

test("nanSafeCompareFloats") {
def shouldMatchDefaultOrder(a: Float, b: Float): Unit = {
assert(Utils.nanSafeCompareFloats(a, b) === JFloat.compare(a, b))
assert(Utils.nanSafeCompareFloats(b, a) === JFloat.compare(b, a))
}
shouldMatchDefaultOrder(0f, 0f)
shouldMatchDefaultOrder(1f, 1f)
shouldMatchDefaultOrder(Float.MinValue, Float.MaxValue)
assert(Utils.nanSafeCompareFloats(Float.NaN, Float.NaN) === 0)
assert(Utils.nanSafeCompareFloats(Float.NaN, Float.PositiveInfinity) === 1)
assert(Utils.nanSafeCompareFloats(Float.NaN, Float.NegativeInfinity) === 1)
assert(Utils.nanSafeCompareFloats(Float.PositiveInfinity, Float.NaN) === -1)
assert(Utils.nanSafeCompareFloats(Float.NegativeInfinity, Float.NaN) === -1)
}

test("isDynamicAllocationEnabled") {
val conf = new SparkConf()
conf.set("spark.master", "yarn")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -609,8 +609,8 @@ class CodegenContext {
def genComp(dataType: DataType, c1: String, c2: String): String = dataType match {
// java boolean doesn't support > or < operator
case BooleanType => s"($c1 == $c2 ? 0 : ($c1 ? 1 : -1))"
case DoubleType => s"org.apache.spark.util.Utils.nanSafeCompareDoubles($c1, $c2)"
case FloatType => s"org.apache.spark.util.Utils.nanSafeCompareFloats($c1, $c2)"
case DoubleType => s"java.lang.Double.compare($c1, $c2)"
case FloatType => s"java.lang.Float.compare($c1, $c2)"
// use c1 - c2 may overflow
case dt: DataType if isPrimitiveType(dt) => s"($c1 > $c2 ? 1 : $c1 < $c2 ? -1 : 0)"
case BinaryType => s"org.apache.spark.sql.catalyst.util.TypeUtils.compareBinary($c1, $c2)"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class DoubleType private() extends FractionalType {
private[sql] val numeric = implicitly[Numeric[Double]]
private[sql] val fractional = implicitly[Fractional[Double]]
private[sql] val ordering = new Ordering[Double] {
override def compare(x: Double, y: Double): Int = Utils.nanSafeCompareDoubles(x, y)
override def compare(x: Double, y: Double): Int = java.lang.Double.compare(x, y)
}
private[sql] val asIntegral = DoubleAsIfIntegral

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ class FloatType private() extends FractionalType {
private[sql] val numeric = implicitly[Numeric[Float]]
private[sql] val fractional = implicitly[Fractional[Float]]
private[sql] val ordering = new Ordering[Float] {
override def compare(x: Float, y: Float): Int = Utils.nanSafeCompareFloats(x, y)
override def compare(x: Float, y: Float): Int = java.lang.Float.compare(x, y)
}
private[sql] val asIntegral = FloatAsIfIntegral

Expand Down