17
17
18
18
package org .apache .spark .sql .catalyst .util
19
19
20
+ import java .lang .{Double => JDouble , Float => JFloat }
21
+
20
22
import org .apache .spark .SparkFunSuite
21
23
22
24
class TypeUtilsSuite extends SparkFunSuite {
23
25
24
26
import TypeUtils ._
25
27
26
28
test(" compareDoubles" ) {
27
- assert(compareDoubles(0 , 0 ) === 0 )
28
- assert(compareDoubles(1 , 0 ) === - 1 )
29
- assert(compareDoubles(0 , 1 ) === 1 )
30
- assert(compareDoubles(Double .MinValue , Double .MaxValue ) === 1 )
29
+ def shouldMatchDefaultOrder (a : Double , b : Double ): Unit = {
30
+ assert(compareDoubles(a, b) === JDouble .compare(a, b))
31
+ assert(compareDoubles(b, a) === JDouble .compare(b, a))
32
+ }
33
+ shouldMatchDefaultOrder(0d , 0d )
34
+ shouldMatchDefaultOrder(0d , 1d )
35
+ shouldMatchDefaultOrder(Double .MinValue , Double .MaxValue )
31
36
assert(compareDoubles(Double .NaN , Double .NaN ) === 0 )
32
37
assert(compareDoubles(Double .NaN , Double .PositiveInfinity ) === 1 )
33
38
assert(compareDoubles(Double .NaN , Double .NegativeInfinity ) === 1 )
@@ -36,10 +41,13 @@ class TypeUtilsSuite extends SparkFunSuite {
36
41
}
37
42
38
43
test(" compareFloats" ) {
39
- assert(compareFloats(0 , 0 ) === 0 )
40
- assert(compareFloats(1 , 0 ) === - 1 )
41
- assert(compareFloats(0 , 1 ) === 1 )
42
- assert(compareFloats(Float .MinValue , Float .MaxValue ) === 1 )
44
+ def shouldMatchDefaultOrder (a : Float , b : Float ): Unit = {
45
+ assert(compareFloats(a, b) === JFloat .compare(a, b))
46
+ assert(compareFloats(b, a) === JFloat .compare(b, a))
47
+ }
48
+ shouldMatchDefaultOrder(0f , 0f )
49
+ shouldMatchDefaultOrder(1f , 1f )
50
+ shouldMatchDefaultOrder(Float .MinValue , Float .MaxValue )
43
51
assert(compareFloats(Float .NaN , Float .NaN ) === 0 )
44
52
assert(compareFloats(Float .NaN , Float .PositiveInfinity ) === 1 )
45
53
assert(compareFloats(Float .NaN , Float .NegativeInfinity ) === 1 )
0 commit comments