Skip to content

Commit 7196f6e

Browse files
committed
add withDefaultTimeZone and epochDaysToMicros => DateTimeUtils.epochDaysToMicros
1 parent 871867b commit 7196f6e

File tree

2 files changed

+11
-9
lines changed

2 files changed

+11
-9
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2629,8 +2629,8 @@ object Sequence {
26292629
(num.toLong(start), num.toLong(stop))
26302630
}
26312631
else {
2632-
(epochDaysToMicros(num.toInt(start), zoneId),
2633-
epochDaysToMicros(num.toInt(stop), zoneId))
2632+
(DateTimeUtils.epochDaysToMicros(num.toInt(start), zoneId),
2633+
DateTimeUtils.epochDaysToMicros(num.toInt(stop), zoneId))
26342634
}
26352635

26362636
val maxEstimatedArrayLength =

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1839,13 +1839,15 @@ class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper
18391839

18401840
test("SPARK-31982: sequence doesn't handle date increments that cross DST") {
18411841
Array("America/Chicago", "GMT", "Asia/Shanghai").foreach(tz => {
1842-
checkEvaluation(Sequence(
1843-
Cast(Literal("2011-03-01"), DateType),
1844-
Cast(Literal("2011-04-01"), DateType),
1845-
Option(Literal(stringToInterval("interval 1 month"))),
1846-
Option(tz)),
1847-
Seq(
1848-
Date.valueOf("2011-03-01"), Date.valueOf("2011-04-01")))
1842+
DateTimeTestUtils.withDefaultTimeZone(DateTimeUtils.getTimeZone(tz).toZoneId) {
1843+
checkEvaluation(Sequence(
1844+
Cast(Literal("2011-03-01"), DateType),
1845+
Cast(Literal("2011-04-01"), DateType),
1846+
Option(Literal(stringToInterval("interval 1 month"))),
1847+
Option(tz)),
1848+
Seq(
1849+
Date.valueOf("2011-03-01"), Date.valueOf("2011-04-01")))
1850+
}
18491851
})
18501852
}
18511853
}

0 commit comments

Comments
 (0)