Skip to content

Commit 7e64d1e

Browse files
author
云峤
committed
Update
1 parent 7b9b858 commit 7e64d1e

File tree

3 files changed

+10
-20
lines changed

3 files changed

+10
-20
lines changed

python/pyspark/sql/tests.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -445,7 +445,6 @@ def test_between_function(self):
445445
self.assertEqual([False, True, True],
446446
df.select(df.a.between(df.b, df.c)).collect())
447447

448-
449448
def test_save_and_load(self):
450449
df = self.df
451450
tmpPath = tempfile.mkdtemp()

sql/core/src/main/scala/org/apache/spark/sql/Column.scala

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -300,16 +300,7 @@ class Column(protected[sql] val expr: Expression) extends Logging {
300300
*
301301
* @group java_expr_ops
302302
*/
303-
def between(lowerBound: String, upperBound: String): Column = {
304-
between(Column(lowerBound), Column(upperBound))
305-
}
306-
307-
/**
308-
* True if the current column is between the lower bound and upper bound, inclusive.
309-
*
310-
* @group java_expr_ops
311-
*/
312-
def between(lowerBound: Column, upperBound: Column): Column = {
303+
def between(lowerBound: Any, upperBound: Any): Column = {
313304
(this >= lowerBound) && (this <= upperBound)
314305
}
315306

sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -211,15 +211,15 @@ class ColumnExpressionSuite extends QueryTest {
211211
test("between") {
212212
val testData = TestSQLContext.sparkContext.parallelize(
213213
(0, 1, 2) ::
214-
(1, 2, 3) ::
215-
(2, 1, 0) ::
216-
(2, 2, 4) ::
217-
(3, 1, 6) ::
218-
(3, 2, 0) :: Nil).toDF("a", "b", "c")
219-
testData.registerTempTable("TestData4")
220-
checkAnswer(
221-
testData.filter($"a".between($"b", $"c")),
222-
testData.collect().toSeq.filter(r => r.getInt(0) >= r.getInt(1) && r.getInt(0) <= r.getInt(2)))
214+
(1, 2, 3) ::
215+
(2, 1, 0) ::
216+
(2, 2, 4) ::
217+
(3, 1, 6) ::
218+
(3, 2, 0) :: Nil).toDF("a", "b", "c")
219+
val expectAnswer = testData.collect().toSeq.
220+
filter(r => r.getInt(0) >= r.getInt(1) && r.getInt(0) <= r.getInt(2))
221+
222+
checkAnswer(testData.filter($"a".between($"b", $"c")), expectAnswer)
223223
}
224224

225225
val booleanData = TestSQLContext.createDataFrame(TestSQLContext.sparkContext.parallelize(

0 commit comments

Comments
 (0)