diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py index 000dab99ea730..074eae5cb2b16 100644 --- a/python/pyspark/sql/tests.py +++ b/python/pyspark/sql/tests.py @@ -445,7 +445,6 @@ def test_between_function(self): self.assertEqual([False, True, True], df.select(df.a.between(df.b, df.c)).collect()) - def test_save_and_load(self): df = self.df tmpPath = tempfile.mkdtemp() diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala index 590c9c2db97a9..c0503bf047052 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala @@ -300,16 +300,7 @@ class Column(protected[sql] val expr: Expression) extends Logging { * * @group java_expr_ops */ - def between(lowerBound: String, upperBound: String): Column = { - between(Column(lowerBound), Column(upperBound)) - } - - /** - * True if the current column is between the lower bound and upper bound, inclusive. - * - * @group java_expr_ops - */ - def between(lowerBound: Column, upperBound: Column): Column = { + def between(lowerBound: Any, upperBound: Any): Column = { (this >= lowerBound) && (this <= upperBound) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala index dcea32f97c840..3c1ad656fc855 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala @@ -211,15 +211,15 @@ class ColumnExpressionSuite extends QueryTest { test("between") { val testData = TestSQLContext.sparkContext.parallelize( (0, 1, 2) :: - (1, 2, 3) :: - (2, 1, 0) :: - (2, 2, 4) :: - (3, 1, 6) :: - (3, 2, 0) :: Nil).toDF("a", "b", "c") - testData.registerTempTable("TestData4") - checkAnswer( - testData.filter($"a".between($"b", $"c")), - testData.collect().toSeq.filter(r => r.getInt(0) >= r.getInt(1) && r.getInt(0) <= r.getInt(2))) + (1, 2, 3) :: + (2, 1, 0) :: + (2, 2, 4) :: + (3, 1, 6) :: + (3, 2, 0) :: Nil).toDF("a", "b", "c") + val expectAnswer = testData.collect().toSeq. + filter(r => r.getInt(0) >= r.getInt(1) && r.getInt(0) <= r.getInt(2)) + + checkAnswer(testData.filter($"a".between($"b", $"c")), expectAnswer) } val booleanData = TestSQLContext.createDataFrame(TestSQLContext.sparkContext.parallelize(