From 7b9b8583b25f3417e9b6c5672598d325678c7769 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E4=BA=91=E5=B3=A4?= Date: Sun, 3 May 2015 14:04:08 +0800 Subject: [PATCH] undo --- .../main/scala/org/apache/spark/sql/Column.scala | 3 +-- .../apache/spark/sql/ColumnExpressionSuite.scala | 14 +++++++++++--- .../test/scala/org/apache/spark/sql/TestData.scala | 11 ----------- 3 files changed, 12 insertions(+), 16 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala index b51b6368eeb56..590c9c2db97a9 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala @@ -310,8 +310,7 @@ class Column(protected[sql] val expr: Expression) extends Logging { * @group java_expr_ops */ def between(lowerBound: Column, upperBound: Column): Column = { - And(GreaterThanOrEqual(this.expr, lowerBound.expr), - LessThanOrEqual(this.expr, upperBound.expr)) + (this >= lowerBound) && (this <= upperBound) } /** diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala index b63c1814adc3d..dcea32f97c840 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ColumnExpressionSuite.scala @@ -209,9 +209,17 @@ class ColumnExpressionSuite extends QueryTest { } test("between") { - checkAnswer( - testData4.filter($"a".between($"b", $"c")), - testData4.collect().toSeq.filter(r => r.getInt(0) >= r.getInt(1) && r.getInt(0) <= r.getInt(2))) + val testData = TestSQLContext.sparkContext.parallelize( + (0, 1, 2) :: + (1, 2, 3) :: + (2, 1, 0) :: + (2, 2, 4) :: + (3, 1, 6) :: + (3, 2, 0) :: Nil).toDF("a", "b", "c") + testData.registerTempTable("TestData4") + checkAnswer( + testData.filter($"a".between($"b", $"c")), + testData.collect().toSeq.filter(r => r.getInt(0) >= r.getInt(1) && r.getInt(0) <= r.getInt(2))) } val booleanData = TestSQLContext.createDataFrame(TestSQLContext.sparkContext.parallelize( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala index 487d07249922f..225b51bd73d6c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala @@ -57,17 +57,6 @@ object TestData { TestData2(3, 2) :: Nil, 2).toDF() testData2.registerTempTable("testData2") - case class TestData4(a: Int, b: Int, c: Int) - val testData4 = - TestSQLContext.sparkContext.parallelize( - TestData4(0, 1, 2) :: - TestData4(1, 2, 3) :: - TestData4(2, 1, 0) :: - TestData4(2, 2, 4) :: - TestData4(3, 1, 6) :: - TestData4(3, 2, 0) :: Nil, 2).toDF() - testData4.registerTempTable("TestData4") - case class DecimalData(a: BigDecimal, b: BigDecimal) val decimalData =