Skip to content

Commit

Permalink
Update
Browse files Browse the repository at this point in the history
  • Loading branch information
云峤 committed May 4, 2015
1 parent 7b9b858 commit 7e64d1e
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 20 deletions.
1 change: 0 additions & 1 deletion python/pyspark/sql/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -445,7 +445,6 @@ def test_between_function(self):
self.assertEqual([False, True, True],
df.select(df.a.between(df.b, df.c)).collect())


def test_save_and_load(self):
df = self.df
tmpPath = tempfile.mkdtemp()
Expand Down
11 changes: 1 addition & 10 deletions sql/core/src/main/scala/org/apache/spark/sql/Column.scala
Original file line number Diff line number Diff line change
Expand Up @@ -300,16 +300,7 @@ class Column(protected[sql] val expr: Expression) extends Logging {
*
* @group java_expr_ops
*/
def between(lowerBound: String, upperBound: String): Column = {
between(Column(lowerBound), Column(upperBound))
}

/**
* True if the current column is between the lower bound and upper bound, inclusive.
*
* @group java_expr_ops
*/
def between(lowerBound: Column, upperBound: Column): Column = {
def between(lowerBound: Any, upperBound: Any): Column = {
(this >= lowerBound) && (this <= upperBound)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -211,15 +211,15 @@ class ColumnExpressionSuite extends QueryTest {
test("between") {
val testData = TestSQLContext.sparkContext.parallelize(
(0, 1, 2) ::
(1, 2, 3) ::
(2, 1, 0) ::
(2, 2, 4) ::
(3, 1, 6) ::
(3, 2, 0) :: Nil).toDF("a", "b", "c")
testData.registerTempTable("TestData4")
checkAnswer(
testData.filter($"a".between($"b", $"c")),
testData.collect().toSeq.filter(r => r.getInt(0) >= r.getInt(1) && r.getInt(0) <= r.getInt(2)))
(1, 2, 3) ::
(2, 1, 0) ::
(2, 2, 4) ::
(3, 1, 6) ::
(3, 2, 0) :: Nil).toDF("a", "b", "c")
val expectAnswer = testData.collect().toSeq.
filter(r => r.getInt(0) >= r.getInt(1) && r.getInt(0) <= r.getInt(2))

checkAnswer(testData.filter($"a".between($"b", $"c")), expectAnswer)
}

val booleanData = TestSQLContext.createDataFrame(TestSQLContext.sparkContext.parallelize(
Expand Down

0 comments on commit 7e64d1e

Please sign in to comment.