Skip to content

Commit

Permalink
update test cases for map()
Browse files Browse the repository at this point in the history
add test cases for filter()
  • Loading branch information
kiszk committed Mar 10, 2017
1 parent 35ba2c6 commit 4c88c03
Showing 1 changed file with 31 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -64,33 +64,33 @@ class DatasetPrimitiveSuite extends QueryTest with SharedSQLContext {

test("mapPrimitive") {
val dsInt = Seq(1, 2, 3).toDS()
checkDataset(dsInt.map(e => e > 1), false, true, true)
checkDataset(dsInt.map(e => e + 1), 2, 3, 4)
checkDataset(dsInt.map(e => e + 8589934592L), 8589934593L, 8589934594L, 8589934595L)
checkDataset(dsInt.map(e => e + 1.1F), 2.1F, 3.1F, 4.1F)
checkDataset(dsInt.map(e => e + 1.23D), 2.23D, 3.23D, 4.23D)
checkDataset(dsInt.map(_ > 1), false, true, true)
checkDataset(dsInt.map(_ + 1), 2, 3, 4)
checkDataset(dsInt.map(_ + 8589934592L), 8589934593L, 8589934594L, 8589934595L)
checkDataset(dsInt.map(_ + 1.1F), 2.1F, 3.1F, 4.1F)
checkDataset(dsInt.map(_ + 1.23D), 2.23D, 3.23D, 4.23D)

val dsLong = Seq(1L, 2L, 3L).toDS()
checkDataset(dsLong.map(e => e > 1), false, true, true)
checkDataset(dsLong.map(_ > 1), false, true, true)
checkDataset(dsLong.map(e => (e + 1).toInt), 2, 3, 4)
checkDataset(dsLong.map(e => e + 8589934592L), 8589934593L, 8589934594L, 8589934595L)
checkDataset(dsLong.map(e => e + 1.1F), 2.1F, 3.1F, 4.1F)
checkDataset(dsLong.map(e => e + 1.23D), 2.23D, 3.23D, 4.23D)
checkDataset(dsLong.map(_ + 8589934592L), 8589934593L, 8589934594L, 8589934595L)
checkDataset(dsLong.map(_ + 1.1F), 2.1F, 3.1F, 4.1F)
checkDataset(dsLong.map(_ + 1.23D), 2.23D, 3.23D, 4.23D)

val dsFloat = Seq(1F, 2F, 3F).toDS()
checkDataset(dsFloat.map(e => e > 1), false, true, true)
checkDataset(dsFloat.map(_ > 1), false, true, true)
checkDataset(dsFloat.map(e => (e + 1).toInt), 2, 3, 4)
checkDataset(dsFloat.map(e => (e + 123456L).toLong), 123457L, 123458L, 123459L)
checkDataset(dsFloat.map(e => e + 1.1F), 2.1F, 3.1F, 4.1F)
checkDataset(dsFloat.map(e => e + 1.23D), 2.23D, 3.23D, 4.23D)
checkDataset(dsFloat.map(_ + 1.1F), 2.1F, 3.1F, 4.1F)
checkDataset(dsFloat.map(_ + 1.23D), 2.23D, 3.23D, 4.23D)

val dsDouble = Seq(1D, 2D, 3D).toDS()
checkDataset(dsDouble.map(e => e > 1), false, true, true)
checkDataset(dsDouble.map(_ > 1), false, true, true)
checkDataset(dsDouble.map(e => (e + 1).toInt), 2, 3, 4)
checkDataset(dsDouble.map(e => (e + 8589934592L).toLong),
8589934593L, 8589934594L, 8589934595L)
checkDataset(dsDouble.map(e => (e + 1.1F).toFloat), 2.1F, 3.1F, 4.1F)
checkDataset(dsDouble.map(e => e + 1.23D), 2.23D, 3.23D, 4.23D)
checkDataset(dsDouble.map(_ + 1.23D), 2.23D, 3.23D, 4.23D)

val dsBoolean = Seq(true, false).toDS()
checkDataset(dsBoolean.map(e => !e), false, true)
Expand All @@ -103,6 +103,23 @@ class DatasetPrimitiveSuite extends QueryTest with SharedSQLContext {
2, 4)
}

test("filterPrimitive") {
val dsInt = Seq(1, 2, 3).toDS()
checkDataset(dsInt.filter(_ > 1), 2, 3)

val dsLong = Seq(1L, 2L, 3L).toDS()
checkDataset(dsLong.filter(_ > 1), 2L, 3L)

val dsFloat = Seq(1F, 2F, 3F).toDS()
checkDataset(dsFloat.filter(_ > 1), 2F, 3F)

val dsDouble = Seq(1D, 2D, 3D).toDS()
checkDataset(dsDouble.filter(_ > 1), 2D, 3D)

val dsBoolean = Seq(true, false).toDS()
checkDataset(dsBoolean.filter(e => !e), false)
}

test("foreach") {
val ds = Seq(1, 2, 3).toDS()
val acc = sparkContext.longAccumulator
Expand Down

0 comments on commit 4c88c03

Please sign in to comment.