Skip to content

Commit

Permalink
update scalatest to version 2.1.5
Browse files Browse the repository at this point in the history
  • Loading branch information
witgo committed May 9, 2014
1 parent 191279c commit c458928
Show file tree
Hide file tree
Showing 8 changed files with 22 additions and 21 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import scala.language.postfixOps
import scala.util.Random

import org.scalatest.{BeforeAndAfter, FunSuite}
import org.scalatest.concurrent.Eventually
import org.scalatest.concurrent.{PatienceConfiguration, Eventually}
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._

Expand Down Expand Up @@ -76,7 +76,7 @@ class ContextCleanerSuite extends FunSuite with BeforeAndAfter with LocalSparkCo
tester.assertCleanup()

// Verify that shuffles can be re-executed after cleaning up
assert(rdd.collect().toList === collected)
assert(rdd.collect().toList.equals(collected))
}

test("cleanup broadcast") {
Expand Down Expand Up @@ -285,7 +285,7 @@ class CleanerTester(
sc.cleaner.get.attachListener(cleanerListener)

/** Assert that all the stuff has been cleaned up */
def assertCleanup()(implicit waitTimeout: Eventually.Timeout) {
def assertCleanup()(implicit waitTimeout: PatienceConfiguration.Timeout) {
try {
eventually(waitTimeout, interval(100 millis)) {
assert(isAllCleanedUp)
Expand Down
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@ class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll {

// This test suite should run all tests in ShuffleSuite with Netty shuffle mode.

override def beforeAll(configMap: Map[String, Any]) {
override def beforeAll() {
System.setProperty("spark.shuffle.use.netty", "true")
}

override def afterAll(configMap: Map[String, Any]) {
override def afterAll() {
System.setProperty("spark.shuffle.use.netty", "false")
}
}
5 changes: 3 additions & 2 deletions core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -235,8 +235,9 @@ class RDDSuite extends FunSuite with SharedSparkContext {

// we can optionally shuffle to keep the upstream parallel
val coalesced5 = data.coalesce(1, shuffle = true)
assert(coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] !=
null)
val bool = coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] !=
null
assert(bool)

// when shuffling, we can increase the number of partitions
val coalesced6 = data.coalesce(20, shuffle = true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import scala.language.reflectiveCalls

import akka.actor._
import akka.testkit.{ImplicitSender, TestKit, TestActorRef}
import org.scalatest.{BeforeAndAfter, FunSuite}
import org.scalatest.{BeforeAndAfter, FunSuiteLike}

import org.apache.spark._
import org.apache.spark.rdd.RDD
Expand All @@ -37,7 +37,7 @@ class BuggyDAGEventProcessActor extends Actor {
}
}

class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuite
class DAGSchedulerSuite extends TestKit(ActorSystem("DAGSchedulerSuite")) with FunSuiteLike
with ImplicitSender with BeforeAndAfter with LocalSparkContext {

val conf = new SparkConf
Expand Down
4 changes: 2 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -447,7 +447,7 @@
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<version>1.9.1</version>
<version>2.1.5</version>
<scope>test</scope>
</dependency>
<dependency>
Expand All @@ -465,7 +465,7 @@
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId>
<version>1.10.0</version>
<version>1.11.3</version>
<scope>test</scope>
</dependency>
<dependency>
Expand Down
4 changes: 2 additions & 2 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -271,8 +271,8 @@ object SparkBuild extends Build {
"org.eclipse.jetty" % "jetty-util" % jettyVersion,
"org.eclipse.jetty" % "jetty-plus" % jettyVersion,
"org.eclipse.jetty" % "jetty-security" % jettyVersion,
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
"org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
"org.scalatest" %% "scalatest" % "2.1.5" % "test",
"org.scalacheck" %% "scalacheck" % "1.11.3" % "test",
"com.novocode" % "junit-interface" % "0.10" % "test",
"org.easymock" % "easymock" % "3.1" % "test",
"org.mockito" % "mockito-all" % "1.8.5" % "test"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package org.apache.spark.sql.parquet

import java.io.File

import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.scalatest.{BeforeAndAfterAll, FunSuiteLike}

import org.apache.hadoop.fs.{Path, FileSystem}
import org.apache.hadoop.mapreduce.Job
Expand All @@ -42,7 +42,7 @@ import org.apache.spark.sql.test.TestSQLContext._

case class TestRDDEntry(key: Int, value: String)

class ParquetQuerySuite extends QueryTest with FunSuite with BeforeAndAfterAll {
class ParquetQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {
import TestData._
TestData // Load test data tables.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,9 @@ class BasicOperationsSuite extends TestSuiteBase {
assert(second.size === 5)
assert(third.size === 5)

assert(first.flatten.toSet === (1 to 100).toSet)
assert(second.flatten.toSet === (101 to 200).toSet)
assert(third.flatten.toSet === (201 to 300).toSet)
assert(first.flatten.toSet.equals((1 to 100).toSet) )
assert(second.flatten.toSet.equals((101 to 200).toSet))
assert(third.flatten.toSet.equals((201 to 300).toSet))
}

test("repartition (fewer partitions)") {
Expand All @@ -111,9 +111,9 @@ class BasicOperationsSuite extends TestSuiteBase {
assert(second.size === 2)
assert(third.size === 2)

assert(first.flatten.toSet === (1 to 100).toSet)
assert(second.flatten.toSet === (101 to 200).toSet)
assert(third.flatten.toSet === (201 to 300).toSet)
assert(first.flatten.toSet.equals((1 to 100).toSet))
assert(second.flatten.toSet.equals( (101 to 200).toSet))
assert(third.flatten.toSet.equals((201 to 300).toSet))
}

test("groupByKey") {
Expand Down

0 comments on commit c458928

Please sign in to comment.