Skip to content

Commit

Permalink
[SPARK-1841]: update scalatest to version 2.1.5
Browse files Browse the repository at this point in the history
Author: witgo <witgo@qq.com>

Closes apache#713 from witgo/scalatest and squashes the following commits:

b627a6a [witgo] merge master
51fb3d6 [witgo] merge master
3771474 [witgo] fix RDDSuite
996d6f9 [witgo] fix TimeStampedWeakValueHashMap test
9dfa4e7 [witgo] merge bug
1479b22 [witgo] merge master
29b9194 [witgo] fix code style
022a7a2 [witgo] fix test dependency
a52c0fa [witgo] fix test dependency
cd8f59d [witgo] Merge branch 'master' of https://github.com/apache/spark into scalatest
046540d [witgo] fix RDDSuite.scala
2c543b9 [witgo] fix ReplSuite.scala
c458928 [witgo] update scalatest to version 2.1.5

Conflicts:
	core/pom.xml
	core/src/test/scala/org/apache/spark/ContextCleanerSuite.scala
	core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala
	core/src/test/scala/org/apache/spark/util/TimeStampedHashMapSuite.scala
	pom.xml
	project/SparkBuild.scala
	sql/core/src/test/scala/org/apache/spark/sql/parquet/ParquetQuerySuite.scala
  • Loading branch information
witgo authored and markhamstra committed Jun 13, 2014
1 parent fc8b3e0 commit 2b77e24
Show file tree
Hide file tree
Showing 8 changed files with 35 additions and 23 deletions.
2 changes: 1 addition & 1 deletion core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@
</dependency>
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<artifactId>easymockclassextension</artifactId>
<scope>test</scope>
</dependency>
<dependency>
Expand Down
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,11 @@ class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll {

// This test suite should run all tests in ShuffleSuite with Netty shuffle mode.

override def beforeAll(configMap: Map[String, Any]) {
override def beforeAll() {
System.setProperty("spark.shuffle.use.netty", "true")
}

override def afterAll(configMap: Map[String, Any]) {
override def afterAll() {
System.setProperty("spark.shuffle.use.netty", "false")
}
}
5 changes: 3 additions & 2 deletions core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -233,8 +233,9 @@ class RDDSuite extends FunSuite with SharedSparkContext {

// we can optionally shuffle to keep the upstream parallel
val coalesced5 = data.coalesce(1, shuffle = true)
assert(coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] !=
null)
val isEquals = coalesced5.dependencies.head.rdd.dependencies.head.rdd.
asInstanceOf[ShuffledRDD[_, _, _]] != null
assert(isEquals)

// when shuffling, we can increase the number of partitions
val coalesced6 = data.coalesce(20, shuffle = true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import org.apache.spark.{FetchFailed, Success, TaskEndReason}
import org.apache.spark.rdd.RDD
import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
import org.apache.spark.storage.{BlockId, BlockManagerId, BlockManagerMaster}
import org.scalatest.{BeforeAndAfter, FunSuite}
import org.scalatest.{BeforeAndAfter, FunSuiteLike}

/**
* Tests for DAGScheduler. These tests directly call the event processing functions in DAGScheduler
Expand All @@ -45,7 +45,7 @@ import org.scalatest.{BeforeAndAfter, FunSuite}
* DAGScheduler#submitWaitingStages (via test utility functions like runEvent or respondToTaskSet)
* and capturing the resulting TaskSets from the mock TaskScheduler.
*/
class DAGSchedulerSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
class DAGSchedulerSuite extends FunSuiteLike with BeforeAndAfter with LocalSparkContext {
val conf = new SparkConf
/** Set of TaskSets the DAGScheduler has requested executed. */
val taskSets = scala.collection.mutable.Buffer[TaskSet]()
Expand Down
16 changes: 12 additions & 4 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,7 @@
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<version>1.9.1</version>
<version>2.1.5</version>
<scope>test</scope>
</dependency>
<dependency>
Expand All @@ -389,20 +389,26 @@
</dependency>
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<artifactId>easymockclassextension</artifactId>
<version>3.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.8.5</version>
<version>1.9.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId>
<version>1.10.0</version>
<version>1.11.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.10</version>
<scope>test</scope>
</dependency>
<dependency>
Expand Down Expand Up @@ -609,6 +615,8 @@
<args>
<arg>-unchecked</arg>
<arg>-deprecation</arg>
<arg>-feature</arg>
<arg>-language:postfixOps</arg>
</args>
<jvmArgs>
<jvmArg>-Xms64m</jvmArg>
Expand Down
9 changes: 5 additions & 4 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -224,12 +224,13 @@ object SparkBuild extends Build {
"org.eclipse.jetty" % "jetty-server" % "7.6.8.v20121106",
/** Workaround for SPARK-959. Dependency used by org.eclipse.jetty. Fixed in ivy 2.3.0. */
"org.eclipse.jetty.orbit" % "javax.servlet" % "2.5.0.v201103041518" artifacts Artifact("javax.servlet", "jar", "jar"),
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
"org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
"org.scalatest" %% "scalatest" % "2.1.5" % "test",
"org.scalacheck" %% "scalacheck" % "1.11.3" % "test",
"com.novocode" % "junit-interface" % "0.10" % "test",
"org.easymock" % "easymock" % "3.1" % "test",
"org.mockito" % "mockito-all" % "1.8.5" % "test",
"org.easymock" % "easymockclassextension" % "3.1" % "test",
"org.mockito" % "mockito-all" % "1.9.0" % "test",
"commons-io" % "commons-io" % "2.4" % "test"
"junit" % "junit" % "4.10" % "test"
),

testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
Expand Down
6 changes: 4 additions & 2 deletions repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -56,12 +56,14 @@ class ReplSuite extends FunSuite {
}

def assertContains(message: String, output: String) {
assert(output.contains(message),
val isContain = output.contains(message)
assert(isContain,
"Interpreter output did not contain '" + message + "':\n" + output)
}

def assertDoesNotContain(message: String, output: String) {
assert(!output.contains(message),
val isContain = output.contains(message)
assert(!isContain,
"Interpreter output contained '" + message + "':\n" + output)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,9 +90,9 @@ class BasicOperationsSuite extends TestSuiteBase {
assert(second.size === 5)
assert(third.size === 5)

assert(first.flatten.toSet === (1 to 100).toSet)
assert(second.flatten.toSet === (101 to 200).toSet)
assert(third.flatten.toSet === (201 to 300).toSet)
assert(first.flatten.toSet.equals((1 to 100).toSet) )
assert(second.flatten.toSet.equals((101 to 200).toSet))
assert(third.flatten.toSet.equals((201 to 300).toSet))
}

test("repartition (fewer partitions)") {
Expand All @@ -109,9 +109,9 @@ class BasicOperationsSuite extends TestSuiteBase {
assert(second.size === 2)
assert(third.size === 2)

assert(first.flatten.toSet === (1 to 100).toSet)
assert(second.flatten.toSet === (101 to 200).toSet)
assert(third.flatten.toSet === (201 to 300).toSet)
assert(first.flatten.toSet.equals((1 to 100).toSet))
assert(second.flatten.toSet.equals( (101 to 200).toSet))
assert(third.flatten.toSet.equals((201 to 300).toSet))
}

test("groupByKey") {
Expand Down

0 comments on commit 2b77e24

Please sign in to comment.