Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

REL-319 REL-320 And catch up with latest Apache 0.9 bugfixes #18

Merged
merged 3 commits into from
Jun 13, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@
</dependency>
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<artifactId>easymockclassextension</artifactId>
<scope>test</scope>
</dependency>
<dependency>
Expand Down
4 changes: 2 additions & 2 deletions core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,11 @@ class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll {

// This test suite should run all tests in ShuffleSuite with Netty shuffle mode.

override def beforeAll(configMap: Map[String, Any]) {
override def beforeAll() {
System.setProperty("spark.shuffle.use.netty", "true")
}

override def afterAll(configMap: Map[String, Any]) {
override def afterAll() {
System.setProperty("spark.shuffle.use.netty", "false")
}
}
5 changes: 3 additions & 2 deletions core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -233,8 +233,9 @@ class RDDSuite extends FunSuite with SharedSparkContext {

// we can optionally shuffle to keep the upstream parallel
val coalesced5 = data.coalesce(1, shuffle = true)
assert(coalesced5.dependencies.head.rdd.dependencies.head.rdd.asInstanceOf[ShuffledRDD[_, _, _]] !=
null)
val isEquals = coalesced5.dependencies.head.rdd.dependencies.head.rdd.
asInstanceOf[ShuffledRDD[_, _, _]] != null
assert(isEquals)

// when shuffling, we can increase the number of partitions
val coalesced6 = data.coalesce(20, shuffle = true)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ import org.apache.spark.{FetchFailed, Success, TaskEndReason}
import org.apache.spark.rdd.RDD
import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
import org.apache.spark.storage.{BlockId, BlockManagerId, BlockManagerMaster}
import org.scalatest.{BeforeAndAfter, FunSuite}
import org.scalatest.{BeforeAndAfter, FunSuiteLike}

/**
* Tests for DAGScheduler. These tests directly call the event processing functions in DAGScheduler
Expand All @@ -45,7 +45,7 @@ import org.scalatest.{BeforeAndAfter, FunSuite}
* DAGScheduler#submitWaitingStages (via test utility functions like runEvent or respondToTaskSet)
* and capturing the resulting TaskSets from the mock TaskScheduler.
*/
class DAGSchedulerSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
class DAGSchedulerSuite extends FunSuiteLike with BeforeAndAfter with LocalSparkContext {
val conf = new SparkConf
/** Set of TaskSets the DAGScheduler has requested executed. */
val taskSets = scala.collection.mutable.Buffer[TaskSet]()
Expand Down
18 changes: 13 additions & 5 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@

<java.version>1.6</java.version>

<scala.version>2.10.3</scala.version>
<scala.version>2.10.4</scala.version>
<scala.binary.version>2.10</scala.binary.version>
<mesos.version>0.13.0</mesos.version>
<cdh.version>4.1.2</cdh.version>
Expand Down Expand Up @@ -373,7 +373,7 @@
<dependency>
<groupId>org.scalatest</groupId>
<artifactId>scalatest_${scala.binary.version}</artifactId>
<version>1.9.1</version>
<version>2.2.0</version>
<scope>test</scope>
</dependency>
<dependency>
Expand All @@ -389,20 +389,26 @@
</dependency>
<dependency>
<groupId>org.easymock</groupId>
<artifactId>easymock</artifactId>
<artifactId>easymockclassextension</artifactId>
<version>3.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<version>1.8.5</version>
<version>1.9.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scalacheck</groupId>
<artifactId>scalacheck_${scala.binary.version}</artifactId>
<version>1.10.0</version>
<version>1.11.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.10</version>
<scope>test</scope>
</dependency>
<dependency>
Expand Down Expand Up @@ -609,6 +615,8 @@
<args>
<arg>-unchecked</arg>
<arg>-deprecation</arg>
<arg>-feature</arg>
<arg>-language:postfixOps</arg>
</args>
<jvmArgs>
<jvmArg>-Xms64m</jvmArg>
Expand Down
11 changes: 6 additions & 5 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ object SparkBuild extends Build {
def sharedSettings = Defaults.defaultSettings ++ Seq(
organization := "org.apache.spark",
version := "0.9.2-SNAPSHOT",
scalaVersion := "2.10.3",
scalaVersion := "2.10.4",
scalacOptions := Seq("-Xmax-classfile-name", "120", "-unchecked", "-deprecation",
"-target:" + SCALAC_JVM_VERSION),
javacOptions := Seq("-target", JAVAC_JVM_VERSION, "-source", JAVAC_JVM_VERSION),
Expand Down Expand Up @@ -224,12 +224,13 @@ object SparkBuild extends Build {
"org.eclipse.jetty" % "jetty-server" % "7.6.8.v20121106",
/** Workaround for SPARK-959. Dependency used by org.eclipse.jetty. Fixed in ivy 2.3.0. */
"org.eclipse.jetty.orbit" % "javax.servlet" % "2.5.0.v201103041518" artifacts Artifact("javax.servlet", "jar", "jar"),
"org.scalatest" %% "scalatest" % "1.9.1" % "test",
"org.scalacheck" %% "scalacheck" % "1.10.0" % "test",
"org.scalatest" %% "scalatest" % "2.2.0" % "test",
"org.scalacheck" %% "scalacheck" % "1.11.3" % "test",
"com.novocode" % "junit-interface" % "0.10" % "test",
"org.easymock" % "easymock" % "3.1" % "test",
"org.mockito" % "mockito-all" % "1.8.5" % "test",
"org.easymock" % "easymockclassextension" % "3.1" % "test",
"org.mockito" % "mockito-all" % "1.9.0" % "test",
"commons-io" % "commons-io" % "2.4" % "test"
"junit" % "junit" % "4.10" % "test"
),

testOptions += Tests.Argument(TestFrameworks.JUnit, "-v", "-a"),
Expand Down
6 changes: 4 additions & 2 deletions repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -56,12 +56,14 @@ class ReplSuite extends FunSuite {
}

def assertContains(message: String, output: String) {
assert(output.contains(message),
val isContain = output.contains(message)
assert(isContain,
"Interpreter output did not contain '" + message + "':\n" + output)
}

def assertDoesNotContain(message: String, output: String) {
assert(!output.contains(message),
val isContain = output.contains(message)
assert(!isContain,
"Interpreter output contained '" + message + "':\n" + output)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,9 +90,9 @@ class BasicOperationsSuite extends TestSuiteBase {
assert(second.size === 5)
assert(third.size === 5)

assert(first.flatten.toSet === (1 to 100).toSet)
assert(second.flatten.toSet === (101 to 200).toSet)
assert(third.flatten.toSet === (201 to 300).toSet)
assert(first.flatten.toSet.equals((1 to 100).toSet) )
assert(second.flatten.toSet.equals((101 to 200).toSet))
assert(third.flatten.toSet.equals((201 to 300).toSet))
}

test("repartition (fewer partitions)") {
Expand All @@ -109,9 +109,9 @@ class BasicOperationsSuite extends TestSuiteBase {
assert(second.size === 2)
assert(third.size === 2)

assert(first.flatten.toSet === (1 to 100).toSet)
assert(second.flatten.toSet === (101 to 200).toSet)
assert(third.flatten.toSet === (201 to 300).toSet)
assert(first.flatten.toSet.equals((1 to 100).toSet))
assert(second.flatten.toSet.equals( (101 to 200).toSet))
assert(third.flatten.toSet.equals((201 to 300).toSet))
}

test("groupByKey") {
Expand Down