Skip to content

Commit

Permalink
Use ResetSystemProperties in SparkContextSchedulerCreationSuite
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshRosen committed Dec 19, 2014
1 parent c83ded8 commit 0995c4b
Showing 1 changed file with 3 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,11 @@ import org.apache.spark.scheduler.{SchedulerBackend, TaskScheduler, TaskSchedule
import org.apache.spark.scheduler.cluster.{SimrSchedulerBackend, SparkDeploySchedulerBackend}
import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend}
import org.apache.spark.scheduler.local.LocalBackend
import org.apache.spark.util.ResetSystemProperties

class SparkContextSchedulerCreationSuite
extends FunSuite with LocalSparkContext with PrivateMethodTester with Logging {
extends FunSuite with LocalSparkContext with PrivateMethodTester with Logging
with ResetSystemProperties {

def createTaskScheduler(master: String): TaskSchedulerImpl = {
// Create local SparkContext to setup a SparkEnv. We don't actually want to start() the
Expand Down Expand Up @@ -102,19 +104,13 @@ class SparkContextSchedulerCreationSuite
}

test("local-default-parallelism") {
val defaultParallelism = System.getProperty("spark.default.parallelism")
System.setProperty("spark.default.parallelism", "16")
val sched = createTaskScheduler("local")

sched.backend match {
case s: LocalBackend => assert(s.defaultParallelism() === 16)
case _ => fail()
}

Option(defaultParallelism) match {
case Some(v) => System.setProperty("spark.default.parallelism", v)
case _ => System.clearProperty("spark.default.parallelism")
}
}

test("simr") {
Expand Down

0 comments on commit 0995c4b

Please sign in to comment.