Skip to content

Commit

Permalink
Remove use of system properties in SparkContextSuite
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshRosen committed Dec 24, 2014
1 parent 8783ab0 commit cfe9cce
Showing 1 changed file with 6 additions and 7 deletions.
13 changes: 6 additions & 7 deletions core/src/test/scala/org/apache/spark/SparkContextSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,16 @@

package org.apache.spark

import org.apache.spark.util.ResetSystemProperties
import org.scalatest.FunSuite

import org.apache.hadoop.io.BytesWritable

class SparkContextSuite extends FunSuite with ResetSystemProperties with LocalSparkContext {
class SparkContextSuite extends FunSuite with LocalSparkContext {

test("Only one SparkContext may be active at a time") {
// Regression test for SPARK-4180
System.setProperty("spark.driver.allowMultipleContexts", "false")
val conf = new SparkConf().setAppName("test").setMaster("local")
.set("spark.driver.allowMultipleContexts", "false")
sc = new SparkContext(conf)
// A SparkContext is already running, so we shouldn't be able to create a second one
intercept[SparkException] { new SparkContext(conf) }
Expand All @@ -37,20 +36,20 @@ class SparkContextSuite extends FunSuite with ResetSystemProperties with LocalSp
}

test("Can still construct a new SparkContext after failing to construct a previous one") {
System.setProperty("spark.driver.allowMultipleContexts", "false")
val conf = new SparkConf().set("spark.driver.allowMultipleContexts", "false")
// This is an invalid configuration (no app name or master URL)
intercept[SparkException] {
new SparkContext(new SparkConf())
new SparkContext(conf)
}
// Even though those earlier calls failed, we should still be able to create a new context
sc = new SparkContext(new SparkConf().setMaster("local").setAppName("test"))
sc = new SparkContext(conf.setMaster("local").setAppName("test"))
}

test("Check for multiple SparkContexts can be disabled via undocumented debug option") {
System.setProperty("spark.driver.allowMultipleContexts", "true")
var secondSparkContext: SparkContext = null
try {
val conf = new SparkConf().setAppName("test").setMaster("local")
.set("spark.driver.allowMultipleContexts", "true")
sc = new SparkContext(conf)
secondSparkContext = new SparkContext(conf)
} finally {
Expand Down

0 comments on commit cfe9cce

Please sign in to comment.