Skip to content

Commit

Permalink
Remove use of system properties in FileServerSuite
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshRosen committed Dec 24, 2014
1 parent 25bfce2 commit 633a84a
Showing 1 changed file with 10 additions and 10 deletions.
20 changes: 10 additions & 10 deletions core/src/test/scala/org/apache/spark/FileServerSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,11 @@ class FileServerSuite extends FunSuite with LocalSparkContext {
@transient var tmpFile: File = _
@transient var tmpJarUrl: String = _

override def withFixture(test: NoArgTest) = {
def newConf: SparkConf = new SparkConf(loadDefaults = false).set("spark.authenticate", "false")

override def beforeEach() {
super.beforeEach()
resetSparkContext()
TestUtils.withSystemProperty("spark.authenticate", "false") {
super.withFixture(test)
}
}

override def beforeAll() {
Expand Down Expand Up @@ -74,7 +74,7 @@ class FileServerSuite extends FunSuite with LocalSparkContext {
}

test("Distributing files locally") {
sc = new SparkContext("local[4]", "test")
sc = new SparkContext("local[4]", "test", newConf)
sc.addFile(tmpFile.toString)
val testData = Array((1,1), (1,1), (2,1), (3,5), (2,2), (3,0))
val result = sc.parallelize(testData).reduceByKey {
Expand Down Expand Up @@ -108,7 +108,7 @@ class FileServerSuite extends FunSuite with LocalSparkContext {

test("Distributing files locally using URL as input") {
// addFile("file:///....")
sc = new SparkContext("local[4]", "test")
sc = new SparkContext("local[4]", "test", newConf)
sc.addFile(new File(tmpFile.toString).toURI.toString)
val testData = Array((1,1), (1,1), (2,1), (3,5), (2,2), (3,0))
val result = sc.parallelize(testData).reduceByKey {
Expand All @@ -122,7 +122,7 @@ class FileServerSuite extends FunSuite with LocalSparkContext {
}

test ("Dynamically adding JARS locally") {
sc = new SparkContext("local[4]", "test")
sc = new SparkContext("local[4]", "test", newConf)
sc.addJar(tmpJarUrl)
val testData = Array((1, 1))
sc.parallelize(testData).foreach { x =>
Expand All @@ -133,7 +133,7 @@ class FileServerSuite extends FunSuite with LocalSparkContext {
}

test("Distributing files on a standalone cluster") {
sc = new SparkContext("local-cluster[1,1,512]", "test")
sc = new SparkContext("local-cluster[1,1,512]", "test", newConf)
sc.addFile(tmpFile.toString)
val testData = Array((1,1), (1,1), (2,1), (3,5), (2,2), (3,0))
val result = sc.parallelize(testData).reduceByKey {
Expand All @@ -147,7 +147,7 @@ class FileServerSuite extends FunSuite with LocalSparkContext {
}

test ("Dynamically adding JARS on a standalone cluster") {
sc = new SparkContext("local-cluster[1,1,512]", "test")
sc = new SparkContext("local-cluster[1,1,512]", "test", newConf)
sc.addJar(tmpJarUrl)
val testData = Array((1,1))
sc.parallelize(testData).foreach { x =>
Expand All @@ -158,7 +158,7 @@ class FileServerSuite extends FunSuite with LocalSparkContext {
}

test ("Dynamically adding JARS on a standalone cluster using local: URL") {
sc = new SparkContext("local-cluster[1,1,512]", "test")
sc = new SparkContext("local-cluster[1,1,512]", "test", newConf)
sc.addJar(tmpJarUrl.replace("file", "local"))
val testData = Array((1,1))
sc.parallelize(testData).foreach { x =>
Expand Down

0 comments on commit 633a84a

Please sign in to comment.