Skip to content

Commit

Permalink
Replace setProperty uses in two example programs / tools
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshRosen committed Dec 25, 2014
1 parent 3888fe3 commit 0236d66
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,9 @@ object BroadcastTest {
val bcName = if (args.length > 2) args(2) else "Http"
val blockSize = if (args.length > 3) args(3) else "4096"

System.setProperty("spark.broadcast.factory", "org.apache.spark.broadcast." + bcName +
"BroadcastFactory")
System.setProperty("spark.broadcast.blockSize", blockSize)
val sparkConf = new SparkConf().setAppName("Broadcast Test")

.set("spark.broadcast.factory", s"org.apache.spark.broadcast.${bcName}BroaddcastFactory")
.set("spark.broadcast.blockSize", blockSize)
val sc = new SparkContext(sparkConf)

val slices = if (args.length > 0) args(0).toInt else 2
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import java.util.concurrent.{CountDownLatch, Executors}
import java.util.concurrent.atomic.AtomicLong

import org.apache.spark.executor.ShuffleWriteMetrics
import org.apache.spark.SparkContext
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.serializer.KryoSerializer
import org.apache.spark.shuffle.hash.HashShuffleManager
import org.apache.spark.util.Utils
Expand Down Expand Up @@ -49,13 +49,13 @@ object StoragePerfTester {
val writeData = "1" * recordLength
val executor = Executors.newFixedThreadPool(numMaps)

System.setProperty("spark.shuffle.compress", "false")
System.setProperty("spark.shuffle.sync", "true")
System.setProperty("spark.shuffle.manager",
"org.apache.spark.shuffle.hash.HashShuffleManager")
val conf = new SparkConf()
.set("spark.shuffle.compress", "false")
.set("spark.shuffle.sync", "true")
.set("spark.shuffle.manager", "org.apache.spark.shuffle.hash.HashShuffleManager")

// This is only used to instantiate a BlockManager. All thread scheduling is done manually.
val sc = new SparkContext("local[4]", "Write Tester")
val sc = new SparkContext("local[4]", "Write Tester", conf)
val hashShuffleManager = sc.env.shuffleManager.asInstanceOf[HashShuffleManager]

def writeOutputBytes(mapId: Int, total: AtomicLong) = {
Expand Down

0 comments on commit 0236d66

Please sign in to comment.