Skip to content

Commit

Permalink
solve the shuffle problem for HashSet
Browse files Browse the repository at this point in the history
  • Loading branch information
WangTaoTheTonic committed Sep 9, 2014
1 parent f674e59 commit b6560cf
Showing 1 changed file with 6 additions and 7 deletions.
13 changes: 6 additions & 7 deletions core/src/main/scala/org/apache/spark/deploy/master/Master.scala
Original file line number Diff line number Diff line change
Expand Up @@ -482,26 +482,25 @@ private[spark] class Master(

// First schedule drivers, they take strict precedence over applications
// Randomization helps balance drivers
val shuffledWorkers = Random.shuffle(workers).toArray
val workerNum = shuffledWorkers.size
val shuffledAliveWorkers = Random.shuffle(workers.toSeq.filter(_.state == WorkerState.ALIVE))
val aliveWorkerNum = shuffledAliveWorkers.size
var curPos = 0
for (driver <- List(waitingDrivers: _*)) { // iterate over a copy of waitingDrivers
// For each waiting driver we pick a worker that has enough resources to launch it.
// The picking does in a round-robin fashion, starting from position behind last
// worker on which driver was just launched and ending with driver being launched
// or we have iterated over all workers.
val startPos = curPos
curPos = (curPos + 1) % workerNum
curPos = (curPos + 1) % aliveWorkerNum
var launched = false
while (curPos - 1 != startPos && !launched) {
val worker = shuffledWorkers(curPos)
if (worker.state == WorkerState.ALIVE
&& worker.memoryFree >= driver.desc.mem && worker.coresFree >= driver.desc.cores) {
val worker = shuffledAliveWorkers(curPos)
if (worker.memoryFree >= driver.desc.mem && worker.coresFree >= driver.desc.cores) {
launchDriver(worker, driver)
waitingDrivers -= driver
launched = true
}
curPos = (curPos + 1) % workerNum
curPos = (curPos + 1) % aliveWorkerNum
}
}

Expand Down

0 comments on commit b6560cf

Please sign in to comment.