Skip to content

Commit

Permalink
Fix for SparkContext stop behavior
Browse files Browse the repository at this point in the history
  • Loading branch information
rekhajoshm committed Jun 30, 2015
1 parent 446b0a4 commit 277043e
Showing 1 changed file with 1 addition and 6 deletions.
7 changes: 1 addition & 6 deletions core/src/main/scala/org/apache/spark/SparkEnv.scala
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import java.net.Socket

import akka.actor.ActorSystem

import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.util.Properties

Expand All @@ -45,8 +44,6 @@ import org.apache.spark.storage._
import org.apache.spark.unsafe.memory.{ExecutorMemoryManager, MemoryAllocator}
import org.apache.spark.util.{RpcUtils, Utils}

import scala.util.control.NonFatal

/**
* :: DeveloperApi ::
* Holds all the runtime environment objects for a running Spark instance (either master or worker),
Expand Down Expand Up @@ -95,8 +92,7 @@ class SparkEnv (

if (!isStopped) {
isStopped = true

pythonWorkers.foreach { case (key, worker) => worker.stop()}
pythonWorkers.values.foreach(_.stop())
Option(httpFileServer).foreach(_.stop())
mapOutputTracker.stop()
shuffleManager.stop()
Expand All @@ -107,7 +103,6 @@ class SparkEnv (
outputCommitCoordinator.stop()
rpcEnv.shutdown()


// Unfortunately Akka's awaitTermination doesn't actually wait for the Netty server to shut
// down, but let's call it anyway in case it gets fixed in a later release
// UPDATE: In Akka 2.1.x, this hangs if there are remote actors, so we can't call it.
Expand Down

0 comments on commit 277043e

Please sign in to comment.