From 5b0c58a49b60ff7e01ee12aa14d6ce487005377c Mon Sep 17 00:00:00 2001 From: Andrew Or Date: Wed, 14 May 2014 15:57:09 -0700 Subject: [PATCH] Do not throw exception if YARN_LOCAL_DIRS is not set for local mode --- .../scala/org/apache/spark/executor/Executor.scala | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/executor/Executor.scala b/core/src/main/scala/org/apache/spark/executor/Executor.scala index baee7a216a7c3..9160a7dcfc691 100644 --- a/core/src/main/scala/org/apache/spark/executor/Executor.scala +++ b/core/src/main/scala/org/apache/spark/executor/Executor.scala @@ -26,7 +26,6 @@ import scala.collection.JavaConversions._ import scala.collection.mutable.HashMap import org.apache.spark._ -import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.scheduler._ import org.apache.spark.storage.{StorageLevel, TaskResultBlockId} import org.apache.spark.util.{AkkaUtils, Utils} @@ -39,8 +38,8 @@ private[spark] class Executor( slaveHostname: String, properties: Seq[(String, String)], isLocal: Boolean = false) - extends Logging -{ + extends Logging { + // Application dependencies (added through SparkContext) that we've fetched so far on this node. // Each map holds the master's timestamp for the version of that file or JAR we got. private val currentFiles: HashMap[String, Long] = new HashMap[String, Long]() @@ -63,9 +62,10 @@ private[spark] class Executor( // If we are in yarn mode, systems can have different disk layouts so we must set it // to what Yarn on this system said was available. This will be used later when SparkEnv // created. - if (java.lang.Boolean.valueOf( - System.getProperty("SPARK_YARN_MODE", System.getenv("SPARK_YARN_MODE")))) { - conf.set("spark.local.dir", getYarnLocalDirs()) + val sparkYarnMode = java.lang.Boolean.valueOf( + System.getProperty("SPARK_YARN_MODE", System.getenv("SPARK_YARN_MODE"))) + if (sparkYarnMode && !isLocal) { + conf.set("spark.local.dir", getYarnLocalDirs) } else if (sys.env.contains("SPARK_LOCAL_DIRS")) { conf.set("spark.local.dir", sys.env("SPARK_LOCAL_DIRS")) } @@ -121,7 +121,7 @@ private[spark] class Executor( } /** Get the Yarn approved local directories. */ - private def getYarnLocalDirs(): String = { + private def getYarnLocalDirs: String = { // Hadoop 0.23 and 2.x have different Environment variable names for the // local dirs, so lets check both. We assume one of the 2 is set. // LOCAL_DIRS => 2.X, YARN_LOCAL_DIRS => 0.23.X