Skip to content

Commit

Permalink
Allow environment variables to be mocked in tests.
Browse files Browse the repository at this point in the history
  • Loading branch information
JoshRosen committed Aug 17, 2014
1 parent 6d9259b commit 007298b
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 7 deletions.
6 changes: 6 additions & 0 deletions core/src/main/scala/org/apache/spark/SparkConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,12 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
new SparkConf(false).setAll(settings)
}

/**
* By using this instead of System.getenv(), environment variables can be mocked
* in unit tests.
*/
private[spark] def getenv(name: String): String = System.getenv(name)

/** Checks for illegal or deprecated config settings. Throws an exception for the former. Not
* idempotent - may mutate this conf object to convert deprecated settings to supported ones. */
private[spark] def validateSettings() {
Expand Down
14 changes: 7 additions & 7 deletions core/src/main/scala/org/apache/spark/executor/Executor.scala
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,10 @@ private[spark] class Executor(
// to what Yarn on this system said was available. This will be used later when SparkEnv
// created.
if (java.lang.Boolean.valueOf(
System.getProperty("SPARK_YARN_MODE", System.getenv("SPARK_YARN_MODE")))) {
conf.set("spark.local.dir", getYarnLocalDirs())
} else if (sys.env.contains("SPARK_LOCAL_DIRS")) {
conf.set("spark.local.dir", sys.env("SPARK_LOCAL_DIRS"))
System.getProperty("SPARK_YARN_MODE", conf.getenv("SPARK_YARN_MODE")))) {
conf.set("spark.local.dir", getYarnLocalDirs(conf))
} else if (conf.getenv("SPARK_LOCAL_DIRS") != null) {
conf.set("spark.local.dir", conf.getenv("SPARK_LOCAL_DIRS"))
}

if (!isLocal) {
Expand Down Expand Up @@ -135,12 +135,12 @@ private[spark] class Executor(
}

/** Get the Yarn approved local directories. */
private def getYarnLocalDirs(): String = {
private def getYarnLocalDirs(conf: SparkConf): String = {
// Hadoop 0.23 and 2.x have different Environment variable names for the
// local dirs, so lets check both. We assume one of the 2 is set.
// LOCAL_DIRS => 2.X, YARN_LOCAL_DIRS => 0.23.X
val localDirs = Option(System.getenv("YARN_LOCAL_DIRS"))
.getOrElse(Option(System.getenv("LOCAL_DIRS"))
val localDirs = Option(conf.getenv("YARN_LOCAL_DIRS"))
.getOrElse(Option(conf.getenv("LOCAL_DIRS"))
.getOrElse(""))

if (localDirs.isEmpty) {
Expand Down

0 comments on commit 007298b

Please sign in to comment.