From d9c69519d1e6eb108be281320612534044eb1324 Mon Sep 17 00:00:00 2001 From: Andrew Rowson Date: Tue, 10 Feb 2015 15:55:44 +0000 Subject: [PATCH] [SPARK-5655] Don't chmod700 application files if running in a YARN container --- .../main/scala/org/apache/spark/deploy/worker/Worker.scala | 2 +- .../scala/org/apache/spark/storage/DiskBlockManager.scala | 2 +- core/src/main/scala/org/apache/spark/util/Utils.scala | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala index 10929eb516041..7e27337923240 100755 --- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala @@ -349,7 +349,7 @@ private[spark] class Worker( // application finishes. val appLocalDirs = appDirectories.get(appId).getOrElse { Utils.getOrCreateLocalRootDirs(conf).map { dir => - Utils.createDirectory(dir).getAbsolutePath() + Utils.createDirectory(dir, conf=conf).getAbsolutePath() }.toSeq } appDirectories(appId) = appLocalDirs diff --git a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala index 53eaedacbf291..50f5b82696075 100644 --- a/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala +++ b/core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala @@ -123,7 +123,7 @@ private[spark] class DiskBlockManager(blockManager: BlockManager, conf: SparkCon private def createLocalDirs(conf: SparkConf): Array[File] = { Utils.getOrCreateLocalRootDirs(conf).flatMap { rootDir => try { - val localDir = Utils.createDirectory(rootDir, "blockmgr") + val localDir = Utils.createDirectory(rootDir, "blockmgr", conf) logInfo(s"Created local directory at $localDir") Some(localDir) } catch { diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala index 61d287ca9c3ac..656fc469c9274 100644 --- a/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -268,7 +268,7 @@ private[spark] object Utils extends Logging { * Create a directory inside the given parent directory. The directory is guaranteed to be * newly created, and is not marked for automatic deletion. */ - def createDirectory(root: String, namePrefix: String = "spark"): File = { + def createDirectory(root: String, namePrefix: String = "spark", conf: SparkConf = null): File = { var attempts = 0 val maxAttempts = MAX_DIR_CREATION_ATTEMPTS var dir: File = null @@ -285,7 +285,7 @@ private[spark] object Utils extends Logging { } else { // Restrict file permissions via chmod if available. // For Windows this step is ignored. - if (!isWindows && !chmod700(dir)) { + if (!isWindows && (conf==null || !isRunningInYarnContainer(conf)) && !chmod700(dir)) { dir.delete() dir = null } @@ -702,7 +702,7 @@ private[spark] object Utils extends Logging { try { val rootDir = new File(root) if (rootDir.exists || rootDir.mkdirs()) { - Some(createDirectory(root).getAbsolutePath()) + Some(createDirectory(root, conf=conf).getAbsolutePath()) } else { logError(s"Failed to create dir in $root. Ignoring this directory.") None