From 4b6284319c0c5616c997176747d644ca9ff14fae Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Wed, 30 Aug 2017 13:14:59 -0700 Subject: [PATCH 1/2] [SPARK-21728][core] Follow up: fix user config, auth in SparkSubmit logging. - SecurityManager complains when auth is enabled but no secret is defined; SparkSubmit doesn't use the auth functionality of the SecurityManager, so use a dummy secret to work around the exception. - Only reset the log4j configuration when Spark was the one initializing it, otherwise user-defined log configuration may be lost. --- .../src/main/scala/org/apache/spark/deploy/SparkSubmit.scala | 3 +++ core/src/main/scala/org/apache/spark/internal/Logging.scala | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala index 38604fe93998c..ea9c9bdaede76 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala @@ -352,6 +352,9 @@ object SparkSubmit extends CommandLineUtils with Logging { var localJars: String = null var localPyFiles: String = null if (deployMode == CLIENT) { + // This security manager will not need an auth secret, but set a dummy value in case + // spark.authenticate is enabled, otherwise an exception is thrown. + sparkConf.set(SecurityManager.SPARK_AUTH_SECRET_CONF, "unused") val secMgr = new SecurityManager(sparkConf) localPrimaryResource = Option(args.primaryResource).map { downloadFile(_, targetDir, sparkConf, hadoopConf, secMgr) diff --git a/core/src/main/scala/org/apache/spark/internal/Logging.scala b/core/src/main/scala/org/apache/spark/internal/Logging.scala index cea9964ea8c91..fec4fe364d8f7 100644 --- a/core/src/main/scala/org/apache/spark/internal/Logging.scala +++ b/core/src/main/scala/org/apache/spark/internal/Logging.scala @@ -120,6 +120,7 @@ trait Logging { val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements // scalastyle:off println if (!log4j12Initialized) { + Logging.defaultSparkLog4jConfig = true val defaultLogProps = "org/apache/spark/log4j-defaults.properties" Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match { case Some(url) => @@ -164,6 +165,7 @@ trait Logging { private[spark] object Logging { @volatile private var initialized = false @volatile private var defaultRootLevel: Level = null + @volatile private var defaultSparkLog4jConfig = false val initLock = new Object() try { @@ -185,7 +187,8 @@ private[spark] object Logging { * initialization again. */ def uninitialize(): Unit = initLock.synchronized { - if (isLog4j12()) { + if (isLog4j12() && defaultSparkLog4jConfig) { + defaultSparkLog4jConfig = false LogManager.resetConfiguration() } this.initialized = false From 31d6c776cfad48c1835effc417ec2116fada757f Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Thu, 31 Aug 2017 09:46:53 -0700 Subject: [PATCH 2/2] Restore root logger level when uninitializing. --- .../main/scala/org/apache/spark/internal/Logging.scala | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/internal/Logging.scala b/core/src/main/scala/org/apache/spark/internal/Logging.scala index fec4fe364d8f7..c0d709ad25f29 100644 --- a/core/src/main/scala/org/apache/spark/internal/Logging.scala +++ b/core/src/main/scala/org/apache/spark/internal/Logging.scala @@ -187,9 +187,13 @@ private[spark] object Logging { * initialization again. */ def uninitialize(): Unit = initLock.synchronized { - if (isLog4j12() && defaultSparkLog4jConfig) { - defaultSparkLog4jConfig = false - LogManager.resetConfiguration() + if (isLog4j12()) { + if (defaultSparkLog4jConfig) { + defaultSparkLog4jConfig = false + LogManager.resetConfiguration() + } else { + LogManager.getRootLogger().setLevel(defaultRootLevel) + } } this.initialized = false }