From 6f3c1d3269f0e3083aea538fb21cb50fbefe4588 Mon Sep 17 00:00:00 2001 From: Sean Owen Date: Wed, 23 Jul 2014 14:59:10 +0100 Subject: [PATCH 1/4] Remove log statement in logging initialization, and distinguish log4j 1.2 from 2.0, to avoid stack overflow in initialization --- .../main/scala/org/apache/spark/Logging.scala | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/Logging.scala b/core/src/main/scala/org/apache/spark/Logging.scala index 50d8e93e1f0d7..7d9bde8f45f06 100644 --- a/core/src/main/scala/org/apache/spark/Logging.scala +++ b/core/src/main/scala/org/apache/spark/Logging.scala @@ -110,23 +110,26 @@ trait Logging { } private def initializeLogging() { - // If Log4j is being used, but is not initialized, load a default properties file - val binder = StaticLoggerBinder.getSingleton - val usingLog4j = binder.getLoggerFactoryClassStr.endsWith("Log4jLoggerFactory") - val log4jInitialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements - if (!log4jInitialized && usingLog4j) { + // If Log4j 1.2 is being used, but is not initialized, load a default properties file + val binderClass = StaticLoggerBinder.getSingleton.getLoggerFactoryClassStr + // This minimally distinguishes the log4j 1.2 binding, currently + // org.slf4j.impl.Log4jLoggerFactory, from the log4j 2.0 binding, currently + // org.apache.logging.slf4j.Log4jLoggerFactory + val usingLog4j12 = + binderClass.endsWith("Log4jLoggerFactory") && binderClass.startsWith("org.slf4j.") + val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements + if (!log4j12Initialized && usingLog4j12) { val defaultLogProps = "org/apache/spark/log4j-defaults.properties" Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match { case Some(url) => PropertyConfigurator.configure(url) - log.info(s"Using Spark's default log4j profile: $defaultLogProps") case None => System.err.println(s"Spark was unable to load $defaultLogProps") } } Logging.initialized = true - // Force a call into slf4j to initialize it. Avoids this happening from mutliple threads + // Force a call into slf4j to initialize it. Avoids this happening from multiple threads // and triggering this: http://mailman.qos.ch/pipermail/slf4j-dev/2010-April/002956.html log } From a7f88760eee383d345e746ca201c627066c94222 Mon Sep 17 00:00:00 2001 From: Sean Owen Date: Wed, 30 Jul 2014 11:48:07 +0100 Subject: [PATCH 2/4] Updates from review --- core/src/main/scala/org/apache/spark/Logging.scala | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/Logging.scala b/core/src/main/scala/org/apache/spark/Logging.scala index 7d9bde8f45f06..88fceb331bdfd 100644 --- a/core/src/main/scala/org/apache/spark/Logging.scala +++ b/core/src/main/scala/org/apache/spark/Logging.scala @@ -45,10 +45,7 @@ trait Logging { initializeIfNecessary() var className = this.getClass.getName // Ignore trailing $'s in the class names for Scala objects - if (className.endsWith("$")) { - className = className.substring(0, className.length - 1) - } - log_ = LoggerFactory.getLogger(className) + log_ = LoggerFactory.getLogger(className.stripSuffix("$")) } log_ } @@ -112,11 +109,10 @@ trait Logging { private def initializeLogging() { // If Log4j 1.2 is being used, but is not initialized, load a default properties file val binderClass = StaticLoggerBinder.getSingleton.getLoggerFactoryClassStr - // This minimally distinguishes the log4j 1.2 binding, currently + // This distinguishes the log4j 1.2 binding, currently // org.slf4j.impl.Log4jLoggerFactory, from the log4j 2.0 binding, currently // org.apache.logging.slf4j.Log4jLoggerFactory - val usingLog4j12 = - binderClass.endsWith("Log4jLoggerFactory") && binderClass.startsWith("org.slf4j.") + val usingLog4j12 = "org.slf4j.impl.Log4jLoggerFactory".equals(binderClass) val log4j12Initialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements if (!log4j12Initialized && usingLog4j12) { val defaultLogProps = "org/apache/spark/log4j-defaults.properties" From 94be4c7019025cf0f7fa901d9c24bcaff44004b7 Mon Sep 17 00:00:00 2001 From: Sean Owen Date: Wed, 30 Jul 2014 19:57:32 +0100 Subject: [PATCH 3/4] Add back log message as System.out, with informational comment --- core/src/main/scala/org/apache/spark/Logging.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/src/main/scala/org/apache/spark/Logging.scala b/core/src/main/scala/org/apache/spark/Logging.scala index 88fceb331bdfd..6e7c86fa9f593 100644 --- a/core/src/main/scala/org/apache/spark/Logging.scala +++ b/core/src/main/scala/org/apache/spark/Logging.scala @@ -107,6 +107,7 @@ trait Logging { } private def initializeLogging() { + // Don't use a logger in here, as this is itself occurring during initialization of a logger // If Log4j 1.2 is being used, but is not initialized, load a default properties file val binderClass = StaticLoggerBinder.getSingleton.getLoggerFactoryClassStr // This distinguishes the log4j 1.2 binding, currently @@ -119,6 +120,7 @@ trait Logging { Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match { case Some(url) => PropertyConfigurator.configure(url) + System.out.println(s"Using Spark's default log4j profile: $defaultLogProps") case None => System.err.println(s"Spark was unable to load $defaultLogProps") } From 92a9898a4b112d1e94982898cb9bca2daa6a4769 Mon Sep 17 00:00:00 2001 From: Sean Owen Date: Thu, 31 Jul 2014 09:47:36 +0100 Subject: [PATCH 4/4] System.out -> System.err --- core/src/main/scala/org/apache/spark/Logging.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/Logging.scala b/core/src/main/scala/org/apache/spark/Logging.scala index 6e7c86fa9f593..807ef3e9c9d60 100644 --- a/core/src/main/scala/org/apache/spark/Logging.scala +++ b/core/src/main/scala/org/apache/spark/Logging.scala @@ -120,7 +120,7 @@ trait Logging { Option(Utils.getSparkClassLoader.getResource(defaultLogProps)) match { case Some(url) => PropertyConfigurator.configure(url) - System.out.println(s"Using Spark's default log4j profile: $defaultLogProps") + System.err.println(s"Using Spark's default log4j profile: $defaultLogProps") case None => System.err.println(s"Spark was unable to load $defaultLogProps") }