From 46c55799b3190d5e769047c53246886d98a92494 Mon Sep 17 00:00:00 2001 From: Allen Date: Sat, 9 Apr 2016 16:26:22 +0800 Subject: [PATCH 1/3] fix spark context recreate bug --- core/src/main/scala/org/apache/spark/SparkContext.scala | 2 +- core/src/test/scala/org/apache/spark/SparkContextSuite.scala | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 9ec5cedf258fc..3c4cec8a831fe 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -2192,8 +2192,8 @@ object SparkContext extends Logging { sc: SparkContext, allowMultipleContexts: Boolean): Unit = { SPARK_CONTEXT_CONSTRUCTOR_LOCK.synchronized { - assertNoOtherContextIsRunning(sc, allowMultipleContexts) contextBeingConstructed = Some(sc) + assertNoOtherContextIsRunning(sc, allowMultipleContexts) } } diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala index 841fd02ae8bb6..a759f364fe059 100644 --- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala @@ -39,8 +39,12 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext { val conf = new SparkConf().setAppName("test").setMaster("local") .set("spark.driver.allowMultipleContexts", "false") sc = new SparkContext(conf) + val envBefore = SparkEnv.get // A SparkContext is already running, so we shouldn't be able to create a second one intercept[SparkException] { new SparkContext(conf) } + val envAfter = SparkEnv.get + // SparkEnv and other context variables should be the same + assert(envBefore == envAfter) // After stopping the running context, we should be able to create a new one resetSparkContext() sc = new SparkContext(conf) From 66664c635f22e7cc235596a25296894ce71e90c2 Mon Sep 17 00:00:00 2001 From: Allen Date: Sun, 17 Apr 2016 14:35:01 +0800 Subject: [PATCH 2/3] update change --- .../scala/org/apache/spark/SparkContext.scala | 29 +++++++++---------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 3c4cec8a831fe..79e1eff38704c 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -2119,21 +2119,7 @@ object SparkContext extends Logging { sc: SparkContext, allowMultipleContexts: Boolean): Unit = { SPARK_CONTEXT_CONSTRUCTOR_LOCK.synchronized { - contextBeingConstructed.foreach { otherContext => - if (otherContext ne sc) { // checks for reference equality - // Since otherContext might point to a partially-constructed context, guard against - // its creationSite field being null: - val otherContextCreationSite = - Option(otherContext.creationSite).map(_.longForm).getOrElse("unknown location") - val warnMsg = "Another SparkContext is being constructed (or threw an exception in its" + - " constructor). This may indicate an error, since only one SparkContext may be" + - " running in this JVM (see SPARK-2243)." + - s" The other SparkContext was created at:\n$otherContextCreationSite" - logWarning(warnMsg) - } - - if (activeContext.get() != null) { - val ctx = activeContext.get() + Option(activeContext.get()).foreach { ctx => val errMsg = "Only one SparkContext may be running in this JVM (see SPARK-2243)." + " To ignore this error, set spark.driver.allowMultipleContexts = true. " + s"The currently running SparkContext was created at:\n${ctx.creationSite.longForm}" @@ -2144,6 +2130,17 @@ object SparkContext extends Logging { throw exception } } + + contextBeingConstructed.filter(_ ne sc).foreach { otherContext => + // Since otherContext might point to a partially-constructed context, guard against + // its creationSite field being null: + val otherContextCreationSite = + Option(otherContext.creationSite).map(_.longForm).getOrElse("unknown location") + val warnMsg = "Another SparkContext is being constructed (or threw an exception in its" + + " constructor). This may indicate an error, since only one SparkContext may be" + + " running in this JVM (see SPARK-2243)." + + s" The other SparkContext was created at:\n$otherContextCreationSite" + logWarning(warnMsg) } } } @@ -2192,8 +2189,8 @@ object SparkContext extends Logging { sc: SparkContext, allowMultipleContexts: Boolean): Unit = { SPARK_CONTEXT_CONSTRUCTOR_LOCK.synchronized { - contextBeingConstructed = Some(sc) assertNoOtherContextIsRunning(sc, allowMultipleContexts) + contextBeingConstructed = Some(sc) } } From 49a507e41f5836b5cb6f8659ea15f2bdca0b346b Mon Sep 17 00:00:00 2001 From: Allen Date: Sun, 24 Apr 2016 09:40:44 +0800 Subject: [PATCH 3/3] should filter the same sc --- core/src/main/scala/org/apache/spark/SparkContext.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 79e1eff38704c..6b038f6519aac 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -2119,7 +2119,7 @@ object SparkContext extends Logging { sc: SparkContext, allowMultipleContexts: Boolean): Unit = { SPARK_CONTEXT_CONSTRUCTOR_LOCK.synchronized { - Option(activeContext.get()).foreach { ctx => + Option(activeContext.get()).filter(_ ne sc).foreach { ctx => val errMsg = "Only one SparkContext may be running in this JVM (see SPARK-2243)." + " To ignore this error, set spark.driver.allowMultipleContexts = true. " + s"The currently running SparkContext was created at:\n${ctx.creationSite.longForm}"