From 7ff12e049377f0bcfe6491a33eea2f020cde77d9 Mon Sep 17 00:00:00 2001 From: Josh Rosen Date: Sun, 5 Apr 2015 13:56:34 -0700 Subject: [PATCH 1/3] Change SparkContext.DRIVER_IDENTIFIER from to driver --- core/src/main/scala/org/apache/spark/SparkContext.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 3b73a8a8fd850..0d17d402a9b95 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -1900,7 +1900,7 @@ object SparkContext extends Logging { private[spark] val SPARK_JOB_INTERRUPT_ON_CANCEL = "spark.job.interruptOnCancel" - private[spark] val DRIVER_IDENTIFIER = "" + private[spark] val DRIVER_IDENTIFIER = "driver" // The following deprecated objects have already been copied to `object AccumulatorParam` to // make the compiler find them automatically. They are duplicate codes only for backward From 0c5d04bf0b261f4de3cec69cfb06a5289bc6f17f Mon Sep 17 00:00:00 2001 From: Josh Rosen Date: Sun, 5 Apr 2015 17:33:47 -0700 Subject: [PATCH 2/3] Add backwards-compatibility in BlockManagerId.isDriver --- .../src/main/scala/org/apache/spark/SparkContext.scala | 10 ++++++++++ .../org/apache/spark/storage/BlockManagerId.scala | 5 ++++- .../org/apache/spark/storage/BlockManagerSuite.scala | 6 ++++++ 3 files changed, 20 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 0d17d402a9b95..19dd7a480b1c3 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -1900,8 +1900,18 @@ object SparkContext extends Logging { private[spark] val SPARK_JOB_INTERRUPT_ON_CANCEL = "spark.job.interruptOnCancel" + /** + * Executor id for the driver. In earlier versions of Spark, this was ``, but this was + * changed to `driver` because the angle brackets caused escaping issues in URLs and XML (see + * SPARK-6716 for more details). + */ private[spark] val DRIVER_IDENTIFIER = "driver" + /** + * Legacy version of DRIVER_IDENTIFIER, retained for use in backwards-compatibility tests. + */ + private[spark] val LEGACY_DRIVER_IDENTIFIER = "" + // The following deprecated objects have already been copied to `object AccumulatorParam` to // make the compiler find them automatically. They are duplicate codes only for backward // compatibility, please update `object AccumulatorParam` accordingly if you plan to modify the diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala index a6f1ebf325a7c..69ac37511e730 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala @@ -60,7 +60,10 @@ class BlockManagerId private ( def port: Int = port_ - def isDriver: Boolean = { executorId == SparkContext.DRIVER_IDENTIFIER } + def isDriver: Boolean = { + executorId == SparkContext.DRIVER_IDENTIFIER || + executorId == SparkContext.LEGACY_DRIVER_IDENTIFIER + } override def writeExternal(out: ObjectOutput): Unit = Utils.tryOrIOException { out.writeUTF(executorId_) diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala index ecd1cba5b5abe..024faa7fa9209 100644 --- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala @@ -148,6 +148,12 @@ class BlockManagerSuite extends FunSuite with Matchers with BeforeAndAfterEach assert(id2_.eq(id1), "Deserialized id2 is not the same object as original id1") } + test("BlockManagerId.isDriver() backwards-compatibility with legacy driver ids (SPARK-6716)") { + assert(BlockManagerId(SparkContext.DRIVER_IDENTIFIER, "XXX", 1).isDriver) + assert(BlockManagerId(SparkContext.LEGACY_DRIVER_IDENTIFIER, "XXX", 1).isDriver) + assert(!BlockManagerId("notADriverIdentifier", "XXX", 1).isDriver) + } + test("master + 1 manager interaction") { store = makeBlockManager(20000) val a1 = new Array[Byte](4000) From 42d3c10beffdeb637de4a8f77fbd1a7fc0473d9b Mon Sep 17 00:00:00 2001 From: Josh Rosen Date: Mon, 6 Apr 2015 09:10:36 -0700 Subject: [PATCH 3/3] Clarify comment --- core/src/main/scala/org/apache/spark/SparkContext.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 19dd7a480b1c3..138b36b3441fa 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -1908,7 +1908,7 @@ object SparkContext extends Logging { private[spark] val DRIVER_IDENTIFIER = "driver" /** - * Legacy version of DRIVER_IDENTIFIER, retained for use in backwards-compatibility tests. + * Legacy version of DRIVER_IDENTIFIER, retained for backwards-compatibility. */ private[spark] val LEGACY_DRIVER_IDENTIFIER = ""