From ff402f7c4b1f57194425ac9e095fc2380c080cd5 Mon Sep 17 00:00:00 2001 From: beliefer Date: Sat, 15 Feb 2020 14:46:39 +0800 Subject: [PATCH 01/10] Add version to config --- .../spark/internal/config/ConfigBuilder.scala | 16 ++++++--- .../spark/internal/config/ConfigEntry.scala | 35 +++++++++++++------ 2 files changed, 35 insertions(+), 16 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala index 68e1994f0f94f..8d5959a0c8b7f 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala @@ -129,7 +129,7 @@ private[spark] class TypedConfigBuilder[T]( def createOptional: OptionalConfigEntry[T] = { val entry = new OptionalConfigEntry[T](parent.key, parent._prependedKey, parent._prependSeparator, parent._alternatives, converter, stringConverter, parent._doc, - parent._public) + parent._public, parent._version) parent._onCreate.foreach(_(entry)) entry } @@ -144,7 +144,7 @@ private[spark] class TypedConfigBuilder[T]( val transformedDefault = converter(stringConverter(default)) val entry = new ConfigEntryWithDefault[T](parent.key, parent._prependedKey, parent._prependSeparator, parent._alternatives, transformedDefault, converter, - stringConverter, parent._doc, parent._public) + stringConverter, parent._doc, parent._public, parent._version) parent._onCreate.foreach(_(entry)) entry } @@ -154,7 +154,7 @@ private[spark] class TypedConfigBuilder[T]( def createWithDefaultFunction(defaultFunc: () => T): ConfigEntry[T] = { val entry = new ConfigEntryWithDefaultFunction[T](parent.key, parent._prependedKey, parent._prependSeparator, parent._alternatives, defaultFunc, converter, stringConverter, - parent._doc, parent._public) + parent._doc, parent._public, parent._version) parent._onCreate.foreach(_ (entry)) entry } @@ -166,7 +166,7 @@ private[spark] class TypedConfigBuilder[T]( def createWithDefaultString(default: String): ConfigEntry[T] = { val entry = new ConfigEntryWithDefaultString[T](parent.key, parent._prependedKey, parent._prependSeparator, parent._alternatives, default, converter, stringConverter, - parent._doc, parent._public) + parent._doc, parent._public, parent._version) parent._onCreate.foreach(_(entry)) entry } @@ -186,6 +186,7 @@ private[spark] case class ConfigBuilder(key: String) { private[config] var _prependSeparator: String = "" private[config] var _public = true private[config] var _doc = "" + private[config] var _version = "" private[config] var _onCreate: Option[ConfigEntry[_] => Unit] = None private[config] var _alternatives = List.empty[String] @@ -199,6 +200,11 @@ private[spark] case class ConfigBuilder(key: String) { this } + def version(v: String): ConfigBuilder = { + _version = v + this + } + /** * Registers a callback for when the config entry is finally instantiated. Currently used by * SQLConf to keep track of SQL configuration entries. @@ -255,7 +261,7 @@ private[spark] case class ConfigBuilder(key: String) { def fallbackConf[T](fallback: ConfigEntry[T]): ConfigEntry[T] = { val entry = new FallbackConfigEntry(key, _prependedKey, _prependSeparator, _alternatives, _doc, - _public, fallback) + _public, _version, fallback) _onCreate.foreach(_(entry)) entry } diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala index c5df4c8820098..67b2db4db2961 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala @@ -39,6 +39,7 @@ package org.apache.spark.internal.config * @param doc the documentation for the configuration * @param isPublic if this configuration is public to the user. If it's `false`, this * configuration is only used internally and we should not expose it to users. + * @tparam version the spark version when the configuration was released. * @tparam T the value type */ private[spark] abstract class ConfigEntry[T] ( @@ -49,7 +50,8 @@ private[spark] abstract class ConfigEntry[T] ( val valueConverter: String => T, val stringConverter: T => String, val doc: String, - val isPublic: Boolean) { + val isPublic: Boolean, + val version: String) { import ConfigEntry._ @@ -74,7 +76,8 @@ private[spark] abstract class ConfigEntry[T] ( def defaultValue: Option[T] = None override def toString: String = { - s"ConfigEntry(key=$key, defaultValue=$defaultValueString, doc=$doc, public=$isPublic)" + s"ConfigEntry(key=$key, defaultValue=$defaultValueString, doc=$doc, " + + s"public=$isPublic, version = $version)" } } @@ -87,7 +90,8 @@ private class ConfigEntryWithDefault[T] ( valueConverter: String => T, stringConverter: T => String, doc: String, - isPublic: Boolean) + isPublic: Boolean, + version: String) extends ConfigEntry( key, prependedKey, @@ -96,7 +100,8 @@ private class ConfigEntryWithDefault[T] ( valueConverter, stringConverter, doc, - isPublic + isPublic, + version ) { override def defaultValue: Option[T] = Some(_defaultValue) @@ -117,7 +122,8 @@ private class ConfigEntryWithDefaultFunction[T] ( valueConverter: String => T, stringConverter: T => String, doc: String, - isPublic: Boolean) + isPublic: Boolean, + version: String) extends ConfigEntry( key, prependedKey, @@ -126,7 +132,8 @@ private class ConfigEntryWithDefaultFunction[T] ( valueConverter, stringConverter, doc, - isPublic + isPublic, + version ) { override def defaultValue: Option[T] = Some(_defaultFunction()) @@ -147,7 +154,8 @@ private class ConfigEntryWithDefaultString[T] ( valueConverter: String => T, stringConverter: T => String, doc: String, - isPublic: Boolean) + isPublic: Boolean, + version: String) extends ConfigEntry( key, prependedKey, @@ -156,7 +164,8 @@ private class ConfigEntryWithDefaultString[T] ( valueConverter, stringConverter, doc, - isPublic + isPublic, + version ) { override def defaultValue: Option[T] = Some(valueConverter(_defaultValue)) @@ -181,7 +190,8 @@ private[spark] class OptionalConfigEntry[T]( val rawValueConverter: String => T, val rawStringConverter: T => String, doc: String, - isPublic: Boolean) + isPublic: Boolean, + version: String) extends ConfigEntry[Option[T]]( key, prependedKey, @@ -190,7 +200,8 @@ private[spark] class OptionalConfigEntry[T]( s => Some(rawValueConverter(s)), v => v.map(rawStringConverter).orNull, doc, - isPublic + isPublic, + version ) { override def defaultValueString: String = ConfigEntry.UNDEFINED @@ -210,6 +221,7 @@ private[spark] class FallbackConfigEntry[T] ( alternatives: List[String], doc: String, isPublic: Boolean, + version: String, val fallback: ConfigEntry[T]) extends ConfigEntry[T]( key, @@ -219,7 +231,8 @@ private[spark] class FallbackConfigEntry[T] ( fallback.valueConverter, fallback.stringConverter, doc, - isPublic + isPublic, + version ) { override def defaultValueString: String = s"" From d216d43afb832fd04ed454888d3cc1b0546471fa Mon Sep 17 00:00:00 2001 From: beliefer Date: Sat, 15 Feb 2020 20:59:30 +0800 Subject: [PATCH 02/10] Support SET -v --- .../scala/org/apache/spark/sql/internal/SQLConf.scala | 4 ++-- .../org/apache/spark/sql/api/python/PythonSQLUtils.scala | 2 +- .../apache/spark/sql/execution/command/SetCommand.scala | 7 ++++--- .../org/apache/spark/sql/internal/SQLConfSuite.scala | 8 ++++---- sql/gen-sql-config-docs.py | 8 ++++++-- 5 files changed, 17 insertions(+), 12 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 2214e03f34f0b..20fe5eab3adfa 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -2857,10 +2857,10 @@ class SQLConf extends Serializable with Logging { * Return all the configuration definitions that have been defined in [[SQLConf]]. Each * definition contains key, defaultValue and doc. */ - def getAllDefinedConfs: Seq[(String, String, String)] = sqlConfEntries.synchronized { + def getAllDefinedConfs: Seq[(String, String, String, String)] = sqlConfEntries.synchronized { sqlConfEntries.values.asScala.filter(_.isPublic).map { entry => val displayValue = Option(getConfString(entry.key, null)).getOrElse(entry.defaultValueString) - (entry.key, displayValue, entry.doc) + (entry.key, displayValue, entry.doc, entry.version) }.toSeq } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/python/PythonSQLUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/python/PythonSQLUtils.scala index bf3055d5e3e09..03f5a60aec438 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/api/python/PythonSQLUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/api/python/PythonSQLUtils.scala @@ -40,7 +40,7 @@ private[sql] object PythonSQLUtils { FunctionRegistry.functionSet.flatMap(f => FunctionRegistry.builtin.lookupFunction(f)).toArray } - def listSQLConfigs(): Array[(String, String, String)] = { + def listSQLConfigs(): Array[(String, String, String, String)] = { val conf = new SQLConf() // Py4J doesn't seem to translate Seq well, so we convert to an Array. conf.getAllDefinedConfs.toArray diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala index 39b08e2894dcd..dd3721c0e4c02 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala @@ -115,14 +115,15 @@ case class SetCommand(kv: Option[(String, Option[String])]) extends RunnableComm case Some(("-v", None)) => val runFunc = (sparkSession: SparkSession) => { sparkSession.sessionState.conf.getAllDefinedConfs.sorted.map { - case (key, defaultValue, doc) => - Row(key, Option(defaultValue).getOrElse(""), doc) + case (key, defaultValue, doc, version) => + Row(key, Option(defaultValue).getOrElse(""), doc, version) } } val schema = StructType( StructField("key", StringType, nullable = false) :: StructField("value", StringType, nullable = false) :: - StructField("meaning", StringType, nullable = false) :: Nil) + StructField("meaning", StringType, nullable = false) :: + StructField("version", StringType, nullable = false) :: Nil) (schema.toAttributes, runFunc) // Queries the deprecated "mapred.reduce.tasks" property. diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index 61be3672f3ebe..b540b9aa12011 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -285,8 +285,8 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { assert(spark.sessionState.conf.getConfString(fallback.key, "lzo") === "lzo") val displayValue = spark.sessionState.conf.getAllDefinedConfs - .find { case (key, _, _) => key == fallback.key } - .map { case (_, v, _) => v } + .find { case (key, _, _, _) => key == fallback.key } + .map { case (_, v, _, _) => v } .get assert(displayValue === fallback.defaultValueString) @@ -297,8 +297,8 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { assert(spark.sessionState.conf.getConfString(fallback.key) === "lzo") val newDisplayValue = spark.sessionState.conf.getAllDefinedConfs - .find { case (key, _, _) => key == fallback.key } - .map { case (_, v, _) => v } + .find { case (key, _, _, _) => key == fallback.key } + .map { case (_, v, _, _) => v } .get assert(newDisplayValue === "lzo") diff --git a/sql/gen-sql-config-docs.py b/sql/gen-sql-config-docs.py index 04f5a850c9980..751c980b670c1 100644 --- a/sql/gen-sql-config-docs.py +++ b/sql/gen-sql-config-docs.py @@ -34,6 +34,7 @@ def get_public_sql_configs(jvm): name=_sql_config._1(), default=_sql_config._2(), description=_sql_config._3(), + version=_sql_config._4() ) for _sql_config in jvm.org.apache.spark.sql.api.python.PythonSQLUtils.listSQLConfigs() ] @@ -49,12 +50,13 @@ def generate_sql_configs_table(sql_configs, path): ```html - + + ... @@ -68,7 +70,7 @@ def generate_sql_configs_table(sql_configs, path): f.write(dedent( """
Property NameDefaultMeaning
Property NameDefaultMeaningVersion
spark.sql.adaptive.enabled false

When true, enable adaptive query execution.

2.1.0
- + """ )) for config in sorted(sql_configs, key=lambda x: x.name): @@ -96,12 +98,14 @@ def generate_sql_configs_table(sql_configs, path): + """ .format( name=config.name, default=default, description=markdown.markdown(config.description), + version=config.version ) )) f.write("
Property NameDefaultMeaning
Property NameDefaultMeaningVersion
{name} {default} {description}{version}
\n") From d55a88d0202e01a879859ae8c50806dd1ccc855b Mon Sep 17 00:00:00 2001 From: beliefer Date: Sun, 16 Feb 2020 16:44:47 +0800 Subject: [PATCH 03/10] Optimize code --- .../scala/org/apache/spark/internal/config/ConfigEntry.scala | 2 +- sql/gen-sql-config-docs.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala index 67b2db4db2961..5185d87193ad7 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala @@ -77,7 +77,7 @@ private[spark] abstract class ConfigEntry[T] ( override def toString: String = { s"ConfigEntry(key=$key, defaultValue=$defaultValueString, doc=$doc, " + - s"public=$isPublic, version = $version)" + s"public=$isPublic, version=$version)" } } diff --git a/sql/gen-sql-config-docs.py b/sql/gen-sql-config-docs.py index 751c980b670c1..715db47d1f6f5 100644 --- a/sql/gen-sql-config-docs.py +++ b/sql/gen-sql-config-docs.py @@ -25,7 +25,7 @@ from pyspark.java_gateway import launch_gateway SQLConfEntry = namedtuple( - "SQLConfEntry", ["name", "default", "description"]) + "SQLConfEntry", ["name", "default", "description", "version"]) def get_public_sql_configs(jvm): From 4da0b911e064f288433e14ec004528f0837de492 Mon Sep 17 00:00:00 2001 From: beliefer Date: Mon, 17 Feb 2020 17:28:15 +0800 Subject: [PATCH 04/10] Optimize code --- .../scala/org/apache/spark/internal/config/ConfigEntry.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala index 5185d87193ad7..8c0b11d46312e 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala @@ -39,7 +39,7 @@ package org.apache.spark.internal.config * @param doc the documentation for the configuration * @param isPublic if this configuration is public to the user. If it's `false`, this * configuration is only used internally and we should not expose it to users. - * @tparam version the spark version when the configuration was released. + * @param version the spark version when the configuration was released. * @tparam T the value type */ private[spark] abstract class ConfigEntry[T] ( From 6d8eb75f0c29962962f994d8f212fafae8577cfc Mon Sep 17 00:00:00 2001 From: beliefer Date: Tue, 18 Feb 2020 14:35:49 +0800 Subject: [PATCH 05/10] Version -> Since Version --- sql/gen-sql-config-docs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/gen-sql-config-docs.py b/sql/gen-sql-config-docs.py index 715db47d1f6f5..306fd3168d433 100644 --- a/sql/gen-sql-config-docs.py +++ b/sql/gen-sql-config-docs.py @@ -50,7 +50,7 @@ def generate_sql_configs_table(sql_configs, path): ```html - + From 57127032c2d53ee56ae1014d9112cca779acd9d6 Mon Sep 17 00:00:00 2001 From: beliefer Date: Wed, 19 Feb 2020 16:46:45 +0800 Subject: [PATCH 06/10] Optimize code --- .../apache/spark/sql/execution/command/SetCommand.scala | 8 ++++++-- sql/gen-sql-config-docs.py | 2 +- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala index dd3721c0e4c02..c55ff4ffefa02 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala @@ -116,14 +116,18 @@ case class SetCommand(kv: Option[(String, Option[String])]) extends RunnableComm val runFunc = (sparkSession: SparkSession) => { sparkSession.sessionState.conf.getAllDefinedConfs.sorted.map { case (key, defaultValue, doc, version) => - Row(key, Option(defaultValue).getOrElse(""), doc, version) + Row( + key, + Option(defaultValue).getOrElse(""), + doc, + Option(version).getOrElse("")) } } val schema = StructType( StructField("key", StringType, nullable = false) :: StructField("value", StringType, nullable = false) :: StructField("meaning", StringType, nullable = false) :: - StructField("version", StringType, nullable = false) :: Nil) + StructField("Since version", StringType, nullable = false) :: Nil) (schema.toAttributes, runFunc) // Queries the deprecated "mapred.reduce.tasks" property. diff --git a/sql/gen-sql-config-docs.py b/sql/gen-sql-config-docs.py index 306fd3168d433..98212ad373370 100644 --- a/sql/gen-sql-config-docs.py +++ b/sql/gen-sql-config-docs.py @@ -70,7 +70,7 @@ def generate_sql_configs_table(sql_configs, path): f.write(dedent( """
Property NameDefaultMeaningVersion
Property NameDefaultMeaningSince Version
spark.sql.adaptive.enabled
- + """ )) for config in sorted(sql_configs, key=lambda x: x.name): From 2eec92e220e76360911f87710c4f1b0b3beb75bf Mon Sep 17 00:00:00 2001 From: beliefer Date: Fri, 21 Feb 2020 20:08:29 +0800 Subject: [PATCH 07/10] Add version info for deploy. --- .../org/apache/spark/internal/config/Deploy.scala | 11 +++++++++++ docs/configuration.md | 5 ++++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala index ceab957b36634..d494c5ec019c7 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala @@ -19,48 +19,59 @@ package org.apache.spark.internal.config private[spark] object Deploy { val RECOVERY_MODE = ConfigBuilder("spark.deploy.recoveryMode") + .version("0.8.1") .stringConf .createWithDefault("NONE") val RECOVERY_MODE_FACTORY = ConfigBuilder("spark.deploy.recoveryMode.factory") + .version("1.2.0") .stringConf .createWithDefault("") val RECOVERY_DIRECTORY = ConfigBuilder("spark.deploy.recoveryDirectory") + .version("0.8.1") .stringConf .createWithDefault("") val ZOOKEEPER_URL = ConfigBuilder("spark.deploy.zookeeper.url") .doc(s"When `${RECOVERY_MODE.key}` is set to ZOOKEEPER, this " + "configuration is used to set the zookeeper URL to connect to.") + .version("0.8.1") .stringConf .createOptional val ZOOKEEPER_DIRECTORY = ConfigBuilder("spark.deploy.zookeeper.dir") + .version("0.8.1") .stringConf .createOptional val RETAINED_APPLICATIONS = ConfigBuilder("spark.deploy.retainedApplications") + .version("0.8.0") .intConf .createWithDefault(200) val RETAINED_DRIVERS = ConfigBuilder("spark.deploy.retainedDrivers") + .version("1.1.0") .intConf .createWithDefault(200) val REAPER_ITERATIONS = ConfigBuilder("spark.dead.worker.persistence") + .version("0.8.0") .intConf .createWithDefault(15) val MAX_EXECUTOR_RETRIES = ConfigBuilder("spark.deploy.maxExecutorRetries") + .version("1.6.3") .intConf .createWithDefault(10) val SPREAD_OUT_APPS = ConfigBuilder("spark.deploy.spreadOut") + .version("0.6.1") .booleanConf .createWithDefault(true) val DEFAULT_CORES = ConfigBuilder("spark.deploy.defaultCores") + .version("0.9.0") .intConf .createWithDefault(Int.MaxValue) diff --git a/docs/configuration.md b/docs/configuration.md index 1343755f9d87f..c8841efb22abf 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -2597,22 +2597,25 @@ Spark subsystems. ### Deploy
Property NameDefaultMeaningVersion
Property NameDefaultMeaningSince Version
- + + + +
Property NameDefaultMeaning
Property NameDefaultMeaningSince Version
spark.deploy.recoveryMode NONE The recovery mode setting to recover submitted Spark jobs with cluster mode when it failed and relaunches. This is only applicable for cluster mode when running with Standalone or Mesos.0.8.1
spark.deploy.zookeeper.url None When `spark.deploy.recoveryMode` is set to ZOOKEEPER, this configuration is used to set the zookeeper URL to connect to.0.8.1
spark.deploy.zookeeper.dir None When `spark.deploy.recoveryMode` is set to ZOOKEEPER, this configuration is used to set the zookeeper directory to store recovery state.0.8.1
From ee3cdd27c1e05a10dc851f6e6ef8252b6395bd39 Mon Sep 17 00:00:00 2001 From: beliefer Date: Sat, 22 Feb 2020 11:31:34 +0800 Subject: [PATCH 08/10] Add JIRA ID and commit ID. --- .../apache/spark/internal/config/Deploy.scala | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala index d494c5ec019c7..724abe5ce1cef 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala @@ -19,59 +19,59 @@ package org.apache.spark.internal.config private[spark] object Deploy { val RECOVERY_MODE = ConfigBuilder("spark.deploy.recoveryMode") - .version("0.8.1") + .version("0.8.1") // No JIRA ID, commit ID is: d66c01f2b6defb3db6c1be99523b734a4d960532 .stringConf .createWithDefault("NONE") val RECOVERY_MODE_FACTORY = ConfigBuilder("spark.deploy.recoveryMode.factory") - .version("1.2.0") + .version("1.2.0") // SPARK-1830, commit ID is: deefd9d7377a8091a1d184b99066febd0e9f6afd .stringConf .createWithDefault("") val RECOVERY_DIRECTORY = ConfigBuilder("spark.deploy.recoveryDirectory") - .version("0.8.1") + .version("0.8.1") // No JIRA ID, commit ID is: d66c01f2b6defb3db6c1be99523b734a4d960532 .stringConf .createWithDefault("") val ZOOKEEPER_URL = ConfigBuilder("spark.deploy.zookeeper.url") .doc(s"When `${RECOVERY_MODE.key}` is set to ZOOKEEPER, this " + "configuration is used to set the zookeeper URL to connect to.") - .version("0.8.1") + .version("0.8.1") // No JIRA ID, commit ID is: d66c01f2b6defb3db6c1be99523b734a4d960532 .stringConf .createOptional val ZOOKEEPER_DIRECTORY = ConfigBuilder("spark.deploy.zookeeper.dir") - .version("0.8.1") + .version("0.8.1") // No JIRA ID, commit ID is: d66c01f2b6defb3db6c1be99523b734a4d960532 .stringConf .createOptional val RETAINED_APPLICATIONS = ConfigBuilder("spark.deploy.retainedApplications") - .version("0.8.0") + .version("0.8.0") // No JIRA ID, commit ID is: 46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef .intConf .createWithDefault(200) val RETAINED_DRIVERS = ConfigBuilder("spark.deploy.retainedDrivers") - .version("1.1.0") + .version("1.1.0") // No JIRA ID, commit ID is: 7446f5ff93142d2dd5c79c63fa947f47a1d4db8b .intConf .createWithDefault(200) val REAPER_ITERATIONS = ConfigBuilder("spark.dead.worker.persistence") - .version("0.8.0") + .version("0.8.0") // No JIRA ID, commit ID is: 46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef .intConf .createWithDefault(15) val MAX_EXECUTOR_RETRIES = ConfigBuilder("spark.deploy.maxExecutorRetries") - .version("1.6.3") + .version("1.6.3") // SPARK-16956, commit ID is: ace458f0330f22463ecf7cbee7c0465e10fba8a8 .intConf .createWithDefault(10) val SPREAD_OUT_APPS = ConfigBuilder("spark.deploy.spreadOut") - .version("0.6.1") + .version("0.6.1") // No JIRA ID, commit ID is: bb2b9ff37cd2503cc6ea82c5dd395187b0910af0 .booleanConf .createWithDefault(true) val DEFAULT_CORES = ConfigBuilder("spark.deploy.defaultCores") - .version("0.9.0") + .version("0.9.0") // No JIRA ID, commit ID is: d8bcc8e9a095c1b20dd7a17b6535800d39bff80e .intConf .createWithDefault(Int.MaxValue) From bd6416b4b015c5f9fd81f6ff4ccf838536bbfb9c Mon Sep 17 00:00:00 2001 From: beliefer Date: Sat, 22 Feb 2020 11:59:49 +0800 Subject: [PATCH 09/10] Update comment. --- .../apache/spark/internal/config/Deploy.scala | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala index 724abe5ce1cef..2f7ac52f9dd54 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala @@ -19,59 +19,59 @@ package org.apache.spark.internal.config private[spark] object Deploy { val RECOVERY_MODE = ConfigBuilder("spark.deploy.recoveryMode") - .version("0.8.1") // No JIRA ID, commit ID is: d66c01f2b6defb3db6c1be99523b734a4d960532 + .version("0.8.1") // No JIRA ID, commit ID: d66c01f2b6defb3db6c1be99523b734a4d960532 .stringConf .createWithDefault("NONE") val RECOVERY_MODE_FACTORY = ConfigBuilder("spark.deploy.recoveryMode.factory") - .version("1.2.0") // SPARK-1830, commit ID is: deefd9d7377a8091a1d184b99066febd0e9f6afd + .version("1.2.0") // SPARK-1830, commit ID: deefd9d7377a8091a1d184b99066febd0e9f6afd .stringConf .createWithDefault("") val RECOVERY_DIRECTORY = ConfigBuilder("spark.deploy.recoveryDirectory") - .version("0.8.1") // No JIRA ID, commit ID is: d66c01f2b6defb3db6c1be99523b734a4d960532 + .version("0.8.1") // No JIRA ID, commit ID: d66c01f2b6defb3db6c1be99523b734a4d960532 .stringConf .createWithDefault("") val ZOOKEEPER_URL = ConfigBuilder("spark.deploy.zookeeper.url") .doc(s"When `${RECOVERY_MODE.key}` is set to ZOOKEEPER, this " + "configuration is used to set the zookeeper URL to connect to.") - .version("0.8.1") // No JIRA ID, commit ID is: d66c01f2b6defb3db6c1be99523b734a4d960532 + .version("0.8.1") // No JIRA ID, commit ID: d66c01f2b6defb3db6c1be99523b734a4d960532 .stringConf .createOptional val ZOOKEEPER_DIRECTORY = ConfigBuilder("spark.deploy.zookeeper.dir") - .version("0.8.1") // No JIRA ID, commit ID is: d66c01f2b6defb3db6c1be99523b734a4d960532 + .version("0.8.1") // No JIRA ID, commit ID: d66c01f2b6defb3db6c1be99523b734a4d960532 .stringConf .createOptional val RETAINED_APPLICATIONS = ConfigBuilder("spark.deploy.retainedApplications") - .version("0.8.0") // No JIRA ID, commit ID is: 46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef + .version("0.8.0") // No JIRA ID, commit ID: 46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef .intConf .createWithDefault(200) val RETAINED_DRIVERS = ConfigBuilder("spark.deploy.retainedDrivers") - .version("1.1.0") // No JIRA ID, commit ID is: 7446f5ff93142d2dd5c79c63fa947f47a1d4db8b + .version("1.1.0") // No JIRA ID, commit ID: 7446f5ff93142d2dd5c79c63fa947f47a1d4db8b .intConf .createWithDefault(200) val REAPER_ITERATIONS = ConfigBuilder("spark.dead.worker.persistence") - .version("0.8.0") // No JIRA ID, commit ID is: 46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef + .version("0.8.0") // No JIRA ID, commit ID: 46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef .intConf .createWithDefault(15) val MAX_EXECUTOR_RETRIES = ConfigBuilder("spark.deploy.maxExecutorRetries") - .version("1.6.3") // SPARK-16956, commit ID is: ace458f0330f22463ecf7cbee7c0465e10fba8a8 + .version("1.6.3") // SPARK-16956, commit ID: ace458f0330f22463ecf7cbee7c0465e10fba8a8 .intConf .createWithDefault(10) val SPREAD_OUT_APPS = ConfigBuilder("spark.deploy.spreadOut") - .version("0.6.1") // No JIRA ID, commit ID is: bb2b9ff37cd2503cc6ea82c5dd395187b0910af0 + .version("0.6.1") // No JIRA ID, commit ID: bb2b9ff37cd2503cc6ea82c5dd395187b0910af0 .booleanConf .createWithDefault(true) val DEFAULT_CORES = ConfigBuilder("spark.deploy.defaultCores") - .version("0.9.0") // No JIRA ID, commit ID is: d8bcc8e9a095c1b20dd7a17b6535800d39bff80e + .version("0.9.0") // No JIRA ID, commit ID: d8bcc8e9a095c1b20dd7a17b6535800d39bff80e .intConf .createWithDefault(Int.MaxValue) From 5891872e090d3a31ac4492073b5ff9177195b48f Mon Sep 17 00:00:00 2001 From: beliefer Date: Mon, 24 Feb 2020 12:40:23 +0800 Subject: [PATCH 10/10] Remove comments no need --- .../apache/spark/internal/config/Deploy.scala | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala index 2f7ac52f9dd54..d494c5ec019c7 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala @@ -19,59 +19,59 @@ package org.apache.spark.internal.config private[spark] object Deploy { val RECOVERY_MODE = ConfigBuilder("spark.deploy.recoveryMode") - .version("0.8.1") // No JIRA ID, commit ID: d66c01f2b6defb3db6c1be99523b734a4d960532 + .version("0.8.1") .stringConf .createWithDefault("NONE") val RECOVERY_MODE_FACTORY = ConfigBuilder("spark.deploy.recoveryMode.factory") - .version("1.2.0") // SPARK-1830, commit ID: deefd9d7377a8091a1d184b99066febd0e9f6afd + .version("1.2.0") .stringConf .createWithDefault("") val RECOVERY_DIRECTORY = ConfigBuilder("spark.deploy.recoveryDirectory") - .version("0.8.1") // No JIRA ID, commit ID: d66c01f2b6defb3db6c1be99523b734a4d960532 + .version("0.8.1") .stringConf .createWithDefault("") val ZOOKEEPER_URL = ConfigBuilder("spark.deploy.zookeeper.url") .doc(s"When `${RECOVERY_MODE.key}` is set to ZOOKEEPER, this " + "configuration is used to set the zookeeper URL to connect to.") - .version("0.8.1") // No JIRA ID, commit ID: d66c01f2b6defb3db6c1be99523b734a4d960532 + .version("0.8.1") .stringConf .createOptional val ZOOKEEPER_DIRECTORY = ConfigBuilder("spark.deploy.zookeeper.dir") - .version("0.8.1") // No JIRA ID, commit ID: d66c01f2b6defb3db6c1be99523b734a4d960532 + .version("0.8.1") .stringConf .createOptional val RETAINED_APPLICATIONS = ConfigBuilder("spark.deploy.retainedApplications") - .version("0.8.0") // No JIRA ID, commit ID: 46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef + .version("0.8.0") .intConf .createWithDefault(200) val RETAINED_DRIVERS = ConfigBuilder("spark.deploy.retainedDrivers") - .version("1.1.0") // No JIRA ID, commit ID: 7446f5ff93142d2dd5c79c63fa947f47a1d4db8b + .version("1.1.0") .intConf .createWithDefault(200) val REAPER_ITERATIONS = ConfigBuilder("spark.dead.worker.persistence") - .version("0.8.0") // No JIRA ID, commit ID: 46eecd110a4017ea0c86cbb1010d0ccd6a5eb2ef + .version("0.8.0") .intConf .createWithDefault(15) val MAX_EXECUTOR_RETRIES = ConfigBuilder("spark.deploy.maxExecutorRetries") - .version("1.6.3") // SPARK-16956, commit ID: ace458f0330f22463ecf7cbee7c0465e10fba8a8 + .version("1.6.3") .intConf .createWithDefault(10) val SPREAD_OUT_APPS = ConfigBuilder("spark.deploy.spreadOut") - .version("0.6.1") // No JIRA ID, commit ID: bb2b9ff37cd2503cc6ea82c5dd395187b0910af0 + .version("0.6.1") .booleanConf .createWithDefault(true) val DEFAULT_CORES = ConfigBuilder("spark.deploy.defaultCores") - .version("0.9.0") // No JIRA ID, commit ID: d8bcc8e9a095c1b20dd7a17b6535800d39bff80e + .version("0.9.0") .intConf .createWithDefault(Int.MaxValue)