From f2dd082544aeba5978d0c140d0194eedb969d132 Mon Sep 17 00:00:00 2001 From: Maxim Gekk Date: Tue, 4 Feb 2020 21:17:05 +0800 Subject: [PATCH] [SPARK-30725][SQL] Make legacy SQL configs as internal configs ### What changes were proposed in this pull request? All legacy SQL configs are marked as internal configs. In particular, the following configs are updated as internals: - spark.sql.legacy.sizeOfNull - spark.sql.legacy.replaceDatabricksSparkAvro.enabled - spark.sql.legacy.typeCoercion.datetimeToString.enabled - spark.sql.legacy.looseUpcast - spark.sql.legacy.arrayExistsFollowsThreeValuedLogic ### Why are the changes needed? In general case, users shouldn't change legacy configs, so, they can be marked as internals. ### Does this PR introduce any user-facing change? No ### How was this patch tested? Should be tested by jenkins build and run tests. Closes #27448 from MaxGekk/legacy-internal-sql-conf. Authored-by: Maxim Gekk Signed-off-by: Wenchen Fan --- .../scala/org/apache/spark/sql/internal/SQLConf.scala | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 3ad3416256c7d..b94ddbdc0fc9a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -1916,6 +1916,7 @@ object SQLConf { .createWithDefault(Deflater.DEFAULT_COMPRESSION) val LEGACY_SIZE_OF_NULL = buildConf("spark.sql.legacy.sizeOfNull") + .internal() .doc("If it is set to true, size of null returns -1. This behavior was inherited from Hive. " + "The size function returns null for null input if the flag is disabled.") .booleanConf @@ -1923,6 +1924,7 @@ object SQLConf { val LEGACY_REPLACE_DATABRICKS_SPARK_AVRO_ENABLED = buildConf("spark.sql.legacy.replaceDatabricksSparkAvro.enabled") + .internal() .doc("If it is set to true, the data source provider com.databricks.spark.avro is mapped " + "to the built-in but external Avro data source module for backward compatibility.") .booleanConf @@ -2048,10 +2050,11 @@ object SQLConf { val LEGACY_CAST_DATETIME_TO_STRING = buildConf("spark.sql.legacy.typeCoercion.datetimeToString.enabled") + .internal() .doc("If it is set to true, date/timestamp will cast to string in binary comparisons " + "with String") - .booleanConf - .createWithDefault(false) + .booleanConf + .createWithDefault(false) val DEFAULT_CATALOG = buildConf("spark.sql.defaultCatalog") .doc("Name of the default catalog. This will be the current catalog if users have not " + @@ -2071,6 +2074,7 @@ object SQLConf { .createOptional val LEGACY_LOOSE_UPCAST = buildConf("spark.sql.legacy.looseUpcast") + .internal() .doc("When true, the upcast will be loose and allows string to atomic types.") .booleanConf .createWithDefault(false) @@ -2083,6 +2087,7 @@ object SQLConf { val LEGACY_ARRAY_EXISTS_FOLLOWS_THREE_VALUED_LOGIC = buildConf("spark.sql.legacy.arrayExistsFollowsThreeValuedLogic") + .internal() .doc("When true, the ArrayExists will follow the three-valued boolean logic.") .booleanConf .createWithDefault(true)