Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-12926][SQL] SQLContext to display warning message when non-sql configs are being set #10849

Closed
wants to merge 3 commits into from
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 11 additions & 3 deletions sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import scala.collection.JavaConverters._

import org.apache.parquet.hadoop.ParquetOutputCommitter

import org.apache.spark.Logging
import org.apache.spark.sql.catalyst.CatalystConf
import org.apache.spark.sql.catalyst.parser.ParserConf
import org.apache.spark.util.Utils
Expand Down Expand Up @@ -511,7 +512,7 @@ private[spark] object SQLConf {
*
* SQLConf is thread-safe (internally synchronized, so safe to be used in multiple threads).
*/
private[sql] class SQLConf extends Serializable with CatalystConf with ParserConf {
private[sql] class SQLConf extends Serializable with CatalystConf with ParserConf with Logging {
import SQLConf._

/** Only low degree of contention is expected for conf, thus NOT using ConcurrentHashMap. */
Expand Down Expand Up @@ -618,15 +619,15 @@ private[sql] class SQLConf extends Serializable with CatalystConf with ParserCon
// Only verify configs in the SQLConf object
entry.valueConverter(value)
}
settings.put(key, value)
setConfWithCheck(key, value)
}

/** Set the given Spark SQL configuration property. */
def setConf[T](entry: SQLConfEntry[T], value: T): Unit = {
require(entry != null, "entry cannot be null")
require(value != null, s"value cannot be null for key: ${entry.key}")
require(sqlConfEntries.get(entry.key) == entry, s"$entry is not registered")
settings.put(entry.key, entry.stringConverter(value))
setConfWithCheck(entry.key, entry.stringConverter(value))
}

/** Return the value of Spark SQL configuration property for the given key. */
Expand Down Expand Up @@ -689,6 +690,13 @@ private[sql] class SQLConf extends Serializable with CatalystConf with ParserCon
}.toSeq
}

private def setConfWithCheck(key: String, value: String): Unit = {
if (key.startsWith("spark.") && !key.startsWith("spark.sql.")) {
logWarning(s"Attempt to set non-Spark SQL config in SQLConf: key = $key, value = $value")
}
settings.put(key, value)
}

private[spark] def unsetConf(key: String): Unit = {
settings.remove(key)
}
Expand Down