Skip to content

Commit

Permalink
[SPARK-12926][SQL] SQLContext to display warning message when non-sql…
Browse files Browse the repository at this point in the history
… configs are being set

Users unknowingly try to set core Spark configs in SQLContext but later realise that it didn't work. eg. sqlContext.sql("SET spark.shuffle.memoryFraction=0.4"). This PR adds a warning message when such operations are done.

Author: Tejas Patil <tejasp@fb.com>

Closes #10849 from tejasapatil/SPARK-12926.
  • Loading branch information
tejasapatil authored and marmbrus committed Jan 28, 2016
1 parent 415d0a8 commit 6768039
Showing 1 changed file with 11 additions and 3 deletions.
14 changes: 11 additions & 3 deletions sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import scala.collection.JavaConverters._

import org.apache.parquet.hadoop.ParquetOutputCommitter

import org.apache.spark.Logging
import org.apache.spark.sql.catalyst.CatalystConf
import org.apache.spark.sql.catalyst.parser.ParserConf
import org.apache.spark.util.Utils
Expand Down Expand Up @@ -519,7 +520,7 @@ private[spark] object SQLConf {
*
* SQLConf is thread-safe (internally synchronized, so safe to be used in multiple threads).
*/
private[sql] class SQLConf extends Serializable with CatalystConf with ParserConf {
private[sql] class SQLConf extends Serializable with CatalystConf with ParserConf with Logging {
import SQLConf._

/** Only low degree of contention is expected for conf, thus NOT using ConcurrentHashMap. */
Expand Down Expand Up @@ -628,15 +629,15 @@ private[sql] class SQLConf extends Serializable with CatalystConf with ParserCon
// Only verify configs in the SQLConf object
entry.valueConverter(value)
}
settings.put(key, value)
setConfWithCheck(key, value)
}

/** Set the given Spark SQL configuration property. */
def setConf[T](entry: SQLConfEntry[T], value: T): Unit = {
require(entry != null, "entry cannot be null")
require(value != null, s"value cannot be null for key: ${entry.key}")
require(sqlConfEntries.get(entry.key) == entry, s"$entry is not registered")
settings.put(entry.key, entry.stringConverter(value))
setConfWithCheck(entry.key, entry.stringConverter(value))
}

/** Return the value of Spark SQL configuration property for the given key. */
Expand Down Expand Up @@ -699,6 +700,13 @@ private[sql] class SQLConf extends Serializable with CatalystConf with ParserCon
}.toSeq
}

private def setConfWithCheck(key: String, value: String): Unit = {
if (key.startsWith("spark.") && !key.startsWith("spark.sql.")) {
logWarning(s"Attempt to set non-Spark SQL config in SQLConf: key = $key, value = $value")
}
settings.put(key, value)
}

private[spark] def unsetConf(key: String): Unit = {
settings.remove(key)
}
Expand Down

0 comments on commit 6768039

Please sign in to comment.