Skip to content

Commit

Permalink
[SPARK-20204] SQLConf should react to change in default timezone sett…
Browse files Browse the repository at this point in the history
…ings
  • Loading branch information
dilipbiswal committed Apr 5, 2017
1 parent 402bf2a commit 2eb900b
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,15 @@ private[spark] class TypedConfigBuilder[T](
}
}

/** Creates a [[ConfigEntry]] with a function has a default value */
def createWithDefaultFunction(defaultFunc: () => T): ConfigEntry[T] = {
val entry =
new ConfigEntryWithDefaultFunction[T](parent.key, defaultFunc, converter,
stringConverter, parent._doc, parent._public)
parent._onCreate.foreach(_ (entry))
entry
}

/**
* Creates a [[ConfigEntry]] that has a default value. The default value is provided as a
* [[String]] and must be a valid value for the entry.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,24 @@ private class ConfigEntryWithDefault[T] (
def readFrom(reader: ConfigReader): T = {
reader.get(key).map(valueConverter).getOrElse(_defaultValue)
}
}

private class ConfigEntryWithDefaultFunction[T] (
key: String,
_defaultFunction: () => T,
valueConverter: String => T,
stringConverter: T => String,
doc: String,
isPublic: Boolean)
extends ConfigEntry(key, valueConverter, stringConverter, doc, isPublic) {

override def defaultValue: Option[T] = Some(_defaultFunction())

override def defaultValueString: String = stringConverter(_defaultFunction())

def readFrom(reader: ConfigReader): T = {
reader.get(key).map(valueConverter).getOrElse(_defaultFunction())
}
}

private class ConfigEntryWithDefaultString[T] (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -752,7 +752,7 @@ object SQLConf {
buildConf("spark.sql.session.timeZone")
.doc("""The ID of session local timezone, e.g. "GMT", "America/Los_Angeles", etc.""")
.stringConf
.createWithDefault(TimeZone.getDefault().getID())
.createWithDefaultFunction(() => TimeZone.getDefault().getID())

val WINDOW_EXEC_BUFFER_SPILL_THRESHOLD =
buildConf("spark.sql.windowExec.buffer.spill.threshold")
Expand Down

0 comments on commit 2eb900b

Please sign in to comment.