Skip to content

Commit

Permalink
Minor changes per review comments.
Browse files Browse the repository at this point in the history
  • Loading branch information
concretevitamin committed Jun 6, 2014
1 parent 1ce8a5e commit f8983d1
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 9 deletions.
13 changes: 6 additions & 7 deletions sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
Expand Up @@ -22,7 +22,10 @@ import java.util.Properties
import scala.collection.mutable

/**
* SQLConf holds potentially query-dependent, mutable config parameters and hints.
* SQLConf holds mutable config parameters and hints. These can be set and
* queried either by passing SET commands into Spark SQL's DSL
* functions (sql(), hql(), etc.), or by programmatically using setters and
* getters of this class.
*/
class SQLConf {

Expand All @@ -39,12 +42,8 @@ class SQLConf {
}

def set(key: String, value: String): SQLConf = {
if (key == null) {
throw new NullPointerException("null key")
}
if (value == null) {
throw new NullPointerException("null value")
}
require(key != null, "key cannot be null")
require(value != null, s"value cannot be null for ${key}")
settings(key) = value
this
}
Expand Down
Expand Up @@ -29,20 +29,25 @@ import org.apache.spark.sql.catalyst.expressions.{GenericRow, Attribute}
case class SetCommandPhysical(key: Option[String], value: Option[String])
(@transient context: SQLContext) extends LeafNode {
def execute(): RDD[Row] = (key, value) match {
// Set value for key k; the action itself would
// have been performed in QueryExecution eagerly.
case (Some(k), Some(v)) => context.emptyResult
// Query the value bound to key k.
case (Some(k), None) =>
val resultString = context.sqlConf.getOption(k) match {
case Some(v) => s"$k=$v"
case None => s"$k is undefined"
}
context.sparkContext.parallelize(Seq(new GenericRow(Array[Any](resultString))), 1)
// Query all key-value pairs that are set in the SQLConf of the context.
case (None, None) =>
val pairs = context.sqlConf.getAll
val rows = pairs.map { case (k, v) =>
new GenericRow(Array[Any](s"$k=$v"))
}.toSeq
// Assume config parameters can fit into one split (machine) ;)
context.sparkContext.parallelize(rows, 1)
// The only other case is invalid semantics and is impossible.
case _ => context.emptyResult
}

Expand Down
Expand Up @@ -133,8 +133,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
@transient override lazy val sqlConf: SQLConf = new SQLConf(hiveconf.getAllProperties) {
override def set(key: String, value: String): SQLConf = {
runSqlHive(s"SET $key=$value")
settings(key) = value
this
super.set(key, value)
}
}
@transient protected[hive] lazy val sessionState = new SessionState(hiveconf)
Expand Down
Expand Up @@ -208,6 +208,8 @@ private[hive] object HiveQl {
def parseSql(sql: String): LogicalPlan = {
try {
if (sql.trim.toLowerCase.startsWith("set")) {
// Split in two parts since we treat the part before the first "="
// as key, and the part after as value, which may contain other "=" signs.
sql.trim.drop(3).split("=", 2).map(_.trim) match {
case Array("") => // "set"
SetCommand(None, None)
Expand Down

0 comments on commit f8983d1

Please sign in to comment.