Skip to content

Commit

Permalink
fixes for @rxin's suggestions
Browse files Browse the repository at this point in the history
  • Loading branch information
JD committed Jul 27, 2015
1 parent 60e120b commit e49da48
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ object FunctionRegistry {
}

/** See usage above. */
private[sql] def expression[T <: Expression](name: String)
def expression[T <: Expression](name: String)
(implicit tag: ClassTag[T]): (String, FunctionBuilder) = {

// See if we can find a constructor that accepts Seq[Expression]
Expand Down
13 changes: 6 additions & 7 deletions sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@ import java.beans.Introspector
import java.util.Properties
import java.util.concurrent.atomic.AtomicReference

import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.{expression, FunctionBuilder}
import org.apache.spark.sql.execution.expressions.SparkPartitionID

import scala.collection.JavaConversions._
import scala.collection.immutable
import scala.language.implicitConversions
Expand All @@ -34,6 +31,8 @@ import org.apache.spark.SparkContext
import org.apache.spark.annotation.{DeveloperApi, Experimental}
import org.apache.spark.api.java.{JavaRDD, JavaSparkContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.{expression => FunctionExpression, FunctionBuilder}
import org.apache.spark.sql.execution.expressions.SparkPartitionID
import org.apache.spark.sql.SQLConf.SQLConfEntry
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.errors.DialectException
Expand Down Expand Up @@ -141,14 +140,14 @@ class SQLContext(@transient val sparkContext: SparkContext)
@transient
protected[sql] lazy val catalog: Catalog = new SimpleCatalog(conf)

protected[sql] val extendedFunctions: Map[String, FunctionBuilder] = Map(
expression[SparkPartitionID]("spark__partition__id")
)

// TODO how to handle the temp function per user session?
@transient
protected[sql] lazy val functionRegistry: FunctionRegistry = {
val reg = FunctionRegistry.builtin
val extendedFunctions = List[(String, FunctionBuilder)](
FunctionExpression[SparkPartitionID]("spark__partition__id")
)

extendedFunctions.foreach { case(name, fun) => reg.registerFunction(name, fun) }
reg
}
Expand Down

0 comments on commit e49da48

Please sign in to comment.