diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala index 5978d1c931f37..478ee997a96a2 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/BoundAttribute.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.expressions import org.apache.spark.Logging import org.apache.spark.sql.catalyst.errors.attachTree -import org.apache.spark.sql.catalyst.expressions.codegen.{EvaluatedExpression, Code, CodeGenContext} +import org.apache.spark.sql.catalyst.expressions.codegen.{GeneratedExpressionCode, Code, CodeGenContext} import org.apache.spark.sql.types._ import org.apache.spark.sql.catalyst.trees @@ -43,7 +43,7 @@ case class BoundReference(ordinal: Int, dataType: DataType, nullable: Boolean) override def exprId: ExprId = throw new UnsupportedOperationException - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { s""" final boolean ${ev.nullTerm} = i.isNullAt($ordinal); final ${ctx.primitiveType(dataType)} ${ev.primitiveTerm} = ${ev.nullTerm} ? diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index bcd7781c09e00..d31e004b9c348 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -21,7 +21,7 @@ import java.sql.{Date, Timestamp} import java.text.{DateFormat, SimpleDateFormat} import org.apache.spark.Logging -import org.apache.spark.sql.catalyst.expressions.codegen.{EvaluatedExpression, Code, CodeGenContext} +import org.apache.spark.sql.catalyst.expressions.codegen.{GeneratedExpressionCode, Code, CodeGenContext} import org.apache.spark.sql.catalyst.util.DateUtils import org.apache.spark.sql.types._ @@ -435,15 +435,15 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression w if (evaluated == null) null else cast(evaluated) } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = this match { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = this match { case Cast(child @ BinaryType(), StringType) => castOrNull (ctx, ev, c => - s"new org.apache.spark.sql.types.UTF8String().set($c)") + s"new ${ctx.stringType}().set($c)") case Cast(child @ DateType(), StringType) => castOrNull(ctx, ev, c => - s"""new org.apache.spark.sql.types.UTF8String().set( + s"""new ${ctx.stringType}().set( org.apache.spark.sql.catalyst.util.DateUtils.toString($c))""") case Cast(child @ BooleanType(), dt: NumericType) if !dt.isInstanceOf[DecimalType] => @@ -462,7 +462,7 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression w // does not match the expected output. case Cast(e, StringType) if e.dataType != TimestampType => castOrNull(ctx, ev, c => - s"new org.apache.spark.sql.types.UTF8String().set(String.valueOf($c))") + s"new ${ctx.stringType}().set(String.valueOf($c))") case other => super.genCode(ctx, ev) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala index f4f866331f569..1f1a2fc9694af 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.catalyst.expressions import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, UnresolvedAttribute} -import org.apache.spark.sql.catalyst.expressions.codegen.{EvaluatedExpression, Code, CodeGenContext} +import org.apache.spark.sql.catalyst.expressions.codegen.{GeneratedExpressionCode, Code, CodeGenContext} import org.apache.spark.sql.catalyst.trees import org.apache.spark.sql.catalyst.trees.TreeNode import org.apache.spark.sql.types._ @@ -53,17 +53,17 @@ abstract class Expression extends TreeNode[Expression] { def eval(input: Row = null): Any /** - * Returns an [[EvaluatedExpression]], which contains Java source code that + * Returns an [[GeneratedExpressionCode]], which contains Java source code that * can be used to generate the result of evaluating the expression on an input row. - * + * * @param ctx a [[CodeGenContext]] - * @return [[EvaluatedExpression]] + * @return [[GeneratedExpressionCode]] */ - def gen(ctx: CodeGenContext): EvaluatedExpression = { + def gen(ctx: CodeGenContext): GeneratedExpressionCode = { val nullTerm = ctx.freshName("nullTerm") val primitiveTerm = ctx.freshName("primitiveTerm") val objectTerm = ctx.freshName("objectTerm") - val ve = EvaluatedExpression("", nullTerm, primitiveTerm, objectTerm) + val ve = GeneratedExpressionCode("", nullTerm, primitiveTerm, objectTerm) ve.code = genCode(ctx, ve) ve } @@ -72,10 +72,10 @@ abstract class Expression extends TreeNode[Expression] { * Returns Java source code for this expression. * * @param ctx a [[CodeGenContext]] - * @param ev an [[EvaluatedExpression]] with unique terms. + * @param ev an [[GeneratedExpressionCode]] with unique terms. * @return Java source code */ - def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val e = this.asInstanceOf[Expression] ctx.references += e s""" @@ -164,7 +164,7 @@ abstract class BinaryExpression extends Expression with trees.BinaryNode[Express * @param f a function from two primitive term names to a tree that evaluates them. */ def evaluate(ctx: CodeGenContext, - ev: EvaluatedExpression, + ev: GeneratedExpressionCode, f: (String, String) => String): String = { // TODO: Right now some timestamp tests fail if we enforce this... if (left.dataType != right.dataType) { @@ -198,7 +198,7 @@ abstract class LeafExpression extends Expression with trees.LeafNode[Expression] abstract class UnaryExpression extends Expression with trees.UnaryNode[Expression] { self: Product => def castOrNull(ctx: CodeGenContext, - ev: EvaluatedExpression, + ev: GeneratedExpressionCode, f: String => String): String = { val eval = child.gen(ctx) eval.code + s""" diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala index 6ae815e1d0096..aad8479dafe41 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.catalyst.expressions import org.apache.spark.sql.catalyst.analysis.TypeCheckResult -import org.apache.spark.sql.catalyst.expressions.codegen.{Code, EvaluatedExpression, CodeGenContext} +import org.apache.spark.sql.catalyst.expressions.codegen.{Code, GeneratedExpressionCode, CodeGenContext} import org.apache.spark.sql.catalyst.util.TypeUtils import org.apache.spark.sql.types._ @@ -117,7 +117,7 @@ abstract class BinaryArithmetic extends BinaryExpression { } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { if (left.dataType.isInstanceOf[DecimalType]) { evaluate(ctx, ev, { case (eval1, eval2) => s"$eval1.$decimalMethod($eval2)" } ) } else { @@ -205,7 +205,7 @@ case class Divide(left: Expression, right: Expression) extends BinaryArithmetic } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val eval1 = left.gen(ctx) val eval2 = right.gen(ctx) val test = if (left.dataType.isInstanceOf[DecimalType]) { @@ -263,7 +263,7 @@ case class Remainder(left: Expression, right: Expression) extends BinaryArithmet } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val eval1 = left.gen(ctx) val eval2 = right.gen(ctx) val test = if (left.dataType.isInstanceOf[DecimalType]) { @@ -406,7 +406,7 @@ case class MaxOf(left: Expression, right: Expression) extends BinaryArithmetic { } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { if (ctx.isNativeType(left.dataType)) { val eval1 = left.gen(ctx) val eval2 = right.gen(ctx) @@ -460,7 +460,7 @@ case class MinOf(left: Expression, right: Expression) extends BinaryArithmetic { } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { if (ctx.isNativeType(left.dataType)) { val eval1 = left.gen(ctx) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala index c87258c622664..0a47957bec23c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala @@ -41,10 +41,10 @@ class LongHashSet extends org.apache.spark.util.collection.OpenHashSet[Long] * valid if `nullTerm` is set to `true`. * @param objectTerm A possibly boxed version of the result of evaluating this expression. */ -case class EvaluatedExpression(var code: Code, - nullTerm: Term, - primitiveTerm: Term, - objectTerm: Term) +case class GeneratedExpressionCode(var code: Code, + nullTerm: Term, + primitiveTerm: Term, + objectTerm: Term) /** * A context for codegen, which is used to bookkeeping the expressions those are not supported @@ -58,8 +58,8 @@ class CodeGenContext { */ val references: Seq[Expression] = new mutable.ArrayBuffer[Expression]() - protected val stringType = classOf[UTF8String].getName - protected val decimalType = classOf[Decimal].getName + val stringType = classOf[UTF8String].getName + val decimalType = classOf[Decimal].getName private val curId = new java.util.concurrent.atomic.AtomicInteger() @@ -75,7 +75,7 @@ class CodeGenContext { def getColumn(dataType: DataType, ordinal: Int): Code = { dataType match { - case StringType => s"(org.apache.spark.sql.types.UTF8String)i.apply($ordinal)" + case StringType => s"($stringType)i.apply($ordinal)" case dt: DataType if isNativeType(dt) => s"i.${accessorForType(dt)}($ordinal)" case _ => s"(${boxedType(dataType)})i.apply($ordinal)" } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala index d88cdc7dd2c12..80c51cb3588ad 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/decimalFunctions.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.catalyst.expressions -import org.apache.spark.sql.catalyst.expressions.codegen.{EvaluatedExpression, Code, CodeGenContext} +import org.apache.spark.sql.catalyst.expressions.codegen.{GeneratedExpressionCode, Code, CodeGenContext} import org.apache.spark.sql.types._ /** Return the unscaled Long value of a Decimal, assuming it fits in a Long */ @@ -37,7 +37,7 @@ case class UnscaledValue(child: Expression) extends UnaryExpression { } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val eval = child.gen(ctx) eval.code + s""" boolean ${ev.nullTerm} = ${eval.nullTerm}; @@ -63,7 +63,7 @@ case class MakeDecimal(child: Expression, precision: Int, scale: Int) extends Un } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val eval = child.gen(ctx) eval.code + s""" boolean ${ev.nullTerm} = ${eval.nullTerm}; diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala index 5cb3f26e9dc50..21e21000c9437 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.expressions import java.sql.{Date, Timestamp} import org.apache.spark.sql.catalyst.CatalystTypeConverters -import org.apache.spark.sql.catalyst.expressions.codegen.{Code, CodeGenContext, EvaluatedExpression} +import org.apache.spark.sql.catalyst.expressions.codegen.{Code, CodeGenContext, GeneratedExpressionCode} import org.apache.spark.sql.catalyst.util.DateUtils import org.apache.spark.sql.types._ @@ -81,7 +81,7 @@ case class Literal protected (value: Any, dataType: DataType) extends LeafExpres override def eval(input: Row): Any = value - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { if (value == null) { s""" final boolean ${ev.nullTerm} = true; @@ -94,8 +94,7 @@ case class Literal protected (value: Any, dataType: DataType) extends LeafExpres val arr = s"new byte[]{${v.getBytes.map(_.toString).mkString(", ")}}" s""" final boolean ${ev.nullTerm} = false; - org.apache.spark.sql.types.UTF8String ${ev.primitiveTerm} = - new org.apache.spark.sql.types.UTF8String().set(${arr}); + ${ctx.stringType} ${ev.primitiveTerm} = new ${ctx.stringType}().set(${arr}); """ case FloatType => s""" diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala index 46582173e93b0..d4b35edb33b4c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullFunctions.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.catalyst.expressions -import org.apache.spark.sql.catalyst.expressions.codegen.{EvaluatedExpression, CodeGenContext} +import org.apache.spark.sql.catalyst.expressions.codegen.{GeneratedExpressionCode, CodeGenContext} import org.apache.spark.sql.catalyst.trees import org.apache.spark.sql.catalyst.analysis.UnresolvedException import org.apache.spark.sql.types.DataType @@ -53,7 +53,7 @@ case class Coalesce(children: Seq[Expression]) extends Expression { result } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { s""" boolean ${ev.nullTerm} = true; ${ctx.primitiveType(dataType)} ${ev.primitiveTerm} = ${ctx.defaultValue(dataType)}; @@ -81,7 +81,7 @@ case class IsNull(child: Expression) extends Predicate with trees.UnaryNode[Expr child.eval(input) == null } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val eval = child.gen(ctx) eval.code + s""" final boolean ${ev.nullTerm} = false; @@ -101,7 +101,7 @@ case class IsNotNull(child: Expression) extends Predicate with trees.UnaryNode[E child.eval(input) != null } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val eval = child.gen(ctx) eval.code + s""" boolean ${ev.nullTerm} = false; @@ -132,7 +132,7 @@ case class AtLeastNNonNulls(n: Int, children: Seq[Expression]) extends Predicate numNonNulls >= n } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val nonnull = ctx.freshName("nonnull") val code = children.map { e => val eval = e.gen(ctx) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala index 1a89f5bdb4dea..ad4535a09e04e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala @@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.expressions import org.apache.spark.sql.catalyst.analysis.TypeCheckResult import org.apache.spark.sql.catalyst.util.TypeUtils -import org.apache.spark.sql.catalyst.expressions.codegen.{EvaluatedExpression, Code, CodeGenContext} +import org.apache.spark.sql.catalyst.expressions.codegen.{GeneratedExpressionCode, Code, CodeGenContext} import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan import org.apache.spark.sql.types._ @@ -84,7 +84,7 @@ case class Not(child: Expression) extends UnaryExpression with Predicate with Ex } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { castOrNull(ctx, ev, c => s"!($c)") } } @@ -146,7 +146,7 @@ case class And(left: Expression, right: Expression) } } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val eval1 = left.gen(ctx) val eval2 = right.gen(ctx) s""" @@ -192,7 +192,7 @@ case class Or(left: Expression, right: Expression) } } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val eval1 = left.gen(ctx) val eval2 = right.gen(ctx) s""" @@ -218,7 +218,7 @@ case class Or(left: Expression, right: Expression) abstract class BinaryComparison extends BinaryExpression with Predicate { self: Product => - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { left.dataType match { case dt: NumericType if ctx.isNativeType(dt) => evaluate (ctx, ev, { (c1, c3) => s"$c1 $symbol $c3" @@ -276,7 +276,7 @@ case class EqualTo(left: Expression, right: Expression) extends BinaryComparison if (left.dataType != BinaryType) l == r else java.util.Arrays.equals(l.asInstanceOf[Array[Byte]], r.asInstanceOf[Array[Byte]]) } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { evaluate(ctx, ev, ctx.equalFunc(left.dataType)) } } @@ -300,7 +300,7 @@ case class EqualNullSafe(left: Expression, right: Expression) extends BinaryComp } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val eval1 = left.gen(ctx) val eval2 = right.gen(ctx) val equalCode = ctx.equalFunc(left.dataType)(eval1.primitiveTerm, eval2.primitiveTerm) @@ -383,7 +383,7 @@ case class If(predicate: Expression, trueValue: Expression, falseValue: Expressi falseValue.eval(input) } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val condEval = predicate.gen(ctx) val trueEval = trueValue.gen(ctx) val falseEval = falseValue.gen(ctx) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/sets.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/sets.scala index d62212d669276..55fd748f96b12 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/sets.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/sets.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.catalyst.expressions -import org.apache.spark.sql.catalyst.expressions.codegen.{EvaluatedExpression, CodeGenContext} +import org.apache.spark.sql.catalyst.expressions.codegen.{GeneratedExpressionCode, CodeGenContext} import org.apache.spark.sql.types._ import org.apache.spark.util.collection.OpenHashSet @@ -61,7 +61,7 @@ case class NewSet(elementType: DataType) extends LeafExpression { new OpenHashSet[Any]() } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { elementType match { case IntegerType | LongType => s""" @@ -104,7 +104,7 @@ case class AddItemToSet(item: Expression, set: Expression) extends Expression { } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val elementType = set.dataType.asInstanceOf[OpenHashSetUDT].elementType elementType match { case IntegerType | LongType => @@ -157,7 +157,7 @@ case class CombineSets(left: Expression, right: Expression) extends BinaryExpres } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { val elementType = left.dataType.asInstanceOf[OpenHashSetUDT].elementType elementType match { case IntegerType | LongType => @@ -191,7 +191,7 @@ case class CountSet(child: Expression) extends UnaryExpression { } } - override def genCode(ctx: CodeGenContext, ev: EvaluatedExpression): Code = { + override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): Code = { castOrNull(ctx, ev, c => s"$c.size().toLong()") }