diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala index 0e0453b517d92..c62d5ead86925 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala @@ -194,7 +194,7 @@ abstract class QueryPlan[PlanType <: TreeNode[PlanType]] extends TreeNode[PlanTy } /** Returns all of the expressions present in this query plan operator. */ - def expressions: Seq[Expression] = { + final def expressions: Seq[Expression] = { // Recursively find all expressions from a traversable. def seqToExpressions(seq: Traversable[Any]): Traversable[Expression] = seq.flatMap { case e: Expression => e :: Nil diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala index 522348735aadf..411594c95166c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala @@ -89,9 +89,7 @@ case class Generate( generatorOutput.forall(_.resolved) } - // we don't want the gOutput to be taken as part of the expressions - // as that will cause exceptions like unresolved attributes etc. - override def expressions: Seq[Expression] = generator :: Nil + override def producedAttributes: AttributeSet = AttributeSet(generatorOutput) def output: Seq[Attribute] = { val qualified = qualifier.map(q => diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/object.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/object.scala index 3f97662957b8e..da7f81c785461 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/object.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/object.scala @@ -208,8 +208,6 @@ case class CoGroup( left: LogicalPlan, right: LogicalPlan) extends BinaryNode with ObjectOperator { - override def producedAttributes: AttributeSet = outputSet - override def deserializers: Seq[(Expression, Seq[Attribute])] = // The `leftGroup` and `rightGroup` are guaranteed te be of same schema, so it's safe to resolve // the `keyDeserializer` based on either of them, here we pick the left one. diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/Generate.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/Generate.scala index 6bc4649d432ae..9938d2169f1c3 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/Generate.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/Generate.scala @@ -58,7 +58,7 @@ case class Generate( private[sql] override lazy val metrics = Map( "numOutputRows" -> SQLMetrics.createLongMetric(sparkContext, "number of output rows")) - override def expressions: Seq[Expression] = generator :: Nil + override def producedAttributes: AttributeSet = AttributeSet(output) val boundGenerator = BindReferences.bindReference(generator, child.output)