Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-12957][SQL] Initial support for constraint propagation in SparkSQL #10844

Closed
wants to merge 15 commits into from
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,39 @@

package org.apache.spark.sql.catalyst.plans

import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeSet, Expression, VirtualColumn}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.trees.TreeNode
import org.apache.spark.sql.types.{DataType, StructType}

abstract class QueryPlan[PlanType <: TreeNode[PlanType]] extends TreeNode[PlanType] {
abstract class QueryPlan[PlanType <: TreeNode[PlanType]]
extends TreeNode[PlanType] with PredicateHelper {
self: PlanType =>

def output: Seq[Attribute]

/**
* Extracts the relevant constraints from a given set of constraints based on the attributes that
* appear in the [[outputSet]].
*/
private def getRelevantConstraints(constraints: Set[Expression]): Set[Expression] = {
constraints.filter(_.references.subsetOf(outputSet))
}

/**
* A sequence of expressions that describes the data property of the output rows of this
* operator. For example, if the output of this operator is column `a`, an example `constraints`
* can be `Set(a > 10, a < 20)`.
*/
lazy val constraints: Set[Expression] = getRelevantConstraints(validConstraints)

/**
* This method can be overridden by any child class of QueryPlan to specify a set of constraints
* based on the given operator's constraint propagation logic. These constraints are then
* canonicalized and filtered automatically to contain only those attributes that appear in the
* [[outputSet]]
*/
protected def validConstraints: Set[Expression] = Set.empty
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we add doc here suggesting the child classes override this method along with the semantics (i.e. if we go with the suggestion above you are supposed to output any constraint that might be valid and it will canonicalized and filtered automatically).


/**
* Returns the set of attributes that are output by this node.
*/
Expand Down Expand Up @@ -59,6 +83,7 @@ abstract class QueryPlan[PlanType <: TreeNode[PlanType]] extends TreeNode[PlanTy
* Runs [[transform]] with `rule` on all expressions present in this query operator.
* Users should not expect a specific directionality. If a specific directionality is needed,
* transformExpressionsDown or transformExpressionsUp should be used.
*
* @param rule the rule to be applied to every expression in this operator.
*/
def transformExpressions(rule: PartialFunction[Expression, Expression]): this.type = {
Expand All @@ -67,6 +92,7 @@ abstract class QueryPlan[PlanType <: TreeNode[PlanType]] extends TreeNode[PlanTy

/**
* Runs [[transformDown]] with `rule` on all expressions present in this query operator.
*
* @param rule the rule to be applied to every expression in this operator.
*/
def transformExpressionsDown(rule: PartialFunction[Expression, Expression]): this.type = {
Expand Down Expand Up @@ -99,6 +125,7 @@ abstract class QueryPlan[PlanType <: TreeNode[PlanType]] extends TreeNode[PlanTy

/**
* Runs [[transformUp]] with `rule` on all expressions present in this query operator.
*
* @param rule the rule to be applied to every expression in this operator.
* @return
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -301,10 +301,12 @@ abstract class LeafNode extends LogicalPlan {
/**
* A logical plan node with single child.
*/
abstract class UnaryNode extends LogicalPlan {
abstract class UnaryNode extends LogicalPlan with PredicateHelper {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks like we do not need PredicateHelper at here? Maybe it is better to just with PredicateHelper for Filter and Join?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sounds good, added this only for filter and join that needed splitConjunctivePredicates

def child: LogicalPlan

override def children: Seq[LogicalPlan] = child :: Nil

override protected def validConstraints: Set[Expression] = child.constraints
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,9 +89,27 @@ case class Generate(

case class Filter(condition: Expression, child: LogicalPlan) extends UnaryNode {
override def output: Seq[Attribute] = child.output

override protected def validConstraints: Set[Expression] = {
val newConstraint = splitConjunctivePredicates(condition)
.filter(_.references.subsetOf(outputSet))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

not needed right?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, removed!

.toSet
newConstraint.union(child.constraints)
}
}

abstract class SetOperation(left: LogicalPlan, right: LogicalPlan) extends BinaryNode
abstract class SetOperation(left: LogicalPlan, right: LogicalPlan) extends BinaryNode {

protected def leftConstraints: Set[Expression] = left.constraints

protected def rightConstraints: Set[Expression] = {
require(left.output.size == right.output.size)
val attributeRewrites = AttributeMap(right.output.zip(left.output))
right.constraints.map(_ transform {
case a: Attribute => attributeRewrites(a)
})
}
}

private[sql] object SetOperation {
def unapply(p: SetOperation): Option[(LogicalPlan, LogicalPlan)] = Some((p.left, p.right))
Expand All @@ -106,6 +124,10 @@ case class Intersect(left: LogicalPlan, right: LogicalPlan) extends SetOperation
leftAttr.withNullability(leftAttr.nullable && rightAttr.nullable)
}

override protected def validConstraints: Set[Expression] = {
leftConstraints.union(rightConstraints)
}

// Intersect are only resolved if they don't introduce ambiguous expression ids,
// since the Optimizer will convert Intersect to Join.
override lazy val resolved: Boolean =
Expand All @@ -119,6 +141,8 @@ case class Except(left: LogicalPlan, right: LogicalPlan) extends SetOperation(le
/** We don't use right.output because those rows get excluded from the set. */
override def output: Seq[Attribute] = left.output

override protected def validConstraints: Set[Expression] = leftConstraints

override lazy val resolved: Boolean =
childrenResolved &&
left.output.length == right.output.length &&
Expand Down Expand Up @@ -157,6 +181,23 @@ case class Union(children: Seq[LogicalPlan]) extends LogicalPlan {
val sizeInBytes = children.map(_.statistics.sizeInBytes).sum
Statistics(sizeInBytes = sizeInBytes)
}

def rewriteConstraints(
planA: LogicalPlan,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe give it a more informative name? (how about reference?)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

renamed + added doc

planB: LogicalPlan,
constraints: Set[Expression]): Set[Expression] = {
require(planA.output.size == planB.output.size)
val attributeRewrites = AttributeMap(planB.output.zip(planA.output))
constraints.map(_ transform {
case a: Attribute => attributeRewrites(a)
})
}

override protected def validConstraints: Set[Expression] = {
children
.map(child => rewriteConstraints(children.head, child, child.constraints))
.reduce(_ intersect _)
}
}

case class Join(
Expand All @@ -180,6 +221,46 @@ case class Join(
}
}

private def constructIsNotNullConstraints(condition: Expression): Set[Expression] = {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is this done as a special case here, instead of doing it as part of getRelevantConstraints?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's a great point. Moved the common logic to QueryPlan.

// Currently we only propagate constraints if the condition consists of equality
// and ranges. For all other cases, we return an empty set of constraints
splitConjunctivePredicates(condition).map {
case EqualTo(l, r) =>
Set(IsNotNull(l), IsNotNull(r))
case GreaterThan(l, r) =>
Set(IsNotNull(l), IsNotNull(r))
case GreaterThanOrEqual(l, r) =>
Set(IsNotNull(l), IsNotNull(r))
case LessThan(l, r) =>
Set(IsNotNull(l), IsNotNull(r))
case LessThanOrEqual(l, r) =>
Set(IsNotNull(l), IsNotNull(r))
case _ =>
Set.empty[Expression]
}.foldLeft(Set.empty[Expression])(_ union _.toSet)
}

override protected def validConstraints: Set[Expression] = {
joinType match {
case Inner if condition.isDefined =>
left.constraints
.union(right.constraints)
.union(constructIsNotNullConstraints(condition.get))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should also be including the split form of the condition here and below, right?

case LeftSemi if condition.isDefined =>
left.constraints
.union(right.constraints)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For left semi join, maybe it is not necessary to union the constrains from the right side since we will not output any columns from the right table?

.union(constructIsNotNullConstraints(condition.get))
case LeftOuter =>
left.constraints
case RightOuter =>
right.constraints
case FullOuter =>
Set.empty[Expression]
}
}

def selfJoinResolved: Boolean = left.outputSet.intersect(right.outputSet).isEmpty
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this was a merging mistake as its duplicated with the method below.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

oops, fixed!


def duplicateResolved: Boolean = left.outputSet.intersect(right.outputSet).isEmpty

// Joins are only resolved if they don't introduce ambiguous expression ids.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.catalyst.plans

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._

class ConstraintPropagationSuite extends SparkFunSuite {

private def resolveColumn(tr: LocalRelation, columnName: String): Expression =
tr.analyze.resolveQuoted(columnName, caseInsensitiveResolution).get

private def verifyConstraints(a: Set[Expression], b: Set[Expression]): Unit = {
assert(a.forall(i => b.map(_.semanticEquals(i)).reduce(_ || _)))
assert(b.forall(i => a.map(_.semanticEquals(i)).reduce(_ || _)))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would make this function manually call fail with the condition that we can't find, and also differentiate between missing and found but not expected.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sure, good idea.

}

test("propagating constraints in filters") {
val tr = LocalRelation('a.int, 'b.string, 'c.int)
assert(tr.analyze.constraints.isEmpty)
assert(tr.where('a.attr > 10).select('c.attr, 'b.attr).analyze.constraints.isEmpty)
verifyConstraints(tr.where('a.attr > 10).analyze.constraints, Set(resolveColumn(tr, "a") > 10))
verifyConstraints(tr
.where('a.attr > 10)
.select('c.attr, 'a.attr)
.where('c.attr < 100)
.analyze.constraints,
Set(resolveColumn(tr, "a") > 10, resolveColumn(tr, "c") < 100))
}

test("propagating constraints in union") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int)
val tr2 = LocalRelation('d.int, 'e.int, 'f.int)
val tr3 = LocalRelation('g.int, 'h.int, 'i.int)
assert(tr1
.where('a.attr > 10)
.unionAll(tr2.where('e.attr > 10)
.unionAll(tr3.where('i.attr > 10)))
.analyze.constraints.isEmpty)
verifyConstraints(tr1
.where('a.attr > 10)
.unionAll(tr2.where('d.attr > 10)
.unionAll(tr3.where('g.attr > 10)))
.analyze.constraints,
Set(resolveColumn(tr1, "a") > 10))
}

test("propagating constraints in intersect") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int)
val tr2 = LocalRelation('a.int, 'b.int, 'c.int)
verifyConstraints(tr1
.where('a.attr > 10)
.intersect(tr2.where('b.attr < 100))
.analyze.constraints,
Set(resolveColumn(tr1, "a") > 10, resolveColumn(tr1, "b") < 100))
}

test("propagating constraints in except") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int)
val tr2 = LocalRelation('a.int, 'b.int, 'c.int)
verifyConstraints(tr1
.where('a.attr > 10)
.except(tr2.where('b.attr < 100))
.analyze.constraints,
Set(resolveColumn(tr1, "a") > 10))
}

test("propagating constraints in inner join") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int).subquery('tr1)
val tr2 = LocalRelation('a.int, 'd.int, 'e.int).subquery('tr2)
verifyConstraints(tr1
.where('a.attr > 10)
.join(tr2.where('d.attr < 100), Inner, Some("tr1.a".attr === "tr2.a".attr))
.analyze.constraints,
Set(tr1.resolveQuoted("a", caseInsensitiveResolution).get > 10,
tr2.resolveQuoted("d", caseInsensitiveResolution).get < 100,
IsNotNull(tr2.resolveQuoted("a", caseInsensitiveResolution).get),
IsNotNull(tr1.resolveQuoted("a", caseInsensitiveResolution).get)))
}

test("propagating constraints in left-semi join") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int).subquery('tr1)
val tr2 = LocalRelation('a.int, 'd.int, 'e.int).subquery('tr2)
verifyConstraints(tr1
.where('a.attr > 10)
.join(tr2.where('d.attr < 100), LeftSemi, Some("tr1.a".attr === "tr2.a".attr))
.analyze.constraints,
Set(tr1.resolveQuoted("a", caseInsensitiveResolution).get > 10,
IsNotNull(tr1.resolveQuoted("a", caseInsensitiveResolution).get)))
}

test("propagating constraints in left-outer join") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int).subquery('tr1)
val tr2 = LocalRelation('a.int, 'd.int, 'e.int).subquery('tr2)
verifyConstraints(tr1
.where('a.attr > 10)
.join(tr2.where('d.attr < 100), LeftOuter, Some("tr1.a".attr === "tr2.a".attr))
.analyze.constraints,
Set(tr1.resolveQuoted("a", caseInsensitiveResolution).get > 10))
}

test("propagating constraints in right-outer join") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int).subquery('tr1)
val tr2 = LocalRelation('a.int, 'd.int, 'e.int).subquery('tr2)
verifyConstraints(tr1
.where('a.attr > 10)
.join(tr2.where('d.attr < 100), RightOuter, Some("tr1.a".attr === "tr2.a".attr))
.analyze.constraints,
Set(tr2.resolveQuoted("d", caseInsensitiveResolution).get < 100))
}

test("propagating constraints in full-outer join") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int).subquery('tr1)
val tr2 = LocalRelation('a.int, 'd.int, 'e.int).subquery('tr2)
assert(tr1.where('a.attr > 10)
.join(tr2.where('d.attr < 100), FullOuter, Some("tr1.a".attr === "tr2.a".attr))
.analyze.constraints.isEmpty)
}
}