Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-12957][SQL] Initial support for constraint propagation in SparkSQL #10844

Closed
wants to merge 15 commits into from
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,30 @@

package org.apache.spark.sql.catalyst.plans

import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeSet, Expression, VirtualColumn}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.trees.TreeNode
import org.apache.spark.sql.types.{DataType, StructType}

abstract class QueryPlan[PlanType <: TreeNode[PlanType]] extends TreeNode[PlanType] {
abstract class QueryPlan[PlanType <: TreeNode[PlanType]]
extends TreeNode[PlanType] with PredicateHelper {
self: PlanType =>

def output: Seq[Attribute]

/**
* Extracts the output property from a given child.
*/
def extractConstraintsFromChild(child: QueryPlan[PlanType]): Set[Expression] = {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

protected?

Also I'm not sure I get the scala doc. Maybe getReleventContraints is a better name? It is taking the constraints and removing those that don't apply anymore because we removed columns right?

child.constraints.filter(_.references.subsetOf(outputSet))
}

/**
* An sequence of expressions that describes the data property of the output rows of this
* operator. For example, if the output of this operator is column `a`, an example `constraints`
* can be `Set(a > 10, a < 20)`.
*/
def constraints: Set[Expression] = Set.empty
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is probably going to be nontrivial to calculate for a large tree. We might consider having an internal method, private def validConstraints or something, that we expand / canonicalize into a lazy val constraints


/**
* Returns the set of attributes that are output by this node.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -301,10 +301,14 @@ abstract class LeafNode extends LogicalPlan {
/**
* A logical plan node with single child.
*/
abstract class UnaryNode extends LogicalPlan {
abstract class UnaryNode extends LogicalPlan with PredicateHelper {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks like we do not need PredicateHelper at here? Maybe it is better to just with PredicateHelper for Filter and Join?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sounds good, added this only for filter and join that needed splitConjunctivePredicates

def child: LogicalPlan

override def children: Seq[LogicalPlan] = child :: Nil

override def constraints: Set[Expression] = {
extractConstraintsFromChild(child)
}
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,13 +88,33 @@ case class Generate(

case class Filter(condition: Expression, child: LogicalPlan) extends UnaryNode {
override def output: Seq[Attribute] = child.output

override def constraints: Set[Expression] = {
val newConstraint = splitConjunctivePredicates(condition).filter(
_.references.subsetOf(outputSet)).toSet
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

style nit: we typically avoid breaking in the middle of a function call and instead prefer to break in between calls (always pick the highest syntactic level)

val newConstraint = splitConjunctivePredicates(condition)
  .filter(_.references.subsetOf(outputSet))
  .toSet

newConstraint.union(extractConstraintsFromChild(child))
}
}

abstract class SetOperation(left: LogicalPlan, right: LogicalPlan) extends BinaryNode {
final override lazy val resolved: Boolean =
childrenResolved &&
left.output.length == right.output.length &&
left.output.zip(right.output).forall { case (l, r) => l.dataType == r.dataType }

override def extractConstraintsFromChild(child: QueryPlan[LogicalPlan]): Set[Expression] = {
child.constraints.filter(_.references.subsetOf(child.outputSet))
}

protected def leftConstraints: Set[Expression] = extractConstraintsFromChild(left)

protected def rightConstraints: Set[Expression] = {
require(left.output.size == right.output.size)
val attributeRewrites = AttributeMap(right.output.zip(left.output))
extractConstraintsFromChild(right).map(_ transform {
case a: Attribute => attributeRewrites(a)
})
}
}

private[sql] object SetOperation {
Expand All @@ -107,11 +127,17 @@ case class Intersect(left: LogicalPlan, right: LogicalPlan) extends SetOperation
left.output.zip(right.output).map { case (leftAttr, rightAttr) =>
leftAttr.withNullability(leftAttr.nullable && rightAttr.nullable)
}

override def constraints: Set[Expression] = {
leftConstraints.union(rightConstraints)
}
}

case class Except(left: LogicalPlan, right: LogicalPlan) extends SetOperation(left, right) {
/** We don't use right.output because those rows get excluded from the set. */
override def output: Seq[Attribute] = left.output

override def constraints: Set[Expression] = leftConstraints
}

/** Factory for constructing new `Union` nodes. */
Expand Down Expand Up @@ -146,6 +172,26 @@ case class Union(children: Seq[LogicalPlan]) extends LogicalPlan {
val sizeInBytes = children.map(_.statistics.sizeInBytes).sum
Statistics(sizeInBytes = sizeInBytes)
}

override def extractConstraintsFromChild(child: QueryPlan[LogicalPlan]): Set[Expression] = {
child.constraints.filter(_.references.subsetOf(child.outputSet))
}

def rewriteConstraints(
planA: LogicalPlan,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Maybe give it a more informative name? (how about reference?)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

renamed + added doc

planB: LogicalPlan,
constraints: Set[Expression]): Set[Expression] = {
require(planA.output.size == planB.output.size)
val attributeRewrites = AttributeMap(planB.output.zip(planA.output))
constraints.map(_ transform {
case a: Attribute => attributeRewrites(a)
})
}

override def constraints: Set[Expression] = {
children.map(child => rewriteConstraints(children.head, child,
extractConstraintsFromChild(child))).reduce(_ intersect _)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same style nit

}
}

case class Join(
Expand All @@ -169,6 +215,52 @@ case class Join(
}
}

override def constraints: Set[Expression] = {
var constraints = Set.empty[Expression]

// Currently we only propagate constraints if the condition consists of equality
// and ranges. For all other cases, we return an empty set of constraints
def extractIsNotNullConstraints(condition: Expression): Set[Expression] = {
splitConjunctivePredicates(condition).foreach {
case EqualTo(l, r) =>
constraints = constraints.union(Set(IsNotNull(l), IsNotNull(r)))
case GreaterThan(l, r) =>
constraints = constraints.union(Set(IsNotNull(l), IsNotNull(r)))
case GreaterThanOrEqual(l, r) =>
constraints = constraints.union(Set(IsNotNull(l), IsNotNull(r)))
case LessThan(l, r) =>
constraints = constraints.union(Set(IsNotNull(l), IsNotNull(r)))
case LessThanOrEqual(l, r) =>
constraints = constraints.union(Set(IsNotNull(l), IsNotNull(r)))
}
constraints
}

def extractIsNullConstraints(plan: LogicalPlan): Set[Expression] = {
constraints = constraints.union(plan.output.map(IsNull).toSet)
constraints
}

constraints = joinType match {
case Inner if condition.isDefined =>
extractConstraintsFromChild(left).union(extractConstraintsFromChild(right))
.union(extractIsNotNullConstraints(condition.get))
case LeftSemi if condition.isDefined =>
extractConstraintsFromChild(left).union(extractConstraintsFromChild(right))
.union(extractIsNotNullConstraints(condition.get))
case LeftOuter =>
extractConstraintsFromChild(left).union(extractIsNullConstraints(right))
case RightOuter =>
extractConstraintsFromChild(right).union(extractIsNullConstraints(left))
case FullOuter =>
extractIsNullConstraints(left).union(extractIsNullConstraints(right))
case _ =>
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What types of joins are we not handling? It might be better to get an exception if we add a new type and its not handled, but I'm not sure.

Set.empty
}

constraints.filter(_.references.subsetOf(outputSet))
}

def selfJoinResolved: Boolean = left.outputSet.intersect(right.outputSet).isEmpty
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this was a merging mistake as its duplicated with the method below.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

oops, fixed!


// Joins are only resolved if they don't introduce ambiguous expression ids.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.catalyst.plans

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._

class ConstraintPropagationSuite extends SparkFunSuite {

private def resolveColumn(tr: LocalRelation, columnName: String): Expression =
tr.analyze.resolveQuoted(columnName, caseInsensitiveResolution).get

private def verifyConstraints(a: Set[Expression], b: Set[Expression]): Unit = {
assert(a.forall(i => b.map(_.semanticEquals(i)).reduce(_ || _)))
assert(b.forall(i => a.map(_.semanticEquals(i)).reduce(_ || _)))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I would make this function manually call fail with the condition that we can't find, and also differentiate between missing and found but not expected.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

sure, good idea.

}

test("propagating constraints in filter/project") {
val tr = LocalRelation('a.int, 'b.string, 'c.int)
assert(tr.analyze.constraints.isEmpty)
assert(tr.select('a.attr).analyze.constraints.isEmpty)
assert(tr.where('a.attr > 10).select('c.attr, 'b.attr).analyze.constraints.isEmpty)
verifyConstraints(tr.where('a.attr > 10).analyze.constraints, Set(resolveColumn(tr, "a") > 10))
verifyConstraints(tr.where('a.attr > 10).select('c.attr, 'a.attr).where('c.attr < 100)
.analyze.constraints, Set(resolveColumn(tr, "a") > 10, resolveColumn(tr, "c") < 100))
}

test("propagating constraints in union") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int)
val tr2 = LocalRelation('d.int, 'e.int, 'f.int)
val tr3 = LocalRelation('g.int, 'h.int, 'i.int)
assert(tr1.where('a.attr > 10).unionAll(tr2.where('e.attr > 10)
.unionAll(tr3.where('i.attr > 10))).analyze.constraints.isEmpty)
verifyConstraints(tr1.where('a.attr > 10).unionAll(tr2.where('d.attr > 10)
.unionAll(tr3.where('g.attr > 10))).analyze.constraints, Set(resolveColumn(tr1, "a") > 10))
}

test("propagating constraints in intersect") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int)
val tr2 = LocalRelation('a.int, 'b.int, 'c.int)
verifyConstraints(tr1.where('a.attr > 10).intersect(tr2.where('b.attr < 100))
.analyze.constraints, Set(resolveColumn(tr1, "a") > 10, resolveColumn(tr1, "b") < 100))
}

test("propagating constraints in except") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int)
val tr2 = LocalRelation('a.int, 'b.int, 'c.int)
verifyConstraints(tr1.where('a.attr > 10).except(tr2.where('b.attr < 100)).analyze.constraints,
Set(resolveColumn(tr1, "a") > 10))
}

test("propagating constraints in inner join") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int).subquery('tr1)
val tr2 = LocalRelation('a.int, 'd.int, 'e.int).subquery('tr2)
verifyConstraints(tr1.where('a.attr > 10).join(tr2.where('d.attr < 100), Inner,
Some("tr1.a".attr === "tr2.a".attr)).analyze.constraints,
Set(tr1.resolveQuoted("a", caseInsensitiveResolution).get > 10,
tr2.resolveQuoted("d", caseInsensitiveResolution).get < 100,
IsNotNull(tr2.resolveQuoted("a", caseInsensitiveResolution).get),
IsNotNull(tr1.resolveQuoted("a", caseInsensitiveResolution).get)))
}

test("propagating constraints in left-semi join") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int).subquery('tr1)
val tr2 = LocalRelation('a.int, 'd.int, 'e.int).subquery('tr2)
verifyConstraints(tr1.where('a.attr > 10).join(tr2.where('d.attr < 100), LeftSemi,
Some("tr1.a".attr === "tr2.a".attr)).analyze.constraints,
Set(tr1.resolveQuoted("a", caseInsensitiveResolution).get > 10,
IsNotNull(tr1.resolveQuoted("a", caseInsensitiveResolution).get)))
}

test("propagating constraints in left-outer join") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int).subquery('tr1)
val tr2 = LocalRelation('a.int, 'd.int, 'e.int).subquery('tr2)
verifyConstraints(tr1.where('a.attr > 10).join(tr2.where('d.attr < 100), LeftOuter,
Some("tr1.a".attr === "tr2.a".attr)).analyze.constraints,
Set(tr1.resolveQuoted("a", caseInsensitiveResolution).get > 10,
IsNull(tr2.resolveQuoted("a", caseInsensitiveResolution).get),
IsNull(tr2.resolveQuoted("d", caseInsensitiveResolution).get),
IsNull(tr2.resolveQuoted("e", caseInsensitiveResolution).get)))
}

test("propagating constraints in right-outer join") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int).subquery('tr1)
val tr2 = LocalRelation('a.int, 'd.int, 'e.int).subquery('tr2)
verifyConstraints(tr1.where('a.attr > 10).join(tr2.where('d.attr < 100), RightOuter,
Some("tr1.a".attr === "tr2.a".attr)).analyze.constraints,
Set(tr2.resolveQuoted("d", caseInsensitiveResolution).get < 100,
IsNull(tr1.resolveQuoted("a", caseInsensitiveResolution).get),
IsNull(tr1.resolveQuoted("b", caseInsensitiveResolution).get),
IsNull(tr1.resolveQuoted("c", caseInsensitiveResolution).get)))
}

test("propagating constraints in full-outer join") {
val tr1 = LocalRelation('a.int, 'b.int, 'c.int).subquery('tr1)
val tr2 = LocalRelation('a.int, 'd.int, 'e.int).subquery('tr2)
verifyConstraints(tr1.where('a.attr > 10).join(tr2.where('d.attr < 100), FullOuter,
Some("tr1.a".attr === "tr2.a".attr)).analyze.constraints,
Set(IsNull(tr1.resolveQuoted("a", caseInsensitiveResolution).get),
IsNull(tr1.resolveQuoted("b", caseInsensitiveResolution).get),
IsNull(tr1.resolveQuoted("c", caseInsensitiveResolution).get),
IsNull(tr2.resolveQuoted("a", caseInsensitiveResolution).get),
IsNull(tr2.resolveQuoted("d", caseInsensitiveResolution).get),
IsNull(tr2.resolveQuoted("e", caseInsensitiveResolution).get)))
}
}