Skip to content

Commit

Permalink
add tests
Browse files Browse the repository at this point in the history
  • Loading branch information
yaooqinn committed Jan 21, 2021
1 parent 783932f commit 3a739c9
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import scala.math.BigDecimal.RoundingMode
import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, AttributeMap, Expression}
import org.apache.spark.sql.catalyst.expressions.objects.StaticInvoke
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils
import org.apache.spark.sql.types.{DecimalType, _}

object EstimationUtils {
Expand Down Expand Up @@ -82,8 +82,8 @@ object EstimationUtils {
expressions.flatMap {
case alias @ Alias(attr: Attribute, _) if attributeStats.contains(attr) =>
Some(alias.toAttribute -> attributeStats(attr))
case alias @ Alias(s: StaticInvoke, _) if alias.explicitMetadata.nonEmpty &&
CharVarcharUtils.getRawType(alias.explicitMetadata.get).nonEmpty => s.children.flatMap {
case alias @ Alias(StaticInvoke(cls, _, _, arguments, _, _), _)
if cls.isAssignableFrom(classOf[CharVarcharCodegenUtils]) => arguments.head match {
case attr: Attribute if attributeStats.contains(attr) =>
Some(alias.toAttribute -> attributeStats(attr))
case _ => None
Expand Down
Empty file.
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ class HiveCharVarcharTestSuite extends CharVarcharTestSuite with TestHiveSinglet
}
}

test("cbo") {
test("SPARK-34188: read side length check should not blocks CBO size estimating") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3), c CHAR(4)) USING $format")
val stats = Some(CatalogStatistics(400L, Some(20L), Map(
Expand All @@ -64,8 +64,9 @@ class HiveCharVarcharTestSuite extends CharVarcharTestSuite with TestHiveSinglet
None, CatalogColumnStat.VERSION))
))
spark.sessionState.catalog.alterTableStats(TableIdentifier("t"), stats)
withSQLConf((SQLConf.CBO_ENABLED.key, "true")) {
spark.table("t").where("v > '123'")
withSQLConf((SQLConf.CBO_ENABLED.key, "true"), (SQLConf.PLAN_STATS_ENABLED.key, "true")) {
val newStat = spark.table("t").where("v = '124'").queryExecution.optimizedPlan.stats
assert(newStat.rowCount.get === 1)
}
}
}
Expand Down

0 comments on commit 3a739c9

Please sign in to comment.