Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-41798][BUILD] Upgrade hive-storage-api to 2.8.1 #39322

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion dev/deps/spark-deps-hadoop-2-hive-2.3
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ hive-shims-0.23/2.3.9//hive-shims-0.23-2.3.9.jar
hive-shims-common/2.3.9//hive-shims-common-2.3.9.jar
hive-shims-scheduler/2.3.9//hive-shims-scheduler-2.3.9.jar
hive-shims/2.3.9//hive-shims-2.3.9.jar
hive-storage-api/2.7.3//hive-storage-api-2.7.3.jar
hive-storage-api/2.8.1//hive-storage-api-2.8.1.jar
hk2-api/2.6.1//hk2-api-2.6.1.jar
hk2-locator/2.6.1//hk2-locator-2.6.1.jar
hk2-utils/2.6.1//hk2-utils-2.6.1.jar
Expand Down
2 changes: 1 addition & 1 deletion dev/deps/spark-deps-hadoop-3-hive-2.3
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ hive-shims-0.23/2.3.9//hive-shims-0.23-2.3.9.jar
hive-shims-common/2.3.9//hive-shims-common-2.3.9.jar
hive-shims-scheduler/2.3.9//hive-shims-scheduler-2.3.9.jar
hive-shims/2.3.9//hive-shims-2.3.9.jar
hive-storage-api/2.7.3//hive-storage-api-2.7.3.jar
hive-storage-api/2.8.1//hive-storage-api-2.8.1.jar
hk2-api/2.6.1//hk2-api-2.6.1.jar
hk2-locator/2.6.1//hk2-locator-2.6.1.jar
hk2-utils/2.6.1//hk2-utils-2.6.1.jar
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@
-->
<hadoop.deps.scope>compile</hadoop.deps.scope>
<hive.deps.scope>compile</hive.deps.scope>
<hive.storage.version>2.7.3</hive.storage.version>
<hive.storage.version>2.8.1</hive.storage.version>
<hive.storage.scope>compile</hive.storage.scope>
<hive.common.scope>compile</hive.common.scope>
<hive.llap.scope>compile</hive.llap.scope>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import java.time.{Duration, LocalDateTime, Period}

import scala.collection.JavaConverters._

import org.apache.hadoop.hive.ql.io.sarg.{PredicateLeaf, SearchArgument}
import org.apache.hadoop.hive.ql.io.sarg.{PredicateLeaf, SearchArgument, SearchArgumentImpl}
import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentFactory.newBuilder

import org.apache.spark.{SparkConf, SparkException}
Expand Down Expand Up @@ -86,7 +86,8 @@ class OrcFilterSuite extends OrcTest with SharedSparkSession {
(predicate: Predicate, stringExpr: String)
(implicit df: DataFrame): Unit = {
def checkLogicalOperator(filter: SearchArgument) = {
assert(filter.toString == stringExpr)
// HIVE-24458 changes toString output and provides `toOldString` for old style.
assert(filter.asInstanceOf[SearchArgumentImpl].toOldString == stringExpr)
}
checkFilterPredicate(df, predicate, checkLogicalOperator)
}
Expand Down Expand Up @@ -543,7 +544,7 @@ class OrcFilterSuite extends OrcTest with SharedSparkSession {
OrcFilters.createFilter(schema, Array(
LessThan("a", 10),
StringContains("b", "prefix")
)).get.toString
)).get.asInstanceOf[SearchArgumentImpl].toOldString
}

// The `LessThan` should be converted while the whole inner `And` shouldn't
Expand All @@ -554,7 +555,7 @@ class OrcFilterSuite extends OrcTest with SharedSparkSession {
GreaterThan("a", 1),
StringContains("b", "prefix")
))
)).get.toString
)).get.asInstanceOf[SearchArgumentImpl].toOldString
}

// Safely remove unsupported `StringContains` predicate and push down `LessThan`
Expand All @@ -564,7 +565,7 @@ class OrcFilterSuite extends OrcTest with SharedSparkSession {
LessThan("a", 10),
StringContains("b", "prefix")
)
)).get.toString
)).get.asInstanceOf[SearchArgumentImpl].toOldString
}

// Safely remove unsupported `StringContains` predicate, push down `LessThan` and `GreaterThan`.
Expand All @@ -578,7 +579,7 @@ class OrcFilterSuite extends OrcTest with SharedSparkSession {
),
GreaterThan("a", 1)
)
)).get.toString
)).get.asInstanceOf[SearchArgumentImpl].toOldString
}
}

Expand All @@ -601,7 +602,7 @@ class OrcFilterSuite extends OrcTest with SharedSparkSession {
LessThan("a", 1)
)
)
)).get.toString
)).get.asInstanceOf[SearchArgumentImpl].toOldString
}

assertResult("leaf-0 = (LESS_THAN_EQUALS a 10), leaf-1 = (LESS_THAN a 1)," +
Expand All @@ -617,7 +618,7 @@ class OrcFilterSuite extends OrcTest with SharedSparkSession {
LessThan("a", 1)
)
)
)).get.toString
)).get.asInstanceOf[SearchArgumentImpl].toOldString
}

assert(OrcFilters.createFilter(schema, Array(
Expand All @@ -639,7 +640,7 @@ class OrcFilterSuite extends OrcTest with SharedSparkSession {
LessThan(
"a",
new java.math.BigDecimal(3.14, MathContext.DECIMAL64).setScale(2)))
).get.toString
).get.asInstanceOf[SearchArgumentImpl].toOldString
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ package org.apache.spark.sql.execution.datasources.orc

import scala.collection.JavaConverters._

import org.apache.hadoop.hive.ql.io.sarg.SearchArgumentImpl

import org.apache.spark.SparkConf
import org.apache.spark.sql.{Column, DataFrame}
import org.apache.spark.sql.catalyst.dsl.expressions._
Expand Down Expand Up @@ -74,7 +76,8 @@ class OrcV1FilterSuite extends OrcFilterSuite {
(predicate: Predicate, stringExpr: String)
(implicit df: DataFrame): Unit = {
def checkLogicalOperator(filter: SearchArgument) = {
assert(filter.toString == stringExpr)
// HIVE-24458 changes toString format and provides `toOldString` for old style.
assert(filter.asInstanceOf[SearchArgumentImpl].toOldString == stringExpr)
}
checkFilterPredicate(df, predicate, checkLogicalOperator)
}
Expand Down