Skip to content

Commit

Permalink
[SPARK-34772][TESTS][FOLLOWUP] Disable a test case using Hive 1.2.1 i…
Browse files Browse the repository at this point in the history
…n Java9+ environment

This PR aims to disable a new test case using Hive 1.2.1 from Java9+ test environment.

[HIVE-6113](https://issues.apache.org/jira/browse/HIVE-6113) upgraded Datanucleus to 4.x at Hive 2.0. Datanucleus 3.x doesn't support Java9+.

**Java 9+ Environment**
```
$ build/sbt "hive/testOnly *.HiveSparkSubmitSuite -- -z SPARK-34772" -Phive
...
[info] *** 1 TEST FAILED ***
[error] Failed: Total 1, Failed 1, Errors 0, Passed 0
[error] Failed tests:
[error] 	org.apache.spark.sql.hive.HiveSparkSubmitSuite
[error] (hive / Test / testOnly) sbt.TestsFailedException: Tests unsuccessful
[error] Total time: 328 s (05:28), completed Mar 21, 2021, 5:32:39 PM
```

Fix the UT in Java9+ environment.

Manually.

```
$ build/sbt "hive/testOnly *.HiveSparkSubmitSuite -- -z SPARK-34772" -Phive
...
[info] HiveSparkSubmitSuite:
[info] - SPARK-34772: RebaseDateTime loadRebaseRecords should use Spark classloader instead of context !!! CANCELED !!! (26 milliseconds)
[info]   org.apache.commons.lang3.SystemUtils.isJavaVersionAtLeast(JAVA_9) was true (HiveSparkSubmitSuite.scala:344)
```

Closes apache#31916 from dongjoon-hyun/SPARK-HiveSparkSubmitSuite.

Authored-by: Dongjoon Hyun <dhyun@apple.com>
Signed-off-by: Dongjoon Hyun <dhyun@apple.com>
  • Loading branch information
dongjoon-hyun authored and Yi.Wang committed Oct 25, 2021
1 parent dcc79aa commit 0d2c1b8
Showing 1 changed file with 47 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import scala.util.Properties

import org.apache.commons.lang3.{JavaVersion, SystemUtils}
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hive.common.FileUtils
import org.scalatest.Assertions._
import org.scalatest.BeforeAndAfterEach
import org.scalatest.matchers.must.Matchers
Expand Down Expand Up @@ -337,6 +338,30 @@ class HiveSparkSubmitSuite
unusedJar.toString)
runSparkSubmit(argsForShowTables)
}

test("SPARK-34772: RebaseDateTime loadRebaseRecords should use Spark classloader " +
"instead of context") {
assume(!SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_9))
val unusedJar = TestUtils.createJarWithClasses(Seq.empty)

// We need to specify the metastore database location in case of conflict with other hive
// versions.
withTempDir { file =>
file.delete()
val metastore = s"jdbc:derby:;databaseName=${file.getAbsolutePath};create=true"

val args = Seq(
"--class", SPARK_34772.getClass.getName.stripSuffix("$"),
"--name", "SPARK-34772",
"--master", "local-cluster[2,1,1024]",
"--conf", s"${LEGACY_TIME_PARSER_POLICY.key}=LEGACY",
"--conf", s"${HiveUtils.HIVE_METASTORE_VERSION.key}=1.2.1",
"--conf", s"${HiveUtils.HIVE_METASTORE_JARS.key}=maven",
"--conf", s"spark.hadoop.javax.jdo.option.ConnectionURL=$metastore",
unusedJar.toString)
runSparkSubmit(args)
}
}
}

object SetMetastoreURLTest extends Logging {
Expand Down Expand Up @@ -408,10 +433,11 @@ object SetWarehouseLocationTest extends Logging {

}

if (sparkSession.conf.get(WAREHOUSE_PATH.key) != expectedWarehouseLocation) {
val qualifiedWHPath = FileUtils.makeQualified(
new Path(expectedWarehouseLocation), sparkSession.sparkContext.hadoopConfiguration).toString
if (sparkSession.conf.get(WAREHOUSE_PATH.key) != qualifiedWHPath) {
throw new Exception(
s"${WAREHOUSE_PATH.key} is not set to the expected warehouse location " +
s"$expectedWarehouseLocation.")
s"${WAREHOUSE_PATH.key} is not set to the expected warehouse location $qualifiedWHPath.")
}

val catalog = sparkSession.sessionState.catalog
Expand All @@ -424,7 +450,7 @@ object SetWarehouseLocationTest extends Logging {
val tableMetadata =
catalog.getTableMetadata(TableIdentifier("testLocation", Some("default")))
val expectedLocation =
CatalogUtils.stringToURI(s"file:${expectedWarehouseLocation.toString}/testlocation")
CatalogUtils.stringToURI(s"$qualifiedWHPath/testlocation")
val actualLocation = tableMetadata.location
if (actualLocation != expectedLocation) {
throw new Exception(
Expand All @@ -440,7 +466,7 @@ object SetWarehouseLocationTest extends Logging {
val tableMetadata =
catalog.getTableMetadata(TableIdentifier("testLocation", Some("testLocationDB")))
val expectedLocation = CatalogUtils.stringToURI(
s"file:${expectedWarehouseLocation.toString}/testlocationdb.db/testlocation")
s"$qualifiedWHPath/testlocationdb.db/testlocation")
val actualLocation = tableMetadata.location
if (actualLocation != expectedLocation) {
throw new Exception(
Expand Down Expand Up @@ -711,7 +737,7 @@ object SPARK_9757 extends QueryTest {
val df =
hiveContext
.range(10)
.select(callUDF("struct", ($"id" + 0.2) cast DecimalType(10, 3)) as "dec_struct")
.select(call_udf("struct", ($"id" + 0.2) cast DecimalType(10, 3)) as "dec_struct")
df.write.option("path", dir.getCanonicalPath).mode("overwrite").saveAsTable("t")
checkAnswer(hiveContext.table("t"), df)
}
Expand Down Expand Up @@ -845,3 +871,18 @@ object SPARK_18989_DESC_TABLE {
}
}
}

object SPARK_34772 {
def main(args: Array[String]): Unit = {
val spark = SparkSession.builder()
.config(UI_ENABLED.key, "false")
.enableHiveSupport()
.getOrCreate()
try {
spark.sql("CREATE TABLE t (c int) PARTITIONED BY (p date)")
spark.sql("SELECT * FROM t WHERE p='2021-01-01'").collect()
} finally {
spark.sql("DROP TABLE IF EXISTS t")
}
}
}

0 comments on commit 0d2c1b8

Please sign in to comment.