diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/RebaseDateTime.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/RebaseDateTime.scala index 46860ae1771de..2999d475fc8f6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/RebaseDateTime.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/RebaseDateTime.scala @@ -29,6 +29,7 @@ import com.fasterxml.jackson.module.scala.{DefaultScalaModule, ScalaObjectMapper import org.apache.spark.sql.catalyst.util.DateTimeConstants._ import org.apache.spark.sql.catalyst.util.DateTimeUtils._ +import org.apache.spark.util.Utils /** * The collection of functions for rebasing days and microseconds from/to the hybrid calendar @@ -263,7 +264,7 @@ object RebaseDateTime { // `JsonRebaseRecord`. AnyRefMap is used here instead of Scala's immutable map because // it is 2 times faster in DateTimeRebaseBenchmark. private[sql] def loadRebaseRecords(fileName: String): AnyRefMap[String, RebaseInfo] = { - val file = Thread.currentThread().getContextClassLoader.getResource(fileName) + val file = Utils.getSparkClassLoader.getResource(fileName) val mapper = new ObjectMapper() with ScalaObjectMapper mapper.registerModule(DefaultScalaModule) val jsonRebaseRecords = mapper.readValue[Seq[JsonRebaseRecord]](file) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala index 77d54ed45a5de..a3bff6bb26a6b 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala @@ -36,7 +36,7 @@ import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.execution.command.DDLUtils import org.apache.spark.sql.expressions.Window import org.apache.spark.sql.hive.test.{HiveTestJars, TestHiveContext} -import org.apache.spark.sql.internal.SQLConf.SHUFFLE_PARTITIONS +import org.apache.spark.sql.internal.SQLConf.{LEGACY_TIME_PARSER_POLICY, SHUFFLE_PARTITIONS} import org.apache.spark.sql.internal.StaticSQLConf.WAREHOUSE_PATH import org.apache.spark.sql.types.{DecimalType, StructType} import org.apache.spark.tags.{ExtendedHiveTest, SlowHiveTest} @@ -337,6 +337,29 @@ class HiveSparkSubmitSuite unusedJar.toString) runSparkSubmit(argsForShowTables) } + + test("SPARK-34772: RebaseDateTime loadRebaseRecords should use Spark classloader " + + "instead of context") { + val unusedJar = TestUtils.createJarWithClasses(Seq.empty) + + // We need to specify the metastore database location in case of conflict with other hive + // versions. + withTempDir { file => + file.delete() + val metastore = s"jdbc:derby:;databaseName=${file.getAbsolutePath};create=true" + + val args = Seq( + "--class", SPARK_34772.getClass.getName.stripSuffix("$"), + "--name", "SPARK-34772", + "--master", "local-cluster[2,1,1024]", + "--conf", s"${LEGACY_TIME_PARSER_POLICY.key}=LEGACY", + "--conf", s"${HiveUtils.HIVE_METASTORE_VERSION.key}=1.2.1", + "--conf", s"${HiveUtils.HIVE_METASTORE_JARS.key}=maven", + "--conf", s"spark.hadoop.javax.jdo.option.ConnectionURL=$metastore", + unusedJar.toString) + runSparkSubmit(args) + } + } } object SetMetastoreURLTest extends Logging { @@ -845,3 +868,18 @@ object SPARK_18989_DESC_TABLE { } } } + +object SPARK_34772 { + def main(args: Array[String]): Unit = { + val spark = SparkSession.builder() + .config(UI_ENABLED.key, "false") + .enableHiveSupport() + .getOrCreate() + try { + spark.sql("CREATE TABLE t (c int) PARTITIONED BY (p date)") + spark.sql("SELECT * FROM t WHERE p='2021-01-01'").collect() + } finally { + spark.sql("DROP TABLE IF EXISTS t") + } + } +}