From 4aecbbaeb9da221a205a002739e1eca008b9756c Mon Sep 17 00:00:00 2001 From: Yin Huai Date: Fri, 21 Aug 2015 13:57:24 -0700 Subject: [PATCH] Expose initialConf. --- .../org/apache/spark/sql/hive/client/ClientInterface.scala | 4 ++++ .../org/apache/spark/sql/hive/client/ClientWrapper.scala | 2 ++ .../scala/org/apache/spark/sql/hive/execution/commands.scala | 3 ++- 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala index 3811c152a7ae6..082f10d7a2823 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientInterface.scala @@ -20,6 +20,7 @@ package org.apache.spark.sql.hive.client import java.io.PrintStream import java.util.{Map => JMap} +import org.apache.hadoop.hive.conf.HiveConf import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchTableException} import org.apache.spark.sql.catalyst.expressions.Expression @@ -91,6 +92,9 @@ private[hive] trait ClientInterface { /** Returns the Hive Version of this client. */ def version: HiveVersion + /** Returns the initial HiveConf used to create this client. */ + def initialConf: HiveConf + /** Returns the configuration for the given key in the current session. */ def getConf(key: String, defaultValue: String): String diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala index f49c97de8ff4e..6cc1dbc4adf69 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala @@ -184,6 +184,8 @@ private[hive] class ClientWrapper( /** Returns the configuration for the current session. */ def conf: HiveConf = SessionState.get().getConf + override def initialConf: HiveConf = state.getConf + override def getConf(key: String, defaultValue: String): String = { conf.get(key, defaultValue) } diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala index d1699dd536817..d17e9ed9edd67 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala @@ -100,8 +100,9 @@ case class AddJar(path: String) extends RunnableCommand { // returns the value of a thread local variable and its HiveConf may not be the HiveConf // associated with `executionHive.state` (for example, HiveContext is created in one thread // and then add jar is called from another thread). - hiveContext.executionHive.state.getConf.setClassLoader(newClassLoader) + hiveContext.executionHive.initialConf.setClassLoader(newClassLoader) // Add jar to isolated hive (metadataHive) class loader. + hiveContext.metadataHive.initialConf.setClassLoader(newClassLoader) hiveContext.runSqlHive(s"ADD JAR $path") // Add jar to executors