From 7844933bd8ca61f49b482f154d1b7ee1e66fa045 Mon Sep 17 00:00:00 2001 From: "zhichao.li" Date: Fri, 18 Dec 2015 14:49:38 +0800 Subject: [PATCH 1/6] pass the user parameters to metaDataHive --- .../hive/thriftserver/SparkSQLCLIDriver.scala | 1 + .../hive/thriftserver/SparkSQLDriver.scala | 1 - .../sql/hive/thriftserver/SparkSQLEnv.scala | 9 +++++++-- .../apache/spark/sql/hive/HiveContext.scala | 19 ++++++++++++++----- .../spark/sql/hive/client/ClientWrapper.scala | 13 +++++++++---- .../hive/client/IsolatedClientLoader.scala | 8 ++++---- 6 files changed, 35 insertions(+), 16 deletions(-) diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala index 03bb2c222503f..580d8e37cc956 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala @@ -263,6 +263,7 @@ private[hive] object SparkSQLCLIDriver extends Logging { } + private[hive] class SparkSQLCLIDriver extends CliDriver with Logging { private val sessionState = SessionState.get().asInstanceOf[CliSessionState] diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala index f1ec7238520ac..0c937420345c1 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala @@ -18,7 +18,6 @@ package org.apache.spark.sql.hive.thriftserver import java.util.{Arrays, ArrayList => JArrayList, List => JList} -import org.apache.log4j.LogManager import org.apache.spark.sql.AnalysisException import scala.collection.JavaConverters._ diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala index bacf6cc458fd5..e4306c0239192 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala @@ -21,8 +21,10 @@ import java.io.PrintStream import scala.collection.JavaConverters._ +import org.apache.hadoop.hive.ql.session.SessionState + import org.apache.spark.scheduler.StatsReportListener -import org.apache.spark.sql.hive.HiveContext +import org.apache.spark.sql.hive.{UserInput, HiveContext} import org.apache.spark.{Logging, SparkConf, SparkContext} import org.apache.spark.util.Utils @@ -55,7 +57,10 @@ private[hive] object SparkSQLEnv extends Logging { sparkContext = new SparkContext(sparkConf) sparkContext.addSparkListener(new StatsReportListener()) - hiveContext = new HiveContext(sparkContext) + + val sessionState = SessionState.get() + hiveContext = new HiveContext(sparkContext, + Some(UserInput(sessionState.getIsSilent, sessionState.getIsVerbose))) hiveContext.metadataHive.setOut(new PrintStream(System.out, true, "UTF-8")) hiveContext.metadataHive.setInfo(new PrintStream(System.err, true, "UTF-8")) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala index 5958777b0d064..ae75f9d423c9c 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala @@ -56,6 +56,10 @@ import org.apache.spark.unsafe.types.UTF8String import org.apache.spark.util.Utils import org.apache.spark.{Logging, SparkContext} +/** + * Use to encapsulate the user input parameters from spark-sql CLI. + */ +private[hive] case class UserInput (isSilent: Boolean, isVerbose: Boolean) /** * This is the HiveQL Dialect, this dialect is strongly bind with HiveContext @@ -93,13 +97,19 @@ class HiveContext private[hive]( listener: SQLListener, @transient private val execHive: ClientWrapper, @transient private val metaHive: ClientInterface, - isRootContext: Boolean) + isRootContext: Boolean, + userInput: Option[UserInput] = None) extends SQLContext(sc, cacheManager, listener, isRootContext) with Logging { self => def this(sc: SparkContext) = { - this(sc, new CacheManager, SQLContext.createListenerAndUI(sc), null, null, true) + this(sc, new CacheManager, SQLContext.createListenerAndUI(sc), null, null, true, None) + } + + def this(sc: SparkContext, userInput: Option[UserInput]) = { + this(sc, new CacheManager, SQLContext.createListenerAndUI(sc), null, null, true, userInput) } + def this(sc: JavaSparkContext) = this(sc.sc) import org.apache.spark.sql.hive.HiveContext._ @@ -215,7 +225,7 @@ class HiveContext private[hive]( config = newTemporaryConfiguration(useInMemoryDerby = true), isolationOn = false, baseClassLoader = Utils.getContextOrSparkClassLoader) - loader.createClient().asInstanceOf[ClientWrapper] + loader.createClient(userInput).asInstanceOf[ClientWrapper] } /** @@ -324,7 +334,7 @@ class HiveContext private[hive]( barrierPrefixes = hiveMetastoreBarrierPrefixes, sharedPrefixes = hiveMetastoreSharedPrefixes) } - isolatedLoader.createClient() + isolatedLoader.createClient(userInput) } protected[sql] override def parseSql(sql: String): LogicalPlan = { @@ -656,7 +666,6 @@ class HiveContext private[hive]( } } - private[hive] object HiveContext { /** The version of hive used internally by Spark SQL. */ val hiveExecutionVersion: String = "1.2.1" diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala index 598ccdeee4ad2..34babebc18aae 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala @@ -36,6 +36,7 @@ import org.apache.hadoop.security.UserGroupInformation import org.apache.hadoop.util.VersionInfo import org.apache.spark.{SparkConf, SparkException, Logging} +import org.apache.spark.sql.hive.UserInput import org.apache.spark.sql.catalyst.expressions.Expression import org.apache.spark.sql.execution.QueryExecutionException import org.apache.spark.util.{CircularBuffer, Utils} @@ -61,10 +62,10 @@ private[hive] class ClientWrapper( override val version: HiveVersion, config: Map[String, String], initClassLoader: ClassLoader, - val clientLoader: IsolatedClientLoader) + val clientLoader: IsolatedClientLoader, + userInput: Option[UserInput]) extends ClientInterface with Logging { - overrideHadoopShims() // !! HACK ALERT !! @@ -194,6 +195,10 @@ private[hive] class ClientWrapper( SessionState.start(state) state.out = new PrintStream(outputBuffer, true, "UTF-8") state.err = new PrintStream(outputBuffer, true, "UTF-8") + userInput.foreach { input => + state.setIsSilent(input.isSilent) + state.setIsVerbose(input.isVerbose) + } state } finally { Thread.currentThread().setContextClassLoader(original) @@ -494,7 +499,7 @@ private[hive] class ClientWrapper( results case _ => - if (state.out != null) { + if (state.out != null && !state.getIsSilent) { // scalastyle:off println state.out.println(tokens(0) + " " + cmd_1) // scalastyle:on println @@ -582,7 +587,7 @@ private[hive] class ClientWrapper( } def newSession(): ClientWrapper = { - clientLoader.createClient().asInstanceOf[ClientWrapper] + clientLoader.createClient(userInput).asInstanceOf[ClientWrapper] } def reset(): Unit = withHiveState { diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala index 010051d255fdc..0effcb1fb3f62 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala @@ -30,7 +30,7 @@ import org.apache.commons.io.{FileUtils, IOUtils} import org.apache.spark.Logging import org.apache.spark.deploy.SparkSubmitUtils import org.apache.spark.sql.catalyst.util.quietly -import org.apache.spark.sql.hive.HiveContext +import org.apache.spark.sql.hive.{UserInput, HiveContext} import org.apache.spark.util.{MutableURLClassLoader, Utils} /** Factory for `IsolatedClientLoader` with specific versions of hive. */ @@ -233,9 +233,9 @@ private[hive] class IsolatedClientLoader( } /** The isolated client interface to Hive. */ - private[hive] def createClient(): ClientInterface = { + private[hive] def createClient(userInput: Option[UserInput] = None): ClientInterface = { if (!isolationOn) { - return new ClientWrapper(version, config, baseClassLoader, this) + return new ClientWrapper(version, config, baseClassLoader, this, userInput) } // Pre-reflective instantiation setup. logDebug("Initializing the logger to avoid disaster...") @@ -246,7 +246,7 @@ private[hive] class IsolatedClientLoader( classLoader .loadClass(classOf[ClientWrapper].getName) .getConstructors.head - .newInstance(version, config, classLoader, this) + .newInstance(version, config, classLoader, this, userInput) .asInstanceOf[ClientInterface] } catch { case e: InvocationTargetException => From a8edabbac8fd63336e8c44c9618b33063d8951a6 Mon Sep 17 00:00:00 2001 From: "zhichao.li" Date: Mon, 21 Dec 2015 08:45:24 -0800 Subject: [PATCH 2/6] minimize changes --- .../hive/thriftserver/SparkSQLDriver.scala | 1 + .../sql/hive/thriftserver/SparkSQLEnv.scala | 9 ++------- .../apache/spark/sql/hive/HiveContext.scala | 19 +++++-------------- .../spark/sql/hive/client/ClientWrapper.scala | 8 +++----- .../hive/client/IsolatedClientLoader.scala | 19 +++++++++++++++---- 5 files changed, 26 insertions(+), 30 deletions(-) diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala index 0c937420345c1..f1ec7238520ac 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLDriver.scala @@ -18,6 +18,7 @@ package org.apache.spark.sql.hive.thriftserver import java.util.{Arrays, ArrayList => JArrayList, List => JList} +import org.apache.log4j.LogManager import org.apache.spark.sql.AnalysisException import scala.collection.JavaConverters._ diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala index e4306c0239192..bacf6cc458fd5 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala @@ -21,10 +21,8 @@ import java.io.PrintStream import scala.collection.JavaConverters._ -import org.apache.hadoop.hive.ql.session.SessionState - import org.apache.spark.scheduler.StatsReportListener -import org.apache.spark.sql.hive.{UserInput, HiveContext} +import org.apache.spark.sql.hive.HiveContext import org.apache.spark.{Logging, SparkConf, SparkContext} import org.apache.spark.util.Utils @@ -57,10 +55,7 @@ private[hive] object SparkSQLEnv extends Logging { sparkContext = new SparkContext(sparkConf) sparkContext.addSparkListener(new StatsReportListener()) - - val sessionState = SessionState.get() - hiveContext = new HiveContext(sparkContext, - Some(UserInput(sessionState.getIsSilent, sessionState.getIsVerbose))) + hiveContext = new HiveContext(sparkContext) hiveContext.metadataHive.setOut(new PrintStream(System.out, true, "UTF-8")) hiveContext.metadataHive.setInfo(new PrintStream(System.err, true, "UTF-8")) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala index ae75f9d423c9c..5958777b0d064 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala @@ -56,10 +56,6 @@ import org.apache.spark.unsafe.types.UTF8String import org.apache.spark.util.Utils import org.apache.spark.{Logging, SparkContext} -/** - * Use to encapsulate the user input parameters from spark-sql CLI. - */ -private[hive] case class UserInput (isSilent: Boolean, isVerbose: Boolean) /** * This is the HiveQL Dialect, this dialect is strongly bind with HiveContext @@ -97,19 +93,13 @@ class HiveContext private[hive]( listener: SQLListener, @transient private val execHive: ClientWrapper, @transient private val metaHive: ClientInterface, - isRootContext: Boolean, - userInput: Option[UserInput] = None) + isRootContext: Boolean) extends SQLContext(sc, cacheManager, listener, isRootContext) with Logging { self => def this(sc: SparkContext) = { - this(sc, new CacheManager, SQLContext.createListenerAndUI(sc), null, null, true, None) - } - - def this(sc: SparkContext, userInput: Option[UserInput]) = { - this(sc, new CacheManager, SQLContext.createListenerAndUI(sc), null, null, true, userInput) + this(sc, new CacheManager, SQLContext.createListenerAndUI(sc), null, null, true) } - def this(sc: JavaSparkContext) = this(sc.sc) import org.apache.spark.sql.hive.HiveContext._ @@ -225,7 +215,7 @@ class HiveContext private[hive]( config = newTemporaryConfiguration(useInMemoryDerby = true), isolationOn = false, baseClassLoader = Utils.getContextOrSparkClassLoader) - loader.createClient(userInput).asInstanceOf[ClientWrapper] + loader.createClient().asInstanceOf[ClientWrapper] } /** @@ -334,7 +324,7 @@ class HiveContext private[hive]( barrierPrefixes = hiveMetastoreBarrierPrefixes, sharedPrefixes = hiveMetastoreSharedPrefixes) } - isolatedLoader.createClient(userInput) + isolatedLoader.createClient() } protected[sql] override def parseSql(sql: String): LogicalPlan = { @@ -666,6 +656,7 @@ class HiveContext private[hive]( } } + private[hive] object HiveContext { /** The version of hive used internally by Spark SQL. */ val hiveExecutionVersion: String = "1.2.1" diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala index 34babebc18aae..ab18ccb929de1 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala @@ -36,7 +36,6 @@ import org.apache.hadoop.security.UserGroupInformation import org.apache.hadoop.util.VersionInfo import org.apache.spark.{SparkConf, SparkException, Logging} -import org.apache.spark.sql.hive.UserInput import org.apache.spark.sql.catalyst.expressions.Expression import org.apache.spark.sql.execution.QueryExecutionException import org.apache.spark.util.{CircularBuffer, Utils} @@ -62,8 +61,7 @@ private[hive] class ClientWrapper( override val version: HiveVersion, config: Map[String, String], initClassLoader: ClassLoader, - val clientLoader: IsolatedClientLoader, - userInput: Option[UserInput]) + val clientLoader: IsolatedClientLoader) extends ClientInterface with Logging { overrideHadoopShims() @@ -195,7 +193,7 @@ private[hive] class ClientWrapper( SessionState.start(state) state.out = new PrintStream(outputBuffer, true, "UTF-8") state.err = new PrintStream(outputBuffer, true, "UTF-8") - userInput.foreach { input => + IsolatedClientLoader.userInput.foreach { input => state.setIsSilent(input.isSilent) state.setIsVerbose(input.isVerbose) } @@ -587,7 +585,7 @@ private[hive] class ClientWrapper( } def newSession(): ClientWrapper = { - clientLoader.createClient(userInput).asInstanceOf[ClientWrapper] + clientLoader.createClient().asInstanceOf[ClientWrapper] } def reset(): Unit = withHiveState { diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala index 0effcb1fb3f62..e2e43ea4822cd 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala @@ -26,15 +26,23 @@ import scala.language.reflectiveCalls import scala.util.Try import org.apache.commons.io.{FileUtils, IOUtils} +import org.apache.hadoop.hive.ql.session.SessionState import org.apache.spark.Logging import org.apache.spark.deploy.SparkSubmitUtils import org.apache.spark.sql.catalyst.util.quietly -import org.apache.spark.sql.hive.{UserInput, HiveContext} +import org.apache.spark.sql.hive.HiveContext import org.apache.spark.util.{MutableURLClassLoader, Utils} +/** + * Use to encapsulate the user input parameters from spark-sql CLI. + */ +private[hive] case class UserInput (isSilent: Boolean, isVerbose: Boolean) + + /** Factory for `IsolatedClientLoader` with specific versions of hive. */ private[hive] object IsolatedClientLoader extends Logging { + var userInput: Option[UserInput] = null /** * Creates isolated Hive client loaders by downloading the requested version from maven. */ @@ -233,9 +241,12 @@ private[hive] class IsolatedClientLoader( } /** The isolated client interface to Hive. */ - private[hive] def createClient(userInput: Option[UserInput] = None): ClientInterface = { + private[hive] def createClient(): ClientInterface = { if (!isolationOn) { - return new ClientWrapper(version, config, baseClassLoader, this, userInput) + val ss = baseClassLoader.loadClass("org.apache.hadoop.hive.ql.session.SessionState") + .getMethod("get").invoke(null).asInstanceOf[SessionState] + IsolatedClientLoader.userInput = Some(UserInput(ss.getIsSilent, ss.getIsVerbose)) + return new ClientWrapper(version, config, baseClassLoader, this) } // Pre-reflective instantiation setup. logDebug("Initializing the logger to avoid disaster...") @@ -246,7 +257,7 @@ private[hive] class IsolatedClientLoader( classLoader .loadClass(classOf[ClientWrapper].getName) .getConstructors.head - .newInstance(version, config, classLoader, this, userInput) + .newInstance(version, config, classLoader, this) .asInstanceOf[ClientInterface] } catch { case e: InvocationTargetException => From 06efe5d5300ae57179a0004b38c7aab1037fe002 Mon Sep 17 00:00:00 2001 From: "zhichao.li" Date: Tue, 22 Dec 2015 09:06:51 +0800 Subject: [PATCH 3/6] minimize the code changes --- .../org/apache/spark/sql/hive/client/IsolatedClientLoader.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala index e2e43ea4822cd..176bc4d1cc86c 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala @@ -243,6 +243,7 @@ private[hive] class IsolatedClientLoader( /** The isolated client interface to Hive. */ private[hive] def createClient(): ClientInterface = { if (!isolationOn) { + // Store the user input parameter from SparkSQL CLI before being wiped out in ClientWrapper. val ss = baseClassLoader.loadClass("org.apache.hadoop.hive.ql.session.SessionState") .getMethod("get").invoke(null).asInstanceOf[SessionState] IsolatedClientLoader.userInput = Some(UserInput(ss.getIsSilent, ss.getIsVerbose)) From 0d3c5969251f37dbcb909f37ff4aea266f087707 Mon Sep 17 00:00:00 2001 From: "zhichao.li" Date: Tue, 22 Dec 2015 09:11:40 +0800 Subject: [PATCH 4/6] blank line --- .../apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala | 1 - .../org/apache/spark/sql/hive/client/IsolatedClientLoader.scala | 1 - 2 files changed, 2 deletions(-) diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala index 580d8e37cc956..03bb2c222503f 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala @@ -263,7 +263,6 @@ private[hive] object SparkSQLCLIDriver extends Logging { } - private[hive] class SparkSQLCLIDriver extends CliDriver with Logging { private val sessionState = SessionState.get().asInstanceOf[CliSessionState] diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala index 176bc4d1cc86c..85cb0ab1758f2 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala @@ -39,7 +39,6 @@ import org.apache.spark.util.{MutableURLClassLoader, Utils} */ private[hive] case class UserInput (isSilent: Boolean, isVerbose: Boolean) - /** Factory for `IsolatedClientLoader` with specific versions of hive. */ private[hive] object IsolatedClientLoader extends Logging { var userInput: Option[UserInput] = null From c1a7ef5df01bf43fa05b27c9ca07b187d8acdcd4 Mon Sep 17 00:00:00 2001 From: "zhichao.li" Date: Tue, 22 Dec 2015 09:16:04 +0800 Subject: [PATCH 5/6] change null to None --- .../org/apache/spark/sql/hive/client/IsolatedClientLoader.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala index 85cb0ab1758f2..ccddbc8b58e93 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala @@ -41,7 +41,7 @@ private[hive] case class UserInput (isSilent: Boolean, isVerbose: Boolean) /** Factory for `IsolatedClientLoader` with specific versions of hive. */ private[hive] object IsolatedClientLoader extends Logging { - var userInput: Option[UserInput] = null + var userInput: Option[UserInput] = None /** * Creates isolated Hive client loaders by downloading the requested version from maven. */ From 592ed63295c3ffe91fd2c384a4d80f95e0b08e65 Mon Sep 17 00:00:00 2001 From: "zhichao.li" Date: Tue, 22 Dec 2015 09:33:48 +0800 Subject: [PATCH 6/6] blank line --- .../scala/org/apache/spark/sql/hive/client/ClientWrapper.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala index ab18ccb929de1..64ec7882625d2 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/ClientWrapper.scala @@ -64,6 +64,7 @@ private[hive] class ClientWrapper( val clientLoader: IsolatedClientLoader) extends ClientInterface with Logging { + overrideHadoopShims() // !! HACK ALERT !!