Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
gatorsmile committed Jan 19, 2018
1 parent 5b97119 commit b9aa879
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -176,16 +176,17 @@ private[hive] class TestHiveSparkSession(
}

{ // set the metastore temporary configuration
var metastoreTempConf = HiveUtils.newTemporaryConfiguration(useInMemoryDerby = false) ++ Map(
val metastoreTempConf = HiveUtils.newTemporaryConfiguration(useInMemoryDerby = false) ++ Map(
ConfVars.METASTORE_INTEGER_JDO_PUSHDOWN.varname -> "true",
// scratch directory used by Hive's metastore client
ConfVars.SCRATCHDIR.varname -> TestHiveContext.makeScratchDir().toURI.toString,
ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY.varname -> "1")
// After session cloning, the JDBC connect string for a JDBC metastore should not be changed.
existingSharedState.foreach { state =>
metastoreTempConf += ConfVars.METASTORECONNECTURLKEY.varname ->
state.sparkContext.hadoopConfiguration.get(ConfVars.METASTORECONNECTURLKEY.varname)
}
ConfVars.METASTORE_CLIENT_CONNECT_RETRY_DELAY.varname -> "1") ++
// After session cloning, the JDBC connect string for a JDBC metastore should not be changed.
existingSharedState.map { state =>
val connKey =
state.sparkContext.hadoopConfiguration.get(ConfVars.METASTORECONNECTURLKEY.varname)
ConfVars.METASTORECONNECTURLKEY.varname -> connKey
}

metastoreTempConf.foreach { case (k, v) =>
sc.hadoopConfiguration.set(k, v)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,12 @@ class HiveSessionStateSuite extends SessionStateSuite with TestHiveSingleton {

test("Clone then newSession") {
val sparkSession = hiveContext.sparkSession
val oldValue =
sparkSession.sparkContext.hadoopConfiguration.get(ConfVars.METASTORECONNECTURLKEY.varname)
val conf = sparkSession.sparkContext.hadoopConfiguration
val oldValue = conf.get(ConfVars.METASTORECONNECTURLKEY.varname)
sparkSession.cloneSession()
sparkSession.sharedState.externalCatalog.client.newSession()
val newValue =
sparkSession.sparkContext.hadoopConfiguration.get(ConfVars.METASTORECONNECTURLKEY.varname)
assert(oldValue == newValue, "cloneSession should not affect the Derby directory")
val newValue = conf.get(ConfVars.METASTORECONNECTURLKEY.varname)
assert(oldValue == newValue,
"cloneSession and then newSession should not affect the Derby directory")
}
}

0 comments on commit b9aa879

Please sign in to comment.