Skip to content
Browse files

[SPARK-23000][TEST] Keep Derby DB Location Unchanged After Session Cl…


## What changes were proposed in this pull request?
After session cloning in `TestHive`, the conf of the singleton SparkContext for derby DB location is changed to a new directory. The new directory is created in `HiveUtils.newTemporaryConfiguration(useInMemoryDerby = false)`.

This PR is to keep the conf value of `ConfVars.METASTORECONNECTURLKEY.varname` unchanged during the session clone.

## How was this patch tested?
The issue can be reproduced by the command:
> build/sbt -Phive "hive/test-only org.apache.spark.sql.hive.HiveSessionStateSuite org.apache.spark.sql.hive.DataSourceWithHiveMetastoreCatalogSuite"

Also added a test case.

Author: gatorsmile <>

Closes #20328 from gatorsmile/fixTestFailure.

(cherry picked from commit 6c39654)
Signed-off-by: Wenchen Fan <>
  • Loading branch information...
gatorsmile authored and cloud-fan committed Jan 19, 2018
1 parent b7a8199 commit 8d6845cf926a14e21ca29a43f2cc9a3a9475afd5
@@ -17,8 +17,6 @@

package org.apache.spark.sql

import org.scalatest.BeforeAndAfterAll
import org.scalatest.BeforeAndAfterEach
import scala.collection.mutable.ArrayBuffer

import org.apache.spark.SparkFunSuite
@@ -28,8 +26,7 @@ import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.execution.QueryExecution
import org.apache.spark.sql.util.QueryExecutionListener

class SessionStateSuite extends SparkFunSuite
with BeforeAndAfterEach with BeforeAndAfterAll {
class SessionStateSuite extends SparkFunSuite {

* A shared SparkSession for all tests in this suite. Make sure you reset any changes to this
@@ -180,7 +180,13 @@ private[hive] class TestHiveSparkSession(
ConfVars.METASTORE_INTEGER_JDO_PUSHDOWN.varname -> "true",
// scratch directory used by Hive's metastore client
ConfVars.SCRATCHDIR.varname -> TestHiveContext.makeScratchDir().toURI.toString,
// After session cloning, the JDBC connect string for a JDBC metastore should not be changed. { state =>
val connKey =
ConfVars.METASTORECONNECTURLKEY.varname -> connKey

metastoreTempConf.foreach { case (k, v) =>
sc.hadoopConfiguration.set(k, v)
@@ -17,16 +17,15 @@

package org.apache.spark.sql.hive

import org.scalatest.BeforeAndAfterEach
import org.apache.hadoop.hive.conf.HiveConf.ConfVars

import org.apache.spark.sql._
import org.apache.spark.sql.hive.test.TestHiveSingleton

* Run all tests from `SessionStateSuite` with a Hive based `SessionState`.
class HiveSessionStateSuite extends SessionStateSuite
with TestHiveSingleton with BeforeAndAfterEach {
class HiveSessionStateSuite extends SessionStateSuite with TestHiveSingleton {

override def beforeAll(): Unit = {
// Reuse the singleton session
@@ -38,4 +37,15 @@ class HiveSessionStateSuite extends SessionStateSuite
activeSession = null

test("Clone then newSession") {
val sparkSession = hiveContext.sparkSession
val conf = sparkSession.sparkContext.hadoopConfiguration
val oldValue = conf.get(ConfVars.METASTORECONNECTURLKEY.varname)
val newValue = conf.get(ConfVars.METASTORECONNECTURLKEY.varname)
assert(oldValue == newValue,
"cloneSession and then newSession should not affect the Derby directory")

0 comments on commit 8d6845c

Please sign in to comment.
You can’t perform that action at this time.