Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPARK-21345][SQL][TEST][test-maven] SparkSessionBuilderSuite should clean up stopped sessions. #18567

Closed
wants to merge 6 commits into from
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,50 +17,49 @@

package org.apache.spark.sql

import org.scalatest.BeforeAndAfterEach

import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.sql.internal.SQLConf

/**
* Test cases for the builder pattern of [[SparkSession]].
*/
class SparkSessionBuilderSuite extends SparkFunSuite {
class SparkSessionBuilderSuite extends SparkFunSuite with BeforeAndAfterEach {

private var initialSession: SparkSession = _
override def afterEach(): Unit = {
// This suite should not interfere with the other test suites.
SparkSession.getActiveSession.foreach(_.stop())
SparkSession.clearActiveSession()
SparkSession.getDefaultSession.foreach(_.stop())
SparkSession.clearDefaultSession()
}

private lazy val sparkContext: SparkContext = {
initialSession = SparkSession.builder()
test("create with config options and propagate them to SparkContext and SparkSession") {
val session = SparkSession.builder()
.master("local")
.config("spark.ui.enabled", value = false)
.config("some-config", "v2")
.getOrCreate()
initialSession.sparkContext
}

test("create with config options and propagate them to SparkContext and SparkSession") {
// Creating a new session with config - this works by just calling the lazy val
sparkContext
assert(initialSession.sparkContext.conf.get("some-config") == "v2")
assert(initialSession.conf.get("some-config") == "v2")
SparkSession.clearDefaultSession()
assert(session.sparkContext.conf.get("some-config") == "v2")
assert(session.conf.get("some-config") == "v2")
}

test("use global default session") {
val session = SparkSession.builder().getOrCreate()
val session = SparkSession.builder().master("local").getOrCreate()
assert(SparkSession.builder().getOrCreate() == session)
SparkSession.clearDefaultSession()
}

test("config options are propagated to existing SparkSession") {
val session1 = SparkSession.builder().config("spark-config1", "a").getOrCreate()
val session1 = SparkSession.builder().master("local").config("spark-config1", "a").getOrCreate()
assert(session1.conf.get("spark-config1") == "a")
val session2 = SparkSession.builder().config("spark-config1", "b").getOrCreate()
assert(session1 == session2)
assert(session1.conf.get("spark-config1") == "b")
SparkSession.clearDefaultSession()
}

test("use session from active thread session and propagate config options") {
val defaultSession = SparkSession.builder().getOrCreate()
val defaultSession = SparkSession.builder().master("local").getOrCreate()
val activeSession = defaultSession.newSession()
SparkSession.setActiveSession(activeSession)
val session = SparkSession.builder().config("spark-config2", "a").getOrCreate()
Expand All @@ -73,16 +72,14 @@ class SparkSessionBuilderSuite extends SparkFunSuite {
SparkSession.clearActiveSession()

assert(SparkSession.builder().getOrCreate() == defaultSession)
SparkSession.clearDefaultSession()
}

test("create a new session if the default session has been stopped") {
val defaultSession = SparkSession.builder().getOrCreate()
val defaultSession = SparkSession.builder().master("local").getOrCreate()
SparkSession.setDefaultSession(defaultSession)
defaultSession.stop()
val newSession = SparkSession.builder().master("local").getOrCreate()
assert(newSession != defaultSession)
newSession.stop()
}

test("create a new session if the active thread session has been stopped") {
Expand All @@ -91,11 +88,9 @@ class SparkSessionBuilderSuite extends SparkFunSuite {
activeSession.stop()
val newSession = SparkSession.builder().master("local").getOrCreate()
assert(newSession != activeSession)
newSession.stop()
}

test("create SparkContext first then SparkSession") {
sparkContext.stop()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: the session.stop at the end of test case can also be removed.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thank you. All of them are removed.

val conf = new SparkConf().setAppName("test").setMaster("local").set("key1", "value1")
val sparkContext2 = new SparkContext(conf)
val session = SparkSession.builder().config("key2", "value2").getOrCreate()
Expand All @@ -105,11 +100,9 @@ class SparkSessionBuilderSuite extends SparkFunSuite {
// We won't update conf for existing `SparkContext`
assert(!sparkContext2.conf.contains("key2"))
assert(sparkContext2.conf.get("key1") == "value1")
session.stop()
}

test("create SparkContext first then pass context to SparkSession") {
sparkContext.stop()
val conf = new SparkConf().setAppName("test").setMaster("local").set("key1", "value1")
val newSC = new SparkContext(conf)
val session = SparkSession.builder().sparkContext(newSC).config("key2", "value2").getOrCreate()
Expand All @@ -121,14 +114,12 @@ class SparkSessionBuilderSuite extends SparkFunSuite {
// the conf of this sparkContext will not contain the conf set through the API config.
assert(!session.sparkContext.conf.contains("key2"))
assert(session.sparkContext.conf.get("spark.app.name") == "test")
session.stop()
}

test("SPARK-15887: hive-site.xml should be loaded") {
val session = SparkSession.builder().master("local").getOrCreate()
assert(session.sessionState.newHadoopConf().get("hive.in.test") == "true")
assert(session.sparkContext.hadoopConfiguration.get("hive.in.test") == "true")
session.stop()
}

test("SPARK-15991: Set global Hadoop conf") {
Expand All @@ -140,7 +131,6 @@ class SparkSessionBuilderSuite extends SparkFunSuite {
assert(session.sessionState.newHadoopConf().get(mySpecialKey) == mySpecialValue)
} finally {
session.sparkContext.hadoopConfiguration.unset(mySpecialKey)
session.stop()
}
}
}