Skip to content

Commit

Permalink
Address comments.
Browse files Browse the repository at this point in the history
  • Loading branch information
ueshin committed Jul 3, 2020
1 parent 5a9befa commit aeed617
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 2 deletions.
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/SparkContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2562,7 +2562,7 @@ object SparkContext extends Logging {
*
* Throws an exception if a SparkContext is about to be created in executors.
*/
private[spark] def assertOnDriver(): Unit = {
private def assertOnDriver(): Unit = {
if (TaskContext.get != null) {
// we're accessing it during task execution, fail.
throw new IllegalStateException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -935,7 +935,7 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
}
}

test("Disallow to create SparkContext in executors") {
test("SPARK-32160: Disallow to create SparkContext in executors") {
sc = new SparkContext(new SparkConf().setAppName("test").setMaster("local-cluster[3, 1, 1024]"))

val error = intercept[SparkException] {
Expand Down
1 change: 1 addition & 0 deletions python/pyspark/tests/test_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,6 +268,7 @@ def test_resources(self):
self.assertEqual(len(resources), 0)

def test_disallow_to_create_spark_context_in_executors(self):
# SPARK-32160: SparkContext should not be created in executors.
with SparkContext("local-cluster[3, 1, 1024]") as sc:
with self.assertRaises(Exception) as context:
sc.range(2).foreach(lambda _: SparkContext())
Expand Down

0 comments on commit aeed617

Please sign in to comment.