diff --git a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala index 599ea8955491f..74b4f19f1b069 100644 --- a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala +++ b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala @@ -32,6 +32,8 @@ trait LocalSparkContext extends BeforeAndAfterEach with BeforeAndAfterAll { self override def beforeAll(): Unit = { super.beforeAll() InternalLoggerFactory.setDefaultFactory(Slf4JLoggerFactory.INSTANCE) + System.setProperty("javax.xml.parsers.SAXParserFactory", + "com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl") } override def afterEach(): Unit = { diff --git a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala index 02e67c0af1258..d61d30efb0aa5 100644 --- a/core/src/test/scala/org/apache/spark/SparkFunSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkFunSuite.scala @@ -87,6 +87,8 @@ abstract class SparkFunSuite if (enableAutoThreadAudit) { doThreadPreAudit() } + System.setProperty("javax.xml.parsers.SAXParserFactory", + "com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl") super.beforeAll() } diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala index c5a72efcb786b..24d54f021a6d3 100644 --- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala @@ -671,6 +671,8 @@ class SparkSubmitSuite // SPARK-7287 test("includes jars passed in through --packages") { + System.setProperty("javax.xml.parsers.SAXParserFactory", + "com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl") val unusedJar = TestUtils.createJarWithClasses(Seq.empty) val main = MavenCoordinate("my.great.lib", "mylib", "0.1") val dep = MavenCoordinate("my.great.dep", "mylib", "0.1") @@ -682,6 +684,8 @@ class SparkSubmitSuite "--packages", Seq(main, dep).mkString(","), "--repositories", repo, "--conf", "spark.ui.enabled=false", + "--conf", "spark.driver.extraJavaOptions=-Djavax.xml.parsers.SAXParserFactory=" + + "com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl", "--conf", "spark.master.rest.enabled=false", "--conf", s"spark.jars.ivySettings=${emptyIvySettings.getAbsolutePath()}", unusedJar.toString, @@ -691,6 +695,8 @@ class SparkSubmitSuite } test("includes jars passed through spark.jars.packages and spark.jars.repositories") { + System.setProperty("javax.xml.parsers.SAXParserFactory", + "com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl") val unusedJar = TestUtils.createJarWithClasses(Seq.empty) val main = MavenCoordinate("my.great.lib", "mylib", "0.1") val dep = MavenCoordinate("my.great.dep", "mylib", "0.1") @@ -702,6 +708,8 @@ class SparkSubmitSuite "--conf", "spark.jars.packages=my.great.lib:mylib:0.1,my.great.dep:mylib:0.1", "--conf", s"spark.jars.repositories=$repo", "--conf", "spark.ui.enabled=false", + "--conf", "spark.driver.extraJavaOptions=-Djavax.xml.parsers.SAXParserFactory=" + + "com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl", "--conf", "spark.master.rest.enabled=false", "--conf", s"spark.jars.ivySettings=${emptyIvySettings.getAbsolutePath()}", unusedJar.toString, diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala index 57328942cc620..ba05c088f3b8a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala @@ -112,7 +112,7 @@ class PlannerSuite extends SharedSparkSession with AdaptiveSparkPlanHelper { val planned = sql( """ |SELECT l.a, l.b - |FROM testData2 l JOIN (SELECT * FROM testLimit LIMIT 1) r ON (l.a = r.key) + |FROM testData2 l JOIN (SELECT * FROM testLimit LIMIT 2) r ON (l.a = r.key) """.stripMargin).queryExecution.sparkPlan val broadcastHashJoins = planned.collect { case join: BroadcastHashJoinExec => join } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/SharedSparkSession.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/SharedSparkSession.scala index ed2e309fa075a..f2a8df392715a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/test/SharedSparkSession.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SharedSparkSession.scala @@ -43,6 +43,9 @@ trait SharedSparkSession extends SQLTestUtils with SharedSparkSessionBase { protected override def beforeAll(): Unit = { doThreadPreAudit() + System.setProperty("javax.xml.parsers.SAXParserFactory", + "com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl") + super.beforeAll() }