diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala index a46cb6b3f4013..4849c7cdc71be 100644 --- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -18,9 +18,7 @@ package org.apache.spark.repl import java.io._ -import java.net.URLClassLoader -import scala.collection.mutable.ArrayBuffer import scala.tools.nsc.interpreter.SimpleReader import org.apache.log4j.{Level, LogManager} @@ -34,25 +32,16 @@ class ReplSuite extends SparkFunSuite { def runInterpreter(master: String, input: String): String = { val CONF_EXECUTOR_CLASSPATH = "spark.executor.extraClassPath" - val in = new BufferedReader(new StringReader(input + "\n")) - val out = new StringWriter() - val cl = getClass.getClassLoader - var paths = new ArrayBuffer[String] - if (cl.isInstanceOf[URLClassLoader]) { - val urlLoader = cl.asInstanceOf[URLClassLoader] - for (url <- urlLoader.getURLs) { - if (url.getProtocol == "file") { - paths += url.getFile - } - } - } - val classpath = paths.map(new File(_).getAbsolutePath).mkString(File.pathSeparator) - val oldExecutorClasspath = System.getProperty(CONF_EXECUTOR_CLASSPATH) + val classpath = System.getProperty("java.class.path") System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath) + Main.sparkContext = null Main.sparkSession = null // causes recreation of SparkContext for each test. Main.conf.set("spark.master", master) + + val in = new BufferedReader(new StringReader(input + "\n")) + val out = new StringWriter() Main.doMain(Array("-classpath", classpath), new SparkILoop(in, new PrintWriter(out))) if (oldExecutorClasspath != null) { @@ -60,7 +49,8 @@ class ReplSuite extends SparkFunSuite { } else { System.clearProperty(CONF_EXECUTOR_CLASSPATH) } - return out.toString + + out.toString } // Simulate the paste mode in Scala REPL. diff --git a/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala index d49e0fd85229f..039fc627f52f8 100644 --- a/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala @@ -18,9 +18,6 @@ package org.apache.spark.repl import java.io._ -import java.net.URLClassLoader - -import scala.collection.mutable.ArrayBuffer import org.apache.commons.lang3.StringEscapeUtils @@ -42,19 +39,9 @@ class SingletonReplSuite extends SparkFunSuite { override def beforeAll(): Unit = { super.beforeAll() - val cl = getClass.getClassLoader - var paths = new ArrayBuffer[String] - if (cl.isInstanceOf[URLClassLoader]) { - val urlLoader = cl.asInstanceOf[URLClassLoader] - for (url <- urlLoader.getURLs) { - if (url.getProtocol == "file") { - paths += url.getFile - } - } - } - val classpath = paths.map(new File(_).getAbsolutePath).mkString(File.pathSeparator) - + val classpath = System.getProperty("java.class.path") System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath) + Main.conf.set("spark.master", "local-cluster[2,1,1024]") val interp = new SparkILoop( new BufferedReader(new InputStreamReader(new PipedInputStream(in))),