From a90cbde817cd149004a751d80d1e676837bce6f9 Mon Sep 17 00:00:00 2001 From: Sean Owen Date: Thu, 28 Mar 2019 19:28:16 -0500 Subject: [PATCH 1/2] First attempt to fix Java 11 REPL tests --- .../org/apache/spark/repl/ReplSuite.scala | 41 +++++++++---------- .../spark/repl/SingletonReplSuite.scala | 20 ++++----- 2 files changed, 28 insertions(+), 33 deletions(-) diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala index a46cb6b3f4013..b5c992bb5cf3a 100644 --- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -20,7 +20,6 @@ package org.apache.spark.repl import java.io._ import java.net.URLClassLoader -import scala.collection.mutable.ArrayBuffer import scala.tools.nsc.interpreter.SimpleReader import org.apache.log4j.{Level, LogManager} @@ -34,33 +33,33 @@ class ReplSuite extends SparkFunSuite { def runInterpreter(master: String, input: String): String = { val CONF_EXECUTOR_CLASSPATH = "spark.executor.extraClassPath" - val in = new BufferedReader(new StringReader(input + "\n")) - val out = new StringWriter() + val cl = getClass.getClassLoader - var paths = new ArrayBuffer[String] - if (cl.isInstanceOf[URLClassLoader]) { - val urlLoader = cl.asInstanceOf[URLClassLoader] - for (url <- urlLoader.getURLs) { - if (url.getProtocol == "file") { - paths += url.getFile - } - } - } - val classpath = paths.map(new File(_).getAbsolutePath).mkString(File.pathSeparator) val oldExecutorClasspath = System.getProperty(CONF_EXECUTOR_CLASSPATH) - System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath) + if (oldExecutorClasspath == null) { + System.clearProperty(CONF_EXECUTOR_CLASSPATH) + } else { + System.setProperty(CONF_EXECUTOR_CLASSPATH, oldExecutorClasspath) + } + + val classpath = cl match { + case urlLoader: URLClassLoader => + val paths = urlLoader.getURLs.filter(_.getProtocol == "file").map(_.getFile) + val classpath = paths.map(new File(_).getAbsolutePath).mkString(File.pathSeparator) + System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath) + classpath + case _ => System.getProperty("java.class.path") + } + Main.sparkContext = null Main.sparkSession = null // causes recreation of SparkContext for each test. Main.conf.set("spark.master", master) - Main.doMain(Array("-classpath", classpath), new SparkILoop(in, new PrintWriter(out))) - if (oldExecutorClasspath != null) { - System.setProperty(CONF_EXECUTOR_CLASSPATH, oldExecutorClasspath) - } else { - System.clearProperty(CONF_EXECUTOR_CLASSPATH) - } - return out.toString + val in = new BufferedReader(new StringReader(input + "\n")) + val out = new StringWriter() + Main.doMain(Array("-classpath", classpath), new SparkILoop(in, new PrintWriter(out))) + out.toString } // Simulate the paste mode in Scala REPL. diff --git a/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala index d49e0fd85229f..19b36dac94b40 100644 --- a/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala @@ -20,8 +20,6 @@ package org.apache.spark.repl import java.io._ import java.net.URLClassLoader -import scala.collection.mutable.ArrayBuffer - import org.apache.commons.lang3.StringEscapeUtils import org.apache.spark.SparkFunSuite @@ -43,18 +41,16 @@ class SingletonReplSuite extends SparkFunSuite { super.beforeAll() val cl = getClass.getClassLoader - var paths = new ArrayBuffer[String] - if (cl.isInstanceOf[URLClassLoader]) { - val urlLoader = cl.asInstanceOf[URLClassLoader] - for (url <- urlLoader.getURLs) { - if (url.getProtocol == "file") { - paths += url.getFile - } - } + + val classpath = cl match { + case urlLoader: URLClassLoader => + val paths = urlLoader.getURLs.filter(_.getProtocol == "file").map(_.getFile) + val classpath = paths.map(new File(_).getAbsolutePath).mkString(File.pathSeparator) + System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath) + classpath + case _ => System.getProperty("java.class.path") } - val classpath = paths.map(new File(_).getAbsolutePath).mkString(File.pathSeparator) - System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath) Main.conf.set("spark.master", "local-cluster[2,1,1024]") val interp = new SparkILoop( new BufferedReader(new InputStreamReader(new PipedInputStream(in))), From fb5ed124f34cd85f99d127d1701663c561af0750 Mon Sep 17 00:00:00 2001 From: Sean Owen Date: Fri, 29 Mar 2019 09:55:40 -0500 Subject: [PATCH 2/2] Simplify classpath logic and fix restoring classpath --- .../org/apache/spark/repl/ReplSuite.scala | 27 +++++++------------ .../spark/repl/SingletonReplSuite.scala | 13 ++------- 2 files changed, 11 insertions(+), 29 deletions(-) diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala index b5c992bb5cf3a..4849c7cdc71be 100644 --- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -18,7 +18,6 @@ package org.apache.spark.repl import java.io._ -import java.net.URLClassLoader import scala.tools.nsc.interpreter.SimpleReader @@ -33,24 +32,9 @@ class ReplSuite extends SparkFunSuite { def runInterpreter(master: String, input: String): String = { val CONF_EXECUTOR_CLASSPATH = "spark.executor.extraClassPath" - - val cl = getClass.getClassLoader - val oldExecutorClasspath = System.getProperty(CONF_EXECUTOR_CLASSPATH) - if (oldExecutorClasspath == null) { - System.clearProperty(CONF_EXECUTOR_CLASSPATH) - } else { - System.setProperty(CONF_EXECUTOR_CLASSPATH, oldExecutorClasspath) - } - - val classpath = cl match { - case urlLoader: URLClassLoader => - val paths = urlLoader.getURLs.filter(_.getProtocol == "file").map(_.getFile) - val classpath = paths.map(new File(_).getAbsolutePath).mkString(File.pathSeparator) - System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath) - classpath - case _ => System.getProperty("java.class.path") - } + val classpath = System.getProperty("java.class.path") + System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath) Main.sparkContext = null Main.sparkSession = null // causes recreation of SparkContext for each test. @@ -59,6 +43,13 @@ class ReplSuite extends SparkFunSuite { val in = new BufferedReader(new StringReader(input + "\n")) val out = new StringWriter() Main.doMain(Array("-classpath", classpath), new SparkILoop(in, new PrintWriter(out))) + + if (oldExecutorClasspath != null) { + System.setProperty(CONF_EXECUTOR_CLASSPATH, oldExecutorClasspath) + } else { + System.clearProperty(CONF_EXECUTOR_CLASSPATH) + } + out.toString } diff --git a/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala index 19b36dac94b40..039fc627f52f8 100644 --- a/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala @@ -18,7 +18,6 @@ package org.apache.spark.repl import java.io._ -import java.net.URLClassLoader import org.apache.commons.lang3.StringEscapeUtils @@ -40,16 +39,8 @@ class SingletonReplSuite extends SparkFunSuite { override def beforeAll(): Unit = { super.beforeAll() - val cl = getClass.getClassLoader - - val classpath = cl match { - case urlLoader: URLClassLoader => - val paths = urlLoader.getURLs.filter(_.getProtocol == "file").map(_.getFile) - val classpath = paths.map(new File(_).getAbsolutePath).mkString(File.pathSeparator) - System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath) - classpath - case _ => System.getProperty("java.class.path") - } + val classpath = System.getProperty("java.class.path") + System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath) Main.conf.set("spark.master", "local-cluster[2,1,1024]") val interp = new SparkILoop(