From 117348827d223822cb70864ebada0594d5edd8ad Mon Sep 17 00:00:00 2001 From: Sean Owen Date: Tue, 5 Apr 2016 02:41:31 +0100 Subject: [PATCH] First pass at changes needed if Java 8 is required --- build/mvn | 2 +- build/sbt-launch-lib.bash | 2 +- .../spark/launcher/WorkerCommandBuilder.scala | 1 - .../java/org/apache/spark/Java8APISuite.java | 0 .../org/apache/spark/JDK8ScalaSuite.scala | 0 dev/create-release/release-build.sh | 1 - dev/make-distribution.sh | 2 +- dev/mima | 1 - dev/run-tests.py | 3 - docs/building-spark.md | 19 +-- docs/index.md | 2 +- docs/programming-guide.md | 6 +- external/java8-tests/README.md | 22 ---- external/java8-tests/pom.xml | 108 ------------------ .../src/test/resources/log4j.properties | 27 ----- .../spark/launcher/ChildProcAppHandle.java | 9 +- .../spark/launcher/CommandBuilderUtils.java | 36 ------ .../apache/spark/launcher/SparkAppHandle.java | 3 - .../launcher/SparkClassCommandBuilder.java | 1 - .../launcher/SparkSubmitCommandBuilder.java | 1 - .../SparkSubmitCommandBuilderSuite.java | 8 +- .../src/test/resources/spark-defaults.conf | 2 +- pom.xml | 43 +------ project/SparkBuild.scala | 7 +- .../scala/org/apache/spark/sql/Dataset.scala | 2 +- .../org/apache/spark/sql/SQLContext.scala | 11 -- .../org/apache/spark/sql/JavaUDFSuite.java | 12 +- sql/hive/pom.xml | 2 +- .../apache/spark/streaming/Java8APISuite.java | 0 .../org/apache/spark/deploy/yarn/Client.scala | 1 - .../spark/deploy/yarn/ExecutorRunnable.scala | 1 - 31 files changed, 25 insertions(+), 310 deletions(-) rename {external/java8-tests => core}/src/test/java/org/apache/spark/Java8APISuite.java (100%) rename {external/java8-tests => core}/src/test/scala/org/apache/spark/JDK8ScalaSuite.scala (100%) delete mode 100644 external/java8-tests/README.md delete mode 100644 external/java8-tests/pom.xml delete mode 100644 external/java8-tests/src/test/resources/log4j.properties rename {external/java8-tests => streaming}/src/test/java/org/apache/spark/streaming/Java8APISuite.java (100%) diff --git a/build/mvn b/build/mvn index 58058c04b891..c22f2fa59813 100755 --- a/build/mvn +++ b/build/mvn @@ -22,7 +22,7 @@ _DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # Preserve the calling directory _CALLING_DIR="$(pwd)" # Options used during compilation -_COMPILE_JVM_OPTS="-Xmx2g -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512m" +_COMPILE_JVM_OPTS="-Xmx2g -XX:ReservedCodeCacheSize=512m" # Installs any application tarball given a URL, the expected tarball name, # and, optionally, a checkable binary path to determine if the binary has diff --git a/build/sbt-launch-lib.bash b/build/sbt-launch-lib.bash index 615f84839465..4732669ee651 100755 --- a/build/sbt-launch-lib.bash +++ b/build/sbt-launch-lib.bash @@ -117,7 +117,7 @@ get_mem_opts () { (( $perm < 4096 )) || perm=4096 local codecache=$(( $perm / 2 )) - echo "-Xms${mem}m -Xmx${mem}m -XX:MaxPermSize=${perm}m -XX:ReservedCodeCacheSize=${codecache}m" + echo "-Xms${mem}m -Xmx${mem}m -XX:ReservedCodeCacheSize=${codecache}m" } require_arg () { diff --git a/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala b/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala index a2add6161728..9f259f0a7638 100644 --- a/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala +++ b/core/src/main/scala/org/apache/spark/launcher/WorkerCommandBuilder.scala @@ -40,7 +40,6 @@ private[spark] class WorkerCommandBuilder(sparkHome: String, memoryMb: Int, comm cmd.add(s"-Xms${memoryMb}M") cmd.add(s"-Xmx${memoryMb}M") command.javaOpts.foreach(cmd.add) - CommandBuilderUtils.addPermGenSizeOpt(cmd) addOptionString(cmd, getenv("SPARK_JAVA_OPTS")) cmd } diff --git a/external/java8-tests/src/test/java/org/apache/spark/Java8APISuite.java b/core/src/test/java/org/apache/spark/Java8APISuite.java similarity index 100% rename from external/java8-tests/src/test/java/org/apache/spark/Java8APISuite.java rename to core/src/test/java/org/apache/spark/Java8APISuite.java diff --git a/external/java8-tests/src/test/scala/org/apache/spark/JDK8ScalaSuite.scala b/core/src/test/scala/org/apache/spark/JDK8ScalaSuite.scala similarity index 100% rename from external/java8-tests/src/test/scala/org/apache/spark/JDK8ScalaSuite.scala rename to core/src/test/scala/org/apache/spark/JDK8ScalaSuite.scala diff --git a/dev/create-release/release-build.sh b/dev/create-release/release-build.sh index 65e80fc76056..28c67bf2a45b 100755 --- a/dev/create-release/release-build.sh +++ b/dev/create-release/release-build.sh @@ -216,7 +216,6 @@ if [[ "$1" == "docs" ]]; then echo "Building Spark docs" dest_dir="$REMOTE_PARENT_DIR/${DEST_DIR_NAME}-docs" cd docs - # Compile docs with Java 7 to use nicer format # TODO: Make configurable to add this: PRODUCTION=1 PRODUCTION=1 RELEASE_VERSION="$SPARK_VERSION" jekyll build echo "Copying release documentation to $dest_dir" diff --git a/dev/make-distribution.sh b/dev/make-distribution.sh index 4f7544f6ea78..9d7d244e2991 100755 --- a/dev/make-distribution.sh +++ b/dev/make-distribution.sh @@ -145,7 +145,7 @@ fi # Build uber fat JAR cd "$SPARK_HOME" -export MAVEN_OPTS="${MAVEN_OPTS:--Xmx2g -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512m}" +export MAVEN_OPTS="${MAVEN_OPTS:--Xmx2g -XX:ReservedCodeCacheSize=512m}" # Store the command as an array because $MVN variable might have spaces in it. # Normal quoting tricks don't work. diff --git a/dev/mima b/dev/mima index c3553490451c..1e479411255c 100755 --- a/dev/mima +++ b/dev/mima @@ -31,7 +31,6 @@ OLD_DEPS_CLASSPATH="$(build/sbt -DcopyDependencies=false $SPARK_PROFILES "export rm -f .generated-mima* java \ - -XX:MaxPermSize=1g \ -Xmx2g \ -cp "$TOOLS_CLASSPATH:$OLD_DEPS_CLASSPATH" \ org.apache.spark.tools.GenerateMIMAIgnore diff --git a/dev/run-tests.py b/dev/run-tests.py index cbe347274e62..a216865d0693 100755 --- a/dev/run-tests.py +++ b/dev/run-tests.py @@ -489,9 +489,6 @@ def main(): java_version = determine_java_version(java_exe) - if java_version.minor < 8: - print("[warn] Java 8 tests will not run because JDK version is < 1.8.") - # install SparkR if which("R"): run_cmd([os.path.join(SPARK_HOME, "R", "install-dev.sh")]) diff --git a/docs/building-spark.md b/docs/building-spark.md index 13aa80496eae..038e6ceead9e 100644 --- a/docs/building-spark.md +++ b/docs/building-spark.md @@ -7,7 +7,7 @@ redirect_from: "building-with-maven.html" * This will become a table of contents (this text will be scraped). {:toc} -Building Spark using Maven requires Maven 3.3.9 or newer and Java 7+. +Building Spark using Maven requires Maven 3.3.9 or newer and Java 8+. The Spark build can supply a suitable Maven binary; see below. # Building with `build/mvn` @@ -47,23 +47,10 @@ For more information on usage, run `./dev/make-distribution.sh --help` You'll need to configure Maven to use more memory than usual by setting `MAVEN_OPTS`. We recommend the following settings: {% highlight bash %} -export MAVEN_OPTS="-Xmx2g -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512m" +export MAVEN_OPTS="-Xmx2g -XX:ReservedCodeCacheSize=512m" {% endhighlight %} -If you don't run this, you may see errors like the following: - - [INFO] Compiling 203 Scala sources and 9 Java sources to /Users/me/Development/spark/core/target/scala-{{site.SCALA_BINARY_VERSION}}/classes... - [ERROR] PermGen space -> [Help 1] - - [INFO] Compiling 203 Scala sources and 9 Java sources to /Users/me/Development/spark/core/target/scala-{{site.SCALA_BINARY_VERSION}}/classes... - [ERROR] Java heap space -> [Help 1] - -You can fix this by setting the `MAVEN_OPTS` variable as discussed before. - -**Note:** - -* For Java 8 and above this step is not required. -* If using `build/mvn` with no `MAVEN_OPTS` set, the script will automate this for you. +If using `build/mvn` with no `MAVEN_OPTS` set, the script will automate this for you. # Specifying the Hadoop Version diff --git a/docs/index.md b/docs/index.md index 20eab567a50d..1c6ce45f6cdc 100644 --- a/docs/index.md +++ b/docs/index.md @@ -24,7 +24,7 @@ Spark runs on both Windows and UNIX-like systems (e.g. Linux, Mac OS). It's easy locally on one machine --- all you need is to have `java` installed on your system `PATH`, or the `JAVA_HOME` environment variable pointing to a Java installation. -Spark runs on Java 7+, Python 2.6+ and R 3.1+. For the Scala API, Spark {{site.SPARK_VERSION}} uses +Spark runs on Java 8+, Python 2.6+ and R 3.1+. For the Scala API, Spark {{site.SPARK_VERSION}} uses Scala {{site.SCALA_BINARY_VERSION}}. You will need to use a compatible Scala version ({{site.SCALA_BINARY_VERSION}}.x). diff --git a/docs/programming-guide.md b/docs/programming-guide.md index 2f0ed5eca2b2..4f8200902d4a 100644 --- a/docs/programming-guide.md +++ b/docs/programming-guide.md @@ -53,9 +53,9 @@ import org.apache.spark.SparkConf
-Spark {{site.SPARK_VERSION}} works with Java 7 and higher. If you are using Java 8, Spark supports +Spark {{site.SPARK_VERSION}} works with Java 8 and higher, and supports [lambda expressions](http://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html) -for concisely writing functions, otherwise you can use the classes in the +for concisely writing functions. Otherwise you can use the classes in the [org.apache.spark.api.java.function](api/java/index.html?org/apache/spark/api/java/function/package-summary.html) package. To write a Spark application in Java, you need to add a dependency on Spark. Spark is available through Maven Central at: @@ -650,7 +650,7 @@ There are two ways to create such functions: * Implement the Function interfaces in your own class, either as an anonymous inner class or a named one, and pass an instance of it to Spark. -* In Java 8, use [lambda expressions](http://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html) +* Use [lambda expressions](http://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html) to concisely define an implementation. While much of this guide uses lambda syntax for conciseness, it is easy to use all the same APIs diff --git a/external/java8-tests/README.md b/external/java8-tests/README.md deleted file mode 100644 index aa87901695c2..000000000000 --- a/external/java8-tests/README.md +++ /dev/null @@ -1,22 +0,0 @@ -# Java 8 Test Suites - -These tests require having Java 8 installed and are isolated from the main Spark build. -If Java 8 is not your system's default Java version, you will need to point Spark's build -to your Java location. The set-up depends a bit on the build system: - -* Sbt users can either set JAVA_HOME to the location of a Java 8 JDK or explicitly pass - `-java-home` to the sbt launch script. If a Java 8 JDK is detected sbt will automatically - include the Java 8 test project. - - `$ JAVA_HOME=/opt/jdk1.8.0/ build/sbt clean java8-tests/test - -* For Maven users, - - Maven users can also refer to their Java 8 directory using JAVA_HOME. - - `$ JAVA_HOME=/opt/jdk1.8.0/ mvn clean install -DskipTests` - `$ JAVA_HOME=/opt/jdk1.8.0/ mvn -pl :java8-tests_2.11 test` - - Note that the above command can only be run from project root directory since this module - depends on core and the test-jars of core and streaming. This means an install step is - required to make the test dependencies visible to the Java 8 sub-project. diff --git a/external/java8-tests/pom.xml b/external/java8-tests/pom.xml deleted file mode 100644 index f5a06467ee59..000000000000 --- a/external/java8-tests/pom.xml +++ /dev/null @@ -1,108 +0,0 @@ - - - - 4.0.0 - - org.apache.spark - spark-parent_2.11 - 2.0.0-SNAPSHOT - ../../pom.xml - - - org.apache.spark - java8-tests_2.11 - pom - Spark Project Java 8 Tests - - - java8-tests - - - - - org.apache.spark - spark-core_${scala.binary.version} - ${project.version} - - - org.apache.spark - spark-core_${scala.binary.version} - ${project.version} - test-jar - test - - - org.apache.spark - spark-streaming_${scala.binary.version} - ${project.version} - - - org.apache.spark - spark-streaming_${scala.binary.version} - ${project.version} - test-jar - test - - - org.apache.spark - spark-test-tags_${scala.binary.version} - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - - true - - - - org.apache.maven.plugins - maven-install-plugin - - true - - - - org.apache.maven.plugins - maven-compiler-plugin - - true - 1.8 - 1.8 - 1.8 - - - - net.alchim31.maven - scala-maven-plugin - - - -source - 1.8 - -target - 1.8 - -Xlint:all,-serial,-path - - - - - - diff --git a/external/java8-tests/src/test/resources/log4j.properties b/external/java8-tests/src/test/resources/log4j.properties deleted file mode 100644 index edbecdae9209..000000000000 --- a/external/java8-tests/src/test/resources/log4j.properties +++ /dev/null @@ -1,27 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# Set everything to be logged to the file target/unit-tests.log -log4j.rootCategory=INFO, file -log4j.appender.file=org.apache.log4j.FileAppender -log4j.appender.file.append=true -log4j.appender.file.file=target/unit-tests.log -log4j.appender.file.layout=org.apache.log4j.PatternLayout -log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n - -# Ignore messages below warning level from Jetty, because it's a bit verbose -log4j.logger.org.spark-project.jetty=WARN diff --git a/launcher/src/main/java/org/apache/spark/launcher/ChildProcAppHandle.java b/launcher/src/main/java/org/apache/spark/launcher/ChildProcAppHandle.java index 1bfda289dec3..fce646d78464 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/ChildProcAppHandle.java +++ b/launcher/src/main/java/org/apache/spark/launcher/ChildProcAppHandle.java @@ -106,14 +106,7 @@ public synchronized void kill() { try { childProc.exitValue(); } catch (IllegalThreadStateException e) { - // Child is still alive. Try to use Java 8's "destroyForcibly()" if available, - // fall back to the old API if it's not there. - try { - Method destroy = childProc.getClass().getMethod("destroyForcibly"); - destroy.invoke(childProc); - } catch (Exception inner) { - childProc.destroy(); - } + childProc.destroyForcibly(); } finally { childProc = null; } diff --git a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java index 91586aad7b70..306cbbd0ab42 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java +++ b/launcher/src/main/java/org/apache/spark/launcher/CommandBuilderUtils.java @@ -112,21 +112,6 @@ static boolean isWindows() { return os.startsWith("Windows"); } - /** Returns an enum value indicating whose JVM is being used. */ - static JavaVendor getJavaVendor() { - String vendorString = System.getProperty("java.vendor"); - if (vendorString.contains("Oracle")) { - return JavaVendor.Oracle; - } - if (vendorString.contains("IBM")) { - return JavaVendor.IBM; - } - if (vendorString.contains("OpenJDK")) { - return JavaVendor.OpenJDK; - } - return JavaVendor.Unknown; - } - /** * Updates the user environment, appending the given pathList to the existing value of the given * environment variable (or setting it if it hasn't yet been set). @@ -312,27 +297,6 @@ static String quoteForCommandString(String s) { return quoted.append('"').toString(); } - /** - * Adds the default perm gen size option for Spark if the VM requires it and the user hasn't - * set it. - */ - static void addPermGenSizeOpt(List cmd) { - // Don't set MaxPermSize for IBM Java, or Oracle Java 8 and later. - if (getJavaVendor() == JavaVendor.IBM) { - return; - } - if (javaMajorVersion(System.getProperty("java.version")) > 7) { - return; - } - for (String arg : cmd) { - if (arg.startsWith("-XX:MaxPermSize=")) { - return; - } - } - - cmd.add("-XX:MaxPermSize=256m"); - } - /** * Get the major version of the java version string supplied. This method * accepts any JEP-223-compliant strings (9-ea, 9+100), as well as legacy diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkAppHandle.java b/launcher/src/main/java/org/apache/spark/launcher/SparkAppHandle.java index 625d02632114..efa6cbdeb8ad 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/SparkAppHandle.java +++ b/launcher/src/main/java/org/apache/spark/launcher/SparkAppHandle.java @@ -89,9 +89,6 @@ public boolean isFinal() { * Tries to kill the underlying application. Implies {@link #disconnect()}. This will not send * a {@link #stop()} message to the application, so it's recommended that users first try to * stop the application cleanly and only resort to this method if that fails. - *

- * Note that if the application is running as a child process, this method fail to kill the - * process when using Java 7. This may happen if, for example, the application is deadlocked. */ void kill(); diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java index 6b9d36cc0b0c..ab3213f76f26 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java +++ b/launcher/src/main/java/org/apache/spark/launcher/SparkClassCommandBuilder.java @@ -87,7 +87,6 @@ public List buildCommand(Map env) throws IOException { String mem = firstNonEmpty(memKey != null ? System.getenv(memKey) : null, DEFAULT_MEM); cmd.add("-Xms" + mem); cmd.add("-Xmx" + mem); - addPermGenSizeOpt(cmd); cmd.add(className); cmd.addAll(classArgs); return cmd; diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java index c31c42cd3a41..eec10ad613f5 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java +++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java @@ -247,7 +247,6 @@ private List buildSparkSubmitCommand(Map env) throws IOE config.get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH)); } - addPermGenSizeOpt(cmd); cmd.add("org.apache.spark.deploy.SparkSubmit"); cmd.addAll(buildSparkSubmitArgs()); return cmd; diff --git a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java index 29cbbe825bce..d047828dd1df 100644 --- a/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java +++ b/launcher/src/test/java/org/apache/spark/launcher/SparkSubmitCommandBuilderSuite.java @@ -189,7 +189,7 @@ private void testCmdBuilder(boolean isDriver, boolean useDefaultPropertyFile) th launcher.setPropertiesFile(dummyPropsFile.getAbsolutePath()); launcher.conf.put(SparkLauncher.DRIVER_MEMORY, "1g"); launcher.conf.put(SparkLauncher.DRIVER_EXTRA_CLASSPATH, "/driver"); - launcher.conf.put(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Ddriver -XX:MaxPermSize=256m"); + launcher.conf.put(SparkLauncher.DRIVER_EXTRA_JAVA_OPTIONS, "-Ddriver"); launcher.conf.put(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH, "/native"); } else { launcher.childEnv.put("SPARK_CONF_DIR", System.getProperty("spark.test.home") @@ -215,12 +215,6 @@ private void testCmdBuilder(boolean isDriver, boolean useDefaultPropertyFile) th assertFalse("Memory arguments should not be set.", found); } - for (String arg : cmd) { - if (arg.startsWith("-XX:MaxPermSize=")) { - assertEquals("-XX:MaxPermSize=256m", arg); - } - } - String[] cp = findArgValue(cmd, "-cp").split(Pattern.quote(File.pathSeparator)); if (isDriver) { assertTrue("Driver classpath should contain provided entry.", contains("/driver", cp)); diff --git a/launcher/src/test/resources/spark-defaults.conf b/launcher/src/test/resources/spark-defaults.conf index 239fc57883e9..3a51208c7c24 100644 --- a/launcher/src/test/resources/spark-defaults.conf +++ b/launcher/src/test/resources/spark-defaults.conf @@ -17,5 +17,5 @@ spark.driver.memory=1g spark.driver.extraClassPath=/driver -spark.driver.extraJavaOptions=-Ddriver -XX:MaxPermSize=256m +spark.driver.extraJavaOptions=-Ddriver spark.driver.extraLibraryPath=/native \ No newline at end of file diff --git a/pom.xml b/pom.xml index 984b2859efbe..a40d7a4d5be5 100644 --- a/pom.xml +++ b/pom.xml @@ -114,7 +114,7 @@ UTF-8 UTF-8 - 1.7 + 1.8 3.3.9 spark 0.21.1 @@ -209,9 +209,6 @@ things breaking. --> ${session.executionRootDirectory} - - 64m - 512m 512m @@ -1863,8 +1860,6 @@ -Xms1024m -Xmx1024m - -XX:PermSize=${PermGen} - -XX:MaxPermSize=${MaxPermGen} -XX:ReservedCodeCacheSize=${CodeCacheSize} @@ -1915,7 +1910,7 @@ **/*Suite.java ${project.build.directory}/surefire-reports - -Xmx3g -Xss4096k -XX:MaxPermSize=${MaxPermGen} -XX:ReservedCodeCacheSize=512m + -Xmx3g -Xss4096k -XX:ReservedCodeCacheSize=512m - -da -Xmx3g -XX:MaxPermSize=${MaxPermGen} -XX:ReservedCodeCacheSize=512m + -da -Xmx3g -XX:ReservedCodeCacheSize=512m diff --git a/external/java8-tests/src/test/java/org/apache/spark/streaming/Java8APISuite.java b/streaming/src/test/java/org/apache/spark/streaming/Java8APISuite.java similarity index 100% rename from external/java8-tests/src/test/java/org/apache/spark/streaming/Java8APISuite.java rename to streaming/src/test/java/org/apache/spark/streaming/Java8APISuite.java diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala index 336e29fc6bfd..4b76e46ae55a 100644 --- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala +++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala @@ -856,7 +856,6 @@ private[spark] class Client( // For log4j configuration to reference javaOpts += ("-Dspark.yarn.app.container.log.dir=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR) - YarnCommandBuilderUtils.addPermGenSizeOpt(javaOpts) val userClass = if (isClusterMode) { diff --git a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala index 7b55d781f86e..5f5316610b2c 100644 --- a/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala +++ b/yarn/src/main/scala/org/apache/spark/deploy/yarn/ExecutorRunnable.scala @@ -203,7 +203,6 @@ private[yarn] class ExecutorRunnable( // For log4j configuration to reference javaOpts += ("-Dspark.yarn.app.container.log.dir=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR) - YarnCommandBuilderUtils.addPermGenSizeOpt(javaOpts) val userClassPath = Client.getUserClasspath(sparkConf).flatMap { uri => val absPath =