diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index fab46d5a0f7fc..439d62de921cc 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -51,6 +51,7 @@ import org.apache.spark.internal.config.Tests._ import org.apache.spark.internal.config.UI._ import org.apache.spark.internal.plugin.PluginContainer import org.apache.spark.io.CompressionCodec +import org.apache.spark.launcher.JavaModuleOptions import org.apache.spark.metrics.source.JVMCPUSource import org.apache.spark.partial.{ApproximateEvaluator, PartialResult} import org.apache.spark.rdd._ @@ -399,6 +400,8 @@ class SparkContext(config: SparkConf) extends Logging { // This should be set as early as possible. SparkContext.fillMissingMagicCommitterConfsIfNeeded(_conf) + SparkContext.supplementJavaModuleOptions(_conf) + _driverLogger = DriverLogger(_conf) val resourcesFileOpt = conf.get(DRIVER_RESOURCES_FILE) @@ -3025,6 +3028,22 @@ object SparkContext extends Logging { } } } + + /** + * SPARK-36796: This is a helper function to supplement `--add-opens` options to + * `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions`. + */ + private def supplementJavaModuleOptions(conf: SparkConf): Unit = { + def supplement(key: OptionalConfigEntry[String]): Unit = { + val v = conf.get(key) match { + case Some(opts) => s"${JavaModuleOptions.defaultModuleOptions()} $opts" + case None => JavaModuleOptions.defaultModuleOptions() + } + conf.set(key.key, v) + } + supplement(DRIVER_JAVA_OPTIONS) + supplement(EXECUTOR_JAVA_OPTIONS) + } } /** diff --git a/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java b/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java new file mode 100644 index 0000000000000..07a62f5233583 --- /dev/null +++ b/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.launcher; + +/** + * This helper class is used to place the all `--add-opens` options + * required by Spark when using Java 17. `DEFAULT_MODULE_OPTIONS` has added + * `-XX:+IgnoreUnrecognizedVMOptions` to be compatible with Java 8 and Java 11. + */ +public class JavaModuleOptions { + private static final String[] DEFAULT_MODULE_OPTIONS = { + "-XX:+IgnoreUnrecognizedVMOptions", + "--add-opens=java.base/java.lang=ALL-UNNAMED", + "--add-opens=java.base/java.lang.invoke=ALL-UNNAMED", + "--add-opens=java.base/java.io=ALL-UNNAMED", + "--add-opens=java.base/java.net=ALL-UNNAMED", + "--add-opens=java.base/java.nio=ALL-UNNAMED", + "--add-opens=java.base/java.util=ALL-UNNAMED", + "--add-opens=java.base/java.util.concurrent=ALL-UNNAMED", + "--add-opens=java.base/sun.nio.ch=ALL-UNNAMED", + "--add-opens=java.base/sun.nio.cs=ALL-UNNAMED", + "--add-opens=java.base/sun.security.action=ALL-UNNAMED", + "--add-opens=java.base/sun.util.calendar=ALL-UNNAMED"}; + + /** + * Returns the default Java options related to `--add-opens' and + * `-XX:+IgnoreUnrecognizedVMOptions` used by Spark. + */ + public static String defaultModuleOptions() { + return String.join(" ", DEFAULT_MODULE_OPTIONS); + } +} diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java index b2c12973bcabd..25237da47ce90 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java +++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitCommandBuilder.java @@ -294,6 +294,8 @@ private List buildSparkSubmitCommand(Map env) config.get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH)); } + // SPARK-36796: Always add default `--add-opens` to submit command + addOptionString(cmd, JavaModuleOptions.defaultModuleOptions()); cmd.add("org.apache.spark.deploy.SparkSubmit"); cmd.addAll(buildSparkSubmitArgs()); return cmd; diff --git a/pom.xml b/pom.xml index 5439506326c37..06b974c4425f1 100644 --- a/pom.xml +++ b/pom.xml @@ -283,6 +283,22 @@ 128m yyyy-MM-dd HH:mm:ss z + + + + -XX:+IgnoreUnrecognizedVMOptions + --add-opens=java.base/java.lang=ALL-UNNAMED + --add-opens=java.base/java.lang.invoke=ALL-UNNAMED + --add-opens=java.base/java.io=ALL-UNNAMED + --add-opens=java.base/java.net=ALL-UNNAMED + --add-opens=java.base/java.nio=ALL-UNNAMED + --add-opens=java.base/java.util=ALL-UNNAMED + --add-opens=java.base/java.util.concurrent=ALL-UNNAMED + --add-opens=java.base/sun.nio.ch=ALL-UNNAMED + --add-opens=java.base/sun.nio.cs=ALL-UNNAMED + --add-opens=java.base/sun.security.action=ALL-UNNAMED + --add-opens=java.base/sun.util.calendar=ALL-UNNAMED + @@ -2707,7 +2723,7 @@ **/*Suite.java ${project.build.directory}/surefire-reports - -ea -Xmx4g -Xss4m -XX:MaxMetaspaceSize=2g -XX:ReservedCodeCacheSize=${CodeCacheSize} -Dio.netty.tryReflectionSetAccessible=true + -ea -Xmx4g -Xss4m -XX:MaxMetaspaceSize=2g -XX:ReservedCodeCacheSize=${CodeCacheSize} ${extraJavaTestArgs} -Dio.netty.tryReflectionSetAccessible=true