diff --git a/bin/run-tests b/bin/run-tests deleted file mode 100755 index a514642e317b6..0000000000000 --- a/bin/run-tests +++ /dev/null @@ -1,234 +0,0 @@ -#!/usr/bin/env bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -# This file tests the various functionalities in bin/utils.sh -# -# By default, this prints only the relevant error output at the end if tests fail. -# For debugging, the user can set SPARK_TESTING_VERBOSE to print more information -# while tests are still running. -# -# This script returns an exit code of 1 on test failure. - -SPARK_HOME="$(cd `dirname $0`/..; pwd)" -PROPERTIES_FILE="$SPARK_HOME/bin/test.conf" - -# Load utility functions -. "$SPARK_HOME/bin/utils.sh" - -tests_failed=0 -this_test_failed=0 -error_output_buffer="" -temp_output_buffer="" - -# Echo only if the verbose flag is set -function verbose_echo() { - if [[ -n "$SPARK_TESTING_VERBOSE" ]]; then - echo -e "$1" - fi -} - -# Collect error output for echoing at the end if tests fail -# This also echoes the given string if the verbose flag is set -function log_error() { - verbose_echo "$1" - if [[ -n "$error_output_buffer" ]]; then - error_output_buffer=$(echo -e "$error_output_buffer\n$1") - else - error_output_buffer="$1" - fi -} - -# Collect temporary output for logging -function collect_temp_output() { - if [[ -n "$temp_output_buffer" ]]; then - temp_output_buffer=$(echo -e "$temp_output_buffer\n$1") - else - temp_output_buffer="$1" - fi -} - -# Print the result of an individual test -function echo_test_result() { - if [[ "$this_test_failed" == 1 ]]; then - log_error "$temp_output_buffer" - tests_failed=1 - else - verbose_echo "$temp_output_buffer" - fi -} - -# Test parse_java_property. This takes in three parameters, the name of -# the config, the expected value, and whether or not to ignore whitespace. -function test_parse_java_property() { - key="$1" - expected_value="$2" - ignore_whitespace="$3" - temp_output_buffer="" - this_test_failed=0 - parse_java_property "$key" - actual_value="$JAVA_PROPERTY_VALUE" - collect_temp_output " $key -> $actual_value" - # Ignore whitespace for multi-line arguments - if [[ -n "$ignore_whitespace" ]]; then - expected_value=$(echo "$expected_value" | sed "s/[[:space:]]//g") - actual_value=$(echo "$actual_value" | sed "s/[[:space:]]//g") - fi - if [[ "$actual_value" != "$expected_value" ]]; then - collect_temp_output " XXXXX TEST FAILED XXXXX" - collect_temp_output " expected: $expected_value" - collect_temp_output " actual: $actual_value" - this_test_failed=1 - fi - echo_test_result -} - -# Test split_java_options. This takes in three or more parameters, the name of the config, -# the expected number of java options, and values of the java options themselves. -function test_split_java_options() { - key="$1" - expected_size="$2" - expected_values=("${@:3}") - temp_output_buffer="" - this_test_failed=0 - parse_java_property "$key" - collect_temp_output " $JAVA_PROPERTY_VALUE" - split_java_options "$JAVA_PROPERTY_VALUE" - if [[ "$expected_size" != "${#SPLIT_JAVA_OPTS[@]}" ]]; then - collect_temp_output " XXXXX TEST FAILED XXXXX" - collect_temp_output " expected size: $expected_size" - collect_temp_output " actual size: ${#SPLIT_JAVA_OPTS[@]}" - this_test_failed=1 - fi - for i in $(seq 0 $((expected_size - 1))); do - expected_value="${expected_values[$i]}" - actual_value="${SPLIT_JAVA_OPTS[$i]}" - collect_temp_output " -> $actual_value" - if [[ "$expected_value" != "$actual_value" ]]; then - collect_temp_output " XXXXX TEST FAILED (key $key) XXXXX" - collect_temp_output " expected value: $expected_value" - collect_temp_output " actual value: $actual_value" - this_test_failed=1 - break - fi - done - echo_test_result -} - -# Test split_java_options. This takes in three or more parameters, the name of the config, -# the expected number of java options, and values of the java options themselves. -function test_quote_java_property() { - key="$1" - expected_size="$2" - expected_values=("${@:3}") - temp_output_buffer="" - this_test_failed=0 - parse_java_property "$key" - split_java_options "$JAVA_PROPERTY_VALUE" - quote_java_property "${SPLIT_JAVA_OPTS[@]}" - collect_temp_output " $JAVA_PROPERTY_VALUE" - for i in $(seq 0 $((expected_size - 1))); do - expected_value="${expected_values[$i]}" - actual_value="${QUOTED_JAVA_OPTS[$i]}" - collect_temp_output " -> $actual_value" - if [[ "$expected_value" != "$actual_value" ]]; then - collect_temp_output " XXXXX TEST FAILED (key $key) XXXXX" - collect_temp_output " expected value: $expected_value" - collect_temp_output " actual value: $actual_value" - this_test_failed=1 - break - fi - done - echo_test_result -} - -# Test parse_java_property. This should read the literal value as written in the conf file. -log_error "--- Testing parse_java_property ---" -delimiters=("space" "equal" "colon") -test_parse_java_property "does.not.exist" "" -for delimiter in "${delimiters[@]}"; do - test_parse_java_property "spark.$delimiter.1" "-Dstraw=berry" - test_parse_java_property "spark.$delimiter.2" "-Dstraw=\"berry\"" - test_parse_java_property "spark.$delimiter.3" "-Dstraw=\"berry again\"" - test_parse_java_property "spark.$delimiter.4" "-Dstraw=\"berry \\\"quote\"" - test_parse_java_property "spark.$delimiter.5" "-Dstraw=\"berry \\\\backslash\"" - test_parse_java_property "spark.$delimiter.6" \ - "-Dstraw=\"berry \\\"quotes\\\" and \\\\backslashes\\\\ \"" - test_parse_java_property "spark.$delimiter.7" \ - "-Dstraw=berry -Dblue=berry -Dblack=berry" - test_parse_java_property "spark.$delimiter.8" \ - "-Dstraw=\"berry space\" -Dblue=\"berry\" -Dblack=berry" - test_parse_java_property "spark.$delimiter.9" \ - "-Dstraw=\"berry space\" -Dblue=\"berry \\\"quotes\\\"\" -Dblack=\"berry \\\\backslashes\\\\ \"" - test_parse_java_property "spark.$delimiter.10" \ - "-Dstraw=\"berry space\" -Dblue=\"berry \\\"quotes\\\"\" -Dblack=\"berry \\\\backslashes\\\\ \"" \ - IGNORE_WHITESPACE -done -log_error - -# Test split_java_options. Note that this relies on parse_java_property to work correctly. -log_error "--- Testing split_java_options ---" -if [[ "$tests_failed" == 1 ]]; then - log_error "* WARNING: Tests for parse_java_property failed!" - log_error "This should also fail tests for split_java_options" -fi -test_split_java_options "spark.space.1" 1 "-Dstraw=berry" -test_split_java_options "spark.space.2" 1 "-Dstraw=berry" -test_split_java_options "spark.space.3" 1 "-Dstraw=berry again" -test_split_java_options "spark.space.4" 1 "-Dstraw=berry \"quote" -test_split_java_options "spark.space.5" 1 "-Dstraw=berry \\backslash" -test_split_java_options "spark.space.6" 1 "-Dstraw=berry \"quotes\" and \\backslashes\\ " -test_split_java_options "spark.space.7" 3 "-Dstraw=berry" "-Dblue=berry" "-Dblack=berry" -test_split_java_options "spark.space.8" 3 "-Dstraw=berry space" "-Dblue=berry" "-Dblack=berry" -test_split_java_options "spark.space.9" 3 \ - "-Dstraw=berry space" "-Dblue=berry \"quotes\"" "-Dblack=berry \\backslashes\\ " -test_split_java_options "spark.space.10" 3 \ - "-Dstraw=berry space" "-Dblue=berry \"quotes\"" "-Dblack=berry \\backslashes\\ " -log_error - -# Test quote_java_property. Note that this relies on split_java_options to work correctly. -log_error "--- Testing quote_java_property ---" -if [[ "$tests_failed" == 1 ]]; then - log_error "* WARNING: Tests for split_java_options failed!" - log_error "This should also fail tests for quote_java_property" -fi -test_quote_java_property "spark.space.1" 1 "\"-Dstraw=berry\"" -test_quote_java_property "spark.space.2" 1 "\"-Dstraw=berry\"" -test_quote_java_property "spark.space.3" 1 "\"-Dstraw=berry again\"" -test_quote_java_property "spark.space.4" 1 "\"-Dstraw=berry \"quote\"" -test_quote_java_property "spark.space.5" 1 "\"-Dstraw=berry \\backslash\"" -test_quote_java_property "spark.space.6" 1 \ - "\"-Dstraw=berry \"quotes\" and \\backslashes\\ \"" -test_quote_java_property "spark.space.7" 3 \ - "\"-Dstraw=berry\"" "\"-Dblue=berry\"" "\"-Dblack=berry\"" -test_quote_java_property "spark.space.8" 3 \ - "\"-Dstraw=berry space\"" "\"-Dblue=berry\"" "\"-Dblack=berry\"" -test_quote_java_property "spark.space.9" 3 \ - "\"-Dstraw=berry space\"" "\"-Dblue=berry \"quotes\"\"" "\"-Dblack=berry \\backslashes\\ \"" -test_quote_java_property "spark.space.10" 3 \ - "\"-Dstraw=berry space\"" "\"-Dblue=berry \"quotes\"\"" "\"-Dblack=berry \\backslashes\\ \"" -log_error - -# Final test result -if [[ "$tests_failed" == 0 ]]; then - echo "BASH tests passed." -else - echo -e "XXXXX BASH tests failed XXXXX\n" - echo -e "$error_output_buffer" - exit 1 -fi - diff --git a/bin/spark-class b/bin/spark-class index 8390af4492889..7a9203cfce47b 100755 --- a/bin/spark-class +++ b/bin/spark-class @@ -30,9 +30,6 @@ FWDIR="$(cd `dirname $0`/..; pwd)" # Export this as SPARK_HOME export SPARK_HOME="$FWDIR" -# Load utility functions -. "$SPARK_HOME/bin/utils.sh" - . $FWDIR/bin/load-spark-env.sh if [ -z "$1" ]; then @@ -112,10 +109,6 @@ if [ -e "$FWDIR/conf/java-opts" ] ; then JAVA_OPTS="$JAVA_OPTS `cat $FWDIR/conf/java-opts`" fi -# Split JAVA_OPTS properly to handle whitespace, double quotes and backslashes -# This exports the split java options into SPLIT_JAVA_OPTS -split_java_options "$JAVA_OPTS" - # Attention: when changing the way the JAVA_OPTS are assembled, the change must be reflected in CommandUtils.scala! TOOLS_DIR="$FWDIR"/tools @@ -156,13 +149,27 @@ if $cygwin; then fi export CLASSPATH -if [ "$SPARK_PRINT_LAUNCH_COMMAND" == "1" ]; then - # Put quotes around system properties in case they contain spaces for readability - # This exports the resulting list of java opts into QUOTED_JAVA_OPTS - quote_java_property "${SPLIT_JAVA_OPTS[@]}" +if [ -n "$SPARK_PRINT_LAUNCH_COMMAND" ]; then echo -n "Spark Command: " 1>&2 - echo "$RUNNER" -cp "$CLASSPATH" "${QUOTED_JAVA_OPTS[@]}" "$@" 1>&2 + echo "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" 1>&2 echo -e "========================================\n" 1>&2 fi -exec "$RUNNER" -cp "$CLASSPATH" "${SPLIT_JAVA_OPTS[@]}" "$@" +# In Spark submit client mode, the driver is launched in the same JVM as Spark submit itself. +# Here we must parse the properties file for relevant "spark.driver.*" configs for launching +# the driver JVM itself. + +if [ -n "$SPARK_SUBMIT_CLIENT_MODE" ]; then + exec "$RUNNER" org.apache.spark.deploy.SparkClassLauncher \ + "$PROPERTIES_FILE" \ + "$RUNNER" \ + "$CLASSPATH" \ + "$SPARK_SUBMIT_LIBRARY_PATH" \ + "$JAVA_OPTS" \ + "$OUR_JAVA_MEM" \ + true \ + "$@" +else + exec "$RUNNER" -cp "$CLASSPATH" $JAVA_OPTS "$@" +fi + diff --git a/bin/spark-submit b/bin/spark-submit index 73555b29835fb..c8a253a3f26ff 100755 --- a/bin/spark-submit +++ b/bin/spark-submit @@ -20,9 +20,6 @@ export SPARK_HOME="$(cd `dirname $0`/..; pwd)" ORIG_ARGS=("$@") -# Load utility functions -. "$SPARK_HOME/bin/utils.sh" - while (($#)); do if [ "$1" = "--deploy-mode" ]; then DEPLOY_MODE=$2 @@ -44,68 +41,17 @@ DEPLOY_MODE=${DEPLOY_MODE:-"client"} DEFAULT_PROPERTIES_FILE="$SPARK_HOME/conf/spark-defaults.conf" PROPERTIES_FILE=${PROPERTIES_FILE:-"$DEFAULT_PROPERTIES_FILE"} -unset DRIVER_EXTRA_JAVA_OPTIONS -unset EXECUTOR_EXTRA_JAVA_OPTIONS +# For client mode, the driver will be launched in the same JVM that launches +# SparkSubmit, so we to read the properties file for any class paths, library +# paths, java options and memory early on. Otherwise, it will be too late by +# the time the JVM has started. -# A few Spark configs must be parsed early on before launching the JVM: -# -# [spark.driver.extra*] -# These configs encode java options, class paths, and library paths -# needed to launch the JVM if we are running Spark in client mode -# -# [spark.*.extraJavaOptions] -# The escaped characters in these configs must be preserved for -# splitting the arguments in Java later. For these configs, we -# export the raw values as environment variables. -# -if [[ -f "$PROPERTIES_FILE" ]]; then - echo "Using properties file $PROPERTIES_FILE." 1>&2 - # Parse the properties file here only if these special configs exist - should_parse=$(grep -e "spark.driver.extra*\|spark.*.extraJavaOptions" "$PROPERTIES_FILE") - if [[ -n "$should_parse" ]]; then - # This exports the value of the given key into JAVA_PROPERTY_VALUE - parse_java_property "spark.driver.memory" - DRIVER_MEMORY_CONF="$JAVA_PROPERTY_VALUE" - parse_java_property "spark.driver.extraLibraryPath" - DRIVER_EXTRA_LIBRARY_PATH="$JAVA_PROPERTY_VALUE" - parse_java_property "spark.driver.extraClassPath" - DRIVER_EXTRA_CLASSPATH="$JAVA_PROPERTY_VALUE" - parse_java_property "spark.driver.extraJavaOptions" - DRIVER_EXTRA_JAVA_OPTS="$JAVA_PROPERTY_VALUE" - parse_java_property "spark.executor.extraJavaOptions" - EXECUTOR_EXTRA_JAVA_OPTS="$JAVA_PROPERTY_VALUE" - # Export these for SparkSubmitArguments.scala to consume - if [[ -n "DRIVER_EXTRA_JAVA_OPTS" ]]; then - export DRIVER_EXTRA_JAVA_OPTS - fi - if [[ -n "EXECUTOR_EXTRA_JAVA_OPTS" ]]; then - export EXECUTOR_EXTRA_JAVA_OPTS - fi - fi -elif [[ "$PROPERTIES_FILE" != "$DEFAULT_PROPERTIES_FILE" ]]; then - echo "Warning: properties file $PROPERTIES_FILE does not exist." 1>&2 -fi - -# For client mode, the driver will be launched in the JVM that launches -# SparkSubmit, so we need to handle the class paths, java options, and -# memory preemptively in bash. Otherwise, it will be too late by the -# time the JVM has started. - -if [[ $DEPLOY_MODE == "client" ]]; then - if [[ -n "$DRIVER_EXTRA_JAVA_OPTS" ]]; then - export SPARK_SUBMIT_OPTS="$SPARK_SUBMIT_OPTS $DRIVER_EXTRA_JAVA_OPTS" - fi - if [[ -n "$DRIVER_EXTRA_CLASSPATH" ]]; then - export SPARK_SUBMIT_CLASSPATH="$SPARK_SUBMIT_CLASSPATH:$DRIVER_EXTRA_CLASSPATH" - fi - if [[ -n "$DRIVER_EXTRA_LIBRARY_PATH" ]]; then - export SPARK_SUBMIT_LIBRARY_PATH="$SPARK_SUBMIT_LIBRARY_PATH:$DRIVER_EXTRA_LIBRARY_PATH" - fi - # Favor command line memory over config memory - DRIVER_MEMORY=${DRIVER_MEMORY:-"$DRIVER_MEMORY_CONF"} - if [[ -n "$DRIVER_MEMORY" ]]; then +if [ "$DEPLOY_MODE" == "client" ]; then + if [ -n "$DRIVER_MEMORY" ]; then export SPARK_DRIVER_MEMORY=$DRIVER_MEMORY fi + export PROPERTIES_FILE + export SPARK_SUBMIT_CLIENT_MODE=1 fi exec $SPARK_HOME/bin/spark-class org.apache.spark.deploy.SparkSubmit "${ORIG_ARGS[@]}" diff --git a/bin/test.conf b/bin/test.conf deleted file mode 100644 index 437d732d75657..0000000000000 --- a/bin/test.conf +++ /dev/null @@ -1,45 +0,0 @@ -# -------------------------------------------------------------------------------------------- -# Spark properties file for testing -# -# The configs are separated into three categories, one for each delimiter that Java supports: -# http://docs.oracle.com/javase/7/docs/api/java/util/Properties.html#load(java.io.Reader). -# The values of the configs are assumed to be identical across the categories. Changes in -# this file must be synced with "bin/test.sh" -# -------------------------------------------------------------------------------------------- - -# Space delimiter -spark.space.1 -Dstraw=berry -spark.space.2 -Dstraw="berry" -spark.space.3 -Dstraw="berry again" -spark.space.4 -Dstraw="berry \"quote" -spark.space.5 -Dstraw="berry \\backslash" -spark.space.6 -Dstraw="berry \"quotes\" and \\backslashes\\ " -spark.space.7 -Dstraw=berry -Dblue=berry -Dblack=berry -spark.space.8 -Dstraw="berry space" -Dblue="berry" -Dblack=berry -spark.space.9 -Dstraw="berry space" -Dblue="berry \"quotes\"" -Dblack="berry \\backslashes\\ " -spark.space.10 -Dstraw="berry space" -Dblue="berry \"quotes\"" -Dblack="berry \\backslashes\\ " - -# Equal sign delimiter -spark.equal.1=-Dstraw=berry -spark.equal.2=-Dstraw="berry" -spark.equal.3=-Dstraw="berry again" -spark.equal.4=-Dstraw="berry \"quote" -spark.equal.5=-Dstraw="berry \\backslash" -spark.equal.6=-Dstraw="berry \"quotes\" and \\backslashes\\ " -spark.equal.7=-Dstraw=berry -Dblue=berry -Dblack=berry -spark.equal.8=-Dstraw="berry space" -Dblue="berry" -Dblack=berry -spark.equal.9=-Dstraw="berry space" -Dblue="berry \"quotes\"" -Dblack="berry \\backslashes\\ " -spark.equal.10 = -Dstraw="berry space" -Dblue="berry \"quotes\"" -Dblack="berry \\backslashes\\ " - -# Colon delimiter -spark.colon.1:-Dstraw=berry -spark.colon.2:-Dstraw="berry" -spark.colon.3:-Dstraw="berry again" -spark.colon.4:-Dstraw="berry \"quote" -spark.colon.5:-Dstraw="berry \\backslash" -spark.colon.6:-Dstraw="berry \"quotes\" and \\backslashes\\ " -spark.colon.7:-Dstraw=berry -Dblue=berry -Dblack=berry -spark.colon.8:-Dstraw="berry space" -Dblue="berry" -Dblack=berry -spark.colon.9:-Dstraw="berry space" -Dblue="berry \"quotes\"" -Dblack="berry \\backslashes\\ " -spark.colon.10 : -Dstraw="berry space" -Dblue="berry \"quotes\"" -Dblack="berry \\backslashes\\ " - diff --git a/bin/utils.sh b/bin/utils.sh index c9ed977be1430..cb7aa70dea19e 100755 --- a/bin/utils.sh +++ b/bin/utils.sh @@ -20,47 +20,6 @@ # | Utility functions for launching Spark applications | # * ---------------------------------------------------- * -# Parse the value of a config from a java properties file according to the specifications in -# http://docs.oracle.com/javase/7/docs/api/java/util/Properties.html#load(java.io.Reader), -# with the exception of the support for multi-line arguments. This accepts the name of the -# config as an argument, and expects the path of the property file to be found in -# PROPERTIES_FILE. The value is returned through JAVA_PROPERTY_VALUE. -function parse_java_property() { - JAVA_PROPERTY_VALUE=$(\ - grep "^[[:space:]]*$1" "$PROPERTIES_FILE" | \ - head -n 1 | \ - sed "s/^[[:space:]]*$1//g" | \ - sed "s/^[[:space:]]*[:=]\{0,1\}//g" | \ - sed "s/^[[:space:]]*//g" | \ - sed "s/[[:space:]]*$//g" - ) - export JAVA_PROPERTY_VALUE -} - -# Properly split java options, dealing with whitespace, double quotes and backslashes. -# This accepts a string and returns the resulting list through SPLIT_JAVA_OPTS. -# For security reasons, this is isolated in its own function. -function split_java_options() { - eval set -- "$1" - SPLIT_JAVA_OPTS=("$@") - export SPLIT_JAVA_OPTS -} - -# Put double quotes around each of the given java options that is a system property. -# This accepts a list and returns the quoted list through QUOTED_JAVA_OPTS -function quote_java_property() { - QUOTED_JAVA_OPTS=() - for opt in "$@"; do - is_system_property=$(echo "$opt" | grep -e "^-D") - if [[ -n "$is_system_property" ]]; then - QUOTED_JAVA_OPTS+=("\"$opt\"") - else - QUOTED_JAVA_OPTS+=("$opt") - fi - done - export QUOTED_JAVA_OPTS -} - # Gather all all spark-submit options into SUBMISSION_OPTS function gatherSparkSubmitOpts() { if [ -z "$SUBMIT_USAGE_FUNCTION" ]; then diff --git a/conf/spark-defaults.conf.template b/conf/spark-defaults.conf.template index 80b72b0ca2d24..92adb4c9a575a 100644 --- a/conf/spark-defaults.conf.template +++ b/conf/spark-defaults.conf.template @@ -1,10 +1,9 @@ # Default system properties included when running spark-submit. # This is useful for setting default environmental settings. -# Note that properties that span multiple lines are not supported. # Example: # spark.master spark://master:7077 # spark.eventLog.enabled true # spark.eventLog.dir hdfs://namenode:8021/directory # spark.serializer org.apache.spark.serializer.KryoSerializer -# spark.executor.extraJavaOptions -XX:+PrintGCDetail -Dnumbers="one \"two\" three" +# spark.executor.extraJavaOptions -XX:+PrintGCDetail -Dkey=value -Dnumbers="one two three" diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkClassLauncher.scala b/core/src/main/scala/org/apache/spark/deploy/SparkClassLauncher.scala index 8acabc591cb2f..3494577e49331 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkClassLauncher.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkClassLauncher.scala @@ -25,6 +25,8 @@ import org.apache.spark.util.{RedirectThread, Utils} /** * Wrapper of `bin/spark-class` that prepares the launch environment of the child JVM properly. + * This is currently only used for running Spark submit in client mode. The goal moving forward + * is to use this class for all use cases of `bin/spark-class`. */ object SparkClassLauncher { @@ -61,7 +63,7 @@ object SparkClassLauncher { val javaRunner = args(1) val clClassPaths = args(2) val clLibraryPaths = args(3) - val clJavaOpts = args(4) + val clJavaOpts = Utils.splitCommandString(args(4)) val clJavaMemory = args(5) val clientMode = args(6) == "true" val mainClass = args(7) @@ -89,9 +91,8 @@ object SparkClassLauncher { val pathSeparator = sys.props("path.separator") val classPaths = clClassPaths + confClassPaths.map(pathSeparator + _).getOrElse("") val libraryPaths = clLibraryPaths + confLibraryPaths.map(pathSeparator + _).getOrElse("") - val javaOpts = Utils.splitCommandString(clJavaOpts) ++ - confJavaOpts.map(Utils.splitCommandString).getOrElse(Seq.empty) - val filteredJavaOpts = javaOpts.filterNot { opt => + val javaOpts = clJavaOpts ++ confJavaOpts.map(Utils.splitCommandString).getOrElse(Seq.empty) + val filteredJavaOpts = javaOpts.distinct.filterNot { opt => opt.startsWith("-Djava.library.path") || opt.startsWith("-Xms") || opt.startsWith("-Xmx") } diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index 28d4e0f65a560..d545f58c5da7e 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -76,15 +76,6 @@ private[spark] class SparkSubmitArguments(args: Seq[String]) { } } } - // For spark.*.extraJavaOptions, we cannot rely on the Java properties loader because it - // un-escapes certain characters (" and \) needed to split the string into java options. - // For these configs, use the equivalent environment variables instead. - sys.env.get("DRIVER_EXTRA_JAVA_OPTS").foreach { opts => - defaultProperties("spark.driver.extraJavaOptions") = opts - } - sys.env.get("EXECUTOR_EXTRA_JAVA_OPTS").foreach { opts => - defaultProperties("spark.executor.extraJavaOptions") = opts - } defaultProperties }