From 714648baf96732d7e5d28fc92df6cfd1daf05815 Mon Sep 17 00:00:00 2001 From: 10129659 Date: Mon, 22 May 2017 17:02:28 +0800 Subject: [PATCH 1/7] Exit if total cores less then 0 --- .../scala/org/apache/spark/deploy/SparkSubmitArguments.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index 0144fd1056bac..5b849ecc3942f 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -27,6 +27,7 @@ import java.util.jar.JarFile import scala.collection.JavaConverters._ import scala.collection.mutable.{ArrayBuffer, HashMap} import scala.io.Source +import scala.util.Try import org.apache.spark.deploy.SparkSubmitAction._ import org.apache.spark.launcher.SparkSubmitArgumentsParser @@ -253,6 +254,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S if (mainClass == null && SparkSubmit.isUserJar(primaryResource)) { SparkSubmit.printErrorAndExit("No main class set in JAR; please specify one with --class") } + if (totalExecutorCores != null && Try(totalExecutorCores.toInt).getOrElse(-1) <= 0) { + SparkSubmit.printErrorAndExit("Total executor cores must be a positive number") + } if (pyFiles != null && !isPython) { SparkSubmit.printErrorAndExit("--py-files given but primary resource is not a Python script") } From ec4b61a4fb4d124a80d98b8d52df9cd129c9a391 Mon Sep 17 00:00:00 2001 From: 10129659 Date: Tue, 23 May 2017 16:55:51 +0800 Subject: [PATCH 2/7] Other numerical parameters are validated --- .../spark/deploy/SparkSubmitArguments.scala | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index 5b849ecc3942f..ae8faad3b6c3d 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -32,6 +32,7 @@ import scala.util.Try import org.apache.spark.deploy.SparkSubmitAction._ import org.apache.spark.launcher.SparkSubmitArgumentsParser import org.apache.spark.util.Utils +import org.apache.spark.network.util.JavaUtils /** * Parses and encapsulates arguments from the spark-submit script. @@ -213,7 +214,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S case _ => SparkSubmit.printErrorAndExit( s"Cannot load main class from JAR $primaryResource with URI $uriScheme. " + - "Please specify a class through --class.") + "Please specify a class through --class.") } } @@ -254,9 +255,21 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S if (mainClass == null && SparkSubmit.isUserJar(primaryResource)) { SparkSubmit.printErrorAndExit("No main class set in JAR; please specify one with --class") } + if (driverMemory != null && Try(JavaUtils.byteStringAsBytes(driverMemory)).getOrElse(-1L) <= 0) { + SparkSubmit.printErrorAndExit("Driver Memory must be a positive number") + } + if (executorMemory != null && Try(JavaUtils.byteStringAsBytes(executorMemory)).getOrElse(-1L) <= 0) { + SparkSubmit.printErrorAndExit("Executor Memory cores must be a positive number") + } + if (executorCores != null && Try(executorCores.toInt).getOrElse(-1) <= 0) { + SparkSubmit.printErrorAndExit("Executor cores must be a positive number") + } if (totalExecutorCores != null && Try(totalExecutorCores.toInt).getOrElse(-1) <= 0) { SparkSubmit.printErrorAndExit("Total executor cores must be a positive number") } + if (numExecutors != null && Try(numExecutors.toInt).getOrElse(-1) <= 0) { + SparkSubmit.printErrorAndExit("Number of executors must be a positive number") + } if (pyFiles != null && !isPython) { SparkSubmit.printErrorAndExit("--py-files given but primary resource is not a Python script") } @@ -625,8 +638,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S // Get the output and discard any unnecessary lines from it. Source.fromString(new String(out.toByteArray(), StandardCharsets.UTF_8)).getLines .filter { line => - !line.startsWith("log4j") && !line.startsWith("usage") - } + !line.startsWith("log4j") && !line.startsWith("usage") + } .mkString("\n") } finally { System.setSecurityManager(currentSm) From 128e2d4ccf1e5c95ea971ce52862930e0cfdbe65 Mon Sep 17 00:00:00 2001 From: 10129659 Date: Tue, 23 May 2017 17:01:59 +0800 Subject: [PATCH 3/7] Other numerical parameters are validated --- .../org/apache/spark/deploy/SparkSubmitArguments.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index ae8faad3b6c3d..198d6429a5bae 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -214,7 +214,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S case _ => SparkSubmit.printErrorAndExit( s"Cannot load main class from JAR $primaryResource with URI $uriScheme. " + - "Please specify a class through --class.") + "Please specify a class through --class.") } } @@ -638,8 +638,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S // Get the output and discard any unnecessary lines from it. Source.fromString(new String(out.toByteArray(), StandardCharsets.UTF_8)).getLines .filter { line => - !line.startsWith("log4j") && !line.startsWith("usage") - } + !line.startsWith("log4j") && !line.startsWith("usage") + } .mkString("\n") } finally { System.setSecurityManager(currentSm) From 4c4bfbb9fd2f03a8ab9747dd09aa897eea6b8463 Mon Sep 17 00:00:00 2001 From: 10129659 Date: Tue, 23 May 2017 17:10:29 +0800 Subject: [PATCH 4/7] Other numerical parameters are validated --- .../org/apache/spark/deploy/SparkSubmitArguments.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index 198d6429a5bae..b5cfe25af767d 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -214,7 +214,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S case _ => SparkSubmit.printErrorAndExit( s"Cannot load main class from JAR $primaryResource with URI $uriScheme. " + - "Please specify a class through --class.") + "Please specify a class through --class.") } } @@ -638,8 +638,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S // Get the output and discard any unnecessary lines from it. Source.fromString(new String(out.toByteArray(), StandardCharsets.UTF_8)).getLines .filter { line => - !line.startsWith("log4j") && !line.startsWith("usage") - } + !line.startsWith("log4j") && !line.startsWith("usage") + } .mkString("\n") } finally { System.setSecurityManager(currentSm) From 66f8a58871715c89182ca4bec9d225020ccaba4b Mon Sep 17 00:00:00 2001 From: 10129659 Date: Tue, 23 May 2017 18:36:45 +0800 Subject: [PATCH 5/7] Fix the Style problems --- .../apache/spark/deploy/SparkSubmitArguments.scala | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index b5cfe25af767d..e8d13d067c295 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -214,7 +214,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S case _ => SparkSubmit.printErrorAndExit( s"Cannot load main class from JAR $primaryResource with URI $uriScheme. " + - "Please specify a class through --class.") + "Please specify a class through --class.") } } @@ -255,10 +255,12 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S if (mainClass == null && SparkSubmit.isUserJar(primaryResource)) { SparkSubmit.printErrorAndExit("No main class set in JAR; please specify one with --class") } - if (driverMemory != null && Try(JavaUtils.byteStringAsBytes(driverMemory)).getOrElse(-1L) <= 0) { + if (driverMemory != null + && Try(JavaUtils.byteStringAsBytes(driverMemory)).getOrElse(-1L) <= 0) { SparkSubmit.printErrorAndExit("Driver Memory must be a positive number") } - if (executorMemory != null && Try(JavaUtils.byteStringAsBytes(executorMemory)).getOrElse(-1L) <= 0) { + if (executorMemory != null + && Try(JavaUtils.byteStringAsBytes(executorMemory)).getOrElse(-1L) <= 0) { SparkSubmit.printErrorAndExit("Executor Memory cores must be a positive number") } if (executorCores != null && Try(executorCores.toInt).getOrElse(-1) <= 0) { @@ -638,8 +640,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S // Get the output and discard any unnecessary lines from it. Source.fromString(new String(out.toByteArray(), StandardCharsets.UTF_8)).getLines .filter { line => - !line.startsWith("log4j") && !line.startsWith("usage") - } + !line.startsWith("log4j") && !line.startsWith("usage") + } .mkString("\n") } finally { System.setSecurityManager(currentSm) From eae0f3d4a22911156b1bf47dd6df0cd0ed31dc28 Mon Sep 17 00:00:00 2001 From: 10129659 Date: Tue, 23 May 2017 19:54:35 +0800 Subject: [PATCH 6/7] Fix the Style problems --- .../org/apache/spark/deploy/SparkSubmitArguments.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index e8d13d067c295..3ac7fca572ecf 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -214,7 +214,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S case _ => SparkSubmit.printErrorAndExit( s"Cannot load main class from JAR $primaryResource with URI $uriScheme. " + - "Please specify a class through --class.") + "Please specify a class through --class.") } } @@ -640,8 +640,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S // Get the output and discard any unnecessary lines from it. Source.fromString(new String(out.toByteArray(), StandardCharsets.UTF_8)).getLines .filter { line => - !line.startsWith("log4j") && !line.startsWith("usage") - } + !line.startsWith("log4j") && !line.startsWith("usage") + } .mkString("\n") } finally { System.setSecurityManager(currentSm) From caacdc04ccbc6bdfcc6ade77559c85c114c9b075 Mon Sep 17 00:00:00 2001 From: 10129659 Date: Fri, 26 May 2017 09:15:19 +0800 Subject: [PATCH 7/7] Modify the import order --- .../scala/org/apache/spark/deploy/SparkSubmitArguments.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala index 3ac7fca572ecf..5100a17006e24 100644 --- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala @@ -31,8 +31,9 @@ import scala.util.Try import org.apache.spark.deploy.SparkSubmitAction._ import org.apache.spark.launcher.SparkSubmitArgumentsParser -import org.apache.spark.util.Utils import org.apache.spark.network.util.JavaUtils +import org.apache.spark.util.Utils + /** * Parses and encapsulates arguments from the spark-submit script.