Skip to content

Commit

Permalink
[SPARK-31235] Separates different categories of applications
Browse files Browse the repository at this point in the history
  • Loading branch information
wang-zhun committed Mar 25, 2020
1 parent 0fd4fa7 commit ef705dc
Show file tree
Hide file tree
Showing 3 changed files with 37 additions and 1 deletion.
30 changes: 30 additions & 0 deletions core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -332,6 +332,36 @@ class SparkSubmitSuite
sys.props("SPARK_SUBMIT") should be ("true")
}

test("handles spark.yarn.applicationType on yarn mode") {
val clArgs = Seq(
"--deploy-mode", "client",
"--master", "yarn",
"--executor-memory", "5g",
"--executor-cores", "5",
"--class", "org.SomeClass",
"--jars", "one.jar,two.jar,three.jar",
"--driver-memory", "4g",
"--queue", "thequeue",
"--files", "file1.txt,file2.txt",
"--archives", "archive1.txt,archive2.txt",
"--num-executors", "6",
"--name", "trill",
"--conf", "spark.yarn.applicationType=SPARK-SQL",
"--conf", "spark.ui.enabled=false",
"thejar.jar",
"arg1", "arg2")
val appArgs = new SparkSubmitArguments(clArgs)
val (childArgs, classpath, conf, mainClass) = submit.prepareSubmitEnvironment(appArgs)
childArgs.mkString(" ") should be ("arg1 arg2")
mainClass should be ("org.SomeClass")
classpath should have length (4)
classpath(0) should endWith ("thejar.jar")
classpath(1) should endWith ("one.jar")
classpath(2) should endWith ("two.jar")
classpath(3) should endWith ("three.jar")
conf.get("spark.yarn.applicationType") should be ("SPARK-SQL")
}

test("handles standalone cluster mode") {
testStandaloneCluster(useRest = true)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ private[spark] class Client(
appContext.setApplicationName(sparkConf.get("spark.app.name", "Spark"))
appContext.setQueue(sparkConf.get(QUEUE_NAME))
appContext.setAMContainerSpec(containerContext)
appContext.setApplicationType("SPARK")
appContext.setApplicationType(sparkConf.get(APPLICATION_TYPE))

sparkConf.get(APPLICATION_TAGS).foreach { tags =>
appContext.setApplicationTags(new java.util.HashSet[String](tags.asJava))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,12 @@ package object config {
.booleanConf
.createWithDefault(false)

private[spark] val APPLICATION_TYPE = ConfigBuilder("spark.yarn.applicationType")
.doc("Type of this application," +
"it separates different categories of applications like SPARK-SQL and SPARK-STREAMING")
.stringConf
.createWithDefault("SPARK")

/* File distribution. */

private[spark] val SPARK_ARCHIVE = ConfigBuilder("spark.yarn.archive")
Expand Down

0 comments on commit ef705dc

Please sign in to comment.