Skip to content

Commit

Permalink
Merge pull request #112 from yaooqinn/KYUUBI-108
Browse files Browse the repository at this point in the history
fix #108 kyuubi server side direct memory oom
  • Loading branch information
yaooqinn committed Oct 25, 2018
2 parents 896eaaa + 2862325 commit 8bbc3d5
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ object KyuubiSparkUtil extends Logging {
private[this] val HIVE_PREFIX = "hive."
private[this] val METASTORE_PREFIX = "metastore."


// ENVIRONMENTS
val SPARK_HOME: String = System.getenv("SPARK_HOME")
val SPARK_JARS_DIR: String = SPARK_HOME + File.separator + "jars"
Expand All @@ -68,6 +67,9 @@ object KyuubiSparkUtil extends Logging {
val DRIVER_CORES: String = SPARK_PREFIX + DRIVER_PREFIX + "cores"
val DRIVER_EXTRA_JAVA_OPTIONS: String = SPARK_PREFIX + DRIVER_PREFIX + "extraJavaOptions"

val GC_INTERVAL: String = SPARK_PREFIX + "cleaner.periodicGC.interval"
val GC_INTERVAL_DEFAULT: String = "3min"

val AM_EXTRA_JAVA_OPTIONS: String = AM_PREFIX + "extraJavaOptions"

val SPARK_UI_PORT: String = SPARK_PREFIX + UI_PREFIX + "port"
Expand Down Expand Up @@ -271,6 +273,7 @@ object KyuubiSparkUtil extends Logging {
KyuubiConf.getAllDefaults.foreach(kv => conf.setIfMissing(kv._1, kv._2))

conf.setIfMissing(SPARK_LOCAL_DIR, conf.get(KyuubiConf.BACKEND_SESSION_LOCAL_DIR.key))
conf.setIfMissing(GC_INTERVAL, GC_INTERVAL_DEFAULT)

if (UserGroupInformation.isSecurityEnabled) {
conf.setIfMissing(HDFS_CLIENT_CACHE, HDFS_CLIENT_CACHE_DEFAULT)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,14 +50,12 @@ class KyuubiSubmitCommandBuilder(args: JList[String]) extends SparkSubmitCommand
throw new IllegalArgumentException(msg)
}

val memory = firstNonEmpty(
config.get(SparkLauncher.DRIVER_MEMORY),
System.getenv("SPARK_DRIVER_MEMORY"),
DEFAULT_MEM)
val memory = firstNonEmpty(config.get(SparkLauncher.DRIVER_MEMORY), DEFAULT_MEM * 4)
cmd.add("-Xmx" + memory)
addOptionString(cmd, driverExtraJavaOptions)
mergeEnvPathList(env, getLibPathEnvName, config.get(SparkLauncher.DRIVER_EXTRA_LIBRARY_PATH))
addPermSize(cmd)
addMaxDirectMemSize(cmd)
cmd.add("org.apache.spark.deploy.KyuubiSubmit")
cmd.addAll(buildSparkSubmitArgs)
cmd
Expand All @@ -80,6 +78,26 @@ class KyuubiSubmitCommandBuilder(args: JList[String]) extends SparkSubmitCommand
return
}
}
cmd.add("-XX:MaxPermSize=256m")
cmd.add("-XX:MaxPermSize=512m")

cmd.asScala.foreach { arg =>
if (arg.contains("-XX:PermSize=")) {
return
}
}
cmd.add("-XX:PermSize=512m")
}

/**
* Adds max direct memory size option for Spark if the VM requires it and the user hasn't
* set it.
*/
private def addMaxDirectMemSize(cmd: JList[String]): Unit = {
cmd.asScala.foreach { arg =>
if (arg.contains("-XX:MaxDirectMemorySize=")) {
return
}
}
cmd.add("-XX:MaxPermSize=4096m")
}
}

0 comments on commit 8bbc3d5

Please sign in to comment.