Skip to content

Commit

Permalink
add sgx log level option (apache#6)
Browse files Browse the repository at this point in the history
  • Loading branch information
glorysdj committed Dec 2, 2021
1 parent 5783d82 commit ab17883
Show file tree
Hide file tree
Showing 6 changed files with 8 additions and 4 deletions.
2 changes: 2 additions & 0 deletions core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -655,6 +655,8 @@ private[spark] class SparkSubmit extends Logging {
confKey = SGX_MEM_SIZE.key),
OptionAssigner(args.sgxJvmMem, KUBERNETES, ALL_DEPLOY_MODES,
confKey = SGX_JVM_MEM_SIZE.key),
OptionAssigner(args.sgxLogLevel, KUBERNETES, ALL_DEPLOY_MODES,
confKey = SGX_LOG_LEVEL.key),

// An internal option used only for spark-shell to add user jars to repl's classloader,
// previously it uses "spark.jars" or "spark.yarn.dist.jars" which now may be pointed to
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
sgxJvmMem = Option(sgxJvmMem)
.getOrElse(sparkProperties.get(config.SGX_JVM_MEM_SIZE.key).orNull)
sgxLogLevel = Option(sgxLogLevel)
.getOrElse(sparkProperties.get(config.SGX_JVM_LOG_LEVEL.key).orNull)
.getOrElse(sparkProperties.get(config.SGX_LOG_LEVEL.key).orNull)

// Global defaults. These should be keep to minimum to avoid confusing behavior.
master = Option(master).getOrElse("local[*]")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2045,7 +2045,7 @@ package object config {
.bytesConf(ByteUnit.GiB)
.createWithDefaultString("16g")

private[spark] val SGX_JVM_LOG_LEVEL =
private[spark] val SGX_LOG_LEVEL =
ConfigBuilder("spark.kubernetes.sgx.log.level")
.doc("the log level of sgx execuotrs, e.g. all, error, debug")
.version("3.1.2")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ object ResourceProfile extends Logging {
val sgxEnabled = conf.get(SGX_ENABLED)
val sgxMem = conf.get(SGX_MEM_SIZE)
val sgxJvmMem = conf.get(SGX_JVM_MEM_SIZE)
val sgxLogLevel = conf.get(SGX_JVM_LOG_LEVEL)
val sgxLogLevel = conf.get(SGX_LOG_LEVEL)
defaultProfileExecutorResources =
Some(DefaultProfileExecutorResources(cores, memory, offheapMem, pysparkMem,
overheadMem, customResources, sgxEnabled, sgxMem, sgxJvmMem, sgxLogLevel))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ private[spark] object Constants {
val ENV_SGX_ENABLED = "SGX_ENABLED"
val ENV_SGX_MEM_SIZE = "SGX_MEM_SIZE"
val ENV_SGX_JVM_MEM_SIZE = "SGX_JVM_MEM_SIZE"
val ENV_SGX_LOG_LEVEL = "SGX_LOG_LEVEL"
// Spark app configs for containers
val SPARK_CONF_VOLUME_DRIVER = "spark-conf-volume-driver"
val SPARK_CONF_VOLUME_EXEC = "spark-conf-volume-exec"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,8 @@ private[spark] class BasicExecutorFeatureStep(
(ENV_RESOURCE_PROFILE_ID, resourceProfile.id.toString),
(ENV_SGX_ENABLED, execResources.sgxEnabled.toString),
(ENV_SGX_MEM_SIZE, execResources.sgxMemGiB + "G"),
(ENV_SGX_JVM_MEM_SIZE, execResources.sgxJvmMemGiB + "G")
(ENV_SGX_JVM_MEM_SIZE, execResources.sgxJvmMemGiB + "G"),
(ENV_SGX_LOG_LEVEL, execResources.sgxLogLevel.toString)
) ++ kubernetesConf.environment).map { case (k, v) =>
new EnvVarBuilder()
.withName(k)
Expand Down

0 comments on commit ab17883

Please sign in to comment.