diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala index a2a4d9a65729c..b138e8cc56b52 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala @@ -50,8 +50,8 @@ class CoarseGrainedSchedulerBackend(scheduler: TaskSchedulerImpl, actorSystem: A val conf = scheduler.sc.conf private val timeout = AkkaUtils.askTimeout(conf) private val akkaFrameSize = AkkaUtils.maxFrameSizeBytes(conf) - var minRegisteredRatio = conf.getDouble("spark.scheduler.minRegisteredRatio", 0) - val maxRegisteredWaitingTime = conf.getInt("spark.scheduler.maxRegisteredWaitingTime", 10000) + var minRegisteredRatio = conf.getDouble("spark.scheduler.minRegisteredExecutorsRatio", 0) + val maxRegisteredWaitingTime = conf.getInt("spark.scheduler.maxRegisteredExecutorsWaitingTime", 30000) val createTime = System.currentTimeMillis() var ready = if (minRegisteredRatio <= 0) true else false diff --git a/yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClusterSchedulerBackend.scala b/yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClusterSchedulerBackend.scala index 3165b456c1663..0b6ca69a241b3 100644 --- a/yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClusterSchedulerBackend.scala +++ b/yarn/common/src/main/scala/org/apache/spark/scheduler/cluster/YarnClusterSchedulerBackend.scala @@ -17,20 +17,19 @@ package org.apache.spark.scheduler.cluster +import scala.collection.mutable.ArrayBuffer import org.apache.spark.{Logging, SparkContext} import org.apache.spark.deploy.yarn.ApplicationMasterArguments import org.apache.spark.scheduler.TaskSchedulerImpl -import scala.collection.mutable.ArrayBuffer - private[spark] class YarnClusterSchedulerBackend( scheduler: TaskSchedulerImpl, sc: SparkContext) extends CoarseGrainedSchedulerBackend(scheduler, sc.env.actorSystem) with Logging { - private[spark] def addArg(optionName: String, envVar: String, sysProp: String, + private def addArg(optionName: String, envVar: String, sysProp: String, arrayBuf: ArrayBuffer[String]) { if (System.getenv(envVar) != null) { arrayBuf += (optionName, System.getenv(envVar))