From f749a5f79e33881235afb4b6e6abb43630eb23cf Mon Sep 17 00:00:00 2001 From: Lianhui Wang Date: Tue, 2 Feb 2016 14:51:56 +0800 Subject: [PATCH] update with master --- .../spark/status/api/v1/ExecutorListResource.scala | 5 ++--- core/src/main/scala/org/apache/spark/ui/SparkUI.scala | 10 +++++----- .../scala/org/apache/spark/ui/exec/ExecutorsPage.scala | 4 +--- 3 files changed, 8 insertions(+), 11 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala b/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala index 97be498871a57..6ca59c2f3caeb 100644 --- a/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala +++ b/core/src/main/scala/org/apache/spark/status/api/v1/ExecutorListResource.scala @@ -31,9 +31,8 @@ private[v1] class ExecutorListResource(ui: SparkUI) { listener.synchronized { // The follow codes should be protected by `listener` to make sure no executors will be // removed before we query their status. See SPARK-12784. - val activeStorageStatusList = listener.activeStorageStatusList - val deadStorageStatusList = listener.deadStorageStatusList - (0 until activeStorageStatusList.size).map { statusId => + val storageStatusList = listener.activeStorageStatusList + (0 until storageStatusList.size).map { statusId => ExecutorsPage.getExecInfo(listener, statusId, isActive = true) } } diff --git a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala index 6b0ef87c0a7d7..ffb047efc1dae 100644 --- a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala +++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala @@ -21,18 +21,18 @@ import java.util.{Date, ServiceLoader} import scala.collection.JavaConverters._ -import org.apache.spark.status.api.v1.{ApiRootResource, ApplicationAttemptInfo, ApplicationInfo, - UIRoot} -import org.apache.spark.util.Utils import org.apache.spark.{Logging, SecurityManager, SparkConf, SparkContext} import org.apache.spark.scheduler._ +import org.apache.spark.status.api.v1.{ApiRootResource, ApplicationAttemptInfo, ApplicationInfo, +UIRoot} import org.apache.spark.storage.StorageStatusListener import org.apache.spark.ui.JettyUtils._ import org.apache.spark.ui.env.{EnvironmentListener, EnvironmentTab} import org.apache.spark.ui.exec.{ExecutorsListener, ExecutorsTab} -import org.apache.spark.ui.jobs.{JobsTab, JobProgressListener, StagesTab} -import org.apache.spark.ui.storage.{StorageListener, StorageTab} +import org.apache.spark.ui.jobs.{JobProgressListener, JobsTab, StagesTab} import org.apache.spark.ui.scope.RDDOperationGraphListener +import org.apache.spark.ui.storage.{StorageListener, StorageTab} +import org.apache.spark.util.Utils /** * Top level user interface for a Spark application. diff --git a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala index 1f2b56d864630..786258c982f87 100644 --- a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala @@ -61,7 +61,6 @@ private[ui] class ExecutorsPage( for (statusId <- 0 until listener.activeStorageStatusList.size) yield ExecutorsPage.getExecInfo(listener, statusId, isActive = true) } - val deadExecutorInfo = listener.deadStorageStatusList val _deadExecutorInfo = { for (statusId <- 0 until listener.deadStorageStatusList.size) yield ExecutorsPage.getExecInfo(listener, statusId, isActive = false) @@ -298,7 +297,7 @@ private[ui] class ExecutorsPage( } else { "" } - }>{failedTasks} + }>{failedTasks} {completedTasks} {totalTasks}