Skip to content

Commit

Permalink
address CodingCat's comments
Browse files Browse the repository at this point in the history
  • Loading branch information
lianhuiwang committed Dec 1, 2015
1 parent 16e175d commit 7b244ff
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 6 deletions.
12 changes: 6 additions & 6 deletions core/src/main/scala/org/apache/spark/ui/exec/ExecutorsPage.scala
Original file line number Diff line number Diff line change
Expand Up @@ -69,15 +69,15 @@ private[ui] class ExecutorsPage(
}

private def listingExecTable(storageStatusList: Seq[StorageStatus], isActive: Boolean)
: Seq[Node] = {
: Seq[Node] = {
val maxMem = storageStatusList.map(_.maxMem).sum
val memUsed = storageStatusList.map(_.memUsed).sum
val diskUsed = storageStatusList.map(_.diskUsed).sum
val execInfo = for (statusId <- 0 until storageStatusList.size) yield
ExecutorsPage.getExecInfo(listener, statusId, isActive)
val execInfoSorted = execInfo.sortBy(_.id)
val logsExist = execInfo.filter(_.executorLogs.nonEmpty).nonEmpty
val isShowThreadDump = threadDumpEnabled && isActive
val shouldShowThreadDump = threadDumpEnabled && isActive

// scalastyle:off
<div class="row-fluid">
Expand Down Expand Up @@ -115,10 +115,10 @@ private[ui] class ExecutorsPage(
</span>
</th>
{if (logsExist) <th class="sorttable_nosort">Logs</th> else Seq.empty}
{if (isShowThreadDump) <th class="sorttable_nosort">Thread Dump</th> else Seq.empty}
{if (shouldShowThreadDump) <th class="sorttable_nosort">Thread Dump</th> else Seq.empty}
</thead>
<tbody>
{execInfoSorted.map(execRow(_, logsExist, isShowThreadDump))}
{execInfoSorted.map(execRow(_, logsExist, shouldShowThreadDump))}
</tbody>
</table>
</div>
Expand All @@ -127,8 +127,8 @@ private[ui] class ExecutorsPage(
}

/** Render an HTML row representing an executor */
private def execRow(info: ExecutorSummary, logsExist: Boolean, isShowThreadDump: Boolean)
: Seq[Node] = {
private def execRow(info: ExecutorSummary, logsExist: Boolean, shouldShowThreadDump: Boolean)
: Seq[Node] = {
val maximumMemory = info.maxMemory
val memoryUsed = info.memoryUsed
val diskUsed = info.diskUsed
Expand Down
7 changes: 7 additions & 0 deletions docs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -565,6 +565,13 @@ Apart from these, the following properties are also available, and may be useful
How many finished batches the Spark UI and status APIs remember before garbage collecting.
</td>
</tr>
<tr>
<td><code>spark.ui.retainedDeadExecutors</code></td>
<td>100</td>
<td>
How many dead executors the Spark UI and status APIs remember before garbage collecting.
</td>
</tr>
</table>

#### Compression and Serialization
Expand Down

0 comments on commit 7b244ff

Please sign in to comment.