Skip to content

Commit

Permalink
Moved utils functions to UIUtils.
Browse files Browse the repository at this point in the history
  • Loading branch information
tdas committed Apr 11, 2014
1 parent 6de06b0 commit 914b8ff
Show file tree
Hide file tree
Showing 6 changed files with 197 additions and 155 deletions.
108 changes: 99 additions & 9 deletions core/src/main/scala/org/apache/spark/ui/UIUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,13 @@
package org.apache.spark.ui

import java.text.SimpleDateFormat
import java.util.Date
import java.util.{Locale, Date}

import scala.xml.Node
import org.apache.spark.Logging

/** Utility functions for generating XML pages with spark content. */
private[spark] object UIUtils {
private[spark] object UIUtils extends Logging {

// SimpleDateFormat is not thread-safe. Don't expose it to avoid improper use.
private val dateFormat = new ThreadLocal[SimpleDateFormat]() {
Expand All @@ -49,6 +50,80 @@ private[spark] object UIUtils {
"%.1f h".format(hours)
}

/** Generate a verbose human-readable string representing a duration such as "5 second 35 ms" */
def formatDurationVerbose(ms: Long): String = {
try {
val second = 1000L
val minute = 60 * second
val hour = 60 * minute
val day = 24 * hour
val week = 7 * day
val year = 365 * day

def toString(num: Long, unit: String): String = {
if (num == 0) {
""
} else if (num == 1) {
s"$num $unit"
} else {
s"$num ${unit}s"
}
}

val millisecondsString = if (ms >= second && ms % second == 0) "" else s"${ms % second} ms"
val secondString = toString((ms % minute) / second, "second")
val minuteString = toString((ms % hour) / minute, "minute")
val hourString = toString((ms % day) / hour, "hour")
val dayString = toString((ms % week) / day, "day")
val weekString = toString((ms % year) / week, "week")
val yearString = toString(ms / year, "year")

Seq(
second -> millisecondsString,
minute -> s"$secondString $millisecondsString",
hour -> s"$minuteString $secondString",
day -> s"$hourString $minuteString $secondString",
week -> s"$dayString $hourString $minuteString",
year -> s"$weekString $dayString $hourString"
).foreach { case (durationLimit, durationString) =>
if (ms < durationLimit) {
// if time is less than the limit (upto year)
return durationString
}
}
// if time is more than a year
return s"$yearString $weekString $dayString"
} catch {
case e: Exception =>
logError("Error converting time to string", e)
// if there is some error, return blank string
return ""
}
}

/** Generate a human-readable string representing a number (e.g. 100 K) */
def formatNumber(records: Double): String = {
val trillion = 1e12
val billion = 1e9
val million = 1e6
val thousand = 1e3

val (value, unit) = {
if (records >= 2*trillion) {
(records / trillion, " T")
} else if (records >= 2*billion) {
(records / billion, " B")
} else if (records >= 2*million) {
(records / million, " M")
} else if (records >= 2*thousand) {
(records / thousand, " K")
} else {
(records, "")
}
}
"%.1f%s".formatLocal(Locale.US, value, unit)
}

// Yarn has to go through a proxy so the base uri is provided and has to be on all links
val uiRoot : String = Option(System.getenv("APPLICATION_WEB_PROXY_BASE")).getOrElse("")

Expand Down Expand Up @@ -146,21 +221,36 @@ private[spark] object UIUtils {
/** Returns an HTML table constructed by generating a row for each object in a sequence. */
def listingTable[T](
headers: Seq[String],
makeRow: T => Seq[Node],
rows: Seq[T],
generateDataRow: T => Seq[Node],
data: Seq[T],
fixedWidth: Boolean = false): Seq[Node] = {

val colWidth = 100.toDouble / headers.size
val colWidthAttr = if (fixedWidth) colWidth + "%" else ""
var tableClass = "table table-bordered table-striped table-condensed sortable"
if (fixedWidth) {
tableClass += " table-fixed"
}

val colWidth = 100.toDouble / headers.size
val colWidthAttr =if (fixedWidth) colWidth + "%" else ""
val headerRow: Seq[Node] = {
// if none of the headers have "\n" in them
if (headers.forall(!_.contains("\n"))) {
// represent header as simple text
headers.map(h => <th width={colWidthAttr}>{h}</th>)
} else {
// represent header text as list while respecting "\n"
headers.map { case h =>
<th width={colWidthAttr}>
<ul class ="unstyled">
{ h.split("\n").map { case t => <li> {t} </li> } }
</ul>
</th>
}
}
}
<table class={tableClass}>
<thead>{headers.map(h => <th width={colWidthAttr}>{h}</th>)}</thead>
<thead>{headerRow}</thead>
<tbody>
{rows.map(r => makeRow(r))}
{data.map(r => generateDataRow(r))}
</tbody>
</table>
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,7 @@ class StreamingContext private[streaming] (

private[streaming] val waiter = new ContextWaiter

private[streaming] val ui = new StreamingTab(this)
ui.start()
private[streaming] val uiTab = new StreamingTab(this)

/** Enumeration to identify current state of the StreamingContext */
private[streaming] object StreamingContextState extends Enumeration {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import org.apache.spark.streaming.scheduler.StreamingListenerBatchSubmitted
import org.apache.spark.util.Distribution


private[ui] class StreamingProgressListener(ssc: StreamingContext) extends StreamingListener {
private[ui] class StreamingJobProgressListener(ssc: StreamingContext) extends StreamingListener {

private val waitingBatchInfos = new HashMap[Time, BatchInfo]
private val runningBatchInfos = new HashMap[Time, BatchInfo]
Expand Down
Loading

0 comments on commit 914b8ff

Please sign in to comment.