-
Notifications
You must be signed in to change notification settings - Fork 26
/
SparkMetrics.scala
178 lines (159 loc) · 7.01 KB
/
SparkMetrics.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.utils.instrumentation
import com.netflix.servo.tag.Tag
import com.netflix.servo.tag.Tags.newTag
import java.io.PrintWriter
import org.bdgenomics.utils.instrumentation.InstrumentationFunctions.{ formatNanos, renderTable }
import org.bdgenomics.utils.instrumentation.ServoTimer._
import org.bdgenomics.utils.instrumentation.SparkMetrics._
import org.bdgenomics.utils.instrumentation.ValueExtractor._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
/**
* Allows metrics for Spark to be captured and rendered in tabular form.
*/
@deprecated("to be removed in version 0.3.0")
abstract class SparkMetrics {
private val taskTimers = new mutable.ArrayBuffer[TaskTimer]()
private val stageIdToName = new mutable.HashMap[Int, String]()
// Maps the stage ID and name to the duration of the stage in nanoseconds
val stageTimes = new mutable.ArrayBuffer[StageTiming]()
def print(out: PrintWriter) = {
val overallMonitors = taskTimers.map(_.getOverallTimings).sortBy(-_.getTotalTime)
val ordering = getOrdering(overallMonitors)
val monitorsByHost = taskTimers.flatMap(_.getHostTimings).sorted(ordering)
val monitorsByStageName = taskTimers.flatMap(_.getStageTimings).map(addStageName).sorted(ordering)
out.print(renderTable("Task Timings", overallMonitors, createTaskHeader()))
out.println()
out.print(renderTable("Task Timings By Host", monitorsByHost,
createHeaderWith(TableHeader(name = "Host",
valueExtractor = forTagValueWithKey(HostTagKey),
alignment = Alignment.Left), 1)))
out.println()
out.print(renderTable("Task Timings By Stage", monitorsByStageName,
createHeaderWith(TableHeader(name = "Stage ID & Name",
valueExtractor = forTagValueWithKey(StageNameTagKey),
alignment = Alignment.Left), 1)))
}
def mapStageIdToName(stageId: Int, stageName: String) {
stageIdToName.put(stageId, stageName)
}
def recordStageDuration(stageId: Int, stageName: Option[String], duration: Duration) = {
stageTimes += StageTiming(stageId, stageName, duration)
}
/**
* Subclasses should call this method to create a new [[TaskTimer]] and to register it
*/
protected def taskTimer(name: String) = {
val timer = new TaskTimer(name)
taskTimers += timer
timer
}
/**
* Uses the sort order from the names of the passed-in timers to create an [[Ordering]]
*/
private def getOrdering(timers: Seq[ServoTimer]): Ordering[ServoTimer] = {
val sortOrderMap = getSortOrder(timers)
object TimerOrdering extends Ordering[ServoTimer] {
def compare(a: ServoTimer, b: ServoTimer): Int = {
val sortOrderA = sortOrderMap.get(a.getName)
val sortOrderB = sortOrderMap.get(b.getName)
(sortOrderA, sortOrderB) match {
case (Some(oA), Some(oB)) if oA != oB => oA - oB
case _ => -(a.getTotalTime compare b.getTotalTime)
}
}
}
TimerOrdering
}
/**
* Gets a map of the timer name to the order in the passed-in list
*/
private def getSortOrder(timers: Seq[ServoTimer]): Map[String, Int] = {
var sortOrder: Int = 0
timers.map(timer => {
sortOrder = sortOrder + 1
(timer.getName, sortOrder)
}).toMap
}
private def addStageName(stageIdAndTimer: (Int, ServoTimer)): ServoTimer = {
val stageIdAndName = formatStageIdAndName(stageIdAndTimer._1, stageIdToName.get(stageIdAndTimer._1))
stageIdAndTimer._2.addTag(newTag(StageNameTagKey, stageIdAndName))
stageIdAndTimer._2
}
private def formatStageIdAndName(stageId: Int, stageName: Option[String]): String = {
stageId + ": " + stageName.getOrElse("unknown")
}
private def createHeaderWith(header: TableHeader, position: Int): ArrayBuffer[TableHeader] = {
val baseHeader = createTaskHeader()
baseHeader.insert(position, header)
baseHeader
}
private def createTaskHeader(): ArrayBuffer[TableHeader] = {
ArrayBuffer(
TableHeader(name = "Metric", valueExtractor = forTagValueWithKey(NameTagKey), alignment = Alignment.Left),
TableHeader(name = "Total Time", valueExtractor = forMonitorMatchingTag(TotalTimeTag), formatFunction = Some(formatNanos)),
TableHeader(name = "Count", valueExtractor = forMonitorMatchingTag(CountTag)),
TableHeader(name = "Mean", valueExtractor = forMonitorMatchingTag(MeanTag), formatFunction = Some(formatNanos)),
TableHeader(name = "Min", valueExtractor = forMonitorMatchingTag(MinTag), formatFunction = Some(formatNanos)),
TableHeader(name = "Max", valueExtractor = forMonitorMatchingTag(MaxTag), formatFunction = Some(formatNanos)))
}
}
@deprecated("to be removed in version 0.3.0")
protected object SparkMetrics {
final val HostTagKey = "host"
final val StageNameTagKey = "stageName"
final val StageIdTagKey = "stageId"
}
@deprecated("to be removed in version 0.3.0")
class TaskTimer(name: String) {
val overallTimings = buildTimer(name)
val timingsByHost = new mutable.HashMap[String, ServoTimer]
val timingsByStageId = new mutable.HashMap[Int, ServoTimer]
def +=(millisecondTiming: Long)(implicit taskContext: SparkTaskContext) = {
recordMillis(overallTimings, millisecondTiming)
recordMillis(timingsByHost.getOrElseUpdate(taskContext.executorId,
buildTimer(name, newTag(HostTagKey, taskContext.executorId))), millisecondTiming)
recordMillis(timingsByStageId.getOrElseUpdate(taskContext.stageId,
buildTimer(name, newTag(StageIdTagKey, taskContext.stageId.toString))), millisecondTiming)
}
def getOverallTimings: ServoTimer = {
overallTimings
}
def getHostTimings: Iterable[ServoTimer] = {
timingsByHost.values
}
def getStageTimings: Seq[(Int, ServoTimer)] = {
timingsByStageId.toSeq
}
private def recordMillis(timer: ServoTimer, milliSecondTiming: Long) = {
timer.recordMillis(milliSecondTiming)
}
private def buildTimer(name: String): ServoTimer = {
new ServoTimer(name)
}
private def buildTimer(name: String, tag: Tag): ServoTimer = {
new ServoTimer(name, tag)
}
}
@deprecated("to be removed in version 0.3.0")
case class SparkTaskContext(executorId: String, stageId: Int)
@deprecated("to be removed in version 0.3.0")
case class StageTiming(stageId: Int, stageName: Option[String], duration: Duration)