Skip to content

Commit

Permalink
Rename the function name of removedCount to calculateNumberToRemove.
Browse files Browse the repository at this point in the history
  • Loading branch information
wangyum committed Jan 21, 2017
1 parent 89721cd commit 8e4954f
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ class JobProgressListener(conf: SparkConf) extends SparkListener with Logging {
/** If stages is too large, remove and garbage collect old stages */
private def trimStagesIfNecessary(stages: ListBuffer[StageInfo]) = synchronized {
if (stages.size > retainedStages) {
val toRemove = removedCount(stages.size, retainedStages)
val toRemove = calculateNumberToRemove(stages.size, retainedStages)
stages.take(toRemove).foreach { s =>
stageIdToData.remove((s.stageId, s.attemptId))
stageIdToInfo.remove(s.stageId)
Expand All @@ -154,7 +154,7 @@ class JobProgressListener(conf: SparkConf) extends SparkListener with Logging {
/** If jobs is too large, remove and garbage collect old jobs */
private def trimJobsIfNecessary(jobs: ListBuffer[JobUIData]) = synchronized {
if (jobs.size > retainedJobs) {
val toRemove = removedCount(jobs.size, retainedJobs)
val toRemove = calculateNumberToRemove(jobs.size, retainedJobs)
jobs.take(toRemove).foreach { job =>
// Remove the job's UI data, if it exists
jobIdToData.remove(job.jobId).foreach { removedJob =>
Expand Down Expand Up @@ -410,7 +410,7 @@ class JobProgressListener(conf: SparkConf) extends SparkListener with Logging {
// If Tasks is too large, remove and garbage collect old tasks
if (stageData.taskData.size > retainedTasks) {
stageData.taskData = stageData.taskData.drop(
removedCount(stageData.taskData.size, retainedTasks))
calculateNumberToRemove(stageData.taskData.size, retainedTasks))
}

for (
Expand All @@ -434,7 +434,7 @@ class JobProgressListener(conf: SparkConf) extends SparkListener with Logging {
/**
* Remove at least (maxRetained / 10) items to reduce friction.
*/
def removedCount(dataSize: Int, retainedSize: Int): Int = {
private def calculateNumberToRemove(dataSize: Int, retainedSize: Int): Int = {
math.max(retainedSize / 10, dataSize - retainedSize)
}

Expand Down
6 changes: 3 additions & 3 deletions docs/configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -671,23 +671,23 @@ Apart from these, the following properties are also available, and may be useful
<td>1000</td>
<td>
How many jobs the Spark UI and status APIs remember before garbage collecting.
This is a target maximum and that fewer elements may be retained in some circumstances.
This is a target maximum, and fewer elements may be retained in some circumstances.
</td>
</tr>
<tr>
<td><code>spark.ui.retainedStages</code></td>
<td>1000</td>
<td>
How many stages the Spark UI and status APIs remember before garbage collecting.
This is a target maximum and that fewer elements may be retained in some circumstances.
This is a target maximum, and fewer elements may be retained in some circumstances.
</td>
</tr>
<tr>
<td><code>spark.ui.retainedTasks</code></td>
<td>100000</td>
<td>
How many tasks the Spark UI and status APIs remember before garbage collecting.
This is a target maximum and that fewer elements may be retained in some circumstances.
This is a target maximum, and fewer elements may be retained in some circumstances.
</td>
</tr>
<tr>
Expand Down

0 comments on commit 8e4954f

Please sign in to comment.