forked from apache/spark
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Created a monotask to serialize macrotask results.
Created a new ComputeMonotask called ResultSerializationMonotask that serializes the macrotask result generated by an ExecutionMonotask. There is exactly one ResultSerializationMonotask per macrotask, and it depends on all of the "leaves" of the DAG of monotasks (this means that it is always the only sink in the DAG of monotasks, and that it will be the last monotask to run). The reason for this change is that we need to refrain from calling TaskMetrics.setMetricsOnTaskCompletion() until all of a macrotask's main compute/disk/network monotasks have finished executing, otherwise the TaskMetrics will not accurately reflect the resource usage over the entire time that the macrotask was executing. See issue apache#22.
- Loading branch information
Christopher Canel
committed
Jun 8, 2015
1 parent
cb7d5ea
commit 44357d7
Showing
14 changed files
with
237 additions
and
205 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
76 changes: 76 additions & 0 deletions
76
core/src/main/scala/org/apache/spark/monotasks/compute/ResultSerializationMonotask.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,76 @@ | ||
/* | ||
* Copyright 2014 The Regents of The University California | ||
* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package org.apache.spark.monotasks.compute | ||
|
||
import java.nio.ByteBuffer | ||
|
||
import org.apache.spark.{Accumulators, Logging, TaskContextImpl} | ||
import org.apache.spark.scheduler.{DirectTaskResult, IndirectTaskResult} | ||
import org.apache.spark.storage.{BlockId, StorageLevel, TaskResultBlockId} | ||
|
||
/** | ||
* ResultSerializationMonotasks are responsible for serializing the result of a macrotask and the | ||
* associated metrics. The DAG for a macrotask always contains exactly one | ||
* ResultSerializationMonotask, and it is run after all of the macrotask's other monotasks have | ||
* completed (because otherwise the metrics computed by ResultSerializationMonotask would not be | ||
* complete). | ||
*/ | ||
class ResultSerializationMonotask(context: TaskContextImpl, resultBlockId: BlockId) | ||
extends ComputeMonotask(context) with Logging { | ||
|
||
override def execute(): Option[ByteBuffer] = { | ||
val blockManager = context.localDagScheduler.blockManager | ||
blockManager.getSingle(resultBlockId).map { result => | ||
blockManager.removeBlockFromMemory(resultBlockId, false) | ||
context.markTaskCompleted() | ||
|
||
// The mysterious choice of which serializer to use when is written to be consistent with | ||
// Spark. | ||
val closureSerializer = context.env.closureSerializer.newInstance() | ||
val resultSer = context.env.serializer.newInstance() | ||
|
||
val serializationStartTime = System.currentTimeMillis() | ||
val valueBytes = resultSer.serialize(result) | ||
context.taskMetrics.setResultSerializationTime( | ||
System.currentTimeMillis() - serializationStartTime) | ||
accountForComputeTime() | ||
|
||
context.taskMetrics.setMetricsOnTaskCompletion() | ||
val accumulatorValues = Accumulators.getValues | ||
val directResult = new DirectTaskResult(valueBytes, accumulatorValues, context.taskMetrics) | ||
val serializedDirectResult = closureSerializer.serialize(directResult) | ||
val resultSize = serializedDirectResult.limit | ||
|
||
if (context.maximumResultSizeBytes > 0 && resultSize > context.maximumResultSizeBytes) { | ||
val blockId = TaskResultBlockId(context.taskAttemptId) | ||
context.localDagScheduler.blockManager.cacheBytes( | ||
blockId, serializedDirectResult, StorageLevel.MEMORY_AND_DISK_SER) | ||
logInfo(s"Finished TID ${context.taskAttemptId}. $resultSize bytes result will be sent " + | ||
"via the BlockManager.") | ||
closureSerializer.serialize(new IndirectTaskResult[Any](blockId, resultSize)) | ||
} else { | ||
logInfo(s"Finished TID ${context.taskAttemptId}. $resultSize bytes result will be sent " + | ||
"directly to driver.") | ||
serializedDirectResult | ||
} | ||
}.orElse { | ||
throw new IllegalStateException(s"Deserialized result for macrotask " + | ||
s"${context.taskAttemptId} could not be found in the BlockManager " + | ||
s"using blockId $resultBlockId.") | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.