Skip to content

Commit

Permalink
Small code style changes
Browse files Browse the repository at this point in the history
  • Loading branch information
massie committed Jun 8, 2015
1 parent b70c945 commit 208b7a5
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ private[spark] class HashShuffleReader[K, C](
/** Read the combined key-values for this reduce task */
override def read(): Iterator[Product2[K, C]] = {
val blockStreams = BlockStoreShuffleFetcher.fetchBlockStreams(
handle.shuffleId, startPartition, context)
handle.shuffleId, startPartition, context)

// Wrap the streams for compression based on configuration
val wrappedStreams = blockStreams.map { case (blockId, inputStream) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import java.io.InputStream
import java.util.concurrent.LinkedBlockingQueue

import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.{ArrayBuffer, HashSet}
import scala.util.{Failure, Try}

import org.apache.spark.network.buffer.ManagedBuffer
Expand Down Expand Up @@ -78,7 +78,7 @@ final class ShuffleBlockFetcherIterator(
private[this] val localBlocks = new ArrayBuffer[BlockId]()

/** Remote blocks to fetch, excluding zero-sized blocks. */
private[this] val remoteBlocks = new mutable.HashSet[BlockId]()
private[this] val remoteBlocks = new HashSet[BlockId]()

/**
* A queue to hold our results. This turns the asynchronous model provided by
Expand Down

0 comments on commit 208b7a5

Please sign in to comment.