Skip to content

Commit

Permalink
Make reduce logging less verbose
Browse files Browse the repository at this point in the history
  • Loading branch information
mateiz committed Oct 7, 2012
1 parent 7eae2d1 commit dbf1f3d
Showing 1 changed file with 4 additions and 3 deletions.
7 changes: 4 additions & 3 deletions core/src/main/scala/spark/SimpleShuffleFetcher.scala
Expand Up @@ -37,7 +37,7 @@ class SimpleShuffleFetcher extends ShuffleFetcher with Logging {
new FastBufferedInputStream(new URL(url).openStream()))
try {
totalRecords = inputStream.readObject().asInstanceOf[Int]
logInfo("Total records to read from " + url + ": " + totalRecords)
logDebug("Total records to read from " + url + ": " + totalRecords)
while (true) {
val pair = inputStream.readObject().asInstanceOf[(K, V)]
if (recordsRead <= recordsProcessed) {
Expand All @@ -51,10 +51,11 @@ class SimpleShuffleFetcher extends ShuffleFetcher with Logging {
}
} catch {
case e: EOFException => {
logInfo("Reduce %s got %s records from map %s before EOF".format(
logDebug("Reduce %s got %s records from map %s before EOF".format(
reduceId, recordsRead, i))
if (recordsRead < totalRecords) {
logInfo("Retrying because we needed " + totalRecords + " in total!")
logInfo("Reduce %s only got %s/%s records from map %s before EOF; retrying".format(
reduceId, recordsRead, totalRecords, i))
}
}
case other: Exception => {
Expand Down

0 comments on commit dbf1f3d

Please sign in to comment.