diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala index 8a1771c6a5f38..73092993ca531 100644 --- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala @@ -788,9 +788,9 @@ abstract class RDD[T: ClassTag]( } /** - * Take all of the elements in this RDD, and print the first num elements of the RDD. + * Process all of the elements in this RDD, and take the first num elements of the RDD. */ - def printTop (num: Int): Array[T] = { + def processAllAndTake (num: Int): Array[T] = { val buf = new ArrayBuffer[T] val results = sc.runJob(this, (iter: Iterator[T]) => iter.toArray) for (partition <- results; data <- partition if buf.size <= num){ diff --git a/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala index 7c8f3a68014a6..86e0de504de11 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala @@ -621,9 +621,9 @@ abstract class DStream[T: ClassTag] ( /** * Print the first specified number elements of each RDD in this DStream. */ - def printTop(num: Int) { + def processAllAndPrintFirst(num: Int) { def foreachFunc = (rdd: RDD[T], time: Time) => { - val first11 = rdd.printTop(num) + val first11 = rdd.processAllAndTake(num) println ("-------------------------------------------") println ("Time: " + time) println ("-------------------------------------------")