Skip to content

Commit

Permalink
address comments
Browse files Browse the repository at this point in the history
  • Loading branch information
cloud-fan committed Oct 26, 2017
1 parent cecea8c commit dfcdb23
Showing 1 changed file with 5 additions and 0 deletions.
5 changes: 5 additions & 0 deletions sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
Original file line number Diff line number Diff line change
Expand Up @@ -2657,6 +2657,9 @@ class Dataset[T] private[sql](
*/
def toLocalIterator(): java.util.Iterator[T] = {
withAction("toLocalIterator", queryExecution) { plan =>
// This projection writes output to a `InternalRow`, which means applying this projection is
// not thread-safe. Here we create the projection inside this method to make `Dataset`
// thread-safe.
val objProj = GenerateSafeProjection.generate(deserializer :: Nil)
plan.executeToIterator().map { row =>
// The row returned by SafeProjection is `SpecificInternalRow`, which ignore the data type
Expand Down Expand Up @@ -3103,6 +3106,8 @@ class Dataset[T] private[sql](
* Collect all elements from a spark plan.
*/
private def collectFromPlan(plan: SparkPlan): Array[T] = {
// This projection writes output to a `InternalRow`, which means applying this projection is not
// thread-safe. Here we create the projection inside this method to make `Dataset` thread-safe.
val objProj = GenerateSafeProjection.generate(deserializer :: Nil)
plan.executeCollect().map { row =>
// The row returned by SafeProjection is `SpecificInternalRow`, which ignore the data type
Expand Down

0 comments on commit dfcdb23

Please sign in to comment.