Skip to content

Commit

Permalink
Remove old writeToStream
Browse files Browse the repository at this point in the history
  • Loading branch information
MLnick committed Feb 14, 2014
1 parent 2beeedb commit 97ef708
Showing 1 changed file with 3 additions and 18 deletions.
21 changes: 3 additions & 18 deletions core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala
Expand Up @@ -255,6 +255,9 @@ private[spark] object PythonRDD extends Logging {
case other =>
throw new SparkException("Unexpected element type " + first.getClass)
}
}
}

// PySpark / Hadoop InputFormat//

/** Create and RDD from a path using [[org.apache.hadoop.mapred.SequenceFileInputFormat]] */
Expand Down Expand Up @@ -396,23 +399,6 @@ private[spark] object PythonRDD extends Logging {
rdd
}

def writeToStream(elem: Any, dataOut: DataOutputStream) {
elem match {
case bytes: Array[Byte] =>
dataOut.writeInt(bytes.length)
dataOut.write(bytes)
case (a: Array[Byte], b: Array[Byte]) =>
dataOut.writeInt(a.length)
dataOut.write(a)
dataOut.writeInt(b.length)
dataOut.write(b)
case str: String =>
dataOut.writeUTF(str)
case other =>
throw new SparkException("Unexpected element type " + other.getClass)
}
}

def writeUTF(str: String, dataOut: DataOutputStream) {
val bytes = str.getBytes("UTF-8")
dataOut.writeInt(bytes.length)
Expand All @@ -429,7 +415,6 @@ private[spark] object PythonRDD extends Logging {
writeIteratorToStream(items, file)
file.close()
}

}

private class BytesToString extends org.apache.spark.api.java.function.Function[Array[Byte], String] {
Expand Down

0 comments on commit 97ef708

Please sign in to comment.