Skip to content
Browse files

Integer -> java.lang.Integer

  • Loading branch information...
1 parent 737819a commit 11d0c2b66b35614d8730c96033eccc67d5eeb466 @ScrapCodes ScrapCodes committed Feb 21, 2014
View
2 core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
@@ -74,7 +74,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
* of the original partition.
*/
def mapPartitionsWithIndex[R: ClassTag](
- f: JFunction2[Integer, java.util.Iterator[T], java.util.Iterator[R]],
+ f: JFunction2[java.lang.Integer, java.util.Iterator[T], java.util.Iterator[R]],
preservesPartitioning: Boolean = false): JavaRDD[R] =
new JavaRDD(rdd.mapPartitionsWithIndex(((a,b) => f(a,asJavaIterator(b))),
preservesPartitioning))
View
4 core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
@@ -106,10 +106,10 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
def startTime: java.lang.Long = sc.startTime
/** Default level of parallelism to use when not given by user (e.g. parallelize and makeRDD). */
- def defaultParallelism: Integer = sc.defaultParallelism
+ def defaultParallelism: java.lang.Integer = sc.defaultParallelism
/** Default min number of partitions for Hadoop RDDs when not given by user */
- def defaultMinSplits: Integer = sc.defaultMinSplits
+ def defaultMinSplits: java.lang.Integer = sc.defaultMinSplits
/** Distribute a local Scala collection to form an RDD. */
def parallelize[T](list: java.util.List[T], numSlices: Int): JavaRDD[T] = {

0 comments on commit 11d0c2b

Please sign in to comment.
Something went wrong with that request. Please try again.