Skip to content
Browse files

SPARK-1095 add explicit return types to APIs.

  • Loading branch information...
1 parent 3ddc8bb commit 737819a16627517c6d6f42a8c681e434a685f292 @ScrapCodes ScrapCodes committed Feb 17, 2014
View
2 core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala
@@ -126,7 +126,7 @@ class JavaRDD[T](val rdd: RDD[T])(implicit val classTag: ClassTag[T])
def subtract(other: JavaRDD[T], p: Partitioner): JavaRDD[T] =
wrapRDD(rdd.subtract(other, p))
- def generator = rdd.generator
+ def generator: String = rdd.generator
override def toString = rdd.toString
View
2 core/src/main/scala/org/apache/spark/api/java/JavaRDDLike.scala
@@ -74,7 +74,7 @@ trait JavaRDDLike[T, This <: JavaRDDLike[T, This]] extends Serializable {
* of the original partition.
*/
def mapPartitionsWithIndex[R: ClassTag](
- f: JFunction2[Int, java.util.Iterator[T], java.util.Iterator[R]],
+ f: JFunction2[Integer, java.util.Iterator[T], java.util.Iterator[R]],
preservesPartitioning: Boolean = false): JavaRDD[R] =
new JavaRDD(rdd.mapPartitionsWithIndex(((a,b) => f(a,asJavaIterator(b))),
preservesPartitioning))
View
17 core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala
@@ -17,6 +17,7 @@
package org.apache.spark.api.java
+import java.util
import java.util.{Map => JMap}
import scala.collection.JavaConversions
@@ -92,23 +93,23 @@ class JavaSparkContext(val sc: SparkContext) extends JavaSparkContextVarargsWork
private[spark] val env = sc.env
- def isLocal = sc.isLocal
+ def isLocal: java.lang.Boolean = sc.isLocal
- def sparkUser = sc.sparkUser
+ def sparkUser: String = sc.sparkUser
- def master = sc.master
+ def master: String = sc.master
- def appName = sc.appName
+ def appName: String = sc.appName
- def jars = JavaConversions.seqAsJavaList(sc.jars)
+ def jars: util.List[String] = sc.jars
- def startTime = sc.startTime
+ def startTime: java.lang.Long = sc.startTime
/** Default level of parallelism to use when not given by user (e.g. parallelize and makeRDD). */
- def defaultParallelism = sc.defaultParallelism
+ def defaultParallelism: Integer = sc.defaultParallelism
/** Default min number of partitions for Hadoop RDDs when not given by user */
- def defaultMinSplits = sc.defaultMinSplits
+ def defaultMinSplits: Integer = sc.defaultMinSplits
/** Distribute a local Scala collection to form an RDD. */
def parallelize[T](list: java.util.List[T], numSlices: Int): JavaRDD[T] = {

0 comments on commit 737819a

Please sign in to comment.
Something went wrong with that request. Please try again.