From 217ecc0e49285ffee7d9c951793cf2e5024efe9a Mon Sep 17 00:00:00 2001 From: Josh Rosen Date: Tue, 20 Jan 2015 14:20:09 -0800 Subject: [PATCH] Revert "Add addSparkListener to JavaSparkContext" This reverts commit 25988f377fc0cbeadbddd630f9564697b07ef877. --- .../org/apache/spark/api/java/JavaSparkContext.scala | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala index 2a55edd2db0d4..4cbc624ad9cc0 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaSparkContext.scala @@ -34,7 +34,7 @@ import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat} import org.apache.spark._ import org.apache.spark.AccumulatorParam._ -import org.apache.spark.annotation.{DeveloperApi, Experimental} +import org.apache.spark.annotation.Experimental import org.apache.spark.api.java.JavaSparkContext.fakeClassTag import org.apache.spark.broadcast.Broadcast import org.apache.spark.rdd.{EmptyRDD, HadoopRDD, NewHadoopRDD, RDD} @@ -688,15 +688,6 @@ class JavaSparkContext(val sc: SparkContext) sc.clearFiles() } - /** - * :: DeveloperApi :: - * Register a listener to receive up-calls from events that happen during execution. - */ - @DeveloperApi - def addSparkListener(listener: SparkListener): Unit = { - sc.addSparkListener(listener) - } - /** * Returns the Hadoop configuration used for the Hadoop code (e.g. file systems) we reuse. */