diff --git a/core/src/main/java/org/apache/spark/network/netty/FileClient.java b/core/src/main/java/org/apache/spark/network/netty/FileClient.java index d2d778b7567..0d31894d6ec 100644 --- a/core/src/main/java/org/apache/spark/network/netty/FileClient.java +++ b/core/src/main/java/org/apache/spark/network/netty/FileClient.java @@ -17,6 +17,8 @@ package org.apache.spark.network.netty; +import java.util.concurrent.TimeUnit; + import io.netty.bootstrap.Bootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelOption; @@ -27,8 +29,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.concurrent.TimeUnit; - class FileClient { private static final Logger LOG = LoggerFactory.getLogger(FileClient.class.getName()); diff --git a/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java b/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java index 3ac045f9444..c0133e19c7f 100644 --- a/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java +++ b/core/src/main/java/org/apache/spark/network/netty/FileServerHandler.java @@ -23,11 +23,11 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.channel.DefaultFileRegion; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.spark.storage.BlockId; import org.apache.spark.storage.FileSegment; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; class FileServerHandler extends SimpleChannelInboundHandler { diff --git a/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala b/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala index 32429f01aca..1fca5729c60 100644 --- a/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala +++ b/core/src/main/scala/org/apache/hadoop/mapreduce/SparkHadoopMapReduceUtil.scala @@ -17,7 +17,8 @@ package org.apache.hadoop.mapreduce -import java.lang.{Integer => JInteger, Boolean => JBoolean} +import java.lang.{Boolean => JBoolean, Integer => JInteger} + import org.apache.hadoop.conf.Configuration private[apache] diff --git a/core/src/main/scala/org/apache/spark/Accumulators.scala b/core/src/main/scala/org/apache/spark/Accumulators.scala index df01b2e9421..73dd471ab1d 100644 --- a/core/src/main/scala/org/apache/spark/Accumulators.scala +++ b/core/src/main/scala/org/apache/spark/Accumulators.scala @@ -19,8 +19,9 @@ package org.apache.spark import java.io.{ObjectInputStream, Serializable} -import scala.collection.mutable.Map import scala.collection.generic.Growable +import scala.collection.mutable.Map + import org.apache.spark.serializer.JavaSerializer /** diff --git a/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala b/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala index d9ed572da6d..754b46a4c7d 100644 --- a/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala +++ b/core/src/main/scala/org/apache/spark/BlockStoreShuffleFetcher.scala @@ -20,12 +20,11 @@ package org.apache.spark import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.HashMap -import org.apache.spark.executor.{ShuffleReadMetrics, TaskMetrics} +import org.apache.spark.executor.ShuffleReadMetrics import org.apache.spark.serializer.Serializer import org.apache.spark.storage.{BlockId, BlockManagerId, ShuffleBlockId} import org.apache.spark.util.CompletionIterator - private[spark] class BlockStoreShuffleFetcher extends ShuffleFetcher with Logging { override def fetch[T]( diff --git a/core/src/main/scala/org/apache/spark/CacheManager.scala b/core/src/main/scala/org/apache/spark/CacheManager.scala index b38af2497d3..1daabecf232 100644 --- a/core/src/main/scala/org/apache/spark/CacheManager.scala +++ b/core/src/main/scala/org/apache/spark/CacheManager.scala @@ -18,9 +18,9 @@ package org.apache.spark import scala.collection.mutable.{ArrayBuffer, HashSet} -import org.apache.spark.storage.{BlockId, BlockManager, StorageLevel, RDDBlockId} -import org.apache.spark.rdd.RDD +import org.apache.spark.rdd.RDD +import org.apache.spark.storage.{BlockManager, RDDBlockId, StorageLevel} /** Spark class responsible for passing RDDs split contents to the BlockManager and making sure a node doesn't load two copies of an RDD at once. diff --git a/core/src/main/scala/org/apache/spark/FutureAction.scala b/core/src/main/scala/org/apache/spark/FutureAction.scala index d7d10285dad..f2decd14ef6 100644 --- a/core/src/main/scala/org/apache/spark/FutureAction.scala +++ b/core/src/main/scala/org/apache/spark/FutureAction.scala @@ -21,10 +21,8 @@ import scala.concurrent._ import scala.concurrent.duration.Duration import scala.util.Try -import org.apache.spark.scheduler.{JobSucceeded, JobWaiter} -import org.apache.spark.scheduler.JobFailed import org.apache.spark.rdd.RDD - +import org.apache.spark.scheduler.{JobFailed, JobSucceeded, JobWaiter} /** * A future for the result of an action to support cancellation. This is an extension of the diff --git a/core/src/main/scala/org/apache/spark/HttpFileServer.scala b/core/src/main/scala/org/apache/spark/HttpFileServer.scala index a885898ad48..d3264a4bb3c 100644 --- a/core/src/main/scala/org/apache/spark/HttpFileServer.scala +++ b/core/src/main/scala/org/apache/spark/HttpFileServer.scala @@ -17,8 +17,10 @@ package org.apache.spark -import java.io.{File} +import java.io.File + import com.google.common.io.Files + import org.apache.spark.util.Utils private[spark] class HttpFileServer extends Logging { diff --git a/core/src/main/scala/org/apache/spark/HttpServer.scala b/core/src/main/scala/org/apache/spark/HttpServer.scala index 69a738dc444..759e68ee0cc 100644 --- a/core/src/main/scala/org/apache/spark/HttpServer.scala +++ b/core/src/main/scala/org/apache/spark/HttpServer.scala @@ -18,7 +18,6 @@ package org.apache.spark import java.io.File -import java.net.InetAddress import org.eclipse.jetty.server.Server import org.eclipse.jetty.server.bio.SocketConnector @@ -26,6 +25,7 @@ import org.eclipse.jetty.server.handler.DefaultHandler import org.eclipse.jetty.server.handler.HandlerList import org.eclipse.jetty.server.handler.ResourceHandler import org.eclipse.jetty.util.thread.QueuedThreadPool + import org.apache.spark.util.Utils /** diff --git a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala index 8d6db0fca23..59689731329 100644 --- a/core/src/main/scala/org/apache/spark/MapOutputTracker.scala +++ b/core/src/main/scala/org/apache/spark/MapOutputTracker.scala @@ -22,7 +22,6 @@ import java.util.zip.{GZIPInputStream, GZIPOutputStream} import scala.collection.mutable.HashSet import scala.concurrent.Await -import scala.concurrent.duration._ import akka.actor._ import akka.pattern.ask diff --git a/core/src/main/scala/org/apache/spark/SerializableWritable.scala b/core/src/main/scala/org/apache/spark/SerializableWritable.scala index fdd4c24e234..dff665cae6c 100644 --- a/core/src/main/scala/org/apache/spark/SerializableWritable.scala +++ b/core/src/main/scala/org/apache/spark/SerializableWritable.scala @@ -19,9 +19,9 @@ package org.apache.spark import java.io._ +import org.apache.hadoop.conf.Configuration import org.apache.hadoop.io.ObjectWritable import org.apache.hadoop.io.Writable -import org.apache.hadoop.conf.Configuration class SerializableWritable[T <: Writable](@transient var t: T) extends Serializable { def value = t diff --git a/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala b/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala index a85aa50a9b9..e8f756c4088 100644 --- a/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala +++ b/core/src/main/scala/org/apache/spark/ShuffleFetcher.scala @@ -17,10 +17,8 @@ package org.apache.spark -import org.apache.spark.executor.TaskMetrics import org.apache.spark.serializer.Serializer - private[spark] abstract class ShuffleFetcher { /** diff --git a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/main/scala/org/apache/spark/SparkConf.scala index 45d19bcbfa6..b947feb891e 100644 --- a/core/src/main/scala/org/apache/spark/SparkConf.scala +++ b/core/src/main/scala/org/apache/spark/SparkConf.scala @@ -20,8 +20,6 @@ package org.apache.spark import scala.collection.JavaConverters._ import scala.collection.mutable.HashMap -import java.io.{ObjectInputStream, ObjectOutputStream, IOException} - /** * Configuration for a Spark application. Used to set various Spark parameters as key-value pairs. * diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala index 5a6d06b66e8..a24f07e9a6e 100644 --- a/core/src/main/scala/org/apache/spark/SparkContext.scala +++ b/core/src/main/scala/org/apache/spark/SparkContext.scala @@ -19,21 +19,18 @@ package org.apache.spark import java.io._ import java.net.URI -import java.util.{UUID, Properties} +import java.util.{Properties, UUID} import java.util.concurrent.atomic.AtomicInteger import scala.collection.{Map, Set} import scala.collection.generic.Growable - import scala.collection.mutable.{ArrayBuffer, HashMap} import scala.reflect.{ClassTag, classTag} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path -import org.apache.hadoop.io.{ArrayWritable, BooleanWritable, BytesWritable, DoubleWritable, - FloatWritable, IntWritable, LongWritable, NullWritable, Text, Writable} -import org.apache.hadoop.mapred.{FileInputFormat, InputFormat, JobConf, SequenceFileInputFormat, - TextInputFormat} +import org.apache.hadoop.io.{ArrayWritable, BooleanWritable, BytesWritable, DoubleWritable, FloatWritable, IntWritable, LongWritable, NullWritable, Text, Writable} +import org.apache.hadoop.mapred.{FileInputFormat, InputFormat, JobConf, SequenceFileInputFormat, TextInputFormat} import org.apache.hadoop.mapreduce.{InputFormat => NewInputFormat, Job => NewHadoopJob} import org.apache.hadoop.mapreduce.lib.input.{FileInputFormat => NewFileInputFormat} import org.apache.mesos.MesosNativeLibrary @@ -42,14 +39,12 @@ import org.apache.spark.deploy.{LocalSparkCluster, SparkHadoopUtil} import org.apache.spark.partial.{ApproximateEvaluator, PartialResult} import org.apache.spark.rdd._ import org.apache.spark.scheduler._ -import org.apache.spark.scheduler.cluster.{CoarseGrainedSchedulerBackend, - SparkDeploySchedulerBackend, SimrSchedulerBackend} +import org.apache.spark.scheduler.cluster.{CoarseGrainedSchedulerBackend, SparkDeploySchedulerBackend, SimrSchedulerBackend} import org.apache.spark.scheduler.cluster.mesos.{CoarseMesosSchedulerBackend, MesosSchedulerBackend} import org.apache.spark.scheduler.local.LocalBackend import org.apache.spark.storage.{BlockManagerSource, RDDInfo, StorageStatus, StorageUtils} import org.apache.spark.ui.SparkUI -import org.apache.spark.util.{Utils, TimeStampedHashMap, MetadataCleaner, MetadataCleanerType, - ClosureCleaner} +import org.apache.spark.util.{ClosureCleaner, MetadataCleaner, MetadataCleanerType, TimeStampedHashMap, Utils} /** * Main entry point for Spark functionality. A SparkContext represents the connection to a Spark diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala b/core/src/main/scala/org/apache/spark/SparkEnv.scala index 6ae020f6a21..7ac65828f67 100644 --- a/core/src/main/scala/org/apache/spark/SparkEnv.scala +++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala @@ -21,16 +21,15 @@ import scala.collection.mutable import scala.concurrent.Await import akka.actor._ +import com.google.common.collect.MapMaker +import org.apache.spark.api.python.PythonWorkerFactory import org.apache.spark.broadcast.BroadcastManager import org.apache.spark.metrics.MetricsSystem -import org.apache.spark.storage.{BlockManagerMasterActor, BlockManager, BlockManagerMaster} +import org.apache.spark.storage.{BlockManager, BlockManagerMaster, BlockManagerMasterActor} import org.apache.spark.network.ConnectionManager import org.apache.spark.serializer.{Serializer, SerializerManager} -import org.apache.spark.util.{Utils, AkkaUtils} -import org.apache.spark.api.python.PythonWorkerFactory - -import com.google.common.collect.MapMaker +import org.apache.spark.util.{AkkaUtils, Utils} /** * Holds all the runtime environment objects for a running Spark instance (either master or worker), diff --git a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala index 4e63117a513..d404459a8eb 100644 --- a/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala +++ b/core/src/main/scala/org/apache/spark/SparkHadoopWriter.scala @@ -18,8 +18,8 @@ package org.apache.hadoop.mapred import java.io.IOException -import java.text.SimpleDateFormat import java.text.NumberFormat +import java.text.SimpleDateFormat import java.util.Date import org.apache.hadoop.fs.FileSystem diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala index 33737e1960a..071044463d9 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaDoubleRDD.scala @@ -17,27 +17,25 @@ package org.apache.spark.api.java +import java.lang.{Double => JDouble} + import scala.reflect.ClassTag -import org.apache.spark.rdd.RDD +import org.apache.spark.Partitioner import org.apache.spark.SparkContext.doubleRDDToDoubleRDDFunctions import org.apache.spark.api.java.function.{Function => JFunction} -import org.apache.spark.util.StatCounter import org.apache.spark.partial.{BoundedDouble, PartialResult} +import org.apache.spark.rdd.RDD import org.apache.spark.storage.StorageLevel +import org.apache.spark.util.StatCounter -import java.lang.Double -import org.apache.spark.Partitioner - -import scala.collection.JavaConverters._ - -class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, JavaDoubleRDD] { +class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[JDouble, JavaDoubleRDD] { - override val classTag: ClassTag[Double] = implicitly[ClassTag[Double]] + override val classTag: ClassTag[JDouble] = implicitly[ClassTag[JDouble]] - override val rdd: RDD[Double] = srdd.map(x => Double.valueOf(x)) + override val rdd: RDD[JDouble] = srdd.map(x => JDouble.valueOf(x)) - override def wrapRDD(rdd: RDD[Double]): JavaDoubleRDD = + override def wrapRDD(rdd: RDD[JDouble]): JavaDoubleRDD = new JavaDoubleRDD(rdd.map(_.doubleValue)) // Common RDD functions @@ -67,7 +65,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav def unpersist(blocking: Boolean): JavaDoubleRDD = fromRDD(srdd.unpersist(blocking)) // first() has to be overriden here in order for its return type to be Double instead of Object. - override def first(): Double = srdd.first() + override def first(): JDouble = srdd.first() // Transformations (return a new RDD) @@ -84,7 +82,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav /** * Return a new RDD containing only the elements that satisfy a predicate. */ - def filter(f: JFunction[Double, java.lang.Boolean]): JavaDoubleRDD = + def filter(f: JFunction[JDouble, java.lang.Boolean]): JavaDoubleRDD = fromRDD(srdd.filter(x => f(x).booleanValue())) /** @@ -133,7 +131,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav /** * Return a sampled subset of this RDD. */ - def sample(withReplacement: Boolean, fraction: Double, seed: Int): JavaDoubleRDD = + def sample(withReplacement: Boolean, fraction: JDouble, seed: Int): JavaDoubleRDD = fromRDD(srdd.sample(withReplacement, fraction, seed)) /** @@ -145,7 +143,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav // Double RDD functions /** Add up the elements in this RDD. */ - def sum(): Double = srdd.sum() + def sum(): JDouble = srdd.sum() /** * Return a [[org.apache.spark.util.StatCounter]] object that captures the mean, variance and @@ -154,35 +152,35 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav def stats(): StatCounter = srdd.stats() /** Compute the mean of this RDD's elements. */ - def mean(): Double = srdd.mean() + def mean(): JDouble = srdd.mean() /** Compute the variance of this RDD's elements. */ - def variance(): Double = srdd.variance() + def variance(): JDouble = srdd.variance() /** Compute the standard deviation of this RDD's elements. */ - def stdev(): Double = srdd.stdev() + def stdev(): JDouble = srdd.stdev() /** * Compute the sample standard deviation of this RDD's elements (which corrects for bias in * estimating the standard deviation by dividing by N-1 instead of N). */ - def sampleStdev(): Double = srdd.sampleStdev() + def sampleStdev(): JDouble = srdd.sampleStdev() /** * Compute the sample variance of this RDD's elements (which corrects for bias in * estimating the standard variance by dividing by N-1 instead of N). */ - def sampleVariance(): Double = srdd.sampleVariance() + def sampleVariance(): JDouble = srdd.sampleVariance() /** Return the approximate mean of the elements in this RDD. */ - def meanApprox(timeout: Long, confidence: Double): PartialResult[BoundedDouble] = + def meanApprox(timeout: Long, confidence: JDouble): PartialResult[BoundedDouble] = srdd.meanApprox(timeout, confidence) /** (Experimental) Approximate operation to return the mean within a timeout. */ def meanApprox(timeout: Long): PartialResult[BoundedDouble] = srdd.meanApprox(timeout) /** (Experimental) Approximate operation to return the sum within a timeout. */ - def sumApprox(timeout: Long, confidence: Double): PartialResult[BoundedDouble] = + def sumApprox(timeout: Long, confidence: JDouble): PartialResult[BoundedDouble] = srdd.sumApprox(timeout, confidence) /** (Experimental) Approximate operation to return the sum within a timeout. */ @@ -222,7 +220,7 @@ class JavaDoubleRDD(val srdd: RDD[scala.Double]) extends JavaRDDLike[Double, Jav srdd.histogram(buckets, false) } - def histogram(buckets: Array[Double], evenBuckets: Boolean): Array[Long] = { + def histogram(buckets: Array[JDouble], evenBuckets: Boolean): Array[Long] = { srdd.histogram(buckets.map(_.toDouble), evenBuckets) } diff --git a/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala index 7b730579535..0055c98844d 100644 --- a/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/java/JavaRDD.scala @@ -20,8 +20,8 @@ package org.apache.spark.api.java import scala.reflect.ClassTag import org.apache.spark._ -import org.apache.spark.rdd.RDD import org.apache.spark.api.java.function.{Function => JFunction} +import org.apache.spark.rdd.RDD import org.apache.spark.storage.StorageLevel class JavaRDD[T](val rdd: RDD[T])(implicit val classTag: ClassTag[T]) diff --git a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java index 3e85052cd03..30e6a524742 100644 --- a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java +++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFlatMapFunction.java @@ -17,7 +17,6 @@ package org.apache.spark.api.java.function; - import java.io.Serializable; /** diff --git a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java index 5e9b8c48b89..490da255bc9 100644 --- a/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java +++ b/core/src/main/scala/org/apache/spark/api/java/function/DoubleFunction.java @@ -17,7 +17,6 @@ package org.apache.spark.api.java.function; - import java.io.Serializable; /** diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function.java b/core/src/main/scala/org/apache/spark/api/java/function/Function.java index 537439ef538..e0fcd460c84 100644 --- a/core/src/main/scala/org/apache/spark/api/java/function/Function.java +++ b/core/src/main/scala/org/apache/spark/api/java/function/Function.java @@ -17,11 +17,10 @@ package org.apache.spark.api.java.function; -import scala.reflect.ClassTag; -import scala.reflect.ClassTag$; - import java.io.Serializable; +import scala.reflect.ClassTag; +import scala.reflect.ClassTag$; /** * Base class for functions whose return types do not create special RDDs. PairFunction and diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function2.java b/core/src/main/scala/org/apache/spark/api/java/function/Function2.java index a2d1214fb46..16d7379462e 100644 --- a/core/src/main/scala/org/apache/spark/api/java/function/Function2.java +++ b/core/src/main/scala/org/apache/spark/api/java/function/Function2.java @@ -17,11 +17,11 @@ package org.apache.spark.api.java.function; +import java.io.Serializable; + import scala.reflect.ClassTag; import scala.reflect.ClassTag$; -import java.io.Serializable; - /** * A two-argument function that takes arguments of type T1 and T2 and returns an R. */ diff --git a/core/src/main/scala/org/apache/spark/api/java/function/Function3.java b/core/src/main/scala/org/apache/spark/api/java/function/Function3.java index fb1deceab5b..096eb71f95c 100644 --- a/core/src/main/scala/org/apache/spark/api/java/function/Function3.java +++ b/core/src/main/scala/org/apache/spark/api/java/function/Function3.java @@ -17,11 +17,10 @@ package org.apache.spark.api.java.function; +import java.io.Serializable; + import scala.reflect.ClassTag; import scala.reflect.ClassTag$; -import scala.runtime.AbstractFunction2; - -import java.io.Serializable; /** * A three-argument function that takes arguments of type T1, T2 and T3 and returns an R. diff --git a/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java index ca485b3cc2d..c72b98c28a0 100644 --- a/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java +++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFlatMapFunction.java @@ -17,12 +17,12 @@ package org.apache.spark.api.java.function; +import java.io.Serializable; + import scala.Tuple2; import scala.reflect.ClassTag; import scala.reflect.ClassTag$; -import java.io.Serializable; - /** * A function that returns zero or more key-value pair records from each input record. The * key-value pairs are represented as scala.Tuple2 objects. diff --git a/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java index cbe2306026a..84b9136d98a 100644 --- a/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java +++ b/core/src/main/scala/org/apache/spark/api/java/function/PairFunction.java @@ -17,12 +17,12 @@ package org.apache.spark.api.java.function; +import java.io.Serializable; + import scala.Tuple2; import scala.reflect.ClassTag; import scala.reflect.ClassTag$; -import java.io.Serializable; - /** * A function that returns key-value pairs (Tuple2), and can be used to construct PairRDDs. */ diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala b/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala index 35eca62ecd5..95bec5030bf 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonPartitioner.scala @@ -17,8 +17,6 @@ package org.apache.spark.api.python -import java.util.Arrays - import org.apache.spark.Partitioner import org.apache.spark.util.Utils diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala index 33667a998ed..e4d0285710e 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala @@ -24,9 +24,9 @@ import java.util.{List => JList, ArrayList => JArrayList, Map => JMap, Collectio import scala.collection.JavaConversions._ import scala.reflect.ClassTag +import org.apache.spark._ import org.apache.spark.api.java.{JavaSparkContext, JavaPairRDD, JavaRDD} import org.apache.spark.broadcast.Broadcast -import org.apache.spark._ import org.apache.spark.rdd.RDD import org.apache.spark.util.Utils diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala index f291266fcf1..a5f0f3d5e7e 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala @@ -17,8 +17,8 @@ package org.apache.spark.api.python -import java.io.{OutputStreamWriter, File, DataInputStream, IOException} -import java.net.{ServerSocket, Socket, SocketException, InetAddress} +import java.io.{DataInputStream, File, IOException, OutputStreamWriter} +import java.net.{InetAddress, ServerSocket, Socket, SocketException} import scala.collection.JavaConversions._ diff --git a/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala index 39ee0dbb928..20207c26132 100644 --- a/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala +++ b/core/src/main/scala/org/apache/spark/broadcast/HttpBroadcast.scala @@ -24,7 +24,7 @@ import java.util.concurrent.TimeUnit import it.unimi.dsi.fastutil.io.FastBufferedInputStream import it.unimi.dsi.fastutil.io.FastBufferedOutputStream -import org.apache.spark.{SparkConf, HttpServer, Logging, SparkEnv} +import org.apache.spark.{HttpServer, Logging, SparkConf, SparkEnv} import org.apache.spark.io.CompressionCodec import org.apache.spark.storage.{BroadcastBlockId, StorageLevel} import org.apache.spark.util.{MetadataCleaner, MetadataCleanerType, TimeStampedHashSet, Utils} diff --git a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala index ec997255d59..22d783c8590 100644 --- a/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala +++ b/core/src/main/scala/org/apache/spark/broadcast/TorrentBroadcast.scala @@ -26,7 +26,6 @@ import org.apache.spark._ import org.apache.spark.storage.{BroadcastBlockId, BroadcastHelperBlockId, StorageLevel} import org.apache.spark.util.Utils - private[spark] class TorrentBroadcast[T](@transient var value_ : T, isLocal: Boolean, id: Long) extends Broadcast[T](id) with Logging with Serializable { diff --git a/core/src/main/scala/org/apache/spark/deploy/Client.scala b/core/src/main/scala/org/apache/spark/deploy/Client.scala index 9987e2300ce..eb5676b51d8 100644 --- a/core/src/main/scala/org/apache/spark/deploy/Client.scala +++ b/core/src/main/scala/org/apache/spark/deploy/Client.scala @@ -23,13 +23,13 @@ import scala.concurrent._ import akka.actor._ import akka.pattern.ask +import akka.remote.{AssociationErrorEvent, DisassociatedEvent, RemotingLifecycleEvent} import org.apache.log4j.{Level, Logger} import org.apache.spark.{Logging, SparkConf} import org.apache.spark.deploy.DeployMessages._ import org.apache.spark.deploy.master.{DriverState, Master} import org.apache.spark.util.{AkkaUtils, Utils} -import akka.remote.{AssociationErrorEvent, DisassociatedEvent, RemotingLifecycleEvent} /** * Proxy that relays messages to the driver. diff --git a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala index 7de7c4864ee..190b331cfe7 100644 --- a/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala +++ b/core/src/main/scala/org/apache/spark/deploy/FaultToleranceTest.scala @@ -21,10 +21,10 @@ import java.io._ import java.net.URL import java.util.concurrent.TimeoutException +import scala.collection.mutable.ListBuffer import scala.concurrent.{Await, future, promise} -import scala.concurrent.duration._ import scala.concurrent.ExecutionContext.Implicits.global -import scala.collection.mutable.ListBuffer +import scala.concurrent.duration._ import scala.sys.process._ import net.liftweb.json.JsonParser diff --git a/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala index 33e69371b83..318beb5db52 100644 --- a/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala +++ b/core/src/main/scala/org/apache/spark/deploy/JsonProtocol.scala @@ -20,10 +20,9 @@ package org.apache.spark.deploy import net.liftweb.json.JsonDSL._ import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse} -import org.apache.spark.deploy.master.{ApplicationInfo, WorkerInfo, DriverInfo} +import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo} import org.apache.spark.deploy.worker.ExecutorRunner - private[spark] object JsonProtocol { def writeWorkerInfo(obj: WorkerInfo) = { ("id" -> obj.id) ~ diff --git a/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala index 488843a32c1..a73b459c3ce 100644 --- a/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala +++ b/core/src/main/scala/org/apache/spark/deploy/LocalSparkCluster.scala @@ -17,14 +17,14 @@ package org.apache.spark.deploy +import scala.collection.mutable.ArrayBuffer + import akka.actor.ActorSystem +import org.apache.spark.{Logging, SparkConf} import org.apache.spark.deploy.worker.Worker import org.apache.spark.deploy.master.Master import org.apache.spark.util.Utils -import org.apache.spark.{SparkConf, Logging} - -import scala.collection.mutable.ArrayBuffer /** * Testing class that creates a Spark standalone process in-cluster (that is, running the diff --git a/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala index 80179320325..1550c3eb428 100644 --- a/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala +++ b/core/src/main/scala/org/apache/spark/deploy/client/TestClient.scala @@ -17,9 +17,9 @@ package org.apache.spark.deploy.client -import org.apache.spark.util.{Utils, AkkaUtils} -import org.apache.spark.{SparkConf, SparkContext, Logging} -import org.apache.spark.deploy.{Command, ApplicationDescription} +import org.apache.spark.{Logging, SparkConf} +import org.apache.spark.deploy.{ApplicationDescription, Command} +import org.apache.spark.util.{AkkaUtils, Utils} private[spark] object TestClient { diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala index 3e263791668..e8867bc1691 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/ApplicationInfo.scala @@ -17,11 +17,14 @@ package org.apache.spark.deploy.master -import org.apache.spark.deploy.ApplicationDescription import java.util.Date -import akka.actor.ActorRef + import scala.collection.mutable +import akka.actor.ActorRef + +import org.apache.spark.deploy.ApplicationDescription + private[spark] class ApplicationInfo( val startTime: Long, val id: String, diff --git a/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala b/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala index 74bb9ebf1db..aa85aa060d9 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/FileSystemPersistenceEngine.scala @@ -20,6 +20,7 @@ package org.apache.spark.deploy.master import java.io._ import akka.serialization.Serialization + import org.apache.spark.Logging /** diff --git a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala index e44f90c1412..51794ce40cb 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala @@ -30,15 +30,14 @@ import akka.pattern.ask import akka.remote.{DisassociatedEvent, RemotingLifecycleEvent} import akka.serialization.SerializationExtension - -import org.apache.spark.{SparkConf, Logging, SparkException} +import org.apache.spark.{Logging, SparkConf, SparkException} import org.apache.spark.deploy.{ApplicationDescription, DriverDescription, ExecutorState} import org.apache.spark.deploy.DeployMessages._ +import org.apache.spark.deploy.master.DriverState.DriverState import org.apache.spark.deploy.master.MasterMessages._ import org.apache.spark.deploy.master.ui.MasterWebUI import org.apache.spark.metrics.MetricsSystem import org.apache.spark.util.{AkkaUtils, Utils} -import org.apache.spark.deploy.master.DriverState.DriverState private[spark] class Master(host: String, port: Int, webUiPort: Int) extends Actor with Logging { import context.dispatcher // to use Akka's scheduler.schedule() diff --git a/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala index e7f3224091c..a87781fb938 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala @@ -17,8 +17,8 @@ package org.apache.spark.deploy.master -import org.apache.spark.util.{Utils, IntParam} import org.apache.spark.SparkConf +import org.apache.spark.util.{IntParam, Utils} /** * Command-line parser for the master. diff --git a/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala b/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala index 999090ad746..57758055b19 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/SparkZooKeeperSession.scala @@ -23,7 +23,7 @@ import org.apache.zookeeper._ import org.apache.zookeeper.Watcher.Event.KeeperState import org.apache.zookeeper.data.Stat -import org.apache.spark.{SparkConf, Logging} +import org.apache.spark.{Logging, SparkConf} /** * Provides a Scala-side interface to the standard ZooKeeper client, with the addition of retry diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala index 77c23fb9fbe..47b8f67f8a4 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperLeaderElectionAgent.scala @@ -21,7 +21,7 @@ import akka.actor.ActorRef import org.apache.zookeeper._ import org.apache.zookeeper.Watcher.Event.EventType -import org.apache.spark.{SparkConf, Logging} +import org.apache.spark.{Logging, SparkConf} import org.apache.spark.deploy.master.MasterMessages._ private[spark] class ZooKeeperLeaderElectionAgent(val masterActor: ActorRef, diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala index 10816a1f43f..48b2fc06a9d 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/ZooKeeperPersistenceEngine.scala @@ -17,10 +17,10 @@ package org.apache.spark.deploy.master -import org.apache.spark.{SparkConf, Logging} +import akka.serialization.Serialization import org.apache.zookeeper._ -import akka.serialization.Serialization +import org.apache.spark.{Logging, SparkConf} class ZooKeeperPersistenceEngine(serialization: Serialization, conf: SparkConf) extends PersistenceEngine diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala index f29a6ad2e7b..5cc4adbe448 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/ApplicationPage.scala @@ -17,11 +17,12 @@ package org.apache.spark.deploy.master.ui +import javax.servlet.http.HttpServletRequest + import scala.concurrent.Await import scala.xml.Node import akka.pattern.ask -import javax.servlet.http.HttpServletRequest import net.liftweb.json.JsonAST.JValue import org.apache.spark.deploy.JsonProtocol diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala index 04f9a22a25a..01c8f9065e5 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/IndexPage.scala @@ -17,12 +17,12 @@ package org.apache.spark.deploy.master.ui +import javax.servlet.http.HttpServletRequest + import scala.concurrent.Await -import scala.concurrent.duration._ import scala.xml.Node import akka.pattern.ask -import javax.servlet.http.HttpServletRequest import net.liftweb.json.JsonAST.JValue import org.apache.spark.deploy.{DeployWebUI, JsonProtocol} diff --git a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala index 05c4df891ed..5ab13e7aa6b 100644 --- a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala +++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala @@ -18,6 +18,7 @@ package org.apache.spark.deploy.master.ui import javax.servlet.http.HttpServletRequest + import org.eclipse.jetty.server.{Handler, Server} import org.apache.spark.Logging diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala index 2ceccc703d2..0c761dfc93a 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala @@ -17,7 +17,7 @@ package org.apache.spark.deploy.worker -import java.io.{File, FileOutputStream, IOException, InputStream} +import java.io.{File, FileOutputStream, InputStream, IOException} import java.lang.System._ import org.apache.spark.Logging diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala index 18885d7ca6d..2edd9210668 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala @@ -20,12 +20,11 @@ package org.apache.spark.deploy.worker import java.io._ import akka.actor.ActorRef - import com.google.common.base.Charsets import com.google.common.io.Files import org.apache.spark.Logging -import org.apache.spark.deploy.{ExecutorState, ApplicationDescription, Command} +import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState} import org.apache.spark.deploy.DeployMessages.ExecutorStateChanged /** diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala index f4ee0e23438..7b0b7861b76 100755 --- a/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala @@ -31,7 +31,6 @@ import org.apache.spark.{Logging, SparkConf, SparkException} import org.apache.spark.deploy.{ExecutorDescription, ExecutorState} import org.apache.spark.deploy.DeployMessages._ import org.apache.spark.deploy.master.{DriverState, Master} -import org.apache.spark.deploy.master.DriverState.DriverState import org.apache.spark.deploy.worker.ui.WorkerWebUI import org.apache.spark.metrics.MetricsSystem import org.apache.spark.util.{AkkaUtils, Utils} diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala index 3ed528e6b37..d35d5be73ff 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/WorkerArguments.scala @@ -17,9 +17,10 @@ package org.apache.spark.deploy.worker -import org.apache.spark.util.{Utils, IntParam, MemoryParam} import java.lang.management.ManagementFactory +import org.apache.spark.util.{IntParam, MemoryParam, Utils} + /** * Command-line parser for the master. */ diff --git a/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala index 86688e44242..bdf126f93ab 100644 --- a/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala +++ b/core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala @@ -18,11 +18,11 @@ package org.apache.spark.deploy.worker.ui import java.io.File - import javax.servlet.http.HttpServletRequest + import org.eclipse.jetty.server.{Handler, Server} -import org.apache.spark.{Logging, SparkConf} +import org.apache.spark.Logging import org.apache.spark.deploy.worker.Worker import org.apache.spark.ui.{JettyUtils, UIUtils} import org.apache.spark.ui.JettyUtils._ diff --git a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala index 45b43b403dd..0aae569b172 100644 --- a/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala +++ b/core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala @@ -22,7 +22,7 @@ import java.nio.ByteBuffer import akka.actor._ import akka.remote._ -import org.apache.spark.{SparkConf, SparkContext, Logging} +import org.apache.spark.{Logging, SparkConf} import org.apache.spark.TaskState.TaskState import org.apache.spark.deploy.worker.WorkerWatcher import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._ diff --git a/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala index ad7dd34c769..3d34960653f 100644 --- a/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala +++ b/core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala @@ -18,6 +18,7 @@ package org.apache.spark.executor import java.nio.ByteBuffer + import org.apache.spark.TaskState.TaskState /** diff --git a/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala index c2e973e1738..127f5e90f3e 100644 --- a/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala +++ b/core/src/main/scala/org/apache/spark/executor/ExecutorSource.scala @@ -17,12 +17,11 @@ package org.apache.spark.executor -import com.codahale.metrics.{Gauge, MetricRegistry} +import scala.collection.JavaConversions._ +import com.codahale.metrics.{Gauge, MetricRegistry} import org.apache.hadoop.fs.FileSystem -import scala.collection.JavaConversions._ - import org.apache.spark.metrics.source.Source class ExecutorSource(val executor: Executor, executorId: String) extends Source { diff --git a/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala index b56d8c99124..6fc702fdb15 100644 --- a/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala +++ b/core/src/main/scala/org/apache/spark/executor/MesosExecutorBackend.scala @@ -20,8 +20,7 @@ package org.apache.spark.executor import java.nio.ByteBuffer import com.google.protobuf.ByteString - -import org.apache.mesos.{Executor => MesosExecutor, MesosExecutorDriver, MesosNativeLibrary, ExecutorDriver} +import org.apache.mesos.{Executor => MesosExecutor, ExecutorDriver, MesosExecutorDriver, MesosNativeLibrary} import org.apache.mesos.Protos.{TaskStatus => MesosTaskStatus, _} import org.apache.spark.Logging @@ -29,7 +28,6 @@ import org.apache.spark.TaskState import org.apache.spark.TaskState.TaskState import org.apache.spark.util.Utils - private[spark] class MesosExecutorBackend extends MesosExecutor with ExecutorBackend diff --git a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala index 59801773205..848b5c439bb 100644 --- a/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala +++ b/core/src/main/scala/org/apache/spark/io/CompressionCodec.scala @@ -20,10 +20,9 @@ package org.apache.spark.io import java.io.{InputStream, OutputStream} import com.ning.compress.lzf.{LZFInputStream, LZFOutputStream} - import org.xerial.snappy.{SnappyInputStream, SnappyOutputStream} -import org.apache.spark.{SparkEnv, SparkConf} +import org.apache.spark.SparkConf /** * CompressionCodec allows the customization of choosing different compression implementations diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala index e54ac0b3320..6883a544945 100644 --- a/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala +++ b/core/src/main/scala/org/apache/spark/metrics/MetricsConfig.scala @@ -17,8 +17,8 @@ package org.apache.spark.metrics +import java.io.{FileInputStream, InputStream} import java.util.Properties -import java.io.{File, FileInputStream, InputStream, IOException} import scala.collection.mutable import scala.util.matching.Regex diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala index de233e416a9..966c0921242 100644 --- a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala +++ b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala @@ -17,14 +17,14 @@ package org.apache.spark.metrics -import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry} - import java.util.Properties import java.util.concurrent.TimeUnit import scala.collection.mutable -import org.apache.spark.{SparkConf, Logging} +import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry} + +import org.apache.spark.{Logging, SparkConf} import org.apache.spark.metrics.sink.{MetricsServlet, Sink} import org.apache.spark.metrics.source.Source diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala index bce257d6e6f..98fa1dbd7c6 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/ConsoleSink.scala @@ -17,11 +17,11 @@ package org.apache.spark.metrics.sink -import com.codahale.metrics.{ConsoleReporter, MetricRegistry} - import java.util.Properties import java.util.concurrent.TimeUnit +import com.codahale.metrics.{ConsoleReporter, MetricRegistry} + import org.apache.spark.metrics.MetricsSystem class ConsoleSink(val property: Properties, val registry: MetricRegistry) extends Sink { diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala index 3d1a06a395a..40f64768e68 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/CsvSink.scala @@ -17,12 +17,12 @@ package org.apache.spark.metrics.sink -import com.codahale.metrics.{CsvReporter, MetricRegistry} - import java.io.File import java.util.{Locale, Properties} import java.util.concurrent.TimeUnit +import com.codahale.metrics.{CsvReporter, MetricRegistry} + import org.apache.spark.metrics.MetricsSystem class CsvSink(val property: Properties, val registry: MetricRegistry) extends Sink { diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala index b924907070e..410ca0704b5 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/GangliaSink.scala @@ -20,8 +20,8 @@ package org.apache.spark.metrics.sink import java.util.Properties import java.util.concurrent.TimeUnit -import com.codahale.metrics.ganglia.GangliaReporter import com.codahale.metrics.MetricRegistry +import com.codahale.metrics.ganglia.GangliaReporter import info.ganglia.gmetric4j.gmetric.GMetric import org.apache.spark.metrics.MetricsSystem diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala index cdcfec8ca78..e09be001421 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/GraphiteSink.scala @@ -17,12 +17,12 @@ package org.apache.spark.metrics.sink +import java.net.InetSocketAddress import java.util.Properties import java.util.concurrent.TimeUnit -import java.net.InetSocketAddress import com.codahale.metrics.MetricRegistry -import com.codahale.metrics.graphite.{GraphiteReporter, Graphite} +import com.codahale.metrics.graphite.{Graphite, GraphiteReporter} import org.apache.spark.metrics.MetricsSystem diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala index 621d086d415..b5cf210af21 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/JmxSink.scala @@ -17,10 +17,10 @@ package org.apache.spark.metrics.sink -import com.codahale.metrics.{JmxReporter, MetricRegistry} - import java.util.Properties +import com.codahale.metrics.{JmxReporter, MetricRegistry} + class JmxSink(val property: Properties, val registry: MetricRegistry) extends Sink { val reporter: JmxReporter = JmxReporter.forRegistry(registry).build() diff --git a/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala index 99357fede6d..3cdfe26d40f 100644 --- a/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala +++ b/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala @@ -17,15 +17,13 @@ package org.apache.spark.metrics.sink -import com.codahale.metrics.MetricRegistry -import com.codahale.metrics.json.MetricsModule - -import com.fasterxml.jackson.databind.ObjectMapper - import java.util.Properties import java.util.concurrent.TimeUnit import javax.servlet.http.HttpServletRequest +import com.codahale.metrics.MetricRegistry +import com.codahale.metrics.json.MetricsModule +import com.fasterxml.jackson.databind.ObjectMapper import org.eclipse.jetty.server.Handler import org.apache.spark.ui.JettyUtils diff --git a/core/src/main/scala/org/apache/spark/network/BufferMessage.scala b/core/src/main/scala/org/apache/spark/network/BufferMessage.scala index fb4c65909a9..d3c09b16063 100644 --- a/core/src/main/scala/org/apache/spark/network/BufferMessage.scala +++ b/core/src/main/scala/org/apache/spark/network/BufferMessage.scala @@ -23,7 +23,6 @@ import scala.collection.mutable.ArrayBuffer import org.apache.spark.storage.BlockManager - private[spark] class BufferMessage(id_ : Int, val buffers: ArrayBuffer[ByteBuffer], var ackId: Int) extends Message(Message.BUFFER_MESSAGE, id_) { diff --git a/core/src/main/scala/org/apache/spark/network/Connection.scala b/core/src/main/scala/org/apache/spark/network/Connection.scala index ae2007e41b7..f2e3c1a14ec 100644 --- a/core/src/main/scala/org/apache/spark/network/Connection.scala +++ b/core/src/main/scala/org/apache/spark/network/Connection.scala @@ -17,16 +17,13 @@ package org.apache.spark.network -import org.apache.spark._ - -import scala.collection.mutable.{HashMap, Queue, ArrayBuffer} - -import java.io._ +import java.net._ import java.nio._ import java.nio.channels._ -import java.nio.channels.spi._ -import java.net._ +import scala.collection.mutable.{ArrayBuffer, HashMap, Queue} + +import org.apache.spark._ private[spark] abstract class Connection(val channel: SocketChannel, val selector: Selector, diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala index a78d6ac70f8..3dd82bee0b5 100644 --- a/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala +++ b/core/src/main/scala/org/apache/spark/network/ConnectionManager.scala @@ -17,24 +17,21 @@ package org.apache.spark.network -import org.apache.spark._ - +import java.net._ import java.nio._ import java.nio.channels._ import java.nio.channels.spi._ -import java.net._ import java.util.concurrent.{LinkedBlockingDeque, TimeUnit, ThreadPoolExecutor} -import scala.collection.mutable.HashSet +import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.HashMap +import scala.collection.mutable.HashSet import scala.collection.mutable.SynchronizedMap import scala.collection.mutable.SynchronizedQueue -import scala.collection.mutable.ArrayBuffer - -import scala.concurrent.{Await, Promise, ExecutionContext, Future} -import scala.concurrent.duration.Duration +import scala.concurrent.{Await, ExecutionContext, Future, Promise} import scala.concurrent.duration._ +import org.apache.spark._ import org.apache.spark.util.Utils private[spark] class ConnectionManager(port: Int, conf: SparkConf) extends Logging { diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala index 50dd9bc2d10..b82edb6850d 100644 --- a/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala +++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerId.scala @@ -21,7 +21,6 @@ import java.net.InetSocketAddress import org.apache.spark.util.Utils - private[spark] case class ConnectionManagerId(host: String, port: Int) { // DEBUG code Utils.checkHost(host) diff --git a/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala index 8e5c5296cb8..35f64134b07 100644 --- a/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala +++ b/core/src/main/scala/org/apache/spark/network/ConnectionManagerTest.scala @@ -17,16 +17,13 @@ package org.apache.spark.network -import org.apache.spark._ -import org.apache.spark.SparkContext._ - -import scala.io.Source - import java.nio.ByteBuffer -import java.net.InetAddress import scala.concurrent.Await import scala.concurrent.duration._ +import scala.io.Source + +import org.apache.spark._ private[spark] object ConnectionManagerTest extends Logging{ def main(args: Array[String]) { diff --git a/core/src/main/scala/org/apache/spark/network/Message.scala b/core/src/main/scala/org/apache/spark/network/Message.scala index 2612884bdbe..20fe6766184 100644 --- a/core/src/main/scala/org/apache/spark/network/Message.scala +++ b/core/src/main/scala/org/apache/spark/network/Message.scala @@ -17,12 +17,11 @@ package org.apache.spark.network -import java.nio.ByteBuffer import java.net.InetSocketAddress +import java.nio.ByteBuffer import scala.collection.mutable.ArrayBuffer - private[spark] abstract class Message(val typ: Long, val id: Int) { var senderAddress: InetSocketAddress = null var started = false diff --git a/core/src/main/scala/org/apache/spark/network/MessageChunk.scala b/core/src/main/scala/org/apache/spark/network/MessageChunk.scala index e0fe57b80d5..d0f986a12bf 100644 --- a/core/src/main/scala/org/apache/spark/network/MessageChunk.scala +++ b/core/src/main/scala/org/apache/spark/network/MessageChunk.scala @@ -21,7 +21,6 @@ import java.nio.ByteBuffer import scala.collection.mutable.ArrayBuffer - private[network] class MessageChunk(val header: MessageChunkHeader, val buffer: ByteBuffer) { diff --git a/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala b/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala index 235fbc39b3b..9bcbc6141a5 100644 --- a/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala +++ b/core/src/main/scala/org/apache/spark/network/MessageChunkHeader.scala @@ -21,7 +21,6 @@ import java.net.InetAddress import java.net.InetSocketAddress import java.nio.ByteBuffer - private[spark] class MessageChunkHeader( val typ: Long, val id: Int, diff --git a/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala index 1c9d6030d68..9976255c7e2 100644 --- a/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala +++ b/core/src/main/scala/org/apache/spark/network/ReceiverTest.scala @@ -18,7 +18,7 @@ package org.apache.spark.network import java.nio.ByteBuffer -import java.net.InetAddress + import org.apache.spark.SparkConf private[spark] object ReceiverTest { diff --git a/core/src/main/scala/org/apache/spark/network/SenderTest.scala b/core/src/main/scala/org/apache/spark/network/SenderTest.scala index 162d49bf616..646f8425d95 100644 --- a/core/src/main/scala/org/apache/spark/network/SenderTest.scala +++ b/core/src/main/scala/org/apache/spark/network/SenderTest.scala @@ -18,7 +18,7 @@ package org.apache.spark.network import java.nio.ByteBuffer -import java.net.InetAddress + import org.apache.spark.SparkConf private[spark] object SenderTest { diff --git a/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala b/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala index 1b9fa1e53a9..f9082ffb914 100644 --- a/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala +++ b/core/src/main/scala/org/apache/spark/network/netty/FileHeader.scala @@ -20,7 +20,7 @@ package org.apache.spark.network.netty import io.netty.buffer._ import org.apache.spark.Logging -import org.apache.spark.storage.{TestBlockId, BlockId} +import org.apache.spark.storage.{BlockId, TestBlockId} private[spark] class FileHeader ( val fileLen: Int, diff --git a/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala b/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala index d87157e12c4..e7b2855e1ec 100644 --- a/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala +++ b/core/src/main/scala/org/apache/spark/network/netty/ShuffleCopier.scala @@ -19,17 +19,16 @@ package org.apache.spark.network.netty import java.util.concurrent.Executors +import scala.collection.JavaConverters._ + import io.netty.buffer.ByteBuf import io.netty.channel.ChannelHandlerContext import io.netty.util.CharsetUtil -import org.apache.spark.{SparkContext, SparkConf, Logging} +import org.apache.spark.{Logging, SparkConf} import org.apache.spark.network.ConnectionManagerId - -import scala.collection.JavaConverters._ import org.apache.spark.storage.BlockId - private[spark] class ShuffleCopier(conf: SparkConf) extends Logging { def getBlock(host: String, port: Int, blockId: BlockId, diff --git a/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala b/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala index 44204a8c465..7ef7aecc6a9 100644 --- a/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala +++ b/core/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala @@ -23,7 +23,6 @@ import org.apache.spark.Logging import org.apache.spark.util.Utils import org.apache.spark.storage.{BlockId, FileSegment} - private[spark] class ShuffleSender(portIn: Int, val pResolver: PathResolver) extends Logging { val server = new FileServer(pResolver, portIn) diff --git a/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala b/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala index 423ff67a5fd..d25452daf76 100644 --- a/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala +++ b/core/src/main/scala/org/apache/spark/partial/ApproximateActionListener.scala @@ -18,8 +18,8 @@ package org.apache.spark.partial import org.apache.spark._ -import org.apache.spark.scheduler.JobListener import org.apache.spark.rdd.RDD +import org.apache.spark.scheduler.JobListener /** * A JobListener for an approximate single-result action, such as count() or non-parallel reduce(). diff --git a/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala index e519e3a5484..40b70baabca 100644 --- a/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala +++ b/core/src/main/scala/org/apache/spark/partial/GroupedCountEvaluator.scala @@ -18,14 +18,12 @@ package org.apache.spark.partial import java.util.{HashMap => JHashMap} -import java.util.{Map => JMap} +import scala.collection.JavaConversions.mapAsScalaMap import scala.collection.Map import scala.collection.mutable.HashMap -import scala.collection.JavaConversions.mapAsScalaMap import cern.jet.stat.Probability - import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap} /** diff --git a/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala index cf8a5680b66..b5111891ed3 100644 --- a/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala +++ b/core/src/main/scala/org/apache/spark/partial/GroupedMeanEvaluator.scala @@ -18,11 +18,10 @@ package org.apache.spark.partial import java.util.{HashMap => JHashMap} -import java.util.{Map => JMap} -import scala.collection.mutable.HashMap -import scala.collection.Map import scala.collection.JavaConversions.mapAsScalaMap +import scala.collection.Map +import scala.collection.mutable.HashMap import org.apache.spark.util.StatCounter diff --git a/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala b/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala index 8225a5d933c..442fb86227d 100644 --- a/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala +++ b/core/src/main/scala/org/apache/spark/partial/GroupedSumEvaluator.scala @@ -18,11 +18,10 @@ package org.apache.spark.partial import java.util.{HashMap => JHashMap} -import java.util.{Map => JMap} -import scala.collection.mutable.HashMap -import scala.collection.Map import scala.collection.JavaConversions.mapAsScalaMap +import scala.collection.Map +import scala.collection.mutable.HashMap import org.apache.spark.util.StatCounter diff --git a/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala b/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala index 424354ae165..e6c4a6d3794 100644 --- a/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/BlockRDD.scala @@ -19,7 +19,7 @@ package org.apache.spark.rdd import scala.reflect.ClassTag -import org.apache.spark.{SparkContext, SparkEnv, Partition, TaskContext} +import org.apache.spark.{Partition, SparkContext, SparkEnv, TaskContext} import org.apache.spark.storage.{BlockId, BlockManager} private[spark] class BlockRDDPartition(val blockId: BlockId, idx: Int) extends Partition { diff --git a/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala index 87b950ba431..4908711d17d 100644 --- a/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/CartesianRDD.scala @@ -17,10 +17,11 @@ package org.apache.spark.rdd -import java.io.{ObjectOutputStream, IOException} +import java.io.{IOException, ObjectOutputStream} + import scala.reflect.ClassTag -import org.apache.spark._ +import org.apache.spark._ private[spark] class CartesianPartition( diff --git a/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala index 8f9d1d5a84c..888af541cf9 100644 --- a/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala @@ -18,12 +18,15 @@ package org.apache.spark.rdd import java.io.IOException + import scala.reflect.ClassTag + +import org.apache.hadoop.conf.Configuration +import org.apache.hadoop.fs.Path + import org.apache.spark._ import org.apache.spark.broadcast.Broadcast import org.apache.spark.deploy.SparkHadoopUtil -import org.apache.hadoop.conf.Configuration -import org.apache.hadoop.fs.Path private[spark] class CheckpointRDDPartition(val index: Int) extends Partition {} diff --git a/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala index 0e47f2e0226..699a10c96c2 100644 --- a/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala @@ -17,7 +17,7 @@ package org.apache.spark.rdd -import java.io.{ObjectOutputStream, IOException} +import java.io.{IOException, ObjectOutputStream} import scala.collection.mutable.ArrayBuffer diff --git a/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala index dc345b2df07..4e82b51313b 100644 --- a/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/CoalescedRDD.scala @@ -17,13 +17,14 @@ package org.apache.spark.rdd -import org.apache.spark._ -import java.io.{ObjectOutputStream, IOException} +import java.io.{IOException, ObjectOutputStream} + import scala.collection.mutable -import scala.Some import scala.collection.mutable.ArrayBuffer import scala.reflect.ClassTag +import org.apache.spark._ + /** * Class that captures a coalesced RDD by essentially keeping track of parent partitions * @param index of this coalesced partition diff --git a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala index 20713b4249b..a7b6b3b5146 100644 --- a/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala +++ b/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala @@ -17,14 +17,12 @@ package org.apache.spark.rdd +import org.apache.spark.{TaskContext, Logging} import org.apache.spark.partial.BoundedDouble import org.apache.spark.partial.MeanEvaluator import org.apache.spark.partial.PartialResult import org.apache.spark.partial.SumEvaluator import org.apache.spark.util.StatCounter -import org.apache.spark.{TaskContext, Logging} - -import scala.collection.immutable.NumericRange /** * Extra functions available on RDDs of Doubles through an implicit conversion. diff --git a/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala index e74c83b90ba..9e41b3d1e2d 100644 --- a/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/FilteredRDD.scala @@ -17,9 +17,10 @@ package org.apache.spark.rdd -import org.apache.spark.{OneToOneDependency, Partition, TaskContext} import scala.reflect.ClassTag +import org.apache.spark.{Partition, TaskContext} + private[spark] class FilteredRDD[T: ClassTag]( prev: RDD[T], f: T => Boolean) diff --git a/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala index 4d1878fc142..d8f87d4e369 100644 --- a/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/FlatMappedRDD.scala @@ -17,9 +17,9 @@ package org.apache.spark.rdd -import org.apache.spark.{Partition, TaskContext} import scala.reflect.ClassTag +import org.apache.spark.{Partition, TaskContext} private[spark] class FlatMappedRDD[U: ClassTag, T: ClassTag]( diff --git a/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala b/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala index 82000bac092..7c9023f62d3 100644 --- a/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/FlatMappedValuesRDD.scala @@ -17,8 +17,7 @@ package org.apache.spark.rdd -import org.apache.spark.{TaskContext, Partition} - +import org.apache.spark.{Partition, TaskContext} private[spark] class FlatMappedValuesRDD[K, V, U](prev: RDD[_ <: Product2[K, V]], f: V => TraversableOnce[U]) diff --git a/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala index 1a694475f69..f6463fa715a 100644 --- a/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/GlommedRDD.scala @@ -17,9 +17,10 @@ package org.apache.spark.rdd -import org.apache.spark.{Partition, TaskContext} import scala.reflect.ClassTag +import org.apache.spark.{Partition, TaskContext} + private[spark] class GlommedRDD[T: ClassTag](prev: RDD[T]) extends RDD[Array[T]](prev) { diff --git a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala index ad74d4636fb..a374fc4a871 100644 --- a/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala @@ -19,7 +19,7 @@ package org.apache.spark.rdd import java.io.EOFException -import org.apache.hadoop.conf.{Configuration, Configurable} +import org.apache.hadoop.conf.{Configurable, Configuration} import org.apache.hadoop.mapred.InputFormat import org.apache.hadoop.mapred.InputSplit import org.apache.hadoop.mapred.JobConf @@ -32,7 +32,6 @@ import org.apache.spark.broadcast.Broadcast import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.util.NextIterator - /** * A Spark split class that wraps around a Hadoop InputSplit. */ diff --git a/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala index db15baf503a..4883fb82881 100644 --- a/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/MapPartitionsRDD.scala @@ -17,9 +17,10 @@ package org.apache.spark.rdd -import org.apache.spark.{Partition, TaskContext} import scala.reflect.ClassTag +import org.apache.spark.{Partition, TaskContext} + private[spark] class MapPartitionsRDD[U: ClassTag, T: ClassTag]( prev: RDD[T], f: (TaskContext, Int, Iterator[T]) => Iterator[U], // (TaskContext, partition index, iterator) diff --git a/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala b/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala index d33c1af5815..2bc47eb9fcd 100644 --- a/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/MappedValuesRDD.scala @@ -17,8 +17,7 @@ package org.apache.spark.rdd - -import org.apache.spark.{TaskContext, Partition} +import org.apache.spark.{Partition, TaskContext} private[spark] class MappedValuesRDD[K, V, U](prev: RDD[_ <: Product2[K, V]], f: V => U) diff --git a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala index 10d519e6971..15bec39659e 100644 --- a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala +++ b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala @@ -22,12 +22,13 @@ import java.text.SimpleDateFormat import java.util.Date import java.util.{HashMap => JHashMap} +import scala.collection.JavaConversions._ import scala.collection.Map import scala.collection.mutable import scala.collection.mutable.ArrayBuffer -import scala.collection.JavaConversions._ -import scala.reflect.{ClassTag, classTag} +import scala.reflect.ClassTag +import com.clearspring.analytics.stream.cardinality.HyperLogLog import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path import org.apache.hadoop.io.SequenceFile.CompressionType @@ -38,15 +39,14 @@ import org.apache.hadoop.mapreduce.{Job => NewAPIHadoopJob} import org.apache.hadoop.mapreduce.{RecordWriter => NewRecordWriter} import org.apache.hadoop.mapreduce.lib.output.{FileOutputFormat => NewFileOutputFormat} -import com.clearspring.analytics.stream.cardinality.HyperLogLog - // SparkHadoopWriter and SparkHadoopMapReduceUtil are actually source files defined in Spark. import org.apache.hadoop.mapred.SparkHadoopWriter import org.apache.hadoop.mapreduce.SparkHadoopMapReduceUtil + import org.apache.spark._ +import org.apache.spark.Partitioner.defaultPartitioner import org.apache.spark.SparkContext._ import org.apache.spark.partial.{BoundedDouble, PartialResult} -import org.apache.spark.Partitioner.defaultPartitioner import org.apache.spark.util.SerializableHyperLogLog /** diff --git a/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala index f270c1ac217..5f03d7d650a 100644 --- a/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/ParallelCollectionRDD.scala @@ -17,14 +17,15 @@ package org.apache.spark.rdd +import java.io._ + +import scala.Serializable +import scala.collection.Map import scala.collection.immutable.NumericRange import scala.collection.mutable.ArrayBuffer -import scala.collection.Map import scala.reflect.ClassTag import org.apache.spark._ -import java.io._ -import scala.Serializable import org.apache.spark.serializer.JavaSerializer import org.apache.spark.util.Utils diff --git a/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala index ea8885b36e3..b0440ca7f32 100644 --- a/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/PartitionPruningRDD.scala @@ -19,8 +19,7 @@ package org.apache.spark.rdd import scala.reflect.ClassTag -import org.apache.spark.{NarrowDependency, SparkEnv, Partition, TaskContext} - +import org.apache.spark.{NarrowDependency, Partition, TaskContext} class PartitionPruningRDDPartition(idx: Int, val parentSplit: Partition) extends Partition { override val index = idx diff --git a/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala index f4364329a3a..a84357b3841 100644 --- a/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/PartitionerAwareUnionRDD.scala @@ -17,10 +17,11 @@ package org.apache.spark.rdd +import java.io.{IOException, ObjectOutputStream} + import scala.reflect.ClassTag -import java.io.{ObjectOutputStream, IOException} -import org.apache.spark.{TaskContext, OneToOneDependency, SparkContext, Partition} +import org.apache.spark.{OneToOneDependency, Partition, SparkContext, TaskContext} /** * Class representing partitions of PartitionerAwareUnionRDD, which maintains the list of diff --git a/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala index a74309d8613..ce4c0d382ba 100644 --- a/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/PartitionwiseSampledRDD.scala @@ -21,7 +21,7 @@ import java.util.Random import scala.reflect.ClassTag -import org.apache.spark.{TaskContext, Partition} +import org.apache.spark.{Partition, TaskContext} import org.apache.spark.util.random.RandomSampler private[spark] diff --git a/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala index 8ef919c4b58..abd4414e81f 100644 --- a/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/PipedRDD.scala @@ -20,14 +20,13 @@ package org.apache.spark.rdd import java.io.PrintWriter import java.util.StringTokenizer -import scala.collection.Map import scala.collection.JavaConversions._ +import scala.collection.Map import scala.collection.mutable.ArrayBuffer import scala.io.Source import scala.reflect.ClassTag -import org.apache.spark.{SparkEnv, Partition, TaskContext} - +import org.apache.spark.{Partition, SparkEnv, TaskContext} /** * An RDD that pipes the contents of each parent partition through an external command diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala index d4fc28f5513..50320f40350 100644 --- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala @@ -22,30 +22,27 @@ import java.util.Random import scala.collection.Map import scala.collection.JavaConversions.mapAsScalaMap import scala.collection.mutable.ArrayBuffer - import scala.reflect.{classTag, ClassTag} +import com.clearspring.analytics.stream.cardinality.HyperLogLog +import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap} import org.apache.hadoop.io.BytesWritable import org.apache.hadoop.io.compress.CompressionCodec import org.apache.hadoop.io.NullWritable import org.apache.hadoop.io.Text import org.apache.hadoop.mapred.TextOutputFormat -import it.unimi.dsi.fastutil.objects.{Object2LongOpenHashMap => OLMap} -import com.clearspring.analytics.stream.cardinality.HyperLogLog - +import org.apache.spark._ import org.apache.spark.Partitioner._ +import org.apache.spark.SparkContext._ import org.apache.spark.api.java.JavaRDD import org.apache.spark.partial.BoundedDouble import org.apache.spark.partial.CountEvaluator import org.apache.spark.partial.GroupedCountEvaluator import org.apache.spark.partial.PartialResult import org.apache.spark.storage.StorageLevel -import org.apache.spark.util.{Utils, BoundedPriorityQueue, SerializableHyperLogLog} - -import org.apache.spark.SparkContext._ -import org.apache.spark._ -import org.apache.spark.util.random.{PoissonSampler, BernoulliSampler} +import org.apache.spark.util.{BoundedPriorityQueue, SerializableHyperLogLog, Utils} +import org.apache.spark.util.random.{BernoulliSampler, PoissonSampler} /** * A Resilient Distributed Dataset (RDD), the basic abstraction in Spark. Represents an immutable, diff --git a/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala b/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala index 73e8769c098..953f0555e57 100644 --- a/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala +++ b/core/src/main/scala/org/apache/spark/rdd/RDDCheckpointData.scala @@ -20,9 +20,8 @@ package org.apache.spark.rdd import scala.reflect.ClassTag import org.apache.hadoop.fs.Path -import org.apache.hadoop.conf.Configuration -import org.apache.spark.{SerializableWritable, Partition, SparkException, Logging} +import org.apache.spark.{Logging, Partition, SerializableWritable, SparkException} import org.apache.spark.scheduler.{ResultTask, ShuffleMapTask} /** diff --git a/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala index 08534b6f1db..b50307cfa49 100644 --- a/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/SampledRDD.scala @@ -17,9 +17,10 @@ package org.apache.spark.rdd -import scala.reflect.ClassTag import java.util.Random +import scala.reflect.ClassTag + import cern.jet.random.Poisson import cern.jet.random.engine.DRand diff --git a/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala index c9b4c768a98..7df9a2960d8 100644 --- a/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala +++ b/core/src/main/scala/org/apache/spark/rdd/SequenceFileRDDFunctions.scala @@ -16,15 +16,15 @@ */ package org.apache.spark.rdd -import scala.reflect.{ ClassTag, classTag} +import scala.reflect.{ClassTag, classTag} +import org.apache.hadoop.io.Writable +import org.apache.hadoop.io.compress.CompressionCodec import org.apache.hadoop.mapred.JobConf import org.apache.hadoop.mapred.SequenceFileOutputFormat -import org.apache.hadoop.io.compress.CompressionCodec -import org.apache.hadoop.io.Writable -import org.apache.spark.SparkContext._ import org.apache.spark.Logging +import org.apache.spark.SparkContext._ /** * Extra functions available on RDDs of (key, value) pairs to create a Hadoop SequenceFile, diff --git a/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala index 0ccb309d0d9..0bbda25a905 100644 --- a/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala @@ -19,8 +19,7 @@ package org.apache.spark.rdd import scala.reflect.ClassTag -import org.apache.spark.{Dependency, Partition, Partitioner, ShuffleDependency, - SparkEnv, TaskContext} +import org.apache.spark.{Dependency, Partition, Partitioner, ShuffleDependency, SparkEnv, TaskContext} private[spark] class ShuffledRDDPartition(val idx: Int) extends Partition { override val index = idx diff --git a/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala index 4f90c7d3d68..5fe9f363db4 100644 --- a/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/SubtractedRDD.scala @@ -23,14 +23,13 @@ import scala.collection.JavaConversions._ import scala.collection.mutable.ArrayBuffer import scala.reflect.ClassTag -import org.apache.spark.Partitioner import org.apache.spark.Dependency -import org.apache.spark.TaskContext +import org.apache.spark.OneToOneDependency import org.apache.spark.Partition -import org.apache.spark.SparkEnv +import org.apache.spark.Partitioner import org.apache.spark.ShuffleDependency -import org.apache.spark.OneToOneDependency - +import org.apache.spark.SparkEnv +import org.apache.spark.TaskContext /** * An optimized version of cogroup for set difference/subtraction. diff --git a/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala b/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala index 08a41ac5583..a4470307520 100644 --- a/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/UnionRDD.scala @@ -17,12 +17,12 @@ package org.apache.spark.rdd +import java.io.{IOException, ObjectOutputStream} + import scala.collection.mutable.ArrayBuffer import scala.reflect.ClassTag -import org.apache.spark.{Dependency, RangeDependency, SparkContext, Partition, TaskContext} - -import java.io.{ObjectOutputStream, IOException} +import org.apache.spark.{Dependency, Partition, RangeDependency, SparkContext, TaskContext} private[spark] class UnionPartition[T: ClassTag](idx: Int, rdd: RDD[T], splitIndex: Int) extends Partition { diff --git a/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala index 83be3c6eb40..b56643444aa 100644 --- a/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala @@ -17,10 +17,12 @@ package org.apache.spark.rdd -import org.apache.spark.{OneToOneDependency, SparkContext, Partition, TaskContext} -import java.io.{ObjectOutputStream, IOException} +import java.io.{IOException, ObjectOutputStream} + import scala.reflect.ClassTag +import org.apache.spark.{OneToOneDependency, Partition, SparkContext, TaskContext} + private[spark] class ZippedPartitionsPartition( idx: Int, @transient rdds: Seq[RDD[_]], diff --git a/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala index fb5b070c18d..2119e76f0e0 100644 --- a/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/ZippedRDD.scala @@ -17,12 +17,12 @@ package org.apache.spark.rdd -import org.apache.spark.{OneToOneDependency, SparkContext, Partition, TaskContext} - -import java.io.{ObjectOutputStream, IOException} +import java.io.{IOException, ObjectOutputStream} import scala.reflect.ClassTag +import org.apache.spark.{OneToOneDependency, Partition, SparkContext, TaskContext} + private[spark] class ZippedPartition[T: ClassTag, U: ClassTag]( idx: Int, @transient rdd1: RDD[T], diff --git a/core/src/main/scala/org/apache/spark/rdd/ZippedWithIndexRDD.scala b/core/src/main/scala/org/apache/spark/rdd/ZippedWithIndexRDD.scala index 38dc114d808..e2c301603b4 100644 --- a/core/src/main/scala/org/apache/spark/rdd/ZippedWithIndexRDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/ZippedWithIndexRDD.scala @@ -19,7 +19,7 @@ package org.apache.spark.rdd import scala.reflect.ClassTag -import org.apache.spark.{TaskContext, Partition} +import org.apache.spark.{Partition, TaskContext} import org.apache.spark.util.Utils private[spark] diff --git a/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala index 0b04607d019..9257f48559c 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/ActiveJob.scala @@ -17,10 +17,10 @@ package org.apache.spark.scheduler -import org.apache.spark.TaskContext - import java.util.Properties +import org.apache.spark.TaskContext + /** * Tracks information about an active job in the DAGScheduler. */ diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala index 80211541a6a..729f518b89c 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala @@ -28,9 +28,9 @@ import scala.reflect.ClassTag import akka.actor._ import org.apache.spark._ -import org.apache.spark.rdd.RDD import org.apache.spark.executor.TaskMetrics import org.apache.spark.partial.{ApproximateActionListener, ApproximateEvaluator, PartialResult} +import org.apache.spark.rdd.RDD import org.apache.spark.storage.{BlockId, BlockManager, BlockManagerMaster, RDDBlockId} import org.apache.spark.util.{MetadataCleaner, MetadataCleanerType, TimeStampedHashMap} diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala index add11876130..39cd98e2d74 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerEvent.scala @@ -22,8 +22,8 @@ import java.util.Properties import scala.collection.mutable.Map import org.apache.spark._ -import org.apache.spark.rdd.RDD import org.apache.spark.executor.TaskMetrics +import org.apache.spark.rdd.RDD /** * Types of events that can be handled by the DAGScheduler. The DAGScheduler uses an event queue diff --git a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala index 7b5c0e29ad8..b52fe2410ab 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/DAGSchedulerSource.scala @@ -19,8 +19,8 @@ package org.apache.spark.scheduler import com.codahale.metrics.{Gauge,MetricRegistry} -import org.apache.spark.metrics.source.Source import org.apache.spark.SparkContext +import org.apache.spark.metrics.source.Source private[spark] class DAGSchedulerSource(val dagScheduler: DAGScheduler, sc: SparkContext) extends Source { diff --git a/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala index 23447f1bbf8..5555585c8b4 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/InputFormatInfo.scala @@ -17,17 +17,17 @@ package org.apache.spark.scheduler -import org.apache.spark.{Logging, SparkEnv} -import org.apache.spark.deploy.SparkHadoopUtil +import scala.collection.JavaConversions._ import scala.collection.immutable.Set +import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet} + +import org.apache.hadoop.conf.Configuration import org.apache.hadoop.mapred.{FileInputFormat, JobConf} -import org.apache.hadoop.security.UserGroupInformation -import org.apache.hadoop.util.ReflectionUtils import org.apache.hadoop.mapreduce.Job -import org.apache.hadoop.conf.Configuration -import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet} -import scala.collection.JavaConversions._ +import org.apache.hadoop.util.ReflectionUtils +import org.apache.spark.Logging +import org.apache.spark.deploy.SparkHadoopUtil /** * Parses and holds information about inputFormat (and files) specified as a parameter. diff --git a/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala index b909b66a5de..9d75d7c4ad6 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/JobLogger.scala @@ -17,7 +17,7 @@ package org.apache.spark.scheduler -import java.io.{IOException, File, FileNotFoundException, PrintWriter} +import java.io.{File, FileNotFoundException, IOException, PrintWriter} import java.text.SimpleDateFormat import java.util.{Date, Properties} import java.util.concurrent.LinkedBlockingQueue @@ -25,8 +25,8 @@ import java.util.concurrent.LinkedBlockingQueue import scala.collection.mutable.{HashMap, HashSet, ListBuffer} import org.apache.spark._ -import org.apache.spark.rdd.RDD import org.apache.spark.executor.TaskMetrics +import org.apache.spark.rdd.RDD import org.apache.spark.storage.StorageLevel /** diff --git a/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala b/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala index 1c61687f280..d3f63ff92ac 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/MapStatus.scala @@ -17,8 +17,9 @@ package org.apache.spark.scheduler +import java.io.{Externalizable, ObjectInput, ObjectOutput} + import org.apache.spark.storage.BlockManagerId -import java.io.{ObjectOutput, ObjectInput, Externalizable} /** * Result returned by a ShuffleMapTask to a scheduler. Includes the block manager address that the diff --git a/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala index 77b1682b3e4..3fc6cc9850f 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/ResultTask.scala @@ -23,7 +23,7 @@ import java.util.zip.{GZIPInputStream, GZIPOutputStream} import org.apache.spark._ import org.apache.spark.rdd.RDD import org.apache.spark.rdd.RDDCheckpointData -import org.apache.spark.util.{MetadataCleanerType, MetadataCleaner, TimeStampedHashMap} +import org.apache.spark.util.{MetadataCleaner, MetadataCleanerType, TimeStampedHashMap} private[spark] object ResultTask { diff --git a/core/src/main/scala/org/apache/spark/scheduler/Schedulable.scala b/core/src/main/scala/org/apache/spark/scheduler/Schedulable.scala index d573e125a33..ed24eb6a549 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/Schedulable.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/Schedulable.scala @@ -17,9 +17,10 @@ package org.apache.spark.scheduler +import scala.collection.mutable.ArrayBuffer + import org.apache.spark.scheduler.SchedulingMode.SchedulingMode -import scala.collection.mutable.ArrayBuffer /** * An interface for schedulable entities. * there are two type of Schedulable entities(Pools and TaskSetManagers) diff --git a/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala b/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala index a546193d5b4..e4eced383c3 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/SchedulableBuilder.scala @@ -20,10 +20,10 @@ package org.apache.spark.scheduler import java.io.{FileInputStream, InputStream} import java.util.{NoSuchElementException, Properties} -import org.apache.spark.{SparkConf, Logging} - import scala.xml.XML +import org.apache.spark.{Logging, SparkConf} + /** * An interface to build Schedulable tree * buildPools: build the tree nodes(pools) diff --git a/core/src/main/scala/org/apache/spark/scheduler/SchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/SchedulerBackend.scala index 02bdbba8257..eefc8c232b5 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/SchedulerBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/SchedulerBackend.scala @@ -17,8 +17,6 @@ package org.apache.spark.scheduler -import org.apache.spark.SparkContext - /** * A backend interface for scheduling systems that allows plugging in different ones under * ClusterScheduler. We assume a Mesos-like model where the application gets resource offers as diff --git a/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala index a37ead56327..77789031f46 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/ShuffleMapTask.scala @@ -24,11 +24,10 @@ import scala.collection.mutable.HashMap import org.apache.spark._ import org.apache.spark.executor.ShuffleWriteMetrics -import org.apache.spark.storage._ -import org.apache.spark.util.{MetadataCleanerType, TimeStampedHashMap, MetadataCleaner} import org.apache.spark.rdd.RDD import org.apache.spark.rdd.RDDCheckpointData - +import org.apache.spark.storage._ +import org.apache.spark.util.{MetadataCleaner, MetadataCleanerType, TimeStampedHashMap} private[spark] object ShuffleMapTask { diff --git a/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala index 129153c732d..9590c03f106 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala @@ -18,9 +18,10 @@ package org.apache.spark.scheduler import java.util.Properties -import org.apache.spark.util.{Utils, Distribution} + import org.apache.spark.{Logging, TaskEndReason} import org.apache.spark.executor.TaskMetrics +import org.apache.spark.util.{Distribution, Utils} sealed trait SparkListenerEvents diff --git a/core/src/main/scala/org/apache/spark/scheduler/Task.scala b/core/src/main/scala/org/apache/spark/scheduler/Task.scala index 69b42e86eae..b85b4a50cd9 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/Task.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/Task.scala @@ -29,7 +29,6 @@ import org.apache.spark.executor.TaskMetrics import org.apache.spark.serializer.SerializerInstance import org.apache.spark.util.ByteBufferInputStream - /** * A unit of execution. We have two kinds of Task's in Spark: * - [[org.apache.spark.scheduler.ShuffleMapTask]] diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala index 5190d234d4e..1481d70db42 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/TaskDescription.scala @@ -18,6 +18,7 @@ package org.apache.spark.scheduler import java.nio.ByteBuffer + import org.apache.spark.util.SerializableBuffer private[spark] class TaskDescription( diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala index 91c27d7b8e9..6183b125def 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/TaskInfo.scala @@ -17,8 +17,6 @@ package org.apache.spark.scheduler -import org.apache.spark.util.Utils - /** * Information about a running task attempt inside a TaskSet. */ diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala index 35de13c3851..ea3229b75be 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/TaskLocality.scala @@ -17,7 +17,6 @@ package org.apache.spark.scheduler - private[spark] object TaskLocality extends Enumeration { // process local is expected to be used ONLY within tasksetmanager for now. val PROCESS_LOCAL, NODE_LOCAL, RACK_LOCAL, ANY = Value diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala index 5724ec9d1b4..d49d8fb8870 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/TaskResult.scala @@ -18,13 +18,14 @@ package org.apache.spark.scheduler import java.io._ +import java.nio.ByteBuffer import scala.collection.mutable.Map + +import org.apache.spark.SparkEnv import org.apache.spark.executor.TaskMetrics -import org.apache.spark.{SparkEnv} -import java.nio.ByteBuffer -import org.apache.spark.util.Utils import org.apache.spark.storage.BlockId +import org.apache.spark.util.Utils // Task result. Also contains updates to accumulator variables. private[spark] sealed trait TaskResult[T] diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala index bdec08e968a..cb4ad4ae935 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/TaskResultGetter.scala @@ -18,7 +18,6 @@ package org.apache.spark.scheduler import java.nio.ByteBuffer -import java.util.concurrent.{LinkedBlockingDeque, ThreadFactory, ThreadPoolExecutor, TimeUnit} import org.apache.spark._ import org.apache.spark.TaskState.TaskState diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala index 5b525155e9f..8df37c247d0 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala @@ -18,13 +18,13 @@ package org.apache.spark.scheduler import java.nio.ByteBuffer -import java.util.concurrent.atomic.AtomicLong import java.util.{TimerTask, Timer} +import java.util.concurrent.atomic.AtomicLong +import scala.concurrent.duration._ import scala.collection.mutable.ArrayBuffer import scala.collection.mutable.HashMap import scala.collection.mutable.HashSet -import scala.concurrent.duration._ import org.apache.spark._ import org.apache.spark.TaskState.TaskState diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala index 21b2ff1682b..1a4b7e599c0 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSetManager.scala @@ -26,13 +26,11 @@ import scala.collection.mutable.HashSet import scala.math.max import scala.math.min -import org.apache.spark.{ExceptionFailure, ExecutorLostFailure, FetchFailed, Logging, Resubmitted, - SparkEnv, Success, TaskEndReason, TaskKilled, TaskResultLost, TaskState} +import org.apache.spark.{ExceptionFailure, ExecutorLostFailure, FetchFailed, Logging, Resubmitted, SparkEnv, Success, TaskEndReason, TaskKilled, TaskResultLost, TaskState} import org.apache.spark.TaskState.TaskState import org.apache.spark.executor.TaskMetrics import org.apache.spark.util.{Clock, SystemClock} - /** * Schedules the tasks within a single TaskSet in the ClusterScheduler. This class keeps track of * each task, retries tasks if they fail (up to a limited number of times), and diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala index 53316dae2a6..4a9a1659d82 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedClusterMessage.scala @@ -21,8 +21,7 @@ import java.nio.ByteBuffer import org.apache.spark.TaskState.TaskState import org.apache.spark.scheduler.TaskDescription -import org.apache.spark.util.{Utils, SerializableBuffer} - +import org.apache.spark.util.{SerializableBuffer, Utils} private[spark] sealed trait CoarseGrainedClusterMessage extends Serializable diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala index 78204103a9b..379e02eb9a4 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala @@ -27,10 +27,8 @@ import akka.actor._ import akka.pattern.ask import akka.remote.{DisassociatedEvent, RemotingLifecycleEvent} -import org.apache.spark.{SparkException, Logging, TaskState} import org.apache.spark.{Logging, SparkException, TaskState} -import org.apache.spark.scheduler.{TaskSchedulerImpl, SchedulerBackend, SlaveLost, TaskDescription, - WorkerOffer} +import org.apache.spark.scheduler.{SchedulerBackend, SlaveLost, TaskDescription, TaskSchedulerImpl, WorkerOffer} import org.apache.spark.scheduler.cluster.CoarseGrainedClusterMessages._ import org.apache.spark.util.{AkkaUtils, Utils} diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala index 04f35cca082..ee4b65e312a 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala @@ -17,11 +17,9 @@ package org.apache.spark.scheduler.cluster -import scala.collection.mutable.HashMap - import org.apache.spark.{Logging, SparkContext} -import org.apache.spark.deploy.client.{AppClient, AppClientListener} import org.apache.spark.deploy.{Command, ApplicationDescription} +import org.apache.spark.deploy.client.{AppClient, AppClientListener} import org.apache.spark.scheduler.{ExecutorExited, ExecutorLossReason, SlaveLost, TaskSchedulerImpl} import org.apache.spark.util.Utils diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala index 4401f6df474..28b019d9fd4 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/CoarseMesosSchedulerBackend.scala @@ -18,18 +18,17 @@ package org.apache.spark.scheduler.cluster.mesos import java.io.File -import java.util.{ArrayList => JArrayList, List => JList} +import java.util.{List => JList} import java.util.Collections -import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet} import scala.collection.JavaConversions._ +import scala.collection.mutable.{HashMap, HashSet} -import com.google.protobuf.ByteString import org.apache.mesos.{Scheduler => MScheduler} import org.apache.mesos._ import org.apache.mesos.Protos.{TaskInfo => MesosTaskInfo, TaskState => MesosTaskState, _} -import org.apache.spark.{SparkException, Logging, SparkContext, TaskState} +import org.apache.spark.{Logging, SparkContext, SparkException} import org.apache.spark.scheduler.TaskSchedulerImpl import org.apache.spark.scheduler.cluster.CoarseGrainedSchedulerBackend diff --git a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala index fef291eea02..c576beb0c0d 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala @@ -21,17 +21,16 @@ import java.io.File import java.util.{ArrayList => JArrayList, List => JList} import java.util.Collections -import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet} import scala.collection.JavaConversions._ +import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet} import com.google.protobuf.ByteString import org.apache.mesos.{Scheduler => MScheduler} import org.apache.mesos._ import org.apache.mesos.Protos.{TaskInfo => MesosTaskInfo, TaskState => MesosTaskState, _} -import org.apache.spark.{Logging, SparkException, SparkContext, TaskState} -import org.apache.spark.scheduler.{ExecutorExited, ExecutorLossReason, SchedulerBackend, SlaveLost, - TaskDescription, TaskSchedulerImpl, WorkerOffer} +import org.apache.spark.{Logging, SparkContext, SparkException, TaskState} +import org.apache.spark.scheduler.{ExecutorExited, ExecutorLossReason, SchedulerBackend, SlaveLost, TaskDescription, TaskSchedulerImpl, WorkerOffer} import org.apache.spark.util.Utils /** diff --git a/core/src/main/scala/org/apache/spark/scheduler/local/LocalBackend.scala b/core/src/main/scala/org/apache/spark/scheduler/local/LocalBackend.scala index 897d47a9ad9..50f7e79e97d 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/local/LocalBackend.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/local/LocalBackend.scala @@ -21,7 +21,7 @@ import java.nio.ByteBuffer import akka.actor.{Actor, ActorRef, Props} -import org.apache.spark.{Logging, SparkContext, SparkEnv, TaskState} +import org.apache.spark.{Logging, SparkEnv, TaskState} import org.apache.spark.TaskState.TaskState import org.apache.spark.executor.{Executor, ExecutorBackend} import org.apache.spark.scheduler.{SchedulerBackend, TaskSchedulerImpl, WorkerOffer} diff --git a/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala b/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala index 5d3d43623d9..33c1705ad7c 100644 --- a/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala +++ b/core/src/main/scala/org/apache/spark/serializer/JavaSerializer.scala @@ -20,8 +20,8 @@ package org.apache.spark.serializer import java.io._ import java.nio.ByteBuffer -import org.apache.spark.util.ByteBufferInputStream import org.apache.spark.SparkConf +import org.apache.spark.util.ByteBufferInputStream private[spark] class JavaSerializationStream(out: OutputStream) extends SerializationStream { val objOut = new ObjectOutputStream(out) diff --git a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala index 2d0b2553850..920490f9d0d 100644 --- a/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala +++ b/core/src/main/scala/org/apache/spark/serializer/KryoSerializer.scala @@ -17,13 +17,13 @@ package org.apache.spark.serializer -import java.nio.ByteBuffer import java.io.{EOFException, InputStream, OutputStream} +import java.nio.ByteBuffer -import com.esotericsoftware.kryo.serializers.{JavaSerializer => KryoJavaSerializer} -import com.esotericsoftware.kryo.{KryoException, Kryo} +import com.esotericsoftware.kryo.{Kryo, KryoException} import com.esotericsoftware.kryo.io.{Input => KryoInput, Output => KryoOutput} -import com.twitter.chill.{EmptyScalaKryoInstantiator, AllScalaRegistrar} +import com.esotericsoftware.kryo.serializers.{JavaSerializer => KryoJavaSerializer} +import com.twitter.chill.{AllScalaRegistrar, EmptyScalaKryoInstantiator} import org.apache.spark._ import org.apache.spark.broadcast.HttpBroadcast diff --git a/core/src/main/scala/org/apache/spark/serializer/Serializer.scala b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala index a38a2b59dbc..16677ab54be 100644 --- a/core/src/main/scala/org/apache/spark/serializer/Serializer.scala +++ b/core/src/main/scala/org/apache/spark/serializer/Serializer.scala @@ -22,8 +22,7 @@ import java.nio.ByteBuffer import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream -import org.apache.spark.util.{NextIterator, ByteBufferInputStream} - +import org.apache.spark.util.{ByteBufferInputStream, NextIterator} /** * A serializer. Because some serialization libraries are not thread safe, this class is used to diff --git a/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala b/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala index 36a37af4f82..65ac0155f45 100644 --- a/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala +++ b/core/src/main/scala/org/apache/spark/serializer/SerializerManager.scala @@ -18,8 +18,8 @@ package org.apache.spark.serializer import java.util.concurrent.ConcurrentHashMap -import org.apache.spark.SparkConf +import org.apache.spark.SparkConf /** * A service that returns a serializer object given the serializer's class name. If a previous diff --git a/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala b/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala index aa62ab5aba1..925022e7fe6 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockFetcherIterator.scala @@ -17,7 +17,6 @@ package org.apache.spark.storage -import java.nio.ByteBuffer import java.util.concurrent.LinkedBlockingQueue import scala.collection.mutable.ArrayBuffer @@ -26,15 +25,13 @@ import scala.collection.mutable.Queue import io.netty.buffer.ByteBuf -import org.apache.spark.Logging -import org.apache.spark.SparkException +import org.apache.spark.{Logging, SparkException} import org.apache.spark.network.BufferMessage import org.apache.spark.network.ConnectionManagerId import org.apache.spark.network.netty.ShuffleCopier import org.apache.spark.serializer.Serializer import org.apache.spark.util.Utils - /** * A block fetcher iterator interface. There are two implementations: * diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala index 780a3a15dd1..a734ddc1ef7 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManager.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManager.scala @@ -20,24 +20,21 @@ package org.apache.spark.storage import java.io.{File, InputStream, OutputStream} import java.nio.{ByteBuffer, MappedByteBuffer} -import scala.collection.mutable.{HashMap, ArrayBuffer} -import scala.util.Random - -import akka.actor.{ActorSystem, Cancellable, Props} +import scala.collection.mutable.{ArrayBuffer, HashMap} import scala.concurrent.{Await, Future} -import scala.concurrent.duration.Duration import scala.concurrent.duration._ +import scala.util.Random +import akka.actor.{ActorSystem, Cancellable, Props} import it.unimi.dsi.fastutil.io.{FastBufferedOutputStream, FastByteArrayOutputStream} +import sun.nio.ch.DirectBuffer -import org.apache.spark.{SparkConf, Logging, SparkEnv, SparkException} +import org.apache.spark.{Logging, SparkConf, SparkEnv, SparkException} import org.apache.spark.io.CompressionCodec import org.apache.spark.network._ import org.apache.spark.serializer.Serializer import org.apache.spark.util._ -import sun.nio.ch.DirectBuffer - private[spark] class BlockManager( executorId: String, actorSystem: ActorSystem, diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala index 74207f59af1..98cd6e68fa7 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerId.scala @@ -19,6 +19,7 @@ package org.apache.spark.storage import java.io.{Externalizable, IOException, ObjectInput, ObjectOutput} import java.util.concurrent.ConcurrentHashMap + import org.apache.spark.util.Utils /** diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala index c54e4f26647..e531467cccb 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMaster.scala @@ -23,7 +23,7 @@ import scala.concurrent.ExecutionContext.Implicits.global import akka.actor._ import akka.pattern.ask -import org.apache.spark.{SparkConf, Logging, SparkException} +import org.apache.spark.{Logging, SparkConf, SparkException} import org.apache.spark.storage.BlockManagerMessages._ import org.apache.spark.util.AkkaUtils diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala index 893418fb8ca..a999d76a326 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMasterActor.scala @@ -27,7 +27,7 @@ import scala.concurrent.duration._ import akka.actor.{Actor, ActorRef, Cancellable} import akka.pattern.ask -import org.apache.spark.{SparkConf, Logging, SparkException} +import org.apache.spark.{Logging, SparkConf, SparkException} import org.apache.spark.storage.BlockManagerMessages._ import org.apache.spark.util.{AkkaUtils, Utils} diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala index 45f51da2885..bbb9529b5a0 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala @@ -21,7 +21,6 @@ import java.io.{Externalizable, ObjectInput, ObjectOutput} import akka.actor.ActorRef - private[storage] object BlockManagerMessages { ////////////////////////////////////////////////////////////////////////////////// // Messages from the master to slaves. diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala index 3a65e557338..bcfb82d3c73 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerSlaveActor.scala @@ -21,7 +21,6 @@ import akka.actor.Actor import org.apache.spark.storage.BlockManagerMessages._ - /** * An actor to take commands from the master to execute options. For example, * this is used to remove blocks from the slave's BlockManager. diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala index 7cf754fb204..687586490ab 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerSource.scala @@ -19,9 +19,8 @@ package org.apache.spark.storage import com.codahale.metrics.{Gauge,MetricRegistry} -import org.apache.spark.metrics.source.Source import org.apache.spark.SparkContext - +import org.apache.spark.metrics.source.Source private[spark] class BlockManagerSource(val blockManager: BlockManager, sc: SparkContext) extends Source { diff --git a/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala b/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala index 3efe738a08f..c7766a3a656 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockManagerWorker.scala @@ -19,7 +19,7 @@ package org.apache.spark.storage import java.nio.ByteBuffer -import org.apache.spark.{Logging} +import org.apache.spark.Logging import org.apache.spark.network._ import org.apache.spark.util.Utils diff --git a/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala b/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala index fbafcf79d28..7168ae18c26 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockMessage.scala @@ -19,8 +19,8 @@ package org.apache.spark.storage import java.nio.ByteBuffer -import scala.collection.mutable.StringBuilder import scala.collection.mutable.ArrayBuffer +import scala.collection.mutable.StringBuilder import org.apache.spark.network._ diff --git a/core/src/main/scala/org/apache/spark/storage/BlockStore.scala b/core/src/main/scala/org/apache/spark/storage/BlockStore.scala index ea426562402..b047644b88f 100644 --- a/core/src/main/scala/org/apache/spark/storage/BlockStore.scala +++ b/core/src/main/scala/org/apache/spark/storage/BlockStore.scala @@ -18,6 +18,7 @@ package org.apache.spark.storage import java.nio.ByteBuffer + import scala.collection.mutable.ArrayBuffer import org.apache.spark.Logging diff --git a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala index 5a1e7b44440..d1f07ddb24b 100644 --- a/core/src/main/scala/org/apache/spark/storage/DiskStore.scala +++ b/core/src/main/scala/org/apache/spark/storage/DiskStore.scala @@ -27,7 +27,6 @@ import org.apache.spark.Logging import org.apache.spark.serializer.Serializer import org.apache.spark.util.Utils - /** * Stores BlockManager blocks on disk. */ diff --git a/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala b/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala index eb5a1852168..18141756518 100644 --- a/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala +++ b/core/src/main/scala/org/apache/spark/storage/MemoryStore.scala @@ -17,10 +17,11 @@ package org.apache.spark.storage -import java.util.LinkedHashMap -import java.util.concurrent.ArrayBlockingQueue import java.nio.ByteBuffer -import collection.mutable.ArrayBuffer +import java.util.LinkedHashMap + +import scala.collection.mutable.ArrayBuffer + import org.apache.spark.util.{SizeEstimator, Utils} /** diff --git a/core/src/main/scala/org/apache/spark/storage/StoragePerfTester.scala b/core/src/main/scala/org/apache/spark/storage/StoragePerfTester.scala index 40734aab49f..8cea302eb14 100644 --- a/core/src/main/scala/org/apache/spark/storage/StoragePerfTester.scala +++ b/core/src/main/scala/org/apache/spark/storage/StoragePerfTester.scala @@ -17,11 +17,11 @@ package org.apache.spark.storage -import java.util.concurrent.atomic.AtomicLong import java.util.concurrent.{CountDownLatch, Executors} +import java.util.concurrent.atomic.AtomicLong -import org.apache.spark.serializer.KryoSerializer import org.apache.spark.SparkContext +import org.apache.spark.serializer.KryoSerializer import org.apache.spark.util.Utils /** diff --git a/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala index 50a0cdb3095..2d88a40fbb3 100644 --- a/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala +++ b/core/src/main/scala/org/apache/spark/storage/StorageUtils.scala @@ -17,8 +17,8 @@ package org.apache.spark.storage -import org.apache.spark.{SparkContext} -import BlockManagerMasterActor.BlockStatus +import org.apache.spark.SparkContext +import org.apache.spark.storage.BlockManagerMasterActor.BlockStatus import org.apache.spark.util.Utils private[spark] diff --git a/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala b/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala index 729ba2c550a..1d81d006c0b 100644 --- a/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala +++ b/core/src/main/scala/org/apache/spark/storage/ThreadingTest.scala @@ -17,12 +17,13 @@ package org.apache.spark.storage -import akka.actor._ - import java.util.concurrent.ArrayBlockingQueue + +import akka.actor._ import util.Random + +import org.apache.spark.SparkConf import org.apache.spark.serializer.KryoSerializer -import org.apache.spark.{SparkConf, SparkContext} /** * This class tests the BlockManager and MemoryStore for thread safety and diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala index ade8ba1323b..1f048a84cdf 100644 --- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala +++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala @@ -17,21 +17,19 @@ package org.apache.spark.ui +import java.net.InetSocketAddress import javax.servlet.http.{HttpServletResponse, HttpServletRequest} import scala.annotation.tailrec -import scala.util.{Try, Success, Failure} +import scala.util.{Failure, Success, Try} import scala.xml.Node import net.liftweb.json.{JValue, pretty, render} - -import org.eclipse.jetty.server.{Server, Request, Handler} -import org.eclipse.jetty.server.handler.{ResourceHandler, HandlerList, ContextHandler, AbstractHandler} +import org.eclipse.jetty.server.{Handler, Request, Server} +import org.eclipse.jetty.server.handler.{AbstractHandler, ContextHandler, HandlerList, ResourceHandler} import org.eclipse.jetty.util.thread.QueuedThreadPool import org.apache.spark.Logging -import java.net.InetSocketAddress - /** Utilities for launching a web server using Jetty's HTTP Server class */ private[spark] object JettyUtils extends Logging { diff --git a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala index 0196f43d743..af6b65860e0 100644 --- a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala +++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala @@ -17,16 +17,14 @@ package org.apache.spark.ui -import javax.servlet.http.HttpServletRequest - import org.eclipse.jetty.server.{Handler, Server} import org.apache.spark.{Logging, SparkContext, SparkEnv} +import org.apache.spark.ui.JettyUtils._ import org.apache.spark.ui.env.EnvironmentUI import org.apache.spark.ui.exec.ExecutorsUI -import org.apache.spark.ui.storage.BlockManagerUI import org.apache.spark.ui.jobs.JobProgressUI -import org.apache.spark.ui.JettyUtils._ +import org.apache.spark.ui.storage.BlockManagerUI import org.apache.spark.util.Utils /** Top level user interface for Spark */ diff --git a/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala index f913ee461b2..18d2b5075aa 100644 --- a/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala +++ b/core/src/main/scala/org/apache/spark/ui/UIWorkloadGenerator.scala @@ -23,7 +23,6 @@ import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.SparkContext._ import org.apache.spark.scheduler.SchedulingMode - /** * Continuously generates jobs that expose various features of the WebUI (internal testing tool). * diff --git a/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala index 88f41be8d3d..9e7cdc88162 100644 --- a/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala +++ b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentUI.scala @@ -25,11 +25,10 @@ import scala.xml.Node import org.eclipse.jetty.server.Handler +import org.apache.spark.SparkContext import org.apache.spark.ui.JettyUtils._ -import org.apache.spark.ui.UIUtils import org.apache.spark.ui.Page.Environment -import org.apache.spark.SparkContext - +import org.apache.spark.ui.UIUtils private[spark] class EnvironmentUI(sc: SparkContext) { diff --git a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala index 4e41acf0230..1f3b7a4c231 100644 --- a/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala +++ b/core/src/main/scala/org/apache/spark/ui/exec/ExecutorsUI.scala @@ -26,14 +26,13 @@ import org.eclipse.jetty.server.Handler import org.apache.spark.{ExceptionFailure, Logging, SparkContext} import org.apache.spark.executor.TaskMetrics -import org.apache.spark.scheduler.{SparkListenerTaskStart, SparkListenerTaskEnd, SparkListener} +import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskEnd, SparkListenerTaskStart} import org.apache.spark.scheduler.TaskInfo import org.apache.spark.ui.JettyUtils._ import org.apache.spark.ui.Page.Executors import org.apache.spark.ui.UIUtils import org.apache.spark.util.Utils - private[spark] class ExecutorsUI(val sc: SparkContext) { private var _listener: Option[ExecutorsListener] = None diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala b/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala index ab03eb5ce1a..d012ba4dbb3 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/ExecutorTable.scala @@ -17,11 +17,11 @@ package org.apache.spark.ui.jobs +import scala.collection.mutable import scala.xml.Node import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.util.Utils -import scala.collection.mutable /** Page showing executor summary */ private[spark] class ExecutorTable(val parent: JobProgressUI, val stageId: Int) { diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala index 6289f8744f2..81713edcf5d 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala @@ -25,7 +25,6 @@ import org.apache.spark.scheduler.SchedulingMode import org.apache.spark.ui.Page._ import org.apache.spark.ui.UIUtils._ - /** Page showing list of all ongoing and recently finished stages and pools*/ private[spark] class IndexPage(parent: JobProgressUI) { def listener = parent.listener diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala index 858a10ce750..07a08f5277d 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressListener.scala @@ -17,7 +17,6 @@ package org.apache.spark.ui.jobs -import scala.Seq import scala.collection.mutable.{ListBuffer, HashMap, HashSet} import org.apache.spark.{ExceptionFailure, SparkContext, Success} diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala index c1ee2f3d00d..557bce6b663 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/JobProgressUI.scala @@ -17,23 +17,15 @@ package org.apache.spark.ui.jobs -import scala.concurrent.duration._ - import java.text.SimpleDateFormat - import javax.servlet.http.HttpServletRequest -import org.eclipse.jetty.server.Handler - import scala.Seq -import scala.collection.mutable.{HashSet, ListBuffer, HashMap, ArrayBuffer} +import org.eclipse.jetty.server.Handler + +import org.apache.spark.SparkContext import org.apache.spark.ui.JettyUtils._ -import org.apache.spark.{ExceptionFailure, SparkContext, Success} -import org.apache.spark.scheduler._ -import collection.mutable -import org.apache.spark.scheduler.SchedulingMode -import org.apache.spark.scheduler.SchedulingMode.SchedulingMode import org.apache.spark.util.Utils /** Web UI showing progress status of all jobs in the given SparkContext. */ diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala index 89fffcb80d0..eb7518a0208 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala @@ -19,12 +19,10 @@ package org.apache.spark.ui.jobs import javax.servlet.http.HttpServletRequest -import scala.xml.{NodeSeq, Node} -import scala.collection.mutable.HashSet +import scala.xml.Node -import org.apache.spark.scheduler.Stage -import org.apache.spark.ui.UIUtils._ import org.apache.spark.ui.Page._ +import org.apache.spark.ui.UIUtils._ /** Page showing specific pool details */ private[spark] class PoolPage(parent: JobProgressUI) { diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala index b6e98942ab8..ddc687a45a0 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala @@ -18,17 +18,16 @@ package org.apache.spark.ui.jobs import java.util.Date - import javax.servlet.http.HttpServletRequest import scala.xml.Node -import org.apache.spark.{ExceptionFailure} +import org.apache.spark.ExceptionFailure import org.apache.spark.executor.TaskMetrics +import org.apache.spark.scheduler.TaskInfo import org.apache.spark.ui.UIUtils._ import org.apache.spark.ui.Page._ import org.apache.spark.util.{Utils, Distribution} -import org.apache.spark.scheduler.TaskInfo /** Page showing statistics and task list for a given stage */ private[spark] class StagePage(parent: JobProgressUI) { diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala b/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala index 999a94fc2d0..c5fd3ae16dc 100644 --- a/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala +++ b/core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala @@ -19,14 +19,13 @@ package org.apache.spark.ui.jobs import java.util.Date -import scala.xml.Node import scala.collection.mutable.HashSet +import scala.xml.Node import org.apache.spark.scheduler.{SchedulingMode, StageInfo, TaskInfo} import org.apache.spark.ui.UIUtils import org.apache.spark.util.Utils - /** Page showing list of all ongoing and recently finished stages */ private[spark] class StageTable(val stages: Seq[StageInfo], val parent: JobProgressUI) { diff --git a/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala b/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala index 39f422dd6b9..dc18eab74e0 100644 --- a/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala +++ b/core/src/main/scala/org/apache/spark/ui/storage/BlockManagerUI.scala @@ -17,8 +17,6 @@ package org.apache.spark.ui.storage -import scala.concurrent.duration._ - import javax.servlet.http.HttpServletRequest import org.eclipse.jetty.server.Handler diff --git a/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala index 109a7d4094c..6a3c41fb115 100644 --- a/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala @@ -22,8 +22,8 @@ import javax.servlet.http.HttpServletRequest import scala.xml.Node import org.apache.spark.storage.{RDDInfo, StorageUtils} -import org.apache.spark.ui.UIUtils._ import org.apache.spark.ui.Page._ +import org.apache.spark.ui.UIUtils._ import org.apache.spark.util.Utils /** Page showing list of RDD's currently stored in the cluster */ diff --git a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala index b83cd54f3c3..78b149b14b1 100644 --- a/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala +++ b/core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala @@ -23,11 +23,10 @@ import scala.xml.Node import org.apache.spark.storage.{BlockId, StorageStatus, StorageUtils} import org.apache.spark.storage.BlockManagerMasterActor.BlockStatus -import org.apache.spark.ui.UIUtils._ import org.apache.spark.ui.Page._ +import org.apache.spark.ui.UIUtils._ import org.apache.spark.util.Utils - /** Page showing storage details for a given RDD */ private[spark] class RDDPage(parent: BlockManagerUI) { val sc = parent.sc diff --git a/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala index 761d378c7fd..f26ed47e580 100644 --- a/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala +++ b/core/src/main/scala/org/apache/spark/util/AkkaUtils.scala @@ -22,8 +22,8 @@ import scala.concurrent.duration.{Duration, FiniteDuration} import akka.actor.{ActorSystem, ExtendedActorSystem, IndestructibleActorSystem} import com.typesafe.config.ConfigFactory - import org.apache.log4j.{Level, Logger} + import org.apache.spark.SparkConf /** diff --git a/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala index a38329df037..c3692f2fd92 100644 --- a/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala +++ b/core/src/main/scala/org/apache/spark/util/BoundedPriorityQueue.scala @@ -19,8 +19,9 @@ package org.apache.spark.util import java.io.Serializable import java.util.{PriorityQueue => JPriorityQueue} -import scala.collection.generic.Growable + import scala.collection.JavaConverters._ +import scala.collection.generic.Growable /** * Bounded priority queue. This class wraps the original PriorityQueue diff --git a/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala b/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala index e214d2a519a..54de4d4ee8c 100644 --- a/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala +++ b/core/src/main/scala/org/apache/spark/util/ByteBufferInputStream.scala @@ -19,6 +19,7 @@ package org.apache.spark.util import java.io.InputStream import java.nio.ByteBuffer + import org.apache.spark.storage.BlockManager /** diff --git a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala index c0c057be8de..681d0a30cb3 100644 --- a/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala +++ b/core/src/main/scala/org/apache/spark/util/ClosureCleaner.scala @@ -17,14 +17,14 @@ package org.apache.spark.util -import java.lang.reflect.Field +import java.io.{ByteArrayInputStream, ByteArrayOutputStream} import scala.collection.mutable.Map import scala.collection.mutable.Set import org.objectweb.asm.{ClassReader, ClassVisitor, MethodVisitor, Type} import org.objectweb.asm.Opcodes._ -import java.io.{ByteArrayOutputStream, ByteArrayInputStream} + import org.apache.spark.Logging private[spark] object ClosureCleaner extends Logging { diff --git a/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala index 3868ab36312..0448919e091 100644 --- a/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala +++ b/core/src/main/scala/org/apache/spark/util/MetadataCleaner.scala @@ -17,9 +17,9 @@ package org.apache.spark.util -import java.util.{TimerTask, Timer} -import org.apache.spark.{SparkConf, Logging} +import java.util.{Timer, TimerTask} +import org.apache.spark.{Logging, SparkConf} /** * Runs a timer task to periodically clean up metadata (e.g. old files or hashtable entries) diff --git a/core/src/main/scala/org/apache/spark/util/MutablePair.scala b/core/src/main/scala/org/apache/spark/util/MutablePair.scala index 34f1f6606fc..b053266f127 100644 --- a/core/src/main/scala/org/apache/spark/util/MutablePair.scala +++ b/core/src/main/scala/org/apache/spark/util/MutablePair.scala @@ -17,7 +17,6 @@ package org.apache.spark.util - /** * A tuple of 2 elements. This can be used as an alternative to Scala's Tuple2 when we want to * minimize object allocation. diff --git a/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala b/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala index f2b1ad7d0e9..2b452ad33b0 100644 --- a/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala +++ b/core/src/main/scala/org/apache/spark/util/SerializableBuffer.scala @@ -17,8 +17,8 @@ package org.apache.spark.util +import java.io.{EOFException, IOException, ObjectInputStream, ObjectOutputStream} import java.nio.ByteBuffer -import java.io.{IOException, ObjectOutputStream, EOFException, ObjectInputStream} import java.nio.channels.Channels /** diff --git a/core/src/main/scala/org/apache/spark/util/SerializableHyperLogLog.scala b/core/src/main/scala/org/apache/spark/util/SerializableHyperLogLog.scala index 2110b3596ee..21a88eea3bb 100644 --- a/core/src/main/scala/org/apache/spark/util/SerializableHyperLogLog.scala +++ b/core/src/main/scala/org/apache/spark/util/SerializableHyperLogLog.scala @@ -17,8 +17,9 @@ package org.apache.spark.util -import java.io.{Externalizable, ObjectOutput, ObjectInput} -import com.clearspring.analytics.stream.cardinality.{ICardinality, HyperLogLog} +import java.io.{Externalizable, ObjectInput, ObjectOutput} + +import com.clearspring.analytics.stream.cardinality.{HyperLogLog, ICardinality} /** * A wrapper around [[com.clearspring.analytics.stream.cardinality.HyperLogLog]] that is diff --git a/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala index 17c6481c184..b955612ca77 100644 --- a/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala +++ b/core/src/main/scala/org/apache/spark/util/SizeEstimator.scala @@ -17,20 +17,19 @@ package org.apache.spark.util +import java.lang.management.ManagementFactory +import java.lang.reflect.{Array => JArray} import java.lang.reflect.Field import java.lang.reflect.Modifier -import java.lang.reflect.{Array => JArray} import java.util.IdentityHashMap -import java.util.concurrent.ConcurrentHashMap import java.util.Random - -import javax.management.MBeanServer -import java.lang.management.ManagementFactory +import java.util.concurrent.ConcurrentHashMap import scala.collection.mutable.ArrayBuffer import it.unimi.dsi.fastutil.ints.IntOpenHashSet -import org.apache.spark.{SparkEnv, SparkConf, SparkContext, Logging} + +import org.apache.spark.Logging /** * Estimates the sizes of Java objects (number of bytes of memory they occupy), for use in diff --git a/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala b/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala index 8e07a0f29ad..ddbd084ed7f 100644 --- a/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala +++ b/core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala @@ -18,10 +18,11 @@ package org.apache.spark.util import java.util.concurrent.ConcurrentHashMap + import scala.collection.JavaConversions -import scala.collection.mutable.Map import scala.collection.immutable -import org.apache.spark.scheduler.MapStatus +import scala.collection.mutable.Map + import org.apache.spark.Logging /** diff --git a/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala b/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala index 26983138ff0..19bece86b36 100644 --- a/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala +++ b/core/src/main/scala/org/apache/spark/util/TimeStampedHashSet.scala @@ -17,10 +17,10 @@ package org.apache.spark.util -import scala.collection.mutable.Set -import scala.collection.JavaConversions import java.util.concurrent.ConcurrentHashMap +import scala.collection.JavaConversions +import scala.collection.mutable.Set class TimeStampedHashSet[A] extends Set[A] { val internalMap = new ConcurrentHashMap[A, Long]() diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala index 8749ab7875b..8e69f1d3351 100644 --- a/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -18,7 +18,8 @@ package org.apache.spark.util import java.io._ -import java.net.{InetAddress, URL, URI, NetworkInterface, Inet4Address} +import java.net.{InetAddress, Inet4Address, NetworkInterface, URI, URL} +import java.nio.ByteBuffer import java.util.{Locale, Random, UUID} import java.util.concurrent.{ConcurrentHashMap, Executors, ThreadPoolExecutor} @@ -30,16 +31,11 @@ import scala.reflect.ClassTag import com.google.common.io.Files import com.google.common.util.concurrent.ThreadFactoryBuilder +import org.apache.hadoop.fs.{FileSystem, FileUtil, Path} -import org.apache.hadoop.conf.Configuration -import org.apache.hadoop.fs.{Path, FileSystem, FileUtil} -import org.apache.hadoop.io._ - +import org.apache.spark.{Logging, SparkConf, SparkException} import org.apache.spark.serializer.{DeserializationStream, SerializationStream, SerializerInstance} import org.apache.spark.deploy.SparkHadoopUtil -import java.nio.ByteBuffer -import org.apache.spark.{SparkConf, SparkException, Logging} - /** * Various utility methods used by Spark. diff --git a/core/src/main/scala/org/apache/spark/util/Vector.scala b/core/src/main/scala/org/apache/spark/util/Vector.scala index 96da93d8545..d437c055f33 100644 --- a/core/src/main/scala/org/apache/spark/util/Vector.scala +++ b/core/src/main/scala/org/apache/spark/util/Vector.scala @@ -18,6 +18,7 @@ package org.apache.spark.util import scala.util.Random + import org.apache.spark.util.random.XORShiftRandom class Vector(val elements: Array[Double]) extends Serializable { diff --git a/core/src/main/scala/org/apache/spark/util/collection/BitSet.scala b/core/src/main/scala/org/apache/spark/util/collection/BitSet.scala index c9cf5128438..d3153d2cac4 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/BitSet.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/BitSet.scala @@ -17,7 +17,6 @@ package org.apache.spark.util.collection - /** * A simple, fixed-size bit set implementation. This implementation is fast because it avoids * safety/bound checking. diff --git a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala index 59ba1e457ce..856d092ab34 100644 --- a/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala +++ b/core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala @@ -23,8 +23,8 @@ import java.util.Comparator import scala.collection.mutable import scala.collection.mutable.ArrayBuffer -import it.unimi.dsi.fastutil.io.FastBufferedInputStream import com.google.common.io.ByteStreams +import it.unimi.dsi.fastutil.io.FastBufferedInputStream import org.apache.spark.{Logging, SparkEnv} import org.apache.spark.serializer.Serializer diff --git a/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala b/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala index 6b66d547519..0f1fca4813b 100644 --- a/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala +++ b/core/src/main/scala/org/apache/spark/util/random/RandomSampler.scala @@ -18,6 +18,7 @@ package org.apache.spark.util.random import java.util.Random + import cern.jet.random.Poisson import cern.jet.random.engine.DRand diff --git a/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala b/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala index 20d32d01b5e..ca611b67ed9 100644 --- a/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala +++ b/core/src/main/scala/org/apache/spark/util/random/XORShiftRandom.scala @@ -18,6 +18,7 @@ package org.apache.spark.util.random import java.util.{Random => JavaRandom} + import org.apache.spark.util.Utils.timeIt /** diff --git a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala index c443c5266e4..6c73ea6949d 100644 --- a/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala +++ b/core/src/test/scala/org/apache/spark/AccumulatorSuite.scala @@ -17,12 +17,11 @@ package org.apache.spark +import scala.collection.mutable + import org.scalatest.FunSuite import org.scalatest.matchers.ShouldMatchers -import collection.mutable -import java.util.Random -import scala.math.exp -import scala.math.signum + import org.apache.spark.SparkContext._ class AccumulatorSuite extends FunSuite with ShouldMatchers with LocalSparkContext { diff --git a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala index ec13b329b25..d2e29f20f0b 100644 --- a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala +++ b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala @@ -17,11 +17,14 @@ package org.apache.spark +import java.io.File + import scala.reflect.ClassTag + import org.scalatest.FunSuite -import java.io.File -import org.apache.spark.rdd._ + import org.apache.spark.SparkContext._ +import org.apache.spark.rdd._ import org.apache.spark.storage.{BlockId, StorageLevel, TestBlockId} import org.apache.spark.util.Utils diff --git a/core/src/test/scala/org/apache/spark/DistributedSuite.scala b/core/src/test/scala/org/apache/spark/DistributedSuite.scala index 8de7a328d1c..14ddd6f1ec0 100644 --- a/core/src/test/scala/org/apache/spark/DistributedSuite.scala +++ b/core/src/test/scala/org/apache/spark/DistributedSuite.scala @@ -17,17 +17,16 @@ package org.apache.spark -import network.ConnectionManagerId import org.scalatest.BeforeAndAfter -import org.scalatest.concurrent.Timeouts._ import org.scalatest.FunSuite +import org.scalatest.concurrent.Timeouts._ import org.scalatest.matchers.ShouldMatchers -import org.scalatest.time.{Span, Millis} +import org.scalatest.time.{Millis, Span} -import SparkContext._ +import org.apache.spark.SparkContext._ +import org.apache.spark.network.ConnectionManagerId import org.apache.spark.storage.{BlockManagerWorker, GetBlock, RDDBlockId, StorageLevel} - class NotSerializableClass class NotSerializableExn(val notSer: NotSerializableClass) extends Throwable() {} diff --git a/core/src/test/scala/org/apache/spark/DriverSuite.scala b/core/src/test/scala/org/apache/spark/DriverSuite.scala index fb895372585..e0e80112786 100644 --- a/core/src/test/scala/org/apache/spark/DriverSuite.scala +++ b/core/src/test/scala/org/apache/spark/DriverSuite.scala @@ -26,6 +26,7 @@ import org.scalatest.FunSuite import org.scalatest.concurrent.Timeouts import org.scalatest.prop.TableDrivenPropertyChecks._ import org.scalatest.time.SpanSugar._ + import org.apache.spark.util.Utils class DriverSuite extends FunSuite with Timeouts { diff --git a/core/src/test/scala/org/apache/spark/FailureSuite.scala b/core/src/test/scala/org/apache/spark/FailureSuite.scala index befdc1589f0..ac3c86778d5 100644 --- a/core/src/test/scala/org/apache/spark/FailureSuite.scala +++ b/core/src/test/scala/org/apache/spark/FailureSuite.scala @@ -19,7 +19,7 @@ package org.apache.spark import org.scalatest.FunSuite -import SparkContext._ +import org.apache.spark.SparkContext._ import org.apache.spark.util.NonSerializable // Common state shared by FailureSuite-launched tasks. We use a global object diff --git a/core/src/test/scala/org/apache/spark/FileServerSuite.scala b/core/src/test/scala/org/apache/spark/FileServerSuite.scala index a2eb9a4e846..9be67b3c95a 100644 --- a/core/src/test/scala/org/apache/spark/FileServerSuite.scala +++ b/core/src/test/scala/org/apache/spark/FileServerSuite.scala @@ -20,10 +20,11 @@ package org.apache.spark import java.io._ import java.util.jar.{JarEntry, JarOutputStream} -import SparkContext._ import com.google.common.io.Files import org.scalatest.FunSuite +import org.apache.spark.SparkContext._ + class FileServerSuite extends FunSuite with LocalSparkContext { @transient var tmpFile: File = _ diff --git a/core/src/test/scala/org/apache/spark/FileSuite.scala b/core/src/test/scala/org/apache/spark/FileSuite.scala index 7b82a4cdd9c..8ff02aef67a 100644 --- a/core/src/test/scala/org/apache/spark/FileSuite.scala +++ b/core/src/test/scala/org/apache/spark/FileSuite.scala @@ -17,17 +17,16 @@ package org.apache.spark -import java.io.{FileWriter, PrintWriter, File} +import java.io.{File, FileWriter} import scala.io.Source import com.google.common.io.Files -import org.scalatest.FunSuite import org.apache.hadoop.io._ -import org.apache.hadoop.io.compress.{DefaultCodec, CompressionCodec, GzipCodec} - +import org.apache.hadoop.io.compress.DefaultCodec +import org.scalatest.FunSuite -import SparkContext._ +import org.apache.spark.SparkContext._ class FileSuite extends FunSuite with LocalSparkContext { diff --git a/core/src/test/scala/org/apache/spark/JavaAPISuite.java b/core/src/test/scala/org/apache/spark/JavaAPISuite.java index 8c573ac0d65..20232e9fbb8 100644 --- a/core/src/test/scala/org/apache/spark/JavaAPISuite.java +++ b/core/src/test/scala/org/apache/spark/JavaAPISuite.java @@ -22,14 +22,14 @@ import java.io.Serializable; import java.util.*; -import com.google.common.base.Optional; import scala.Tuple2; +import com.google.common.base.Optional; import com.google.common.base.Charsets; -import org.apache.hadoop.io.compress.DefaultCodec; import com.google.common.io.Files; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; +import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.mapred.SequenceFileInputFormat; import org.apache.hadoop.mapred.SequenceFileOutputFormat; import org.apache.hadoop.mapreduce.Job; @@ -48,7 +48,6 @@ import org.apache.spark.storage.StorageLevel; import org.apache.spark.util.StatCounter; - // The test suite itself is Serializable so that anonymous Function implementations can be // serialized, as an alternative to converting these anonymous classes to static inner classes; // see http://stackoverflow.com/questions/758570/. diff --git a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala index 1121e06e2e6..20c503d30c8 100644 --- a/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala +++ b/core/src/test/scala/org/apache/spark/JobCancellationSuite.scala @@ -20,9 +20,9 @@ package org.apache.spark import java.util.concurrent.Semaphore import scala.concurrent.Await +import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration.Duration import scala.concurrent.future -import scala.concurrent.ExecutionContext.Implicits.global import org.scalatest.{BeforeAndAfter, FunSuite} import org.scalatest.matchers.ShouldMatchers @@ -30,7 +30,6 @@ import org.scalatest.matchers.ShouldMatchers import org.apache.spark.SparkContext._ import org.apache.spark.scheduler.{SparkListenerTaskStart, SparkListener} - /** * Test suite for cancelling running jobs. We run the cancellation tasks for single job action * (e.g. count) as well as multi-job action (e.g. take). We test the local and cluster schedulers diff --git a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala index 3ac706110e2..4b972f88a95 100644 --- a/core/src/test/scala/org/apache/spark/LocalSparkContext.scala +++ b/core/src/test/scala/org/apache/spark/LocalSparkContext.scala @@ -17,12 +17,11 @@ package org.apache.spark -import org.scalatest.Suite -import org.scalatest.BeforeAndAfterEach -import org.scalatest.BeforeAndAfterAll - import org.jboss.netty.logging.InternalLoggerFactory import org.jboss.netty.logging.Slf4JLoggerFactory +import org.scalatest.BeforeAndAfterAll +import org.scalatest.BeforeAndAfterEach +import org.scalatest.Suite /** Manages a local `sc` {@link SparkContext} variable, correctly stopping it after each test. */ trait LocalSparkContext extends BeforeAndAfterEach with BeforeAndAfterAll { self: Suite => diff --git a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala index 930c2523caf..6c1e325f6f3 100644 --- a/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala +++ b/core/src/test/scala/org/apache/spark/MapOutputTrackerSuite.scala @@ -17,13 +17,14 @@ package org.apache.spark -import org.scalatest.FunSuite +import scala.concurrent.Await import akka.actor._ +import org.scalatest.FunSuite + import org.apache.spark.scheduler.MapStatus import org.apache.spark.storage.BlockManagerId import org.apache.spark.util.AkkaUtils -import scala.concurrent.Await class MapOutputTrackerSuite extends FunSuite with LocalSparkContext { private val conf = new SparkConf diff --git a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala index 1c5d5ea4364..4305686d3a6 100644 --- a/core/src/test/scala/org/apache/spark/PartitioningSuite.scala +++ b/core/src/test/scala/org/apache/spark/PartitioningSuite.scala @@ -18,13 +18,12 @@ package org.apache.spark import scala.math.abs -import scala.collection.mutable.ArrayBuffer import org.scalatest.{FunSuite, PrivateMethodTester} import org.apache.spark.SparkContext._ -import org.apache.spark.util.StatCounter import org.apache.spark.rdd.RDD +import org.apache.spark.util.StatCounter class PartitioningSuite extends FunSuite with SharedSparkContext with PrivateMethodTester { diff --git a/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala b/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala index 2e851d892dd..3a0385a1b0b 100644 --- a/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/PipedRDDSuite.scala @@ -18,7 +18,6 @@ package org.apache.spark import org.scalatest.FunSuite -import SparkContext._ class PipedRDDSuite extends FunSuite with SharedSparkContext { diff --git a/core/src/test/scala/org/apache/spark/SharedSparkContext.scala b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala index c650ef4ed58..0b6511a80df 100644 --- a/core/src/test/scala/org/apache/spark/SharedSparkContext.scala +++ b/core/src/test/scala/org/apache/spark/SharedSparkContext.scala @@ -17,8 +17,8 @@ package org.apache.spark -import org.scalatest.Suite import org.scalatest.BeforeAndAfterAll +import org.scalatest.Suite /** Shares a local `SparkContext` between all tests in a suite and closes it at the end */ trait SharedSparkContext extends BeforeAndAfterAll { self: Suite => diff --git a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala index e121b162ad9..29d428aa7dc 100644 --- a/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala +++ b/core/src/test/scala/org/apache/spark/ShuffleNettySuite.scala @@ -19,7 +19,6 @@ package org.apache.spark import org.scalatest.BeforeAndAfterAll - class ShuffleNettySuite extends ShuffleSuite with BeforeAndAfterAll { // This test suite should run all tests in ShuffleSuite with Netty shuffle mode. diff --git a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala index db717865db5..abea36f7c83 100644 --- a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala +++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala @@ -22,10 +22,9 @@ import org.scalatest.matchers.ShouldMatchers import org.apache.spark.SparkContext._ import org.apache.spark.ShuffleSuite.NonJavaSerializableClass -import org.apache.spark.rdd.{RDD, SubtractedRDD, CoGroupedRDD, OrderedRDDFunctions, ShuffledRDD} -import org.apache.spark.util.MutablePair +import org.apache.spark.rdd.{CoGroupedRDD, OrderedRDDFunctions, RDD, ShuffledRDD, SubtractedRDD} import org.apache.spark.serializer.KryoSerializer - +import org.apache.spark.util.MutablePair class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext { test("groupByKey without compression") { diff --git a/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala index 939fe518012..5cb49d9a7fc 100644 --- a/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkContextInfoSuite.scala @@ -18,7 +18,6 @@ package org.apache.spark import org.scalatest.FunSuite -import org.apache.spark.SparkContext._ class SparkContextInfoSuite extends FunSuite with LocalSparkContext { test("getPersistentRDDs only returns RDDs that are marked as cached") { diff --git a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala index 75d6493e338..b5383d553ad 100644 --- a/core/src/test/scala/org/apache/spark/ThreadingSuite.scala +++ b/core/src/test/scala/org/apache/spark/ThreadingSuite.scala @@ -22,9 +22,6 @@ import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.atomic.AtomicInteger import org.scalatest.FunSuite -import org.scalatest.BeforeAndAfter - -import SparkContext._ /** * Holds state shared across task threads in some ThreadingSuite tests. diff --git a/core/src/test/scala/org/apache/spark/UnpersistSuite.scala b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala index 768ca3850e7..42ff059e018 100644 --- a/core/src/test/scala/org/apache/spark/UnpersistSuite.scala +++ b/core/src/test/scala/org/apache/spark/UnpersistSuite.scala @@ -19,8 +19,7 @@ package org.apache.spark import org.scalatest.FunSuite import org.scalatest.concurrent.Timeouts._ -import org.scalatest.time.{Span, Millis} -import org.apache.spark.SparkContext._ +import org.scalatest.time.{Millis, Span} class UnpersistSuite extends FunSuite with LocalSparkContext { test("unpersist RDD") { diff --git a/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala b/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala index 618b9c113b8..4f87fd8654c 100644 --- a/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala +++ b/core/src/test/scala/org/apache/spark/ZippedPartitionsSuite.scala @@ -17,16 +17,7 @@ package org.apache.spark -import scala.collection.immutable.NumericRange - import org.scalatest.FunSuite -import org.scalatest.prop.Checkers -import org.scalacheck.Arbitrary._ -import org.scalacheck.Gen -import org.scalacheck.Prop._ - -import SparkContext._ - object ZippedPartitionsSuite { def procZippedData(i: Iterator[Int], s: Iterator[String], d: Iterator[Double]) : Iterator[Int] = { diff --git a/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala b/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala index 5bcebabc9a4..7b866f08a0e 100644 --- a/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/api/python/PythonRDDSuite.scala @@ -17,11 +17,10 @@ package org.apache.spark.api.python -import org.scalatest.FunSuite -import org.scalatest.matchers.ShouldMatchers - import java.io.{ByteArrayOutputStream, DataOutputStream} +import org.scalatest.FunSuite + class PythonRDDSuite extends FunSuite { test("Writing large strings to the worker") { diff --git a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala index 6445db0063f..de866ed7ffe 100644 --- a/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/JsonProtocolSuite.scala @@ -27,7 +27,7 @@ import org.scalatest.FunSuite import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, WorkerStateResponse} import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, RecoveryState, WorkerInfo} -import org.apache.spark.deploy.worker.{ExecutorRunner, DriverRunner} +import org.apache.spark.deploy.worker.{DriverRunner, ExecutorRunner} class JsonProtocolSuite extends FunSuite { diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala index 0c502612647..a2c131b0c97 100644 --- a/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala +++ b/core/src/test/scala/org/apache/spark/deploy/worker/DriverRunnerTest.scala @@ -19,15 +19,13 @@ package org.apache.spark.deploy.worker import java.io.File -import scala.collection.JavaConversions._ - import org.mockito.Mockito._ import org.mockito.Matchers._ +import org.mockito.invocation.InvocationOnMock +import org.mockito.stubbing.Answer import org.scalatest.FunSuite import org.apache.spark.deploy.{Command, DriverDescription} -import org.mockito.stubbing.Answer -import org.mockito.invocation.InvocationOnMock class DriverRunnerTest extends FunSuite { private def createDriverRunner() = { diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala index 4baa65659f0..3cab8e7b379 100644 --- a/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala +++ b/core/src/test/scala/org/apache/spark/deploy/worker/ExecutorRunnerTest.scala @@ -21,7 +21,7 @@ import java.io.File import org.scalatest.FunSuite -import org.apache.spark.deploy.{ExecutorState, Command, ApplicationDescription} +import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState} class ExecutorRunnerTest extends FunSuite { test("command includes appId") { diff --git a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala index 1f1d8d13800..0b5ed6d7703 100644 --- a/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/worker/WorkerWatcherSuite.scala @@ -17,11 +17,10 @@ package org.apache.spark.deploy.worker - +import akka.actor.{ActorSystem, AddressFromURIString, Props} import akka.testkit.TestActorRef -import org.scalatest.FunSuite import akka.remote.DisassociatedEvent -import akka.actor.{ActorSystem, AddressFromURIString, Props} +import org.scalatest.FunSuite class WorkerWatcherSuite extends FunSuite { test("WorkerWatcher shuts down on valid disassociation") { diff --git a/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala index 8d7546085f4..68a0ea36aa5 100644 --- a/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala +++ b/core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala @@ -20,8 +20,8 @@ package org.apache.spark.io import java.io.{ByteArrayInputStream, ByteArrayOutputStream} import org.scalatest.FunSuite -import org.apache.spark.SparkConf +import org.apache.spark.SparkConf class CompressionCodecSuite extends FunSuite { val conf = new SparkConf(false) diff --git a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala index 71a2c6c498e..c1e8b295dfe 100644 --- a/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala +++ b/core/src/test/scala/org/apache/spark/metrics/MetricsSystemSuite.scala @@ -18,8 +18,9 @@ package org.apache.spark.metrics import org.scalatest.{BeforeAndAfter, FunSuite} -import org.apache.spark.deploy.master.MasterSource + import org.apache.spark.SparkConf +import org.apache.spark.deploy.master.MasterSource class MetricsSystemSuite extends FunSuite with BeforeAndAfter { var filePath: String = _ diff --git a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala index 0d4c10db8ef..3b833f2e418 100644 --- a/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/AsyncRDDActionsSuite.scala @@ -30,7 +30,6 @@ import org.scalatest.time.SpanSugar._ import org.apache.spark.SparkContext._ import org.apache.spark.{SparkContext, SparkException, LocalSparkContext} - class AsyncRDDActionsSuite extends FunSuite with BeforeAndAfterAll with Timeouts { @transient private var sc: SparkContext = _ diff --git a/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala index 7f50a5a47c2..a822bd18bfd 100644 --- a/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/DoubleRDDSuite.scala @@ -17,14 +17,10 @@ package org.apache.spark.rdd -import scala.math.abs -import scala.collection.mutable.ArrayBuffer - import org.scalatest.FunSuite -import org.apache.spark.SparkContext._ -import org.apache.spark.rdd._ import org.apache.spark._ +import org.apache.spark.SparkContext._ class DoubleRDDSuite extends FunSuite with SharedSparkContext { // Verify tests on the histogram functionality. We test with both evenly diff --git a/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala index 3d39a31252e..7c7f69b261a 100644 --- a/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/JdbcRDDSuite.scala @@ -17,11 +17,12 @@ package org.apache.spark -import org.scalatest.{ BeforeAndAfter, FunSuite } -import org.apache.spark.SparkContext._ -import org.apache.spark.rdd.JdbcRDD import java.sql._ +import org.scalatest.{BeforeAndAfter, FunSuite} + +import org.apache.spark.rdd.JdbcRDD + class JdbcRDDSuite extends FunSuite with BeforeAndAfter with LocalSparkContext { before { diff --git a/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala index 5da538a1ddf..fa5c9b10fe0 100644 --- a/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/PairRDDFunctionsSuite.scala @@ -22,12 +22,11 @@ import scala.collection.mutable.HashSet import scala.util.Random import org.scalatest.FunSuite - import com.google.common.io.Files + import org.apache.spark.SparkContext._ import org.apache.spark.{Partitioner, SharedSparkContext} - class PairRDDFunctionsSuite extends FunSuite with SharedSparkContext { test("groupByKey") { val pairs = sc.parallelize(Array((1, 1), (1, 2), (1, 3), (2, 1))) diff --git a/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala index a80afdee7e7..a4381a8b974 100644 --- a/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala @@ -19,11 +19,11 @@ package org.apache.spark.rdd import scala.collection.immutable.NumericRange -import org.scalatest.FunSuite -import org.scalatest.prop.Checkers import org.scalacheck.Arbitrary._ import org.scalacheck.Gen import org.scalacheck.Prop._ +import org.scalatest.FunSuite +import org.scalatest.prop.Checkers class ParallelCollectionSplitSuite extends FunSuite with Checkers { test("one element per slice") { diff --git a/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala index 53a7b7c44df..956c2b9cbd3 100644 --- a/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/PartitionPruningRDDSuite.scala @@ -18,8 +18,8 @@ package org.apache.spark.rdd import org.scalatest.FunSuite -import org.apache.spark.{TaskContext, Partition, SharedSparkContext} +import org.apache.spark.{Partition, SharedSparkContext, TaskContext} class PartitionPruningRDDSuite extends FunSuite with SharedSparkContext { diff --git a/core/src/test/scala/org/apache/spark/rdd/PartitionwiseSampledRDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/PartitionwiseSampledRDDSuite.scala index cfe96fb3f7b..00c273df63b 100644 --- a/core/src/test/scala/org/apache/spark/rdd/PartitionwiseSampledRDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/PartitionwiseSampledRDDSuite.scala @@ -18,6 +18,7 @@ package org.apache.spark.rdd import org.scalatest.FunSuite + import org.apache.spark.SharedSparkContext import org.apache.spark.util.random.RandomSampler diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala index 308c7cc8c3b..60bcada5524 100644 --- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala @@ -18,13 +18,15 @@ package org.apache.spark.rdd import scala.collection.mutable.HashMap +import scala.collection.parallel.mutable + import org.scalatest.FunSuite import org.scalatest.concurrent.Timeouts._ -import org.scalatest.time.{Span, Millis} +import org.scalatest.time.{Millis, Span} + +import org.apache.spark._ import org.apache.spark.SparkContext._ import org.apache.spark.rdd._ -import scala.collection.parallel.mutable -import org.apache.spark._ class RDDSuite extends FunSuite with SharedSparkContext { diff --git a/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala b/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala index e8361199421..d0619559bb4 100644 --- a/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/SortingSuite.scala @@ -18,7 +18,6 @@ package org.apache.spark.rdd import org.scalatest.FunSuite -import org.scalatest.BeforeAndAfter import org.scalatest.matchers.ShouldMatchers import org.apache.spark.{Logging, SharedSparkContext} diff --git a/core/src/test/scala/org/apache/spark/scheduler/ClusterSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/ClusterSchedulerSuite.scala index 98ea4cb5612..85e929925e3 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/ClusterSchedulerSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/ClusterSchedulerSuite.scala @@ -17,13 +17,13 @@ package org.apache.spark.scheduler -import org.scalatest.FunSuite -import org.scalatest.BeforeAndAfter +import java.util.Properties -import org.apache.spark._ import scala.collection.mutable.ArrayBuffer -import java.util.Properties +import org.scalatest.FunSuite + +import org.apache.spark._ class FakeTaskSetManager( initPriority: Int, diff --git a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala index f0236ef1e97..ad890b4e4d0 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala @@ -20,11 +20,12 @@ package org.apache.spark.scheduler import scala.Tuple2 import scala.collection.mutable.{HashMap, Map} +import org.scalatest.{BeforeAndAfter, FunSuite} + import org.apache.spark._ import org.apache.spark.rdd.RDD import org.apache.spark.scheduler.SchedulingMode.SchedulingMode import org.apache.spark.storage.{BlockId, BlockManagerId, BlockManagerMaster} -import org.scalatest.{BeforeAndAfter, FunSuite} /** * Tests for DAGScheduler. These tests directly call the event processing functions in DAGScheduler diff --git a/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala index 29102913c71..25fe63c2655 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/JobLoggerSuite.scala @@ -17,11 +17,6 @@ package org.apache.spark.scheduler -import java.util.Properties -import java.util.concurrent.LinkedBlockingQueue - -import scala.collection.mutable - import org.scalatest.FunSuite import org.scalatest.matchers.ShouldMatchers @@ -29,7 +24,6 @@ import org.apache.spark._ import org.apache.spark.SparkContext._ import org.apache.spark.rdd.RDD - class JobLoggerSuite extends FunSuite with LocalSparkContext with ShouldMatchers { val WAIT_TIMEOUT_MILLIS = 10000 diff --git a/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala index e31a116a75b..8bb5317cd28 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/TaskContextSuite.scala @@ -19,11 +19,12 @@ package org.apache.spark.scheduler import org.scalatest.FunSuite import org.scalatest.BeforeAndAfter + +import org.apache.spark.LocalSparkContext +import org.apache.spark.Partition +import org.apache.spark.SparkContext import org.apache.spark.TaskContext import org.apache.spark.rdd.RDD -import org.apache.spark.SparkContext -import org.apache.spark.Partition -import org.apache.spark.LocalSparkContext class TaskContextSuite extends FunSuite with BeforeAndAfter with LocalSparkContext { diff --git a/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala index 4b52d9651eb..ac07f60e284 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/TaskResultGetterSuite.scala @@ -21,7 +21,7 @@ import java.nio.ByteBuffer import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSuite} -import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkEnv} +import org.apache.spark.{LocalSparkContext, SparkContext, SparkEnv} import org.apache.spark.storage.TaskResultBlockId /** diff --git a/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala index de321c45b54..34a7d8cefee 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/TaskSetManagerSuite.scala @@ -24,8 +24,7 @@ import org.scalatest.FunSuite import org.apache.spark._ import org.apache.spark.executor.TaskMetrics -import java.nio.ByteBuffer -import org.apache.spark.util.{Utils, FakeClock} +import org.apache.spark.util.FakeClock class FakeDAGScheduler(taskScheduler: FakeClusterScheduler) extends DAGScheduler(taskScheduler) { override def taskStarted(task: Task[_], taskInfo: TaskInfo) { diff --git a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala index 38985832755..5d4673aebe9 100644 --- a/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala +++ b/core/src/test/scala/org/apache/spark/serializer/KryoSerializerSuite.scala @@ -20,9 +20,9 @@ package org.apache.spark.serializer import scala.collection.mutable import com.esotericsoftware.kryo.Kryo - import org.scalatest.FunSuite -import org.apache.spark.{SparkConf, SharedSparkContext} + +import org.apache.spark.SharedSparkContext import org.apache.spark.serializer.KryoTest._ class KryoSerializerSuite extends FunSuite with SharedSparkContext { diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala index 85011c64517..9f011d9c8d1 100644 --- a/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerSuite.scala @@ -20,18 +20,17 @@ package org.apache.spark.storage import java.nio.ByteBuffer import akka.actor._ - -import org.scalatest.FunSuite import org.scalatest.BeforeAndAfter +import org.scalatest.FunSuite import org.scalatest.PrivateMethodTester import org.scalatest.concurrent.Eventually._ import org.scalatest.concurrent.Timeouts._ import org.scalatest.matchers.ShouldMatchers._ import org.scalatest.time.SpanSugar._ -import org.apache.spark.util.{SizeEstimator, Utils, AkkaUtils, ByteBufferInputStream} -import org.apache.spark.serializer.{JavaSerializer, KryoSerializer} import org.apache.spark.{SparkConf, SparkContext} +import org.apache.spark.serializer.{JavaSerializer, KryoSerializer} +import org.apache.spark.util.{AkkaUtils, ByteBufferInputStream, SizeEstimator, Utils} class BlockManagerSuite extends FunSuite with BeforeAndAfter with PrivateMethodTester { private val conf = new SparkConf(false) diff --git a/core/src/test/scala/org/apache/spark/storage/DiskBlockManagerSuite.scala b/core/src/test/scala/org/apache/spark/storage/DiskBlockManagerSuite.scala index 829f389460f..62f9b3cc7b2 100644 --- a/core/src/test/scala/org/apache/spark/storage/DiskBlockManagerSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/DiskBlockManagerSuite.scala @@ -22,9 +22,10 @@ import java.io.{File, FileWriter} import scala.collection.mutable import com.google.common.io.Files -import org.apache.spark.SparkConf import org.scalatest.{BeforeAndAfterEach, FunSuite} +import org.apache.spark.SparkConf + class DiskBlockManagerSuite extends FunSuite with BeforeAndAfterEach { private val testConf = new SparkConf(false) val rootDir0 = Files.createTempDir() diff --git a/core/src/test/scala/org/apache/spark/ui/UISuite.scala b/core/src/test/scala/org/apache/spark/ui/UISuite.scala index c17bbfe7d35..20ebb1897e6 100644 --- a/core/src/test/scala/org/apache/spark/ui/UISuite.scala +++ b/core/src/test/scala/org/apache/spark/ui/UISuite.scala @@ -17,10 +17,12 @@ package org.apache.spark.ui -import scala.util.{Failure, Success, Try} import java.net.ServerSocket -import org.scalatest.FunSuite + +import scala.util.{Failure, Success, Try} + import org.eclipse.jetty.server.Server +import org.scalatest.FunSuite class UISuite extends FunSuite { test("jetty port increases under contention") { diff --git a/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala b/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala index 67a57a0e7f9..8ca863e8b36 100644 --- a/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala +++ b/core/src/test/scala/org/apache/spark/ui/jobs/JobProgressListenerSuite.scala @@ -18,10 +18,10 @@ package org.apache.spark.ui.jobs import org.scalatest.FunSuite -import org.apache.spark.scheduler._ + import org.apache.spark.{LocalSparkContext, SparkContext, Success} -import org.apache.spark.scheduler.SparkListenerTaskStart import org.apache.spark.executor.{ShuffleReadMetrics, TaskMetrics} +import org.apache.spark.scheduler._ class JobProgressListenerSuite extends FunSuite with LocalSparkContext { test("test executor id to summary") { diff --git a/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala b/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala index de4871d0433..439e5644e20 100644 --- a/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/ClosureCleanerSuite.scala @@ -17,12 +17,10 @@ package org.apache.spark.util -import java.io.NotSerializableException - import org.scalatest.FunSuite -import org.apache.spark.SparkContext import org.apache.spark.LocalSparkContext._ +import org.apache.spark.SparkContext class ClosureCleanerSuite extends FunSuite { test("closures inside an object") { diff --git a/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala index 45867463a51..e1446cbc90b 100644 --- a/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/NextIteratorSuite.scala @@ -17,10 +17,12 @@ package org.apache.spark.util +import java.util.NoSuchElementException + +import scala.collection.mutable.Buffer + import org.scalatest.FunSuite import org.scalatest.matchers.ShouldMatchers -import scala.collection.mutable.Buffer -import java.util.NoSuchElementException class NextIteratorSuite extends FunSuite with ShouldMatchers { test("one iteration") { diff --git a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala index 11ebdc352bd..b583a8bd46a 100644 --- a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala @@ -17,10 +17,9 @@ package org.apache.spark.util -import org.scalatest.FunSuite import org.scalatest.BeforeAndAfterAll +import org.scalatest.FunSuite import org.scalatest.PrivateMethodTester -import org.apache.spark.SparkContext class DummyClass1 {} diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala index 7030ba4858b..8f55b2372c9 100644 --- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala @@ -17,13 +17,15 @@ package org.apache.spark.util +import scala.util.Random + +import java.io.{ByteArrayOutputStream, ByteArrayInputStream, FileOutputStream} +import java.nio.{ByteBuffer, ByteOrder} + import com.google.common.base.Charsets import com.google.common.io.Files -import java.io.{ByteArrayOutputStream, ByteArrayInputStream, FileOutputStream, File} -import java.nio.{ByteBuffer, ByteOrder} -import org.scalatest.FunSuite import org.apache.commons.io.FileUtils -import scala.util.Random +import org.scalatest.FunSuite class UtilsSuite extends FunSuite { diff --git a/core/src/test/scala/org/apache/spark/util/collection/AppendOnlyMapSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/AppendOnlyMapSuite.scala index f44442f1a53..52c7288e18b 100644 --- a/core/src/test/scala/org/apache/spark/util/collection/AppendOnlyMapSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/collection/AppendOnlyMapSuite.scala @@ -17,10 +17,11 @@ package org.apache.spark.util.collection +import java.util.Comparator + import scala.collection.mutable.HashSet import org.scalatest.FunSuite -import java.util.Comparator class AppendOnlyMapSuite extends FunSuite { test("initialization") { diff --git a/core/src/test/scala/org/apache/spark/util/collection/BitSetSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/BitSetSuite.scala index 0f1ab3d20ee..c32183c134f 100644 --- a/core/src/test/scala/org/apache/spark/util/collection/BitSetSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/collection/BitSetSuite.scala @@ -19,7 +19,6 @@ package org.apache.spark.util.collection import org.scalatest.FunSuite - class BitSetSuite extends FunSuite { test("basic set and get") { diff --git a/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala index e9b62ea70db..b024c89d94d 100644 --- a/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/collection/OpenHashMapSuite.scala @@ -18,8 +18,10 @@ package org.apache.spark.util.collection import scala.collection.mutable.HashSet + import org.scalatest.FunSuite import org.scalatest.matchers.ShouldMatchers + import org.apache.spark.util.SizeEstimator class OpenHashMapSuite extends FunSuite with ShouldMatchers { diff --git a/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala index 1b24f8f287c..ff4a98f5dcd 100644 --- a/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/collection/OpenHashSetSuite.scala @@ -22,7 +22,6 @@ import org.scalatest.matchers.ShouldMatchers import org.apache.spark.util.SizeEstimator - class OpenHashSetSuite extends FunSuite with ShouldMatchers { test("size for specialized, primitive int") { diff --git a/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala index 3b60decee9a..e3fca173908 100644 --- a/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMapSuite.scala @@ -18,8 +18,10 @@ package org.apache.spark.util.collection import scala.collection.mutable.HashSet + import org.scalatest.FunSuite import org.scalatest.matchers.ShouldMatchers + import org.apache.spark.util.SizeEstimator class PrimitiveKeyOpenHashMapSuite extends FunSuite with ShouldMatchers { diff --git a/core/src/test/scala/org/apache/spark/util/random/RandomSamplerSuite.scala b/core/src/test/scala/org/apache/spark/util/random/RandomSamplerSuite.scala index 0f4792cd3bd..7576c9a51f3 100644 --- a/core/src/test/scala/org/apache/spark/util/random/RandomSamplerSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/random/RandomSamplerSuite.scala @@ -17,11 +17,11 @@ package org.apache.spark.util.random -import org.scalatest.{BeforeAndAfter, FunSuite} -import org.scalatest.mock.EasyMockSugar - import java.util.Random + import cern.jet.random.Poisson +import org.scalatest.{BeforeAndAfter, FunSuite} +import org.scalatest.mock.EasyMockSugar class RandomSamplerSuite extends FunSuite with BeforeAndAfter with EasyMockSugar { diff --git a/core/src/test/scala/org/apache/spark/util/random/XORShiftRandomSuite.scala b/core/src/test/scala/org/apache/spark/util/random/XORShiftRandomSuite.scala index 352aa94219c..c51d12bfe0b 100644 --- a/core/src/test/scala/org/apache/spark/util/random/XORShiftRandomSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/random/XORShiftRandomSuite.scala @@ -19,6 +19,7 @@ package org.apache.spark.util.random import org.scalatest.FunSuite import org.scalatest.matchers.ShouldMatchers + import org.apache.spark.util.Utils.times class XORShiftRandomSuite extends FunSuite with ShouldMatchers {