Skip to content

Commit

Permalink
Don't use floats
Browse files Browse the repository at this point in the history
  • Loading branch information
pwendell committed Apr 5, 2014
1 parent 542a736 commit 2f706f1
Show file tree
Hide file tree
Showing 23 changed files with 54 additions and 54 deletions.
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/Aggregator.scala
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark
import org.apache.spark.util.collection.{AppendOnlyMap, ExternalAppendOnlyMap}

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* A set of functions used to aggregate data.
*
Expand Down
10 changes: 5 additions & 5 deletions core/src/main/scala/org/apache/spark/Dependency.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@ import org.apache.spark.rdd.RDD
import org.apache.spark.serializer.Serializer

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* Base class for dependencies.
*/
abstract class Dependency[T](val rdd: RDD[T]) extends Serializable


/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* Base class for dependencies where each partition of the parent RDD is used by at most one
* partition of the child RDD. Narrow dependencies allow for pipelined execution.
Expand All @@ -45,7 +45,7 @@ abstract class NarrowDependency[T](rdd: RDD[T]) extends Dependency(rdd) {


/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* Represents a dependency on the output of a shuffle stage.
* @param rdd the parent RDD
Expand All @@ -65,7 +65,7 @@ class ShuffleDependency[K, V](


/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* Represents a one-to-one dependency between partitions of the parent and child RDDs.
*/
Expand All @@ -75,7 +75,7 @@ class OneToOneDependency[T](rdd: RDD[T]) extends NarrowDependency[T](rdd) {


/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* Represents a one-to-one dependency between ranges of partitions in the parent and child RDDs.
* @param rdd the parent RDD
Expand Down
4 changes: 2 additions & 2 deletions core/src/main/scala/org/apache/spark/FutureAction.scala
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.apache.spark.rdd.RDD
import org.apache.spark.scheduler.{JobFailed, JobSucceeded, JobWaiter}

/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL API</span>
* <span class="badge badge-red">EXPERIMENTAL API</span>
*
* A future for the result of an action to support cancellation. This is an extension of the
* Scala Future interface to support cancellation.
Expand Down Expand Up @@ -150,7 +150,7 @@ class SimpleFutureAction[T] private[spark](jobWaiter: JobWaiter[_], resultFunc:


/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL API</span>
* <span class="badge badge-red">EXPERIMENTAL API</span>
*
* A [[FutureAction]] for actions that could trigger multiple Spark jobs. Examples include take,
* takeSample. Cancellation works by setting the cancelled flag to true and interrupting the
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/TaskContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ import scala.collection.mutable.ArrayBuffer
import org.apache.spark.executor.TaskMetrics

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* Contextual information about a task which can be read or mutated during execution.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark.executor
import org.apache.spark.storage.{BlockId, BlockStatus}

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* Metrics tracked during the execution of a task.
*/
Expand Down Expand Up @@ -88,7 +88,7 @@ object TaskMetrics {


/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* Metrics pertaining to shuffle data read in a given task.
*/
Expand Down Expand Up @@ -127,7 +127,7 @@ class ShuffleReadMetrics extends Serializable {
}

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* Metrics pertaining to shuffle data written in a given task.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.xerial.snappy.{SnappyInputStream, SnappyOutputStream}
import org.apache.spark.SparkConf

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* CompressionCodec allows the customization of choosing different compression implementations
* to be used in block storage.
Expand Down Expand Up @@ -58,7 +58,7 @@ private[spark] object CompressionCodec {


/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* LZF implementation of [[org.apache.spark.io.CompressionCodec]].
*
Expand All @@ -77,7 +77,7 @@ class LZFCompressionCodec(conf: SparkConf) extends CompressionCodec {


/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* Snappy implementation of [[org.apache.spark.io.CompressionCodec]].
* Block size can be configured by spark.io.compression.snappy.block.size.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.spark.partial

/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL API</span>
* <span class="badge badge-red">EXPERIMENTAL API</span>
*
* A Double value with error bars and associated confidence.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.spark.partial

/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL API</span>
* <span class="badge badge-red">EXPERIMENTAL API</span>
*/
class PartialResult[R](initialVal: R, isFinal: Boolean) {
private var finalValue: Option[R] = if (isFinal) Some(initialVal) else None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import scala.reflect.ClassTag
import org.apache.spark.{ComplexFutureAction, FutureAction, Logging}

/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL API</span>
* <span class="badge badge-red">EXPERIMENTAL API</span>
*
* A set of asynchronous RDD actions available through an implicit conversion.
* Import `org.apache.spark.SparkContext._` at the top of your program to use these functions.
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ private[spark] class HadoopPartition(rddId: Int, idx: Int, @transient s: InputSp
}

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* An RDD that provides core functionality for reading data stored in Hadoop (e.g., files in HDFS,
* sources in HBase, or S3), using the older MapReduce API (`org.apache.hadoop.mapred`).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class NewHadoopPartition(rddId: Int, val index: Int, @transient rawSplit: InputS
}

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* An RDD that provides core functionality for reading data stored in Hadoop (e.g., files in HDFS,
* sources in HBase, or S3), using the new MapReduce API (`org.apache.hadoop.mapreduce`).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ private[spark] class PruneDependency[T](rdd: RDD[T], @transient partitionFilterF


/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* A RDD used to prune RDD partitions/partitions so we can avoid launching tasks on
* all partitions. An example use case: If we know the RDD is partitioned by range,
Expand All @@ -67,7 +67,7 @@ class PartitionPruningRDD[T: ClassTag](


/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*/
object PartitionPruningRDD {

Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/rdd/ShuffledRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ private[spark] class ShuffledRDDPartition(val idx: Int) extends Partition {
}

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* The resulting RDD from a shuffle (e.g. repartitioning of data).
* @param prev the parent RDD.
Expand Down
28 changes: 14 additions & 14 deletions core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
Original file line number Diff line number Diff line change
Expand Up @@ -27,23 +27,23 @@ import org.apache.spark.executor.TaskMetrics
import org.apache.spark.storage.BlockManagerId
import org.apache.spark.util.{Distribution, Utils}

/** <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span> */
/** <span class="badge badge-red">DEVELOPER API - UNSTABLE</span> */
sealed trait SparkListenerEvent

/** <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span> */
/** <span class="badge badge-red">DEVELOPER API - UNSTABLE</span> */
case class SparkListenerStageSubmitted(stageInfo: StageInfo, properties: Properties = null)
extends SparkListenerEvent

/** <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span> */
/** <span class="badge badge-red">DEVELOPER API - UNSTABLE</span> */
case class SparkListenerStageCompleted(stageInfo: StageInfo) extends SparkListenerEvent

/** <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span> */
/** <span class="badge badge-red">DEVELOPER API - UNSTABLE</span> */
case class SparkListenerTaskStart(stageId: Int, taskInfo: TaskInfo) extends SparkListenerEvent

/** <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span> */
/** <span class="badge badge-red">DEVELOPER API - UNSTABLE</span> */
case class SparkListenerTaskGettingResult(taskInfo: TaskInfo) extends SparkListenerEvent

/** <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span> */
/** <span class="badge badge-red">DEVELOPER API - UNSTABLE</span> */
case class SparkListenerTaskEnd(
stageId: Int,
taskType: String,
Expand All @@ -52,34 +52,34 @@ case class SparkListenerTaskEnd(
taskMetrics: TaskMetrics)
extends SparkListenerEvent

/** <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span> */
/** <span class="badge badge-red">DEVELOPER API - UNSTABLE</span> */
case class SparkListenerJobStart(jobId: Int, stageIds: Seq[Int], properties: Properties = null)
extends SparkListenerEvent

/** <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span> */
/** <span class="badge badge-red">DEVELOPER API - UNSTABLE</span> */
case class SparkListenerJobEnd(jobId: Int, jobResult: JobResult) extends SparkListenerEvent

/** <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span> */
/** <span class="badge badge-red">DEVELOPER API - UNSTABLE</span> */
case class SparkListenerEnvironmentUpdate(environmentDetails: Map[String, Seq[(String, String)]])
extends SparkListenerEvent

/** <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span> */
/** <span class="badge badge-red">DEVELOPER API - UNSTABLE</span> */
case class SparkListenerBlockManagerAdded(blockManagerId: BlockManagerId, maxMem: Long)
extends SparkListenerEvent

/** <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span> */
/** <span class="badge badge-red">DEVELOPER API - UNSTABLE</span> */
case class SparkListenerBlockManagerRemoved(blockManagerId: BlockManagerId)
extends SparkListenerEvent

/** <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span> */
/** <span class="badge badge-red">DEVELOPER API - UNSTABLE</span> */
case class SparkListenerUnpersistRDD(rddId: Int) extends SparkListenerEvent

/** An event used in the listener to shutdown the listener daemon thread. */
private[spark] case object SparkListenerShutdown extends SparkListenerEvent


/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* Interface for listening to events from the Spark scheduler. Note that this is an internal
* interface which might change in different Spark releases.
Expand Down Expand Up @@ -143,7 +143,7 @@ trait SparkListener {
}

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* Simple SparkListener that logs a few summary statistics when each stage completes
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ private[spark] class JavaSerializerInstance(counterReset: Int) extends Serialize
}

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* A Spark serializer that uses Java's built-in serialization.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import org.apache.spark.util.{ByteBufferInputStream, NextIterator}
import org.apache.spark.SparkEnv

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* A serializer. Because some serialization libraries are not thread safe, this class is used to
* create [[org.apache.spark.serializer.SerializerInstance]] objects that do the actual
Expand Down Expand Up @@ -55,7 +55,7 @@ object Serializer {


/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* An instance of a serializer, for use by one thread at a time.
*/
Expand Down Expand Up @@ -89,7 +89,7 @@ trait SerializerInstance {


/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* A stream for writing serialized objects.
*/
Expand All @@ -108,7 +108,7 @@ trait SerializationStream {


/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* A stream for reading serialized objects.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import scala.collection.JavaConverters._
import scala.collection.generic.Growable

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* Bounded priority queue. This class wraps the original PriorityQueue
* class and modifies it such that only the top K elements are retained.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.spark.util

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* A tuple of 2 elements. This can be used as an alternative to Scala's Tuple2 when we want to
* minimize object allocation.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
package org.apache.spark.util.random

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* A class with pseudorandom behavior.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import cern.jet.random.Poisson
import cern.jet.random.engine.DRand

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* A pseudorandom sampler. It is possible to change the sampled item type. For example, we might
* want to add weights for stratified sampling or importance sampling. Should only use
Expand All @@ -42,7 +42,7 @@ trait RandomSampler[T, U] extends Pseudorandom with Cloneable with Serializable
}

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* A sampler based on Bernoulli trials.
*
Expand Down Expand Up @@ -71,7 +71,7 @@ class BernoulliSampler[T](lb: Double, ub: Double, complement: Boolean = false)
}

/**
* <span class="badge badge-red" style="float: right;">DEVELOPER API - UNSTABLE</span>
* <span class="badge badge-red">DEVELOPER API - UNSTABLE</span>
*
* A sampler based on values drawn from Poisson distribution.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ package org.apache.spark
import org.apache.spark.util.collection.OpenHashSet

/**
* <span class="badge badge-red" style="float: right;">ALPHA COMPONENT</span>
* <span class="badge badge-red">ALPHA COMPONENT</span>
*
* GraphX is a graph processing framework built on top of Spark. */
package object graphx {
Expand Down
Loading

0 comments on commit 2f706f1

Please sign in to comment.