Skip to content

Commit

Permalink
Correct semantics for TimeStampedWeakValueHashMap + add tests
Browse files Browse the repository at this point in the history
This largely accounts for the cases when WeakReference becomes no longer strongly
reachable, in which case the map should return None for all get() operations, and
should skip the entry for all listing operations.
  • Loading branch information
andrewor14 committed Apr 2, 2014
1 parent 5016375 commit f0aabb1
Show file tree
Hide file tree
Showing 4 changed files with 350 additions and 37 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ abstract class Broadcast[T](val id: Long) extends Serializable {
*/
protected def assertValid() {
if (!_isValid) {
throw new SparkException("Attempted to use %s when is no longer valid!".format(toString))
throw new SparkException("Attempted to use %s after it has been destroyed!".format(toString))
}
}

Expand Down
43 changes: 27 additions & 16 deletions core/src/main/scala/org/apache/spark/util/TimeStampedHashMap.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import java.util.Set
import java.util.Map.Entry
import java.util.concurrent.ConcurrentHashMap

import scala.collection.{immutable, JavaConversions, mutable}
import scala.collection.{JavaConversions, mutable}

import org.apache.spark.Logging

Expand Down Expand Up @@ -50,11 +50,11 @@ private[spark] class TimeStampedHashMap[A, B](updateTimeStampOnGet: Boolean = fa
}

def iterator: Iterator[(A, B)] = {
val jIterator = getEntrySet.iterator()
val jIterator = getEntrySet.iterator
JavaConversions.asScalaIterator(jIterator).map(kv => (kv.getKey, kv.getValue.value))
}

def getEntrySet: Set[Entry[A, TimeStampedValue[B]]] = internalMap.entrySet()
def getEntrySet: Set[Entry[A, TimeStampedValue[B]]] = internalMap.entrySet

override def + [B1 >: B](kv: (A, B1)): mutable.Map[A, B1] = {
val newMap = new TimeStampedHashMap[A, B1]
Expand Down Expand Up @@ -86,8 +86,7 @@ private[spark] class TimeStampedHashMap[A, B](updateTimeStampOnGet: Boolean = fa
}

override def apply(key: A): B = {
val value = internalMap.get(key)
Option(value).map(_.value).getOrElse { throw new NoSuchElementException() }
get(key).getOrElse { throw new NoSuchElementException() }
}

override def filter(p: ((A, B)) => Boolean): mutable.Map[A, B] = {
Expand All @@ -101,9 +100,9 @@ private[spark] class TimeStampedHashMap[A, B](updateTimeStampOnGet: Boolean = fa
override def size: Int = internalMap.size

override def foreach[U](f: ((A, B)) => U) {
val iterator = getEntrySet.iterator()
while(iterator.hasNext) {
val entry = iterator.next()
val it = getEntrySet.iterator
while(it.hasNext) {
val entry = it.next()
val kv = (entry.getKey, entry.getValue.value)
f(kv)
}
Expand All @@ -115,27 +114,39 @@ private[spark] class TimeStampedHashMap[A, B](updateTimeStampOnGet: Boolean = fa
Option(prev).map(_.value)
}

def toMap: immutable.Map[A, B] = iterator.toMap
def putAll(map: Map[A, B]) {
map.foreach { case (k, v) => update(k, v) }
}

def toMap: Map[A, B] = iterator.toMap

def clearOldValues(threshTime: Long, f: (A, B) => Unit) {
val iterator = getEntrySet.iterator()
while (iterator.hasNext) {
val entry = iterator.next()
val it = getEntrySet.iterator
while (it.hasNext) {
val entry = it.next()
if (entry.getValue.timestamp < threshTime) {
f(entry.getKey, entry.getValue.value)
logDebug("Removing key " + entry.getKey)
iterator.remove()
it.remove()
}
}
}

/**
* Removes old key-value pairs that have timestamp earlier than `threshTime`.
*/
/** Removes old key-value pairs that have timestamp earlier than `threshTime`. */
def clearOldValues(threshTime: Long) {
clearOldValues(threshTime, (_, _) => ())
}

private def currentTime: Long = System.currentTimeMillis

// For testing

def getTimeStampedValue(key: A): Option[TimeStampedValue[B]] = {
Option(internalMap.get(key))
}

def getTimestamp(key: A): Option[Long] = {
getTimeStampedValue(key).map(_.timestamp)
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -18,47 +18,61 @@
package org.apache.spark.util

import java.lang.ref.WeakReference
import java.util.concurrent.atomic.AtomicInteger

import scala.collection.{immutable, mutable}
import scala.collection.mutable

import org.apache.spark.Logging

/**
* A wrapper of TimeStampedHashMap that ensures the values are weakly referenced and timestamped.
*
* If the value is garbage collected and the weak reference is null, get() operation returns
* a non-existent value. However, the corresponding key is actually not removed in the current
* implementation. Key-value pairs whose timestamps are older than a particular threshold time
* can then be removed using the clearOldValues method. It exposes a scala.collection.mutable.Map
* interface to allow it to be a drop-in replacement for Scala HashMaps.
* If the value is garbage collected and the weak reference is null, get() will return a
* non-existent value. These entries are removed from the map periodically (every N inserts), as
* their values are no longer strongly reachable. Further, key-value pairs whose timestamps are
* older than a particular threshold can be removed using the clearOldValues method.
*
* Internally, it uses a Java ConcurrentHashMap, so all operations on this HashMap are thread-safe.
* TimeStampedWeakValueHashMap exposes a scala.collection.mutable.Map interface, which allows it
* to be a drop-in replacement for Scala HashMaps. Internally, it uses a Java ConcurrentHashMap,
* so all operations on this HashMap are thread-safe.
*
* @param updateTimeStampOnGet Whether timestamp of a pair will be updated when it is accessed.
*/
private[spark] class TimeStampedWeakValueHashMap[A, B](updateTimeStampOnGet: Boolean = false)
extends mutable.Map[A, B]() {
extends mutable.Map[A, B]() with Logging {

import TimeStampedWeakValueHashMap._

private val internalMap = new TimeStampedHashMap[A, WeakReference[B]](updateTimeStampOnGet)
private val insertCount = new AtomicInteger(0)

/** Return a map consisting only of entries whose values are still strongly reachable. */
private def nonNullReferenceMap = internalMap.filter { case (_, ref) => ref.get != null }

def get(key: A): Option[B] = internalMap.get(key)

def iterator: Iterator[(A, B)] = internalMap.iterator
def iterator: Iterator[(A, B)] = nonNullReferenceMap.iterator

override def + [B1 >: B](kv: (A, B1)): mutable.Map[A, B1] = {
val newMap = new TimeStampedWeakValueHashMap[A, B1]
val oldMap = nonNullReferenceMap.asInstanceOf[mutable.Map[A, WeakReference[B1]]]
newMap.internalMap.putAll(oldMap.toMap)
newMap.internalMap += kv
newMap
}

override def - (key: A): mutable.Map[A, B] = {
val newMap = new TimeStampedWeakValueHashMap[A, B]
newMap.internalMap.putAll(nonNullReferenceMap.toMap)
newMap.internalMap -= key
newMap
}

override def += (kv: (A, B)): this.type = {
internalMap += kv
if (insertCount.incrementAndGet() % CLEAR_NULL_VALUES_INTERVAL == 0) {
clearNullValues()
}
this
}

Expand All @@ -71,31 +85,53 @@ private[spark] class TimeStampedWeakValueHashMap[A, B](updateTimeStampOnGet: Boo

override def apply(key: A): B = internalMap.apply(key)

override def filter(p: ((A, B)) => Boolean): mutable.Map[A, B] = internalMap.filter(p)
override def filter(p: ((A, B)) => Boolean): mutable.Map[A, B] = nonNullReferenceMap.filter(p)

override def empty: mutable.Map[A, B] = new TimeStampedWeakValueHashMap[A, B]()

override def size: Int = internalMap.size

override def foreach[U](f: ((A, B)) => U) = internalMap.foreach(f)
override def foreach[U](f: ((A, B)) => U) = nonNullReferenceMap.foreach(f)

def putIfAbsent(key: A, value: B): Option[B] = internalMap.putIfAbsent(key, value)

def toMap: immutable.Map[A, B] = iterator.toMap
def toMap: Map[A, B] = iterator.toMap

/**
* Remove old key-value pairs that have timestamp earlier than `threshTime`.
*/
/** Remove old key-value pairs with timestamps earlier than `threshTime`. */
def clearOldValues(threshTime: Long) = internalMap.clearOldValues(threshTime)

/** Remove entries with values that are no longer strongly reachable. */
def clearNullValues() {
val it = internalMap.getEntrySet.iterator
while (it.hasNext) {
val entry = it.next()
if (entry.getValue.value.get == null) {
logDebug("Removing key " + entry.getKey + " because it is no longer strongly reachable.")
it.remove()
}
}
}

// For testing

def getTimestamp(key: A): Option[Long] = {
internalMap.getTimeStampedValue(key).map(_.timestamp)
}

def getReference(key: A): Option[WeakReference[B]] = {
internalMap.getTimeStampedValue(key).map(_.value)
}
}

/**
* Helper methods for converting to and from WeakReferences.
*/
private[spark] object TimeStampedWeakValueHashMap {
private object TimeStampedWeakValueHashMap {

/* Implicit conversion methods to WeakReferences */
// Number of inserts after which entries with null references are removed
val CLEAR_NULL_VALUES_INTERVAL = 100

/* Implicit conversion methods to WeakReferences. */

implicit def toWeakReference[V](v: V): WeakReference[V] = new WeakReference[V](v)

Expand All @@ -107,12 +143,15 @@ private[spark] object TimeStampedWeakValueHashMap {
(kv: (K, WeakReference[V])) => p(kv)
}

/* Implicit conversion methods from WeakReferences */
/* Implicit conversion methods from WeakReferences. */

implicit def fromWeakReference[V](ref: WeakReference[V]): V = ref.get

implicit def fromWeakReferenceOption[V](v: Option[WeakReference[V]]): Option[V] = {
v.map(fromWeakReference)
v match {
case Some(ref) => Option(fromWeakReference(ref))
case None => None
}
}

implicit def fromWeakReferenceTuple[K, V](kv: (K, WeakReference[V])): (K, V) = {
Expand All @@ -128,5 +167,4 @@ private[spark] object TimeStampedWeakValueHashMap {
map: mutable.Map[K, WeakReference[V]]) : mutable.Map[K, V] = {
mutable.Map(map.mapValues(fromWeakReference).toSeq: _*)
}

}
Loading

0 comments on commit f0aabb1

Please sign in to comment.