diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala index 68496f445bc3..9a428bcbfa22 100644 --- a/src/library/scala/collection/immutable/ListMap.scala +++ b/src/library/scala/collection/immutable/ListMap.scala @@ -246,7 +246,7 @@ object ListMap extends MapFactory[ListMap] { // by directly iterating through LinkedHashMap entries, we save creating intermediate tuples for each // key-value pair var current: ListMap[K, V] = empty[K, V] - var firstEntry = lhm._firstEntry + var firstEntry = lhm._firstNode while (firstEntry ne null) { current = new Node(firstEntry.key, firstEntry.value, current) firstEntry = firstEntry.later @@ -349,7 +349,7 @@ private[immutable] final class ListMapBuilder[K, V] extends mutable.ReusableBuil } else xs match { case lhm: collection.mutable.LinkedHashMap[K, V] => // special-casing LinkedHashMap avoids creating of Iterator and tuples for each key-value - var firstEntry = lhm._firstEntry + var firstEntry = lhm._firstNode while (firstEntry ne null) { underlying = new ListMap.Node(firstEntry.key, firstEntry.value, underlying) firstEntry = firstEntry.later diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala index 22e9b4c53a1d..2ab038183724 100644 --- a/src/library/scala/collection/mutable/LinkedHashMap.scala +++ b/src/library/scala/collection/mutable/LinkedHashMap.scala @@ -14,35 +14,10 @@ package scala package collection package mutable -import scala.annotation.nowarn -import scala.collection.generic.DefaultSerializable +import scala.annotation.{nowarn, tailrec} +import scala.util.hashing.MurmurHash3 +import scala.collection.generic.DefaultSerializationProxy -/** $factoryInfo - * @define Coll `LinkedHashMap` - * @define coll linked hash map - */ -@SerialVersionUID(3L) -object LinkedHashMap extends MapFactory[LinkedHashMap] { - - def empty[K, V] = new LinkedHashMap[K, V] - - def from[K, V](it: collection.IterableOnce[(K, V)]) = - it match { - case lhm: LinkedHashMap[K, V] => lhm - case _ => Growable.from(empty[K, V], it) - } - - def newBuilder[K, V] = new GrowableBuilder(empty[K, V]) - - /** Class for the linked hash map entry, used internally. - */ - private[mutable] final class LinkedEntry[K, V](val key: K, var value: V) - extends HashEntry[K, LinkedEntry[K, V]] { - var earlier: LinkedEntry[K, V] = null - var later: LinkedEntry[K, V] = null - } - -} /** This class implements mutable maps using a hashtable. * The iterator and all traversal methods of this class visit elements in the order they were inserted. @@ -57,115 +32,143 @@ object LinkedHashMap extends MapFactory[LinkedHashMap] { * @define orderDependent * @define orderDependentFold */ -class LinkedHashMap[K, V] +class LinkedHashMap[K, V](initialCapacity: Int, loadFactor: Double) extends AbstractMap[K, V] with SeqMap[K, V] with MapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] with StrictOptimizedIterableOps[(K, V), Iterable, LinkedHashMap[K, V]] with StrictOptimizedMapOps[K, V, LinkedHashMap, LinkedHashMap[K, V]] with MapFactoryDefaults[K, V, LinkedHashMap, Iterable] - with DefaultSerializable { + with Serializable { + // stepper / keyStepper / valueStepper are not overridden to use XTableStepper because that stepper + // would not return the elements in insertion order + def this() = this(LinkedHashMap.defaultinitialSize, LinkedHashMap.defaultLoadFactor) override def mapFactory: MapFactory[LinkedHashMap] = LinkedHashMap - // stepper / keyStepper / valueStepper are not overridden to use XTableStepper because that stepper - // would not return the elements in insertion order + private[collection] type Node = LinkedHashMap.LinkedNode[K, V] + private[collection] def _firstNode: Node = firstNode - private[collection] type Entry = LinkedHashMap.LinkedEntry[K, V] - private[collection] def _firstEntry: Entry = firstEntry + @transient protected var firstNode: Node = null - @transient protected var firstEntry: Entry = null - @transient protected var lastEntry: Entry = null - @transient private[this] var table: HashTable[K, V, Entry] = newHashTable + @transient protected var lastNode: Node = null - // Used by scala-java8-compat (private[mutable] erases to public, so Java code can access it) - private[mutable] def getTable: HashTable[K, V, Entry] = table + /* The Hashset class holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains elements whose hash-index is i. + * - Every bucket is sorted in ascendant hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + /** The actual hash table. */ + @transient private[this] var table = new Array[Node](tableSizeFor(initialCapacity)) - private def newHashTable = - new HashTable[K, V, Entry] { - def createNewEntry(key: K, value: V): Entry = { - val e = new Entry(key, value) - if (firstEntry eq null) firstEntry = e - else { lastEntry.later = e; e.earlier = lastEntry } - lastEntry = e - e - } + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) - override def foreachEntry[U](f: Entry => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f(cur) - cur = cur.later - } - } + private[this] var contentSize = 0 - } + override def size: Int = contentSize override def last: (K, V) = - if (size > 0) (lastEntry.key, lastEntry.value) + if (size > 0) (lastNode.key, lastNode.value) else throw new NoSuchElementException("Cannot call .last on empty LinkedHashMap") override def lastOption: Option[(K, V)] = - if (size > 0) Some((lastEntry.key, lastEntry.value)) + if (size > 0) Some((lastNode.key, lastNode.value)) else None override def head: (K, V) = - if (size > 0) (firstEntry.key, firstEntry.value) + if (size > 0) (firstNode.key, firstNode.value) else throw new NoSuchElementException("Cannot call .head on empty LinkedHashMap") override def headOption: Option[(K, V)] = - if (size > 0) Some((firstEntry.key, firstEntry.value)) + if (size > 0) Some((firstNode.key, firstNode.value)) else None - override def size = table.tableSize override def knownSize: Int = size - override def isEmpty: Boolean = table.tableSize == 0 + override def sizeHint(size: Int): Unit = { + val target = tableSizeFor(((size + 1).toDouble / loadFactor).toInt) + if(target > table.length) growTable(target) + } + override def isEmpty: Boolean = size == 0 def get(key: K): Option[V] = { - val e = table.findEntry(key) + val e = findNode(key) if (e == null) None else Some(e.value) } override def contains(key: K): Boolean = { if (getClass eq classOf[LinkedHashMap[_, _]]) - table.findEntry(key) != null + findNode(key) != null else super.contains(key) // A subclass might override `get`, use the default implementation `contains`. } override def put(key: K, value: V): Option[V] = { - val e = table.findOrAddEntry(key, value) - if (e eq null) None - else { val v = e.value; e.value = value; Some(v) } + put0(key, value, true) match { + case null => None + case sm => sm + } } override def update(key: K, value: V): Unit = { - val e = table.findOrAddEntry(key, value) - if (e ne null) e.value = value + put0(key, value, false) } - override def remove(key: K): Option[V] = { - val e = table.removeEntry(key) - if (e eq null) None - else Some(remove0(e)) + override def addAll(xs: IterableOnce[(K, V)]): this.type = { + sizeHint(xs.knownSize) + + xs match { + case hm: immutable.HashMap[K, V] => + hm.foreachWithHash((k, v, h) => put0(k, v, improveHash(h), getOld = false)) + this + case hm: mutable.HashMap[K, V] => + val iter = hm.nodeIterator + while (iter.hasNext) { + val next = iter.next() + put0(next.key, next.value, next.hash, getOld = false) + } + this + case thatMap: Map[K, V] => + thatMap.foreachEntry { (key: K, value: V) => + put0(key, value, improveHash(key.##), getOld = false) + } + this + case _ => + super.addAll(xs) + } } - private[this] def remove0(e: Entry): V = { - if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later - if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier - e.earlier = null // Null references to prevent nepotism - e.later = null - e.value + override def foreach[U](f: ((K, V)) => U): Unit = { + var cur = firstNode + while (cur ne null) { + f((cur.key, cur.value)) + cur = cur.later + } + } + + override def foreachEntry[U](f: (K, V) => U): Unit = { + var cur = firstNode + while (cur ne null) { + f(cur.key, cur.value) + cur = cur.later + } + } + + override def remove(key: K): Option[V] = { + remove0(key) match { + case null => None + case nd => Some(nd.value) + } } + + def addOne(kv: (K, V)): this.type = { put(kv._1, kv._2); this } def subtractOne(key: K): this.type = { remove(key); this } - def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { - private[this] var cur = firstEntry + override def iterator: Iterator[(K, V)] = new AbstractIterator[(K, V)] { + private[this] var cur = firstNode def hasNext = cur ne null def next() = if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res } @@ -178,87 +181,371 @@ class LinkedHashMap[K, V] override def keySet: collection.Set[K] = new LinkedKeySet - override def keysIterator: Iterator[K] = new AbstractIterator[K] { - private[this] var cur = firstEntry - def hasNext = cur ne null - def next() = - if (hasNext) { val res = cur.key; cur = cur.later; res } - else Iterator.empty.next() - } - // Override updateWith for performance, so we can do the update while hashing // the input key only once and performing one lookup into the hash table override def updateWith(key: K)(remappingFunction: Option[V] => Option[V]): Option[V] = { - val keyIndex = table.index(table.elemHashCode(key)) - val entry = table.findEntry0(key, keyIndex) + val hash = computeHash(key) + val indexedHash = index(hash) + + var foundNode: Node = null.asInstanceOf[Node] + var previousNode: Node = null.asInstanceOf[Node] + table(indexedHash) match { + case null => + case nd => + @tailrec + def findNode(prev: Node, nd: Node, k: K, h: Int): Unit = { + if (h == nd.hash && k == nd.key) { + previousNode = prev + foundNode = nd + } + else if ((nd.next eq null) || (nd.hash > h)) () + else findNode(nd, nd.next, k, h) + } + + findNode(null, nd, key, hash) + } - val previousValue = - if (entry == null) None - else Some(entry.value) + val previousValue = foundNode match { + case null => None + case nd => Some(nd.value) + } - val nextValue = remappingFunction(previousValue) + val nextValue = remappingFunction(previousValue) - (previousValue, nextValue) match { - case (None, None) => // do nothing - case (Some(_), None) => - remove0(entry) - table.removeEntry0(key, keyIndex) + (previousValue, nextValue) match { + case (None, None) => // do nothing - case (None, Some(value)) => - table.addEntry0(table.createNewEntry(key, value), keyIndex) + case (Some(_), None) => + if (previousNode != null) previousNode.next = foundNode.next + else table(indexedHash) = foundNode.next + deleteNode(foundNode) + contentSize -= 1 - case (Some(_), Some(value)) => - entry.value = value - } + case (None, Some(value)) => + val newIndexedHash = + if (contentSize + 1 >= threshold) { + growTable(table.length * 2) + index(hash) + } else indexedHash + put0(key, value, false, hash, newIndexedHash) + + case (Some(_), Some(newValue)) => foundNode.value = newValue + } + nextValue - nextValue } + override def keysIterator: Iterator[K] = new AbstractIterator[K] { + private[this] var cur = firstNode + + def hasNext = cur ne null + + def next() = + if (hasNext) { + val res = cur.key; cur = cur.later; res + } + else Iterator.empty.next() + } override def valuesIterator: Iterator[V] = new AbstractIterator[V] { - private[this] var cur = firstEntry + private[this] var cur = firstNode def hasNext = cur ne null def next() = if (hasNext) { val res = cur.value; cur = cur.later; res } else Iterator.empty.next() } - override def foreach[U](f: ((K, V)) => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f((cur.key, cur.value)) - cur = cur.later - } + private[this] abstract class hashcodeIterator[A] extends AbstractIterator[A] { + private[this] var cur = firstNode + protected[this] def extract(nd: Node): A + override def hasNext: Boolean = cur ne null + override def next(): A = + if (hasNext) {val res = extract(cur); cur = cur.later; res} + else Iterator.empty.next() } - override def foreachEntry[U](f: (K, V) => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f(cur.key, cur.value) - cur = cur.later + + override def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + firstNode = null + lastNode = null + } + + /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ + @`inline` private[this] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of an original (`any.##`) hash. */ + @`inline` private[this] def improveHash(originalHash: Int): Int = { + originalHash ^ (originalHash >>> 16) + } + + /** Computes the improved hash of this key */ + @`inline` private[this] def computeHash(o: K): Int = improveHash(o.##) + + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + @`inline` private[this] def findNode(key: K): Node = { + val hash = computeHash(key) + table(index(hash)) match { + case null => null + case nd => nd.findNode(key, hash) } } - override def clear(): Unit = { - table.clearTable() - firstEntry = null - lastEntry = null + /** Adds a key-value pair to this map + * + * @param key the key to add + * @param value the value to add + * @param hash the **improved** hashcode of `key` (see computeHash) + * @param getOld if true, then the previous value for `key` will be returned, otherwise, false + */ + private[this] def put0(key: K, value: V, hash: Int, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val idx = index(hash) + put0(key, value, getOld, hash, idx) } - private def writeObject(out: java.io.ObjectOutputStream): Unit = { - out.defaultWriteObject() - table.serializeTo(out, { entry => - out.writeObject(entry.key) - out.writeObject(entry.value) - }) + private[this] def put0(key: K, value: V, getOld: Boolean): Some[V] = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + val hash = computeHash(key) + val idx = index(hash) + put0(key, value, getOld, hash, idx) } - private def readObject(in: java.io.ObjectInputStream): Unit = { - in.defaultReadObject() - table = newHashTable - table.init(in, table.createNewEntry(in.readObject().asInstanceOf[K], in.readObject().asInstanceOf[V])) + private[this] def put0(key: K, value: V, getOld: Boolean, hash: Int, idx: Int): Some[V] = { + table(idx) match { + case null => + val nnode = createNewNode(key, hash, value) + nnode.next = null + table(idx) = nnode + case old => + var prev: Node = null + var n = old + while((n ne null) && n.hash <= hash) { + if(n.hash == hash && key == n.key) { + val old = n.value + n.value = value + return if(getOld) Some(old) else null + } + prev = n + n = n.next + } + val nnode = createNewNode(key, hash, value) + if(prev eq null) { + table(idx) = nnode + nnode.next = old + } + else { + nnode.next = prev.next + prev.next = nnode} + } + contentSize += 1 + null + } + + private def remove0(elem: K) : Node = remove0(elem, computeHash(elem)) + + /** Removes a key from this map if it exists + * + * @param elem the element to remove + * @param hash the **improved** hashcode of `element` (see computeHash) + * @return the node that contained element if it was present, otherwise null + */ + private[this] def remove0(elem: K, hash: Int) : Node = { + val idx = index(hash) + table(idx) match { + case null => null + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + deleteNode(nd) + contentSize -= 1 + nd + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while ((next ne null) && next.hash <= hash) { + if (next.hash == hash && next.key == elem) { + prev.next = next.next + deleteNode(next) + contentSize -= 1 + return next + } + prev = next + next = next.next + } + null + } + } + + /*create a new node. If table is empty(firstNode is null), then the + * new node will be the firstNode. If not, just set the new node to + * be the lastNode. + * */ + private[this] def createNewNode(key: K, hash: Int, value: V): Node = + { + val e = new Node(key, hash, value,null, null, null) + if (firstNode eq null) firstNode = e + else { lastNode.later = e; e.earlier = lastNode } + lastNode = e + e + } + + /*delete the node from the linkedhashmap. set its earlier node's later node + * and later node's earlier node correctly.and set its earlier and later to + * be null.*/ + private[this] def deleteNode(e: Node): Unit = { + if (e.earlier eq null) firstNode = e.later + else e.earlier.later = e.later + if (e.later eq null) lastNode = e.earlier + else e.later.earlier = e.earlier + e.earlier = null // Null references to prevent nepotism + e.later = null } + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + private[this] def growTable(newlen: Int): Unit = { + if (newlen < 0) + throw new RuntimeException(s"new hash table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if(size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow = new Node(null.asInstanceOf[K],0, null.asInstanceOf[V], null, null, null) + val preHigh = new Node(null.asInstanceOf[K], 0, null.asInstanceOf[V], null, null, null) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) { + preLow.next = null + preHigh.next = null + var lastLow = preLow + var lastHigh = preHigh + var n = old + while(n ne null) { + val next = n.next + if((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(old ne preLow.next) table(i) = preLow.next + if(preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") override protected[this] def stringPrefix = "LinkedHashMap" + + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new LinkedHashMap.DeserializationFactory[K, V](table.length, loadFactor), this) + + override def hashCode: Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + val tupleHashIterator = new hashcodeIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override protected[this] def extract(nd: Node): Any = { + hash = MurmurHash3.tuple2Hash(unimproveHash(nd.hash), nd.value.##) + this + } + } + MurmurHash3.orderedHash(tupleHashIterator, MurmurHash3.mapSeed) + } + } + + override protected[this] def className = "LinkedHashMap" +} +@SerialVersionUID(3L) +object LinkedHashMap extends MapFactory[LinkedHashMap] { + def empty[K, V] = new LinkedHashMap[K, V]() + + def from[K, V](it: collection.IterableOnce[(K, V)]) = { + val k = it.knownSize + val cap = if (k > 0) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultinitialSize + new LinkedHashMap[K, V](cap, defaultLoadFactor).addAll(it) + } + + def newBuilder[K, V]: Builder[(K, V), LinkedHashMap[K, V]] = newBuilder(defaultinitialSize, defaultLoadFactor) + + def newBuilder[K, V](initialCapacity: Int, loadFactor: Double): Builder[(K, V), LinkedHashMap[K, V]] = + new GrowableBuilder[(K, V), LinkedHashMap[K, V]](new LinkedHashMap[K, V](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) + } + + /** The load factor for the hash table (in 0.001 step). */ + private[collection] final def defaultLoadFactor: Double = 0.75 // corresponds to 75% + /** The default initial capacity for the hash table */ + private[collection] final def defaultinitialSize: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[K, V](val tableLength: Int, val loadFactor: Double) extends Factory[(K, V), LinkedHashMap[K, V]] with Serializable { + def fromSpecific(it: IterableOnce[(K, V)]): LinkedHashMap[K, V] = new LinkedHashMap[K, V](tableLength, loadFactor).addAll(it) + def newBuilder: Builder[(K, V), LinkedHashMap[K, V]] = LinkedHashMap.newBuilder(tableLength, loadFactor) + } + + private[collection] class LinkedNode[K, V](_key: K, _hash: Int, private[this] var _value: V, private[this] var _next: LinkedNode[K, V], + private[this] var _earlier: LinkedNode[K, V], private[this] var _later: LinkedNode[K, V]) { + def key: K = _key + + def hash: Int = _hash + + def value: V = _value + + def value_=(v: V): Unit = _value = v + + def next: LinkedNode[K, V] = _next + + def next_=(n: LinkedNode[K, V]): Unit = _next = n + + def earlier: LinkedNode[K, V] = _earlier + + def earlier_=(ln: LinkedNode[K, V]): Unit = _earlier = ln + + def later: LinkedNode[K, V] = _later + + def later_=(ln: LinkedNode[K, V]) = _later = ln + + @tailrec + final def findNode(k: K, h: Int): LinkedNode[K, V] = + if (h == _hash && k == _key) this + else if ((_next eq null) || (_hash > h)) null + else _next.findNode(k, h) + + @tailrec + final def foreach[U](f: ((K, V)) => U): Unit = { + f((_key, _value)) + if (_next ne null) _next.foreach(f) + } + + @tailrec + final def foreachEntry[U](f: (K, V) => U): Unit = { + f(_key, _value) + if (_next ne null) _next.foreachEntry(f) + } + + override def toString = s"Node($key, $value, $hash) -> $later" + } } diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala index 3c190a141dd6..f9bb3a4f7e7c 100644 --- a/src/library/scala/collection/mutable/LinkedHashSet.scala +++ b/src/library/scala/collection/mutable/LinkedHashSet.scala @@ -14,8 +14,9 @@ package scala package collection package mutable -import scala.annotation.nowarn -import scala.collection.generic.DefaultSerializable +import scala.annotation.{nowarn, tailrec} +import scala.collection.generic.DefaultSerializationProxy +import scala.util.hashing.MurmurHash3 /** This class implements mutable sets using a hashtable. * The iterator and all traversal methods of this class visit elements in the order they were inserted. @@ -29,123 +30,324 @@ import scala.collection.generic.DefaultSerializable * @define orderDependent * @define orderDependentFold */ -class LinkedHashSet[A] +class LinkedHashSet[A](initialCapacity: Int, loadFactor: Double) extends AbstractSet[A] with SetOps[A, LinkedHashSet, LinkedHashSet[A]] with StrictOptimizedIterableOps[A, LinkedHashSet, LinkedHashSet[A]] with IterableFactoryDefaults[A, LinkedHashSet] - with DefaultSerializable { + with Serializable { + def this() = this(LinkedHashSet.defaultInitialCapacity, LinkedHashSet.defaultLoadFactor) override def iterableFactory: IterableFactory[LinkedHashSet] = LinkedHashSet // stepper is not overridden to use XTableStepper because that stepper would not return the // elements in insertion order - type Entry = LinkedHashSet.Entry[A] + type Node = LinkedHashSet.LinkedSNode[A] - @transient protected var firstEntry: Entry = null - @transient protected var lastEntry: Entry = null - @transient private[this] var table: HashTable[A, AnyRef, Entry] = newHashTable + @transient protected var firstNode: Node = null - // Used by scala-java8-compat (private[mutable] erases to public, so Java code can access it) - private[mutable] def getTable: HashTable[A, AnyRef, Entry] = table + @transient protected var lastNode: Node = null - private def newHashTable = - new HashTable[A, AnyRef, Entry] { - def createNewEntry(key: A, value: AnyRef) = { - val e = new Entry(key) - if (firstEntry eq null) firstEntry = e - else { lastEntry.later = e; e.earlier = lastEntry } - lastEntry = e - e - } - override def foreachEntry[U](f: Entry => U): Unit = { - var cur = firstEntry - while (cur ne null) { - f(cur) - cur = cur.later - } - } - } + /* The Hashset class holds the following invariant: + * - For each i between 0 and table.length, the bucket at table(i) only contains elements whose hash-index is i. + * - Every bucket is sorted in ascendant hash order + * - The sum of the lengths of all buckets is equal to contentSize. + */ + /** The actual hash table. */ + @transient private[this] var table = new Array[Node](tableSizeFor(initialCapacity)) + + private[this] var contentSize = 0 + /** The next size value at which to resize (capacity * load factor). */ + private[this] var threshold: Int = newThreshold(table.length) override def last: A = - if (size > 0) lastEntry.key + if (size > 0) lastNode.key else throw new NoSuchElementException("Cannot call .last on empty LinkedHashSet") override def lastOption: Option[A] = - if (size > 0) Some(lastEntry.key) + if (size > 0) Some(lastNode.key) else None override def head: A = - if (size > 0) firstEntry.key + if (size > 0) firstNode.key else throw new NoSuchElementException("Cannot call .head on empty LinkedHashSet") override def headOption: Option[A] = - if (size > 0) Some(firstEntry.key) + if (size > 0) Some(firstNode.key) else None - override def size: Int = table.tableSize + override def size: Int = contentSize override def knownSize: Int = size override def isEmpty: Boolean = size == 0 - def contains(elem: A): Boolean = table.findEntry(elem) ne null - - def addOne(elem: A): this.type = { - table.findOrAddEntry(elem, null) - this - } - - def subtractOne(elem: A): this.type = { - remove(elem) - this + def contains(elem: A): Boolean = { + findNode(elem) ne null } - - override def remove(elem: A): Boolean = { - val e = table.removeEntry(elem) - if (e eq null) false - else { - if (e.earlier eq null) firstEntry = e.later - else e.earlier.later = e.later - if (e.later eq null) lastEntry = e.earlier - else e.later.earlier = e.earlier - e.earlier = null // Null references to prevent nepotism - e.later = null - true + @`inline` private[this] def findNode(elem: A): Node = { + val hash = computeHash(elem) + table(index(hash)) match { + case null => null + case nd => nd.findNode(elem, hash) } } def iterator: Iterator[A] = new AbstractIterator[A] { - private[this] var cur = firstEntry + private[this] var cur = firstNode def hasNext = cur ne null def next() = if (hasNext) { val res = cur.key; cur = cur.later; res } else Iterator.empty.next() } + private[this] abstract class hashcodeIterator[B] extends AbstractIterator[B] { + private[this] var cur = firstNode + protected[this] def extract(nd: Node): B + override def hasNext: Boolean = cur ne null + override def next(): B = + if (hasNext) {val res = extract(cur); cur = cur.later; res} + else Iterator.empty.next() + } override def foreach[U](f: A => U): Unit = { - var cur = firstEntry + var cur = firstNode while (cur ne null) { f(cur.key) cur = cur.later } } - override def clear(): Unit = { - table.clearTable() - firstEntry = null - lastEntry = null + /** Performs the inverse operation of improveHash. In this case, it happens to be identical to improveHash*/ + @`inline` private[collection] def unimproveHash(improvedHash: Int): Int = improveHash(improvedHash) + + /** Computes the improved hash of an original (`any.##`) hash. */ + @`inline` private[this] def improveHash(originalHash: Int): Int = { + // Improve the hash by xoring the high 16 bits into the low 16 bits just in case entropy is skewed towards the + // high-value bits. We only use the lowest bits to determine the hash bucket. This is the same improvement + // algorithm as in java.util.HashMap. + originalHash ^ (originalHash >>> 16) } - private def writeObject(out: java.io.ObjectOutputStream): Unit = { - out.defaultWriteObject() - table.serializeTo(out, { e => out.writeObject(e.key) }) + /** Computes the improved hash of this element */ + @`inline` private[this] def computeHash(o: A): Int = improveHash(o.##) + @`inline` private[this] def index(hash: Int) = hash & (table.length - 1) + + override def add(elem: A) : Boolean = { + if(contentSize + 1 >= threshold) growTable(table.length * 2) + addElem(elem, computeHash(elem)) + } + + override def addAll(xs: IterableOnce[A]): this.type = { + sizeHint(xs.knownSize) + xs match { + case hm: immutable.HashSet[A] => + hm.foreachWithHash((k, h) => addElem(k, improveHash(h))) + this + case hm: mutable.HashSet[A] => + val iter = hm.nodeIterator + while (iter.hasNext) { + val next = iter.next() + addElem(next.key, next.hash) + } + this + case _ => super.addAll(xs) + } + } + + override def subtractAll(xs: IterableOnce[A]): this.type = { + if (size == 0) { + return this + } + + xs match { + case hs: immutable.HashSet[A] => + hs.foreachWithHashWhile { (k, h) => + remove(k, improveHash(h)) + size > 0 + } + this + case hs: mutable.HashSet[A] => + val iter = hs.nodeIterator + while (iter.hasNext) { + val next = iter.next() + remove(next.key, next.hash) + if (size == 0) return this + } + this + case _ => super.subtractAll(xs) + } } - private def readObject(in: java.io.ObjectInputStream): Unit = { - in.defaultReadObject() - table = newHashTable - table.init(in, table.createNewEntry(in.readObject().asInstanceOf[A], null)) + /** Adds an element to this set + * @param elem element to add + * @param hash the **improved** hash of `elem` (see computeHash) + */ + private[this] def addElem(elem: A, hash: Int) : Boolean = { + val idx = index(hash) + table(idx) match { + case null => + val nnode = createNewNode(elem, hash) + table(idx) = nnode + nnode.next = null + case old => + var prev = null.asInstanceOf[Node] + var n = old + while((n ne null) && n.hash <= hash) { + if(n.hash == hash && elem == n.key) return false + prev = n + n = n.next + } + val nnode = createNewNode(elem, hash) + if(prev eq null) { + nnode.next = old + table(idx) = nnode + + } else { + nnode.next = prev.next + prev.next = nnode + } + } + contentSize += 1 + true } + private[this] def remove(elem: A, hash: Int): Boolean = { + val idx = index(hash) + table(idx) match { + case null => false + case nd if nd.hash == hash && nd.key == elem => + // first element matches + table(idx) = nd.next + deleteNode(nd) + nd.next = null + contentSize -= 1 + true + case nd => + // find an element that matches + var prev = nd + var next = nd.next + while((next ne null) && next.hash <= hash) { + if(next.hash == hash && next.key == elem) { + prev.next = next.next + deleteNode(next) + next.next = null + contentSize -= 1 + return true + } + prev = next + next = next.next + } + false + } + } + + override def remove(elem: A) : Boolean = remove(elem, computeHash(elem)) + + + + + private[this] def growTable(newlen: Int) = { + if (newlen < 0) + throw new RuntimeException(s"new hash table size $newlen exceeds maximum") + var oldlen = table.length + threshold = newThreshold(newlen) + if(size == 0) table = new Array(newlen) + else { + table = java.util.Arrays.copyOf(table, newlen) + val preLow = new Node(null.asInstanceOf[A], 0, null, null, null) + val preHigh = new Node(null.asInstanceOf[A], 0, null, null, null) + // Split buckets until the new length has been reached. This could be done more + // efficiently when growing an already filled table to more than double the size. + while(oldlen < newlen) { + var i = 0 + while (i < oldlen) { + val old = table(i) + if(old ne null) { + preLow.next = null + preHigh.next = null + var lastLow = preLow + var lastHigh = preHigh + var n = old + while(n ne null) { + val next = n.next + if((n.hash & oldlen) == 0) { // keep low + lastLow.next = n + lastLow = n + } else { // move to high + lastHigh.next = n + lastHigh = n + } + n = next + } + lastLow.next = null + if(old ne preLow.next) table(i) = preLow.next + if(preHigh.next ne null) { + table(i + oldlen) = preHigh.next + lastHigh.next = null + } + } + i += 1 + } + oldlen *= 2 + } + } + } + + private[this] def tableSizeFor(capacity: Int) = + (Integer.highestOneBit((capacity-1).max(4))*2).min(1 << 30) + + private[this] def newThreshold(size: Int) = (size.toDouble * loadFactor).toInt + + def clear(): Unit = { + java.util.Arrays.fill(table.asInstanceOf[Array[AnyRef]], null) + contentSize = 0 + firstNode = null + lastNode = null + } + @`inline` def addOne(elem: A): this.type = { add(elem); this } + + @`inline` def subtractOne(elem: A): this.type = { remove(elem); this } + + /*create a new node. If table is empty(firstNode is null), then the + * new node will be the firstNode. If not, just set the new node to + * be the lastNode. + * */ + private[this] def createNewNode(key: A, hash: Int): Node = + { + val e = new Node(key, hash, null, null, null) + if (firstNode eq null) firstNode = e + else { lastNode.later = e; e.earlier = lastNode } + lastNode = e + e + } + /*delete the node from the linkedhashset. set its earlier node's later node + * and later node's earlier node correctly.and then set its earlier and later + * to be null.*/ + private[this] def deleteNode(e: Node): Unit = + { + if (e.earlier eq null) firstNode = e.later + else e.earlier.later = e.later + if (e.later eq null) lastNode = e.earlier + else e.later.earlier = e.earlier + e.earlier = null // Null references to prevent nepotism + e.later = null + } + + override def hashCode: Int = { + if (isEmpty) MurmurHash3.emptyMapHash + else { + val tupleHashIterator = new hashcodeIterator[Any] { + var hash: Int = 0 + override def hashCode: Int = hash + override protected[this] def extract(nd: Node): Any = { + hash =unimproveHash(nd.hash) + this + } + } + MurmurHash3.orderedHash(tupleHashIterator, MurmurHash3.mapSeed) + } + } + protected[this] def writeReplace(): AnyRef = new DefaultSerializationProxy(new LinkedHashSet.DeserializationFactory[A](table.length, loadFactor), this) + + override protected[this] def className = "LinkedHashSet" @nowarn("""cat=deprecation&origin=scala\.collection\.Iterable\.stringPrefix""") override protected[this] def stringPrefix = "LinkedHashSet" } @@ -156,22 +358,60 @@ class LinkedHashSet[A] */ @SerialVersionUID(3L) object LinkedHashSet extends IterableFactory[LinkedHashSet] { + def from[B](it: scala.collection.IterableOnce[B]): LinkedHashSet[B] = { + val k = it.knownSize + val cap = if(k > defaultInitialCapacity) ((k + 1).toDouble / defaultLoadFactor).toInt else defaultInitialCapacity + new LinkedHashSet[B](cap, defaultLoadFactor).addAll(it) + } + + def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A] - override def empty[A]: LinkedHashSet[A] = new LinkedHashSet[A] + def newBuilder[A]: Builder[A, LinkedHashSet[A]] = newBuilder(defaultInitialCapacity, defaultLoadFactor) - def from[E](it: collection.IterableOnce[E]) = - it match { - case lhs: LinkedHashSet[E] => lhs - case _ => Growable.from(empty[E], it) + def newBuilder[A](initialCapacity: Int, loadFactor: Double): Builder[A, LinkedHashSet[A]] = + new GrowableBuilder[A, LinkedHashSet[A]](new LinkedHashSet[A](initialCapacity, loadFactor)) { + override def sizeHint(size: Int) = elems.sizeHint(size) } - def newBuilder[A] = new GrowableBuilder(empty[A]) + /** The default load factor for the hash table */ + final def defaultLoadFactor: Double = 0.75 - /** Class for the linked hash set entry, used internally. - */ - private[mutable] final class Entry[A](val key: A) extends HashEntry[A, Entry[A]] { - var earlier: Entry[A] = null - var later: Entry[A] = null + /** The default initial capacity for the hash table */ + final def defaultInitialCapacity: Int = 16 + + @SerialVersionUID(3L) + private final class DeserializationFactory[A](val tableLength: Int, val loadFactor: Double) extends Factory[A, LinkedHashSet[A]] with Serializable { + def fromSpecific(it: IterableOnce[A]): LinkedHashSet[A] = new LinkedHashSet[A](tableLength, loadFactor) ++= it + def newBuilder: Builder[A, LinkedHashSet[A]] = LinkedHashSet.newBuilder(tableLength, loadFactor) + } + + private[collection] class LinkedSNode[K](_key: K, _hash: Int, private[this] var _next: LinkedSNode[K], + private[this] var _earlier: LinkedSNode[K], private[this] var _later: LinkedSNode[K]) { + def key: K = _key + def hash: Int = _hash + def next: LinkedSNode[K] = _next + def next_=(n: LinkedSNode[K]): Unit = _next = n + def earlier: LinkedSNode[K] = _earlier + def earlier_=(ln: LinkedSNode[K]): Unit = _earlier = ln + def later: LinkedSNode[K] = _later + def later_=(ln: LinkedSNode[K]) = _later = ln + @tailrec + final def findNode(k: K, h: Int): LinkedSNode[K] = + if (h == _hash && k == _key) this + else if ((_next eq null) || (_hash > h)) null + else _next.findNode(k, h) + + @tailrec + final def foreach[U](f: K => U): Unit = { + f(_key) + if (_next ne null) _next.foreach(f) + } + @tailrec + final def foreachEntry[U](f: K => U): Unit = { + f(_key) + if (_next ne null) _next.foreachEntry(f) + } + override def toString = s"Node($key, $hash) -> $later" } } diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/LinkedHashMapBenchmark2.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/LinkedHashMapBenchmark2.scala new file mode 100644 index 000000000000..6725141f3b41 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/LinkedHashMapBenchmark2.scala @@ -0,0 +1,216 @@ +package scala.collection.mutable + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra._ +import org.openjdk.jmh.runner.IterationType +import benchmark._ +import java.util.concurrent.TimeUnit +import java.util.{ LinkedHashMap => JLHashMap, LinkedHashSet => JLHashSet } + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 20) +@Measurement(iterations = 20) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class LinkedHashMapBenchmark2 { + @Param(Array(/*"0", "1",*/ "10", "100", "1000", "10000")) + var size: Int = _ + @Param(Array("true")) + var stringsOnly = false + + class Collider(val x: Any, val h: Int) { + override def hashCode: Int = h + override def equals(o: Any): Boolean = o match { + case o: Collider => x == o.x + case _ => false + } + } + + var existingKeys: Array[Any] = _ + var existingKVs: ArrayBuffer[(Any, Any)] = _ + var missingKeys: Array[Any] = _ + var s1: LinkedHashSet[Any] = _ + var m1: LinkedHashMap[Any, Any] = _ + var j1: JLHashMap[Any, Any] = new JLHashMap[Any, Any] + var j2: JLHashSet[Any] = new JLHashSet[Any] + var colliders: Array[Collider] = _ + + @Setup(Level.Trial) def init: Unit = { + existingKeys = (0 until size).map(i => (i % 4) match { + case _ if stringsOnly => i.toString + case 0 => i.toString + case 1 => i.toChar + case 2 => i.toDouble + case 3 => i.toInt + }).toArray + existingKVs = ArrayBuffer.from(existingKeys.iterator.map(k => (k, k))) + missingKeys = (size until (2 * size.max(100))).toArray.map(_.toString) + s1 = LinkedHashSet.from(existingKeys) + m1 = LinkedHashMap.from(existingKVs) + m1.foreach { case (k, v) => j1.put(k, v) } + s1.foreach({case k => j2.add(k)}) + colliders = existingKeys.map(k => new Collider(k, k.hashCode & 0x1111)) + } + + @Benchmark def lhsFillRegular(bh: Blackhole): Unit = { + val h = new LinkedHashSet[Any] + existingKeys.foreach(k => h.addOne(k)) + bh.consume(h) + } + + @Benchmark def lhsFillColliding(bh: Blackhole): Unit = { + val h = new LinkedHashSet[Any] + colliders.foreach(k => h.addOne(k)) + bh.consume(h) + } + + @Benchmark def lhsBuild(bh: Blackhole): Unit = + bh.consume(LinkedHashSet.from(existingKeys)) + + @Benchmark def lhsIterate(bh: Blackhole): Unit = { + val it = s1.iterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def lhsContainsTrue(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(s1.contains(existingKeys(i))) + i += 1 + } + } + + @Benchmark def lhsContainsFalse(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(s1.contains(missingKeys(i))) + i += 1 + } + } + + @Benchmark def lhmFillRegular(bh: Blackhole): Unit = { + val h = new LinkedHashMap[Any, Any] + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def lhmFillColliding(bh: Blackhole): Unit = { + val h = new LinkedHashMap[Any, Any] + colliders.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def lhmBuild(bh: Blackhole): Unit = + bh.consume(LinkedHashMap.from(existingKVs)) + + @Benchmark def lhmIterateKeys(bh: Blackhole): Unit = { + val it = m1.keysIterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def lhmIterateEntries(bh: Blackhole): Unit = { + val it = m1.iterator + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def lhmGetExisting(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(m1.apply(existingKeys(i))) + i += 1 + } + } + + @Benchmark def lhmGetNone(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(m1.get(missingKeys(i))) + i += 1 + } + } + + @Benchmark def javalhmFillRegular(bh: Blackhole): Unit = { + val h = new JLHashMap[Any, Any] + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def javalhmFillColliding(bh: Blackhole): Unit = { + val h = new JLHashMap[Any, Any] + colliders.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def javalhmBuild(bh: Blackhole): Unit = { + val h = new JLHashMap[Any, Any](((existingKeys.length+1).toDouble/0.75).toInt, 0.75f) + existingKeys.foreach(k => h.put(k, k)) + bh.consume(h) + } + + @Benchmark def javalhmIterateKeys(bh: Blackhole): Unit = { + val it = j1.keySet().iterator() + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def javalhmIterateEntries(bh: Blackhole): Unit = { + val it = j1.entrySet().iterator() + while(it.hasNext) bh.consume(it.next()) + } + + @Benchmark def javalhmGetExisting(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(j1.get(existingKeys(i))) + i += 1 + } + } + + @Benchmark def javalhmGetNone(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(j1.get(missingKeys(i))) + i += 1 + } + } + @Benchmark def javalhsFillRegular(bh: Blackhole): Unit = { + val h = new JLHashSet[Any] + existingKeys.foreach(k => h.add(k)) + bh.consume(h) + } + + @Benchmark def javalhsFillColliding(bh: Blackhole): Unit = { + val h = new JLHashSet[Any] + colliders.foreach(k => h.add(k)) + bh.consume(h) + } + + @Benchmark def javalhsBuild(bh: Blackhole): Unit = { + val h = new JLHashSet[Any](((existingKeys.length+1).toDouble/0.75).toInt, 0.75f) + existingKeys.foreach(k => h.add(k)) + bh.consume(h) + } + + @Benchmark def javalhsIterate(bh: Blackhole): Unit = { + val it = j2.iterator() + while(it.hasNext) bh.consume(it.next()) + } + + + @Benchmark def javalhsContainsTrue(bh: Blackhole): Unit = { + var i = 0 + while (i < size) { + bh.consume(j2.contains(existingKeys(i))) + i += 1 + } + } + + @Benchmark def javalhsContainsFalse(bh: Blackhole): Unit = { + var i = 0 + while (i < size.max(100)) { + bh.consume(j2.contains(missingKeys(i))) + i += 1 + } + } +} diff --git a/test/junit/scala/SerializationStabilityTest.scala b/test/junit/scala/SerializationStabilityTest.scala index 1e0e16ff16ef..6499c0689775 100644 --- a/test/junit/scala/SerializationStabilityTest.scala +++ b/test/junit/scala/SerializationStabilityTest.scala @@ -248,8 +248,8 @@ object SerializationStability { check(g)(mutable.TreeSet[Int]())( "rO0ABXNyADJzY2FsYS5jb2xsZWN0aW9uLmdlbmVyaWMuRGVmYXVsdFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAADAwABTAAHZmFjdG9yeXQAGkxzY2FsYS9jb2xsZWN0aW9uL0ZhY3Rvcnk7eHBzcgAyc2NhbGEuY29sbGVjdGlvbi5FdmlkZW5jZUl0ZXJhYmxlRmFjdG9yeSRUb0ZhY3RvcnkAAAAAAAAAAwIAAkwAC2V2aWRlbmNlJDE1dAASTGphdmEvbGFuZy9PYmplY3Q7TAAHZmFjdG9yeXQAKkxzY2FsYS9jb2xsZWN0aW9uL0V2aWRlbmNlSXRlcmFibGVGYWN0b3J5O3hwc3IAJnNjYWxhLnJ1bnRpbWUuTW9kdWxlU2VyaWFsaXphdGlvblByb3h5AAAAAAAAAAECAAFMAAttb2R1bGVDbGFzc3QAEUxqYXZhL2xhbmcvQ2xhc3M7eHB2cgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQkiz96UGruasMCAAB4cHNxAH4AB3ZyACFzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldCQAAAAAAAAAAwIAAHhwdwQAAAAAeA==") check(g)(mutable.TreeSet(1, 3))( "rO0ABXNyADJzY2FsYS5jb2xsZWN0aW9uLmdlbmVyaWMuRGVmYXVsdFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAADAwABTAAHZmFjdG9yeXQAGkxzY2FsYS9jb2xsZWN0aW9uL0ZhY3Rvcnk7eHBzcgAyc2NhbGEuY29sbGVjdGlvbi5FdmlkZW5jZUl0ZXJhYmxlRmFjdG9yeSRUb0ZhY3RvcnkAAAAAAAAAAwIAAkwAC2V2aWRlbmNlJDE1dAASTGphdmEvbGFuZy9PYmplY3Q7TAAHZmFjdG9yeXQAKkxzY2FsYS9jb2xsZWN0aW9uL0V2aWRlbmNlSXRlcmFibGVGYWN0b3J5O3hwc3IAJnNjYWxhLnJ1bnRpbWUuTW9kdWxlU2VyaWFsaXphdGlvblByb3h5AAAAAAAAAAECAAFMAAttb2R1bGVDbGFzc3QAEUxqYXZhL2xhbmcvQ2xhc3M7eHB2cgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQkiz96UGruasMCAAB4cHNxAH4AB3ZyACFzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldCQAAAAAAAAAAwIAAHhwdwQAAAACc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+AA8AAAADeA==") check(g)(mutable.TreeSet(1, 3).range(1, 2))( "rO0ABXNyADJzY2FsYS5jb2xsZWN0aW9uLmdlbmVyaWMuRGVmYXVsdFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAADAwABTAAHZmFjdG9yeXQAGkxzY2FsYS9jb2xsZWN0aW9uL0ZhY3Rvcnk7eHBzcgAyc2NhbGEuY29sbGVjdGlvbi5FdmlkZW5jZUl0ZXJhYmxlRmFjdG9yeSRUb0ZhY3RvcnkAAAAAAAAAAwIAAkwAC2V2aWRlbmNlJDE1dAASTGphdmEvbGFuZy9PYmplY3Q7TAAHZmFjdG9yeXQAKkxzY2FsYS9jb2xsZWN0aW9uL0V2aWRlbmNlSXRlcmFibGVGYWN0b3J5O3hwc3IAJnNjYWxhLnJ1bnRpbWUuTW9kdWxlU2VyaWFsaXphdGlvblByb3h5AAAAAAAAAAECAAFMAAttb2R1bGVDbGFzc3QAEUxqYXZhL2xhbmcvQ2xhc3M7eHB2cgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQkiz96UGruasMCAAB4cHNxAH4AB3ZyACFzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldCQAAAAAAAAAAwIAAHhwdwT/////c3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+AAd2cgAmc2NhbGEuY29sbGVjdGlvbi5nZW5lcmljLlNlcmlhbGl6ZUVuZCQAAAAAAAAAAwIAAHhweA==") - check(g)(mutable.LinkedHashMap(1 -> 2))( "rO0ABXNyADJzY2FsYS5jb2xsZWN0aW9uLmdlbmVyaWMuRGVmYXVsdFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAADAwABTAAHZmFjdG9yeXQAGkxzY2FsYS9jb2xsZWN0aW9uL0ZhY3Rvcnk7eHBzcgAlc2NhbGEuY29sbGVjdGlvbi5NYXBGYWN0b3J5JFRvRmFjdG9yeQAAAAAAAAADAgABTAAHZmFjdG9yeXQAHUxzY2FsYS9jb2xsZWN0aW9uL01hcEZhY3Rvcnk7eHBzcgAmc2NhbGEucnVudGltZS5Nb2R1bGVTZXJpYWxpemF0aW9uUHJveHkAAAAAAAAAAQIAAUwAC21vZHVsZUNsYXNzdAARTGphdmEvbGFuZy9DbGFzczt4cHZyACdzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkSGFzaE1hcCQAAAAAAAAAAwIAAHhwdwQAAAABc3IADHNjYWxhLlR1cGxlMgH73c0i5zR6AgACTAACXzF0ABJMamF2YS9sYW5nL09iamVjdDtMAAJfMnEAfgAMeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ADgAAAAJ4") - check(g)(mutable.LinkedHashSet(1, 2, 3))( "rO0ABXNyADJzY2FsYS5jb2xsZWN0aW9uLmdlbmVyaWMuRGVmYXVsdFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAADAwABTAAHZmFjdG9yeXQAGkxzY2FsYS9jb2xsZWN0aW9uL0ZhY3Rvcnk7eHBzcgAqc2NhbGEuY29sbGVjdGlvbi5JdGVyYWJsZUZhY3RvcnkkVG9GYWN0b3J5AAAAAAAAAAMCAAFMAAdmYWN0b3J5dAAiTHNjYWxhL2NvbGxlY3Rpb24vSXRlcmFibGVGYWN0b3J5O3hwc3IAJnNjYWxhLnJ1bnRpbWUuTW9kdWxlU2VyaWFsaXphdGlvblByb3h5AAAAAAAAAAECAAFMAAttb2R1bGVDbGFzc3QAEUxqYXZhL2xhbmcvQ2xhc3M7eHB2cgAnc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkxpbmtlZEhhc2hTZXQkAAAAAAAAAAMCAAB4cHcEAAAAA3NyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3EAfgALAAAAAnNxAH4ACwAAAAN4") + check(g)(mutable.LinkedHashMap(1 -> 2))( "rO0ABXNyADJzY2FsYS5jb2xsZWN0aW9uLmdlbmVyaWMuRGVmYXVsdFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAADAwABTAAHZmFjdG9yeXQAGkxzY2FsYS9jb2xsZWN0aW9uL0ZhY3Rvcnk7eHBzcgA9c2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkxpbmtlZEhhc2hNYXAkRGVzZXJpYWxpemF0aW9uRmFjdG9yeQAAAAAAAAADAgACRAAKbG9hZEZhY3RvckkAC3RhYmxlTGVuZ3RoeHA/6AAAAAAAAAAAAAh3BAAAAAFzcgAMc2NhbGEuVHVwbGUyAfvdzSLnNHoCAAJMAAJfMXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wAAl8ycQB+AAZ4cHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3EAfgAIAAAAAng=") + check(g)(mutable.LinkedHashSet(1, 2, 3))( "rO0ABXNyADJzY2FsYS5jb2xsZWN0aW9uLmdlbmVyaWMuRGVmYXVsdFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAADAwABTAAHZmFjdG9yeXQAGkxzY2FsYS9jb2xsZWN0aW9uL0ZhY3Rvcnk7eHBzcgA9c2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkxpbmtlZEhhc2hTZXQkRGVzZXJpYWxpemF0aW9uRmFjdG9yeQAAAAAAAAADAgACRAAKbG9hZEZhY3RvckkAC3RhYmxlTGVuZ3RoeHA/6AAAAAAAAAAAABB3BAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABQAAAAJzcQB+AAUAAAADeA==") check(g)(mutable.ListBuffer(1, 2, 3))( "rO0ABXNyADJzY2FsYS5jb2xsZWN0aW9uLmdlbmVyaWMuRGVmYXVsdFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAADAwABTAAHZmFjdG9yeXQAGkxzY2FsYS9jb2xsZWN0aW9uL0ZhY3Rvcnk7eHBzcgAqc2NhbGEuY29sbGVjdGlvbi5JdGVyYWJsZUZhY3RvcnkkVG9GYWN0b3J5AAAAAAAAAAMCAAFMAAdmYWN0b3J5dAAiTHNjYWxhL2NvbGxlY3Rpb24vSXRlcmFibGVGYWN0b3J5O3hwc3IAJnNjYWxhLnJ1bnRpbWUuTW9kdWxlU2VyaWFsaXphdGlvblByb3h5AAAAAAAAAAECAAFMAAttb2R1bGVDbGFzc3QAEUxqYXZhL2xhbmcvQ2xhc3M7eHB2cgAkc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkxpc3RCdWZmZXIkAAAAAAAAAAMCAAB4cHcEAAAAA3NyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3EAfgALAAAAAnNxAH4ACwAAAAN4") check(g)(new mutable.StringBuilder(new java.lang.StringBuilder("123")))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuU3RyaW5nQnVpbGRlcgAAAAAAAAADAgABTAAKdW5kZXJseWluZ3QAGUxqYXZhL2xhbmcvU3RyaW5nQnVpbGRlcjt4cHNyABdqYXZhLmxhbmcuU3RyaW5nQnVpbGRlcjzV+xRaTGrLAwAAeHB3BAAAAAN1cgACW0OwJmaw4l2ErAIAAHhwAAAAEwAxADIAMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAeA==") diff --git a/test/junit/scala/collection/mutable/LinkedHashMapTest.scala b/test/junit/scala/collection/mutable/LinkedHashMapTest.scala index 5b76a43cb427..adf4338a76ed 100644 --- a/test/junit/scala/collection/mutable/LinkedHashMapTest.scala +++ b/test/junit/scala/collection/mutable/LinkedHashMapTest.scala @@ -11,7 +11,7 @@ import scala.collection.mutable @RunWith(classOf[JUnit4]) class LinkedHashMapTest { class TestClass extends mutable.LinkedHashMap[String, Int] { - def lastItemRef = lastEntry + def lastItemRef = lastNode } @Test diff --git a/test/junit/scala/collection/mutable/LinkedHashSetTest.scala b/test/junit/scala/collection/mutable/LinkedHashSetTest.scala index 92a5de20745d..cd931a2e9dd6 100644 --- a/test/junit/scala/collection/mutable/LinkedHashSetTest.scala +++ b/test/junit/scala/collection/mutable/LinkedHashSetTest.scala @@ -10,7 +10,7 @@ import scala.collection.mutable @RunWith(classOf[JUnit4]) class LinkedHashSetTest { class TestClass extends mutable.LinkedHashSet[String] { - def lastItemRef = lastEntry + def lastItemRef = lastNode } @Test