Browse files

import high scale project

  • Loading branch information...
0 parents commit 5180b95135217bc1580eb72114c4ef9b3a84d672 Cliff Moon committed Aug 30, 2010
Showing with 17,703 additions and 0 deletions.
  1. +5 −0 .gitignore
  2. +57 −0 README
  3. +8 −0 Testing/CAT_Tester/AtomicCounter.java
  4. +5 −0 Testing/CAT_Tester/BUILD.sh
  5. +10 −0 Testing/CAT_Tester/CATCounter.java
  6. +11 −0 Testing/CAT_Tester/Counter.java
  7. +212 −0 Testing/CAT_Tester/Harness.java
  8. +16 −0 Testing/CAT_Tester/LockCounter.java
  9. +12 −0 Testing/CAT_Tester/README.txt
  10. +6 −0 Testing/CAT_Tester/RaceyCounter.java
  11. +33 −0 Testing/CAT_Tester/StripeLockCounter.java
  12. +21 −0 Testing/CAT_Tester/StripeUnsafeCounter.java
  13. +7 −0 Testing/CAT_Tester/SyncCounter.java
  14. +46 −0 Testing/CAT_Tester/UnsafeCounter.java
  15. +5 −0 Testing/CAT_Tester/build.bat
  16. +390 −0 Testing/CAT_Tester/hash_test.java
  17. +14 −0 Testing/JUnitSuite.java
  18. +549 −0 Testing/NBHM_Tester/NBHMID_Tester2.java
  19. +510 −0 Testing/NBHM_Tester/NBHML_Tester2.java
  20. +667 −0 Testing/NBHM_Tester/NBHM_Tester.java
  21. +652 −0 Testing/NBHM_Tester/NBHM_Tester2.java
  22. +440 −0 Testing/NBHM_Tester/perf_hash_test.java
  23. +361 −0 Testing/NBHM_Tester/perf_hashlong_test.java
  24. +136 −0 Testing/NBHS_Tester/nbhs_tester.java
  25. +6 −0 Testing/NBHS_Tester/nbhs_tester.log
  26. +191 −0 Testing/NBHS_Tester/nbsi_tester.java
  27. +6 −0 Testing/NBHS_Tester/nbsi_tester.log
  28. +388 −0 Testing/NBHS_Tester/perf_set_test.java
  29. +16 −0 build.xml
  30. +107 −0 contrib/Testing/NBQ_Tester/NBQ_Tester.java
  31. +6 −0 contrib/Testing/NBQ_Tester/NBQ_Tester.log
  32. +49 −0 doc/allclasses-frame.html
  33. +49 −0 doc/allclasses-noframe.html
  34. +142 −0 doc/constant-values.html
  35. +142 −0 doc/deprecated-list.html
  36. +209 −0 doc/help-doc.html
  37. +758 −0 doc/index-all.html
  38. +36 −0 doc/index.html
  39. +446 −0 doc/org/cliffc/high_scale_lib/AbstractEntry.html
  40. +526 −0 doc/org/cliffc/high_scale_lib/ConcurrentAutoTable.html
  41. +243 −0 doc/org/cliffc/high_scale_lib/Counter.html
  42. +1,085 −0 doc/org/cliffc/high_scale_lib/NonBlockingHashMap.html
  43. +387 −0 doc/org/cliffc/high_scale_lib/NonBlockingHashMapLong.IteratorLong.html
  44. +1,191 −0 doc/org/cliffc/high_scale_lib/NonBlockingHashMapLong.html
  45. +459 −0 doc/org/cliffc/high_scale_lib/NonBlockingHashSet.html
  46. +1,066 −0 doc/org/cliffc/high_scale_lib/NonBlockingHashtable.html
  47. +1,085 −0 doc/org/cliffc/high_scale_lib/NonBlockingIdentityHashMap.html
  48. +554 −0 doc/org/cliffc/high_scale_lib/NonBlockingSetInt.html
  49. +225 −0 doc/org/cliffc/high_scale_lib/UtilUnsafe.html
  50. +50 −0 doc/org/cliffc/high_scale_lib/package-frame.html
  51. +197 −0 doc/org/cliffc/high_scale_lib/package-summary.html
  52. +168 −0 doc/org/cliffc/high_scale_lib/package-tree.html
  53. +170 −0 doc/overview-tree.html
  54. +1 −0 doc/package-list
  55. BIN doc/resources/inherit.gif
  56. +479 −0 doc/serialized-form.html
  57. +29 −0 doc/stylesheet.css
  58. 0 docs
  59. +92 −0 src/java/util/Hashtable.java
  60. +34 −0 src/java/util/concurrent/ConcurrentHashMap.java
  61. +52 −0 src/org/cliffc/high_scale_lib/AbstractEntry.java
  62. +275 −0 src/org/cliffc/high_scale_lib/ConcurrentAutoTable.java
  63. +39 −0 src/org/cliffc/high_scale_lib/Counter.java
  64. +1,291 −0 src/org/cliffc/high_scale_lib/NonBlockingHashMap.java
  65. +1,196 −0 src/org/cliffc/high_scale_lib/NonBlockingHashMapLong.java
  66. +85 −0 src/org/cliffc/high_scale_lib/NonBlockingHashSet.java
Sorry, we could not display the entire diff because it was too big.
5 .gitignore
@@ -0,0 +1,5 @@
+.classpath
+.cvsignore
+.project
+bin/*
+lib/*
57 README
@@ -0,0 +1,57 @@
+
+IF YOU ARE LOOKING for the drop-in replacement for java.util.Hashtable, it's
+in the lib directory, lib/java_util_hashtable.jar. It needs to be in your
+bootclasspath. Example:
+
+ java -Xbootclasspath/p:lib/java_util_hashtable.jar my_java_app_goes_here
+
+
+---
+
+A collection of Concurrent and Highly Scalable Utilities. These are intended
+as direct replacements for the java.util.* or java.util.concurrent.*
+collections but with better performance when many CPUs are using the
+collection concurrently. Single-threaded performance may be slightly lower.
+
+The direct replacements match the API - but not all behaviors are covered by
+the API, and so they may not work for your program. In particular, the
+replacement for java.util.Hashtable is NOT synchronized (that is the point!),
+although it is multi-threaded safe. If you rely on the undocumented
+synchronization behavior of the JDK Hashtable, your program may not work.
+Similarly, the iteration order is different between this version and the JDK
+version (this exact issue broke the SpecJBB benchmark when the iteration order
+was changed slightly (via using a slightly different hash function) between
+JDK rev's).
+
+If you want to drop-in the non-blocking versions of Hashtable, HashMap or
+ConcurrentHashMap, you'll need to alter your bootclasspath - these classes
+come directly from your JDK and so are found via the System loader before any
+class-path hacks can be done.
+
+To replace the JDK implementation of Hashtable with a non-blocking version of
+Hashtable, add java_util_hashtable.jar to your java launch line:
+
+ java -Xbootclasspath/p:lib/java_util_hashtable.jar my_app_goes_here
+
+Similarly for ConcurrentHashMap, add java_util_concurrent_chm.jar:
+
+ java -Xbootclasspath/p:lib/java_util_concurrent_chm.jar my_app_goes_here
+
+
+The other utilities do not have direct JDK replacements; you need to call them
+out directly and place high_scale_lib.jar in your classpath:
+
+- NonBlockingHashMap - Fast, concurrent, lock-free HashMap. Linear scaling to 768 CPUs.
+- NonBlockingHashMapLong - Same as above, but using primitive 'long' keys
+- NonBlockingHashSet - A Set version of NBHM
+- NonBlockingSetInt - A fast fully concurrent BitVector
+- Counter - A simple counter that scales linearly even when extremely hot.
+ Most simple counters are either unsynchronized (hence drop counts, generally
+ really badly beyond 2 cpus), or are normally lock'd (hence bottleneck in the
+ 5-10 cpu range), or might use Atomic's (hence bottleneck in the 25-50 cpu
+ range). This version scales linearly to 768 CPUs.
+
+
+
+Cliff Click
+
8 Testing/CAT_Tester/AtomicCounter.java
@@ -0,0 +1,8 @@
+
+import java.util.concurrent.atomic.*;
+public final class AtomicCounter extends Counter {
+ public String name() { return "Atomic"; }
+ private final AtomicLong _cnt = new AtomicLong();
+ public long get(){ return _cnt.get(); }
+ public void add( long x ) { _cnt.getAndAdd(x); }
+}
5 Testing/CAT_Tester/BUILD.sh
@@ -0,0 +1,5 @@
+# Simple build line
+#
+set JAVA_HOME=/usr/local/j2sdk1.5.0_06
+javac -classpath $JAVA_HOME/jre/lib/rt.jar:. harness.java org/cliffc/high_scale_lib/*.java ../org/cliffc/high_scale_lib/*.java
+
10 Testing/CAT_Tester/CATCounter.java
@@ -0,0 +1,10 @@
+
+import org.cliffc.high_scale_lib.*;
+public final class CATCounter extends Counter {
+ public String name() { return "CAT"; }
+ private final ConcurrentAutoTable _tab = new ConcurrentAutoTable();
+ public long get(){ return _tab.get(); }
+ public void add( long x ) { _tab.add(x); }
+ public void print() { _tab.print(); }
+ public int internal_size() { return _tab.internal_size(); }
+}
11 Testing/CAT_Tester/Counter.java
@@ -0,0 +1,11 @@
+//package org.cliffc.high_scale_lib;
+public abstract class Counter {
+ public abstract String name();
+ public abstract long get();
+ public abstract void add( long x );
+ public long pre_add ( long x ) { long l = get(); add(x); return l; }
+ public long post_add( long x ) { add(x); long l = get(); return l; }
+ public long post_inc() { return post_add( 1); }
+ public long pre_dec() { return pre_add(-1); }
+}
+
212 Testing/CAT_Tester/Harness.java
@@ -0,0 +1,212 @@
+/*
+ * Written by Cliff Click and released to the public domain, as explained at
+ * http://creativecommons.org/licenses/publicdomain
+ */
+
+public class Harness extends Thread {
+ static int _thread_min, _thread_max, _thread_incr;
+ static int _ctr_impl;
+
+ static Counter make_ctr( final int impl ) {
+ switch( impl ) {
+ case 1: return new RaceyCounter();
+ case 2: return new SyncCounter();
+ case 3: return new LockCounter();
+ case 4: return new AtomicCounter();
+ case 5: return new UnsafeCounter();
+ case 6: return new StripeLockCounter( 8);
+ case 7: return new StripeUnsafeCounter( 8);
+ case 8: return new StripeLockCounter( 64);
+ case 9: return new StripeUnsafeCounter( 64);
+ case 10: return new StripeLockCounter(256);
+ case 11: return new StripeUnsafeCounter(256);
+ case 12: return new CATCounter();
+ default:
+ throw new Error("Bad imple");
+ }
+ }
+
+ static volatile boolean _start;
+ static volatile boolean _stop;
+ static final int NUM_CPUS = Runtime.getRuntime().availableProcessors();
+
+ static int check( String arg, String msg, int lower, int upper ) {
+ return check( Integer.parseInt(arg), msg, lower, upper );
+ }
+ static int check( int x, String msg, int lower, int upper ) {
+ if( x < lower || x > upper )
+ throw new Error(msg+" must be from "+lower+" to "+upper);
+ return x;
+ }
+
+ public static void main( String args[] ) {
+ // Parse args
+ try {
+ _thread_min = check( args[0], "thread_min", 1, 100000 );
+ _thread_max = check( args[1], "thread_max", 1, 100000 );
+ _thread_incr = check( args[2], "thread_incr", 1, 100000 );
+ _ctr_impl = check( args[3], "implementation", -1, 13 );
+
+ int trips = (_thread_max - _thread_min)/_thread_incr;
+ _thread_max = trips*_thread_incr + _thread_min;
+
+ } catch( Error e ) {
+ System.out.println("Usage: harness thread-min thread-max thread-incr impl[All=0]");
+ throw e;
+ }
+ String name = _ctr_impl == 0 ? "ALL" : (_ctr_impl==-1 ? "Best" : make_ctr(_ctr_impl).name());
+ System.out.println("===== "+name+" =====");
+ System.out.println("Threads from "+_thread_min+" to "+_thread_max+" by "+_thread_incr);
+
+ // Do some warmup
+ System.out.println("==== Warmup -variance: ");
+ run_till_stable(Math.min(_thread_min,2),1);
+
+ // Now do the real thing
+ int num_trials = 7; // Number of Trials
+ System.out.print("==== Counter Threads Trial:");
+ for( int i=0; i<num_trials; i++ )
+ System.out.printf(" %3d ",i);
+ System.out.println(" Average");
+ for( int i=_thread_min; i<=_thread_max; i += _thread_incr )
+ run_till_stable( i, num_trials );
+ }
+
+ static void run_till_stable( int num_threads, int num_trials ) {
+ if( _ctr_impl > 0 ) {
+ run_till_stable(num_threads,num_trials,_ctr_impl);
+ } else if( _ctr_impl == 0 ) {
+ for( int impl=1;impl<13; impl++ )
+ run_till_stable(num_threads,num_trials,impl);
+ System.out.println();
+ } else {
+ run_till_stable(num_threads,num_trials,11); // big stripage Unsafe
+ run_till_stable(num_threads,num_trials,12); // CAT
+ }
+ }
+
+ static void run_till_stable( int num_threads, int num_trials, int impl ) {
+
+ Counter C = make_ctr(impl);
+ System.out.printf("=== %10.10s %3d cnts/sec=",C.name(),num_threads);
+ long[] trials = new long[num_trials]; // Number of trials
+ long total_ops = 0; // Total ops altogether
+ long total_ops_sec = 0; // Sum of ops/sec for each run
+
+ // Run some trials
+ for( int j=0; j<trials.length; j++ ) {
+ long[] ops = new long[num_threads];
+ long millis = run_once(num_threads,C,ops);
+ long sum = 0;
+ for( int i=0; i<num_threads; i++ )
+ sum += ops[i];
+ total_ops += sum;
+ sum = sum*1000L/millis;
+ trials[j] = sum;
+ total_ops_sec += sum;
+ System.out.printf(" %10d",sum);
+ }
+
+ // Compute nice trial results
+ if( trials.length > 2 ) {
+ // Toss out low & high
+ int lo=0;
+ int hi=0;
+ for( int j=1; j<trials.length; j++ ) {
+ if( trials[lo] < trials[j] ) lo=j;
+ if( trials[hi] > trials[j] ) hi=j;
+ }
+ long total2 = total_ops_sec - (trials[lo]+trials[hi]);
+ trials[lo] = trials[trials.length-1];
+ trials[hi] = trials[trials.length-2];
+ // Print avg,stddev
+ long avg = total2/(trials.length-2);
+ long stddev = compute_stddev(trials,trials.length-2);
+ long p = stddev*100/avg; // std-dev as a percent
+
+ System.out.printf(" %10d",avg);
+ System.out.printf(" (+/-%2d%%)",p);
+ }
+
+ long loss = total_ops - C.get();
+ if( loss != 0 ) {
+ System.out.print(" Lossage=");
+ int loss_per = (int)(loss*100/total_ops);
+ System.out.print(loss_per == 0 ? (""+loss) : (""+loss_per+"%"));
+ }
+
+ if( C instanceof CATCounter ) {
+ CATCounter cat = (CATCounter)C;
+ System.out.print(" autotable="+ cat.internal_size());
+ if( loss != 0 ) cat.print();
+ }
+
+ System.out.println();
+ }
+
+ static long compute_stddev(long[] trials, int len) {
+ double sum = 0;
+ double squ = 0.0;
+ for( int i=0; i<len; i++ ) {
+ double d = (double)trials[i];
+ sum += d;
+ squ += d*d;
+ }
+ double x = squ - sum*sum/len;
+ double stddev = Math.sqrt(x/(len-1));
+ return (long)stddev;
+ }
+
+ final int _tnum;
+ final Counter _C;
+ final long[] _ops;
+ Harness( int tnum, Counter C, long[] ops ) { _tnum = tnum; _C = C; _ops = ops; }
+
+ static long run_once( int num_threads, Counter C, long[] ops ) {
+ _start = false;
+ _stop = false;
+
+ // Launch threads
+ Harness thrs[] = new Harness[num_threads];
+ for( int i=0; i<num_threads; i++ ) {
+ thrs[i] = new Harness(i, C, ops);
+ //int h1 = System.identityHashCode(thrs[i]);
+ //int h2 = h1;
+ //h2 ^= (h2>>>20) ^ (h2>>>12);
+ //h2 ^= (h2>>> 7) ^ (h2>>> 4);
+ //System.out.printf("%x ",h1&0xfff);
+ }
+ //System.out.println("");
+ for( int i=0; i<num_threads; i++ )
+ thrs[i].start();
+ // Run threads
+ long start = System.currentTimeMillis();
+ _start = true;
+ try { Thread.sleep(2000); } catch( InterruptedException e ){}
+ _stop = true;
+ long stop = System.currentTimeMillis();
+ long millis = stop-start;
+
+ for( int i=0; i<num_threads; i++ ) {
+ try {
+ thrs[i].join();
+ } catch( InterruptedException e ) { }
+ }
+ return millis;
+ }
+
+ // What a worker thread does
+ public void run() {
+ while( !_start ) // Spin till Time To Go
+ try { Thread.sleep(1); } catch( InterruptedException e ){}
+
+ int ops = 0;
+ while( !_stop ) {
+ ops++;
+ _C.add(1);
+ }
+ // We stopped; report results into shared result structure
+ _ops[_tnum] = ops;
+ }
+
+}
16 Testing/CAT_Tester/LockCounter.java
@@ -0,0 +1,16 @@
+import java.util.concurrent.locks.*;
+
+public final class LockCounter extends Counter {
+ public String name() { return "Lock"; }
+ private final ReentrantLock _lock = new ReentrantLock();
+ private long _cnt;
+ public long get(){ return _cnt; }
+ public void add( long x ) {
+ try {
+ _lock.lock();
+ _cnt += x;
+ } finally {
+ _lock.unlock();
+ }
+ }
+}
12 Testing/CAT_Tester/README.txt
@@ -0,0 +1,12 @@
+
+A testing harness for scalable counters.
+
+There are some counter implementations in the local java/util which
+must be in the bootclasspath (they use Unsafe).
+
+The main counter is ConcurrentAutoTable.java, which is kept in the
+top-level java/util directory, so I end up with 2 java/util
+directories: one for the testing harness & strawman counter
+implementations, one for the main implementation.
+
+
6 Testing/CAT_Tester/RaceyCounter.java
@@ -0,0 +1,6 @@
+public final class RaceyCounter extends Counter {
+ private long _cnt;
+ public long get(){ return _cnt; }
+ public void add( long x ) { _cnt += x; }
+ public String name() { return "Racey"; }
+}
33 Testing/CAT_Tester/StripeLockCounter.java
@@ -0,0 +1,33 @@
+
+import java.util.concurrent.locks.*;
+
+public final class StripeLockCounter extends Counter {
+ private final int _stripes;
+ private final ReentrantLock[] _locks;
+ private final long _cnts[];
+ StripeLockCounter(int stripes) {
+ _stripes = stripes;
+ _locks = new ReentrantLock[stripes];
+ _cnts = new long[stripes];
+ for( int i=0; i<stripes; i++ )
+ _locks[i] = new ReentrantLock();
+ }
+ public String name() { return "Locks"+_stripes; }
+ public long get() {
+ long sum = 0;
+ for( int i=0; i<_cnts.length; i++ )
+ sum += _cnts[i];
+ return sum;
+ }
+ public void add( long x ) {
+ int hash = System.identityHashCode( Thread.currentThread());
+ int idx = hash & (_locks.length-1);
+ final Lock l = _locks[idx];
+ try {
+ l.lock();
+ _cnts [idx] += x;
+ } finally {
+ l.unlock();
+ }
+ }
+}
21 Testing/CAT_Tester/StripeUnsafeCounter.java
@@ -0,0 +1,21 @@
+
+public final class StripeUnsafeCounter extends Counter {
+ private final UnsafeCounter _cnts[];
+ StripeUnsafeCounter(int stripes) {
+ _cnts = new UnsafeCounter[stripes];
+ for( int i=0; i<stripes; i++ )
+ _cnts[i] = new UnsafeCounter();
+ }
+ public String name() { return "Unsafes"+_cnts.length; }
+ public long get() {
+ long sum = 0;
+ for( int i=0; i<_cnts.length; i++ )
+ sum += _cnts[i].get();
+ return sum;
+ }
+ public void add( long x ) {
+ int hash = System.identityHashCode( Thread.currentThread());
+ int idx = hash & (_cnts.length-1);
+ _cnts[idx].add(x);
+ }
+}
7 Testing/CAT_Tester/SyncCounter.java
@@ -0,0 +1,7 @@
+
+public final class SyncCounter extends Counter {
+ public String name() { return "Synchronized"; }
+ private long _cnt;
+ public long get(){ return _cnt; }
+ public synchronized void add( long x ) { _cnt += x; }
+}
46 Testing/CAT_Tester/UnsafeCounter.java
@@ -0,0 +1,46 @@
+//package org.cliffc.high_scale_lib;
+import sun.misc.Unsafe;
+import java.lang.reflect.*;
+
+public final class UnsafeCounter extends Counter {
+ public String name() { return "Unsafe"; }
+ private static final Unsafe _unsafe = UtilUnsafe.getUnsafe();
+ private static final long CNT_OFFSET;
+ static { { // <clinit>
+ Field f = null;
+ try {
+ f = UnsafeCounter.class.getDeclaredField("_cnt");
+ } catch( java.lang.NoSuchFieldException e ) {
+ throw new Error(e);
+ }
+ CNT_OFFSET = _unsafe.objectFieldOffset(f);
+ }
+ }
+
+ private long _cnt;
+ public long get(){ return _cnt; }
+ public void add( final long x ) {
+ long cnt=0;
+ do {
+ cnt = _cnt;
+ } while( !_unsafe.compareAndSwapLong(this,CNT_OFFSET,cnt,cnt+x) );
+ }
+
+
+ private static class UtilUnsafe {
+ private UtilUnsafe() { } // dummy private constructor
+ public static Unsafe getUnsafe() {
+ // Not on bootclasspath
+ if( UtilUnsafe.class.getClassLoader() == null )
+ return Unsafe.getUnsafe();
+ try {
+ final Field fld = Unsafe.class.getDeclaredField("theUnsafe");
+ fld.setAccessible(true);
+ return (Unsafe) fld.get(UtilUnsafe.class);
+ } catch (Exception e) {
+ throw new RuntimeException("Could not obtain access to sun.misc.Unsafe", e);
+ }
+ }
+ }
+
+}
5 Testing/CAT_Tester/build.bat
@@ -0,0 +1,5 @@
+echo
+cd "c:\Documents and Settings\Cliff\Desktop\Highly Scalable Java\high-scale-lib\Testing\CAT_Tester"
+javac -classpath .;..\.. Harness.java CATCounter.java
+cd ..\..
+java -classpath Testing\CAT_Tester;. -Xbootclasspath/p:Testing\CAT_Tester;. Harness 1 3 1 0
390 Testing/CAT_Tester/hash_test.java
@@ -0,0 +1,390 @@
+/*
+ * Written by Cliff Click and released to the public domain, as explained at
+ * http://creativecommons.org/licenses/publicdomain
+ * Big Chunks of code shamelessly copied from Doug Lea's test harness which is also public domain.
+ */
+
+
+import org.cliffc.high_scale_lib.*;
+import java.util.*;
+import java.util.concurrent.*;
+import java.util.concurrent.atomic.*;
+
+public class hash_test extends Thread {
+ static int _read_ratio, _gr, _pr;
+ static int _thread_min, _thread_max, _thread_incr;
+ static int _table_size;
+ static int _map_impl;
+
+ static Map<String,String> make_map( int impl ) {
+ switch( impl ) {
+ case 1: return new Hashtable<String,String>(0);
+ case 2: return null; // new CliffWrapHerlihy(); // was a non-blocking HashSet implementation from Maurice Herlihy
+ case 3: return new ConcurrentHashMap<String,String>(16,0.75f, 16); // force to 16 striping
+ case 4: return new ConcurrentHashMap<String,String>(16,0.75f, 256); // force to 256 striping
+ case 5: return new ConcurrentHashMap<String,String>(16,0.75f,4096); // force to 4096 striping
+ case 6: return new NonBlockingHashMap<String,String>();
+ default: throw new Error("Bad imple");
+ }
+ }
+ static String names[] = {
+ "ALL",
+ "HashTable",
+ "HerlihyHashSet",
+ "CHM_16",
+ "CHM_256",
+ "CHM_4096",
+ "NBHashMap",
+ };
+
+
+ static String KEYS[];
+ static volatile boolean _start;
+ static volatile boolean _stop;
+ static final int NUM_CPUS = Runtime.getRuntime().availableProcessors();
+
+ static int check( String arg, String msg, int lower, int upper ) throws Exception {
+ return check( Integer.parseInt(arg), msg, lower, upper );
+ }
+ static int check( int x, String msg, int lower, int upper ) throws Exception {
+ if( x < lower || x > upper )
+ throw new Error(msg+" must be from "+lower+" to "+upper);
+ return x;
+ }
+
+ public static void main( String args[] ) throws Exception {
+ // Parse args
+ try {
+ _read_ratio = check( args[0], "read%", 0, 100 );
+ _thread_min = check( args[1], "thread_min", 1, 100000 );
+ _thread_max = check( args[2], "thread_max", 1, 100000 );
+ _thread_incr = check( args[3], "thread_incr", 1, 100000 );
+ _table_size = check( args[4], "table_size", 1, 100000000 );
+ _map_impl = check( args[5], "implementation", -1, names.length );
+
+ _gr = (_read_ratio<<20)/100;
+ _pr = (((1<<20) - _gr)>>1) + _gr;
+
+ int trips = (_thread_max - _thread_min)/_thread_incr;
+ _thread_max = trips*_thread_incr + _thread_min;
+
+ } catch( Exception e ) {
+ System.out.println("Usage: hash_test read%[0=churn test] thread-min thread-max thread-increment hash_table_size impl[All=0,Hashtable=1,HerlihyHashSet=2,CHM_16=3,CHM_256=4,CHM_4096=5,NonBlockingHashMap=6]");
+ throw e;
+ }
+
+ System.out.print( _read_ratio+"% gets, "+
+ ((100-_read_ratio)>>1)+"% inserts, "+
+ ((100-_read_ratio)>>1)+"% removes, " +
+ "table_size="+_table_size);
+ if( _read_ratio==0 )
+ System.out.print(" -- churn");
+ String name = _map_impl == -1 ? "Best" : names[_map_impl];
+ System.out.println(" "+name);
+ System.out.println("Threads from "+_thread_min+" to "+_thread_max+" by "+_thread_incr);
+
+ // Do some warmup
+ int keymax = 1;
+ while( keymax < _table_size ) keymax<<=1;
+ if( _read_ratio == 0 ) keymax = 1024*1024; // The churn test uses a large key set
+ KEYS = new String[keymax];
+ int [] histo = new int[64];
+ for( int i=0; i<KEYS.length; i++ ) {
+ KEYS[i] = String.valueOf(i) + "abc" + String.valueOf(i*17+123);
+ histo[KEYS[i].hashCode() >>>(32-6)]++;
+ }
+ // verify good key spread to help ConcurrentHashMap
+ //for( int i=0; i<histo.length; i++ )
+ // System.out.print(" "+histo[i]);
+
+ System.out.println("Warmup -variance: ");
+ run_till_stable(Math.min(_thread_min,2),1);
+
+ // Now do the real thing
+ System.out.print("==== Counter Threads Trial: ");
+ int num_trials = 7; // Number of Trials
+ for( int i=0; i<num_trials; i++ )
+ System.out.printf(" %3d ",i);
+ System.out.println(" Avg Stddev");
+ for( int i=_thread_min; i<=_thread_max; i += _thread_incr )
+ run_till_stable( i, num_trials );
+ }
+
+ static void run_till_stable( int num_threads, int num_trials ) throws Exception {
+ if( _map_impl > 0 ) {
+ run_till_stable(num_threads,num_trials,_map_impl);
+ } else if( _map_impl == 0 ) {
+ for( int i=1; i<names.length; i++ )
+ run_till_stable(num_threads,num_trials,i);
+ } else {
+ run_till_stable(num_threads,num_trials,5);
+ run_till_stable(num_threads,num_trials,6);
+ }
+ }
+
+ static void run_till_stable( int num_threads, int num_trials, int impl ) throws Exception {
+ Map<String,String> HM = make_map(impl);
+ if( HM == null ) return;
+ String name = names[impl];
+ System.out.printf("=== %10.10s %3d cnts/sec=",name,num_threads);
+
+ // Quicky sanity check
+ for( int i=0; i<100; i++ ) {
+ HM.put(KEYS[i],KEYS[i]);
+ for( int j=0; j<i; j++ ) {
+ if( HM.get(KEYS[j]) != KEYS[j] ) {
+ throw new Error("Broken table, put "+i+" but cannot find #"+j);
+ }
+ }
+ }
+
+ long[] trials = new long[num_trials]; // Number of trials
+ long total = 0;
+
+ for( int j=0; j<trials.length; j++ ) {
+ long[] ops = new long[num_threads];
+ long[] nanos = new long[num_threads];
+ long millis = run_once(num_threads,HM,ops,nanos);
+ long sum = 0;
+ for( int i=0; i<num_threads; i++ )
+ sum += ops[i];
+ long ops_per_sec = (sum*1000L)/millis;
+ trials[j] = ops_per_sec;
+ total += ops_per_sec;
+ System.out.printf(" %10d",ops_per_sec);
+
+
+ //for( int i=0; i<num_threads; i++ ) {
+ // if( nanos[i] < 1980000000 ||
+ // nanos[i] > 2010000000 ||
+ // ops[i] < 100000 )
+ // System.out.printf(" %d",ops[i]);
+ //}
+
+ }
+
+ if( trials.length > 2 ) {
+ // Toss out low & high
+ int lo=0;
+ int hi=0;
+ for( int j=1; j<trials.length; j++ ) {
+ if( trials[lo] < trials[j] ) lo=j;
+ if( trials[hi] > trials[j] ) hi=j;
+ }
+ total -= (trials[lo]+trials[hi]);
+ trials[lo] = trials[trials.length-1];
+ trials[hi] = trials[trials.length-2];
+ // Print avg,stddev
+ long avg = total/(trials.length-2);
+ long stddev = compute_stddev(trials,trials.length-2);
+ long p = stddev*100/avg; // std-dev as a percent
+
+ if( trials.length-2 > 2 ) {
+ // Toss out low & high
+ lo=0;
+ hi=0;
+ for( int j=1; j<trials.length-2; j++ ) {
+ if( trials[lo] < trials[j] ) lo=j;
+ if( trials[hi] > trials[j] ) hi=j;
+ }
+ total -= (trials[lo]+trials[hi]);
+ trials[lo] = trials[trials.length-2-1];
+ trials[hi] = trials[trials.length-2-2];
+ // Print avg,stddev
+ avg = total/(trials.length-2-2);
+ stddev = compute_stddev(trials,trials.length-2-2);
+ p = stddev*100/avg; // std-dev as a percent
+ }
+ System.out.printf(" %10d",avg);
+ System.out.printf(" (+/-%2d%%) %d",p,HM.size());
+ }
+ System.out.println();
+ }
+
+ static long compute_stddev(long[] trials, int len) {
+ double sum = 0;
+ double squ = 0.0;
+ for( int i=0; i<len; i++ ) {
+ double d = (double)trials[i];
+ sum += d;
+ squ += d*d;
+ }
+ double x = squ - sum*sum/len;
+ double stddev = Math.sqrt(x/(len-1));
+ return (long)stddev;
+ }
+
+ // Worker thread fields
+ final int _tnum;
+ final Map<String,String> _hash; // Shared hashtable
+ final long[] _ops;
+ final long[] _nanos;
+ hash_test( int tnum, Map<String,String> HM, long[] ops, long [] nanos ) { _tnum = tnum; _hash = HM; _ops = ops; _nanos = nanos; }
+
+ static long run_once( int num_threads, Map<String,String> HM, long[] ops, long [] nanos ) throws Exception {
+ Random R = new Random();
+ _start = false;
+ _stop = false;
+
+ HM.put("Cliff","Cliff");
+ HM.remove("Cliff");
+
+ int sz = HM.size();
+ while( sz+1024 < _table_size ) {
+ int idx = R.nextInt();
+ for( int i=0; i<1024; i++ ) {
+ String key = KEYS[idx&(KEYS.length-1)];
+ HM.put(key,key);
+ idx++;
+ }
+ sz = HM.size();
+ }
+
+ while( sz < ((_table_size>>1)+(_table_size>>3)) ) {
+ int trip = 0;
+ int idx = R.nextInt();
+ while( true ) {
+ String key = KEYS[idx&(KEYS.length-1)];
+ if( sz < _table_size ) {
+ if( HM.put(key,key) == null ) { sz++; break; }
+ } else {
+ if( HM.remove(key ) != null ) { sz--; break; }
+ }
+ idx++;
+ if( (trip & 15)==15 ) idx = R.nextInt();
+ if( trip++ > 1024*1024 ) {
+ if( trip > 1024*1024+100 )
+ throw new Exception("barf trip "+sz+" "+HM.size()+" numkeys="+KEYS.length);
+ System.out.println(key);
+ }
+ }
+ }
+
+ if( sz != HM.size() ) {
+ throw new Error("size does not match table contents sz="+sz+" size()="+HM.size());
+ }
+
+ // Launch threads
+ //long nanoz = System.nanoTime();
+ //System.out.println(" "+nanoz+" Create-Threads");
+ hash_test thrs[] = new hash_test[num_threads];
+ for( int i=0; i<num_threads; i++ )
+ thrs[i] = new hash_test(i, HM, ops, nanos);
+ for( int i=0; i<num_threads; i++ )
+ thrs[i].start();
+ // Run threads
+ //long nano = System.nanoTime();
+ //System.out.println(" "+nano+" Start");
+ long start = System.currentTimeMillis();
+ _start = true;
+ try { Thread.sleep(2000); } catch( InterruptedException e ){}
+ _stop = true;
+ long stop = System.currentTimeMillis();
+ //long nanox = System.nanoTime();
+ long millis = stop-start;
+ //System.out.println(" "+nanox+" Stop");
+
+ for( int i=0; i<num_threads; i++ )
+ thrs[i].join();
+ //long nanoy = System.nanoTime();
+ //System.out.println(" "+nanoy+" Join-Done");
+ return millis;
+ }
+
+ // What a worker thread does
+ public void run() {
+ if( _read_ratio == 0 ) {
+ run_churn();
+ } else {
+ run_normal();
+ }
+ }
+
+ // Force a large turnover of live keys, while keeping the total live-set
+ // low. 10 keys kept alive per thread, out of a set of a million or so.
+ // constantly churned, so we constantly need to 'cleanse' the table to flush
+ // old entries.
+ public void run_churn() {
+ int reprobe = System.identityHashCode(Thread.currentThread());
+ int idx = reprobe;
+
+ while( !_start ) // Spin till Time To Go
+ try { Thread.sleep(1); } catch( Exception e ){}
+
+ long nano1 = System.nanoTime();
+ int get_ops = 0;
+ int put_ops = 0;
+ int del_ops = 0;
+ while( !_stop ) {
+ // Insert a key 10 probes in the future,
+ // remove a key 0 probes in the future,
+ // Net result is the thread keeps 10 random keys in table
+ String key1 = KEYS[(idx+reprobe*10) & (KEYS.length-1)];
+ _hash.put(key1,key1);
+ put_ops++;
+
+ // Remove a key 0 probes in the future
+ String key2 = KEYS[(idx+reprobe* 0) & (KEYS.length-1)];
+ _hash.remove(key2);
+ del_ops++;
+
+ idx += reprobe;
+ }
+
+ // We stopped; report results into shared result structure
+ long nano2 = System.nanoTime();
+ int total = get_ops+put_ops+del_ops;
+ _ops[_tnum] = total;
+ _nanos[_tnum] = (nano2-nano1);
+ }
+
+ public void run_normal() {
+ SimpleRandom R = new SimpleRandom();
+ while( !_start ) // Spin till Time To Go
+ try { Thread.sleep(1); } catch( Exception e ){}
+
+ long nano1 = System.nanoTime();
+ int get_ops = 0;
+ int put_ops = 0;
+ int del_ops = 0;
+ while( !_stop ) {
+ int x = R.nextInt()&((1<<20)-1);
+ String key = KEYS[R.nextInt()&(KEYS.length-1)];
+ if( x < _gr ) {
+ get_ops++;
+ String val = _hash.get(key);
+ if( val != null && val != key ) throw new IllegalArgumentException("Mismatched key="+key+" and val="+val);
+ } else if( x < _pr ) {
+ put_ops++;
+ _hash.put( key, key );
+ } else {
+ del_ops++;
+ _hash.remove( key );
+ }
+ }
+ // We stopped; report results into shared result structure
+ long nano2 = System.nanoTime();
+ int total = get_ops+put_ops+del_ops;
+ _ops[_tnum] = total;
+ _nanos[_tnum] = (nano2-nano1);
+ }
+
+ // Fairly fast random numbers
+ public static final class SimpleRandom {
+ private final static long multiplier = 0x5DEECE66DL;
+ private final static long addend = 0xBL;
+ private final static long mask = (1L << 48) - 1;
+ static final AtomicLong seq = new AtomicLong( -715159705);
+ private long seed;
+ SimpleRandom(long s) { seed = s; }
+ SimpleRandom() { seed = System.nanoTime() + seq.getAndAdd(129); }
+ public void setSeed(long s) { seed = s; }
+ public int nextInt() { return next(); }
+ public int next() {
+ long nextseed = (seed * multiplier + addend) & mask;
+ seed = nextseed;
+ return ((int)(nextseed >>> 17)) & 0x7FFFFFFF;
+ }
+ }
+
+}
14 Testing/JUnitSuite.java
@@ -0,0 +1,14 @@
+import junit.framework.Test;
+import junit.framework.TestSuite;
+
+public class JUnitSuite {
+ public static Test suite() {
+ TestSuite suite = new TestSuite();
+ suite.addTestSuite(NBHM_Tester2.class);
+ suite.addTestSuite(NBHML_Tester2.class);
+ suite.addTestSuite(nbhs_tester.class);
+ suite.addTestSuite(nbsi_tester.class);
+ return suite;
+ }
+ public static void main(String[] args) { junit.textui.TestRunner.run(suite()); }
+}
549 Testing/NBHM_Tester/NBHMID_Tester2.java
@@ -0,0 +1,549 @@
+/*
+ * Written by Cliff Click and released to the public domain, as explained at
+ * http://creativecommons.org/licenses/publicdomain
+ */
+
+import java.io.*;
+import java.util.*;
+import java.util.Map.Entry;
+import java.util.concurrent.*;
+import junit.framework.TestCase;
+import org.cliffc.high_scale_lib.*;
+import static org.hamcrest.CoreMatchers.*;
+import static org.junit.Assert.*;
+
+// Test NonBlockingHashMap via JUnit
+public class NBHMID_Tester2 extends TestCase {
+ public static void main(String args[]) {
+ org.junit.runner.JUnitCore.main("Testing.NBHM_Tester.NBHMID_Tester2");
+ }
+
+ private NonBlockingIdentityHashMap<String,String> _nbhm;
+ protected void setUp () { _nbhm = new NonBlockingIdentityHashMap<String,String>(); }
+ protected void tearDown() { _nbhm = null; }
+
+ // Throw a ClassCastException if I see a tombstone during key-compares
+ private static class KeyBonk {
+ final int _x;
+ KeyBonk( int i ) { _x=i; }
+ public boolean equals( Object o ) {
+ if( o == null ) return false;
+ return ((KeyBonk)o)._x // Throw CCE here
+ == this._x;
+ }
+ public int hashCode() { return (_x>>2); }
+ public String toString() { return "Bonk_"+Integer.toString(_x); }
+ }
+
+ // Test some basic stuff; add a few keys, remove a few keys
+ public void testBasic() {
+ assertTrue ( _nbhm.isEmpty() );
+ assertThat ( _nbhm.putIfAbsent("k1","v1"), nullValue() );
+ checkSizes (1);
+ assertThat ( _nbhm.putIfAbsent("k2","v2"), nullValue() );
+ checkSizes (2);
+ assertTrue ( _nbhm.containsKey("k2") );
+ assertThat ( _nbhm.put("k1","v1a"), is("v1") );
+ assertThat ( _nbhm.put("k2","v2a"), is("v2") );
+ checkSizes (2);
+ assertThat ( _nbhm.putIfAbsent("k2","v2b"), is("v2a") );
+ assertThat ( _nbhm.remove("k1"), is("v1a") );
+ assertFalse( _nbhm.containsKey("k1") );
+ checkSizes (1);
+ assertThat ( _nbhm.remove("k1"), nullValue() );
+ assertThat ( _nbhm.remove("k2"), is("v2a") );
+ checkSizes (0);
+ assertThat ( _nbhm.remove("k2"), nullValue() );
+ assertThat ( _nbhm.remove("k3"), nullValue() );
+ assertTrue ( _nbhm.isEmpty() );
+
+ assertThat ( _nbhm.put("k0","v0"), nullValue() );
+ assertTrue ( _nbhm.containsKey("k0") );
+ checkSizes (1);
+ assertThat ( _nbhm.remove("k0"), is("v0") );
+ assertFalse( _nbhm.containsKey("k0") );
+ checkSizes (0);
+
+ assertThat ( _nbhm.replace("k0","v0"), nullValue() );
+ assertFalse( _nbhm.containsKey("k0") );
+ assertThat ( _nbhm.put("k0","v0"), nullValue() );
+ assertEquals(_nbhm.replace("k0","v0a"), "v0" );
+ assertEquals(_nbhm.get("k0"), "v0a" );
+ assertThat ( _nbhm.remove("k0"), is("v0a") );
+ assertFalse( _nbhm.containsKey("k0") );
+ checkSizes (0);
+
+ assertThat ( _nbhm.replace("k1","v1"), nullValue() );
+ assertFalse( _nbhm.containsKey("k1") );
+ assertThat ( _nbhm.put("k1","v1"), nullValue() );
+ assertEquals(_nbhm.replace("k1","v1a"), "v1" );
+ assertEquals(_nbhm.get("k1"), "v1a" );
+ assertThat ( _nbhm.remove("k1"), is("v1a") );
+ assertFalse( _nbhm.containsKey("k1") );
+ checkSizes (0);
+
+ // Insert & Remove KeyBonks until the table resizes and we start
+ // finding Tombstone keys- and KeyBonk's equals-call with throw a
+ // ClassCastException if it sees a non-KeyBonk.
+ NonBlockingIdentityHashMap<KeyBonk,String> dumb = new NonBlockingIdentityHashMap<KeyBonk,String>();
+ for( int i=0; i<10000; i++ ) {
+ final KeyBonk happy1 = new KeyBonk(i);
+ assertThat( dumb.put(happy1,"and"), nullValue() );
+ if( (i&1)==0 ) dumb.remove(happy1);
+ final KeyBonk happy2 = new KeyBonk(i); // 'equals' but not '=='
+ dumb.get(happy2);
+ }
+ }
+
+ // Check all iterators for correct size counts
+ private void checkSizes(int expectedSize) {
+ assertEquals( "size()", _nbhm.size(), expectedSize );
+ Collection<String> vals = _nbhm.values();
+ checkSizes("values()",vals.size(),vals.iterator(),expectedSize);
+ Set<String> keys = _nbhm.keySet();
+ checkSizes("keySet()",keys.size(),keys.iterator(),expectedSize);
+ Set<Entry<String,String>> ents = _nbhm.entrySet();
+ checkSizes("entrySet()",ents.size(),ents.iterator(),expectedSize);
+ }
+
+ // Check that the iterator iterates the correct number of times
+ private void checkSizes(String msg, int sz, Iterator it, int expectedSize) {
+ assertEquals( msg, expectedSize, sz );
+ int result = 0;
+ while (it.hasNext()) {
+ result++;
+ it.next();
+ }
+ assertEquals( msg, expectedSize, result );
+ }
+
+
+ public void testIteration() {
+ assertTrue ( _nbhm.isEmpty() );
+ assertThat ( _nbhm.put("k1","v1"), nullValue() );
+ assertThat ( _nbhm.put("k2","v2"), nullValue() );
+
+ String str1 = "";
+ for( Iterator<Map.Entry<String,String>> i = _nbhm.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<String,String> e = i.next();
+ str1 += e.getKey();
+ }
+ assertThat("found all entries",str1,anyOf(is("k1k2"),is("k2k1")));
+
+ String str2 = "";
+ for( Iterator<String> i = _nbhm.keySet().iterator(); i.hasNext(); ) {
+ String key = i.next();
+ str2 += key;
+ }
+ assertThat("found all keys",str2,anyOf(is("k1k2"),is("k2k1")));
+
+ String str3 = "";
+ for( Iterator<String> i = _nbhm.values().iterator(); i.hasNext(); ) {
+ String val = i.next();
+ str3 += val;
+ }
+ assertThat("found all vals",str3,anyOf(is("v1v2"),is("v2v1")));
+
+ assertThat("toString works",_nbhm.toString(), anyOf(is("{k1=v1, k2=v2}"),is("{k2=v2, k1=v1}")));
+ }
+
+ public void testSerial() {
+ assertTrue ( _nbhm.isEmpty() );
+ final String k1 = "k1";
+ final String k2 = "k2";
+ assertThat ( _nbhm.put(k1,"v1"), nullValue() );
+ assertThat ( _nbhm.put(k2,"v2"), nullValue() );
+
+ // Serialize it out
+ try {
+ FileOutputStream fos = new FileOutputStream("NBHM_test.txt");
+ ObjectOutputStream out = new ObjectOutputStream(fos);
+ out.writeObject(_nbhm);
+ out.close();
+ } catch(IOException ex) {
+ ex.printStackTrace();
+ }
+
+ // Read it back
+ try {
+ File f = new File("NBHM_test.txt");
+ FileInputStream fis = new FileInputStream(f);
+ ObjectInputStream in = new ObjectInputStream(fis);
+ NonBlockingIdentityHashMap nbhm = (NonBlockingIdentityHashMap)in.readObject();
+ in.close();
+ assertThat("serialization works",nbhm.toString(), anyOf(is("{k1=v1, k2=v2}"),is("{k2=v2, k1=v1}")));
+ if( !f.delete() ) throw new IOException("delete failed");
+ } catch(IOException ex) {
+ ex.printStackTrace();
+ } catch(ClassNotFoundException ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ public void testIterationBig2() {
+ final int CNT = 10000;
+ NonBlockingIdentityHashMap<Integer,String> nbhm = new NonBlockingIdentityHashMap<Integer,String>();
+ final String v = "v";
+ for( int i=0; i<CNT; i++ ) {
+ final Integer z = new Integer(i);
+ String s0 = nbhm.get(z);
+ assertThat( s0, nullValue() );
+ nbhm.put(z,v);
+ String s1 = nbhm.get(z);
+ assertThat( s1, is(v) );
+ }
+ assertThat( nbhm.size(), is(CNT) );
+ }
+
+ public void testIterationBig() {
+ final int CNT = 10000;
+ String [] keys = new String[CNT];
+ String [] vals = new String[CNT];
+ assertThat( _nbhm.size(), is(0) );
+ for( int i=0; i<CNT; i++ )
+ _nbhm.put(keys[i]=("k"+i),vals[i]=("v"+i));
+ assertThat( _nbhm.size(), is(CNT) );
+
+ int sz =0;
+ int sum = 0;
+ for( String s : _nbhm.keySet() ) {
+ sz++;
+ assertThat("",s.charAt(0),is('k'));
+ int x = Integer.parseInt(s.substring(1));
+ sum += x;
+ assertTrue(x>=0 && x<=(CNT-1));
+ }
+ assertThat("Found 10000 ints",sz,is(CNT));
+ assertThat("Found all integers in list",sum,is(CNT*(CNT-1)/2));
+
+ assertThat( "can remove 3", _nbhm.remove(keys[3]), is(vals[3]) );
+ assertThat( "can remove 4", _nbhm.remove(keys[4]), is(vals[4]) );
+ sz =0;
+ sum = 0;
+ for( String s : _nbhm.keySet() ) {
+ sz++;
+ assertThat("",s.charAt(0),is('k'));
+ int x = Integer.parseInt(s.substring(1));
+ sum += x;
+ assertTrue(x>=0 && x<=(CNT-1));
+ String v = _nbhm.get(s);
+ assertThat("",v.charAt(0),is('v'));
+ assertThat("",s.substring(1),is(v.substring(1)));
+ }
+ assertThat("Found "+(CNT-2)+" ints",sz,is(CNT-2));
+ assertThat("Found all integers in list",sum,is(CNT*(CNT-1)/2 - (3+4)));
+ }
+
+ // Do some simple concurrent testing
+ public void testConcurrentSimple() throws InterruptedException {
+ final NonBlockingIdentityHashMap<String,String> nbhm = new NonBlockingIdentityHashMap<String,String>();
+ final String [] keys = new String[20000];
+ for( int i=0; i<20000; i++ )
+ keys[i]="k"+i;
+
+ // In 2 threads, add & remove even & odd elements concurrently
+ Thread t1 = new Thread() { public void run() { work_helper(nbhm,"T1",1,keys); } };
+ t1.start();
+ work_helper(nbhm,"T0",0,keys);
+ t1.join();
+
+ // In the end, all members should be removed
+ StringBuffer buf = new StringBuffer();
+ buf.append("Should be emptyset but has these elements: {");
+ boolean found = false;
+ for( String x : nbhm.keySet() ) {
+ buf.append(" ").append(x);
+ found = true;
+ }
+ if( found ) System.out.println(buf+" }");
+ assertThat( "concurrent size=0", nbhm.size(), is(0) );
+ for( String x : nbhm.keySet() ) {
+ assertTrue("No elements so never get here",false);
+ }
+ }
+
+ void work_helper(NonBlockingIdentityHashMap<String,String> nbhm, String thrd, int d, String[] keys) {
+ final int ITERS = 20000;
+ for( int j=0; j<10; j++ ) {
+ long start = System.nanoTime();
+ for( int i=d; i<ITERS; i+=2 )
+ assertThat( "this key not in there, so putIfAbsent must work",
+ nbhm.putIfAbsent(keys[i],thrd), is((String)null) );
+ for( int i=d; i<ITERS; i+=2 )
+ assertTrue( nbhm.remove(keys[i],thrd) );
+ double delta_nanos = System.nanoTime()-start;
+ double delta_secs = delta_nanos/1000000000.0;
+ double ops = ITERS*2;
+ //System.out.println("Thrd"+thrd+" "+(ops/delta_secs)+" ops/sec size="+nbhm.size());
+ }
+ }
+
+ public final void testNonBlockingIdentityHashMapSize() {
+ NonBlockingIdentityHashMap<Long,String> items = new NonBlockingIdentityHashMap<Long,String>();
+ items.put(Long.valueOf(100), "100");
+ items.put(Long.valueOf(101), "101");
+
+ assertEquals("keySet().size()", 2, items.keySet().size());
+ assertTrue("keySet().contains(100)", items.keySet().contains(Long.valueOf(100)));
+ assertTrue("keySet().contains(101)", items.keySet().contains(Long.valueOf(101)));
+
+ assertEquals("values().size()", 2, items.values().size());
+ assertTrue("values().contains(\"100\")", items.values().contains("100"));
+ assertTrue("values().contains(\"101\")", items.values().contains("101"));
+
+ assertEquals("entrySet().size()", 2, items.entrySet().size());
+ boolean found100 = false;
+ boolean found101 = false;
+ for (Entry<Long, String> entry : items.entrySet()) {
+ if (entry.getKey().equals(Long.valueOf(100))) {
+ assertEquals("entry[100].getValue()==\"100\"", "100", entry.getValue());
+ found100 = true;
+ } else if (entry.getKey().equals(Long.valueOf(101))) {
+ assertEquals("entry[101].getValue()==\"101\"", "101", entry.getValue());
+ found101 = true;
+ }
+ }
+ assertTrue("entrySet().contains([100])", found100);
+ assertTrue("entrySet().contains([101])", found101);
+ }
+
+ // Concurrent insertion & then iterator test.
+ static public void testNonBlockingIdentityHashMapIterator() throws InterruptedException {
+ final int ITEM_COUNT1 = 1000;
+ final int THREAD_COUNT = 5;
+ final int PER_CNT = ITEM_COUNT1/THREAD_COUNT;
+ final int ITEM_COUNT = PER_CNT*THREAD_COUNT; // fix roundoff for odd thread counts
+
+ NonBlockingIdentityHashMap<Long,TestKey> nbhml = new NonBlockingIdentityHashMap<Long,TestKey>();
+ // use a barrier to open the gate for all threads at once to avoid rolling
+ // start and no actual concurrency
+ final CyclicBarrier barrier = new CyclicBarrier(THREAD_COUNT);
+ final ExecutorService ex = Executors.newFixedThreadPool(THREAD_COUNT);
+ final CompletionService<Object> co = new ExecutorCompletionService<Object>(ex);
+ for( int i=0; i<THREAD_COUNT; i++ ) {
+ co.submit(new NBHMLFeeder(nbhml, PER_CNT, barrier, i*PER_CNT));
+ }
+ for( int retCount = 0; retCount < THREAD_COUNT; retCount++ ) {
+ co.take();
+ }
+ ex.shutdown();
+
+ assertEquals("values().size()", ITEM_COUNT, nbhml.values().size());
+ assertEquals("entrySet().size()", ITEM_COUNT, nbhml.entrySet().size());
+ int itemCount = 0;
+ for( TestKey K : nbhml.values() )
+ itemCount++;
+ assertEquals("values().iterator() count", ITEM_COUNT, itemCount);
+ }
+
+ // --- NBHMLFeeder ---
+ // Class to be called from another thread, to get concurrent installs into
+ // the table.
+ static private class NBHMLFeeder implements Callable<Object> {
+ static private final Random _rand = new Random(System.currentTimeMillis());
+ private final NonBlockingIdentityHashMap<Long,TestKey> _map;
+ private final int _count;
+ private final CyclicBarrier _barrier;
+ private final long _offset;
+ public NBHMLFeeder(final NonBlockingIdentityHashMap<Long,TestKey> map, final int count, final CyclicBarrier barrier, final long offset) {
+ _map = map;
+ _count = count;
+ _barrier = barrier;
+ _offset = offset;
+ }
+ public Object call() throws Exception {
+ _barrier.await(); // barrier, to force racing start
+ for( long j=0; j<_count; j++ )
+ _map.put(j+_offset, new TestKey(_rand.nextLong(),_rand.nextInt (), (short) _rand.nextInt(Short.MAX_VALUE)));
+ return null;
+ }
+ }
+
+ // --- TestKey ---
+ // Funny key tests all sorts of things, has a pre-wired hashCode & equals.
+ static private final class TestKey {
+ public final int _type;
+ public final long _id;
+ public final int _hash;
+ public TestKey(final long id, final int type, int hash) {
+ _id = id;
+ _type = type;
+ _hash = hash;
+ }
+ public int hashCode() { return _hash; }
+ public boolean equals(Object object) {
+ if (null == object) return false;
+ if (object == this) return true;
+ if (object.getClass() != this.getClass()) return false;
+ final TestKey other = (TestKey) object;
+ return (this._type == other._type && this._id == other._id);
+ }
+ public String toString() { return String.format("%s:%d,%d,%d", getClass().getSimpleName(), _id, _type, _hash); }
+ }
+
+ // --- Customer Test Case 3 ------------------------------------------------
+ private TestKeyFeeder getTestKeyFeeder() {
+ final TestKeyFeeder feeder = new TestKeyFeeder();
+ feeder.checkedPut(10401000001844L, 657829272, 680293140); // section 12
+ feeder.checkedPut(10401000000614L, 657829272, 401326994); // section 12
+ feeder.checkedPut(10400345749304L, 2095121916, -9852212); // section 12
+ feeder.checkedPut(10401000002204L, 657829272, 14438460); // section 12
+ feeder.checkedPut(10400345749234L, 1186831289, -894006017); // section 12
+ feeder.checkedPut(10401000500234L, 969314784, -2112018706); // section 12
+ feeder.checkedPut(10401000000284L, 657829272, 521425852); // section 12
+ feeder.checkedPut(10401000002134L, 657829272, 208406306); // section 12
+ feeder.checkedPut(10400345749254L, 2095121916, -341939818); // section 12
+ feeder.checkedPut(10401000500384L, 969314784, -2136811544); // section 12
+ feeder.checkedPut(10401000001944L, 657829272, 935194952); // section 12
+ feeder.checkedPut(10400345749224L, 1186831289, -828214183); // section 12
+ feeder.checkedPut(10400345749244L, 2095121916, -351234120); // section 12
+ feeder.checkedPut(10400333128994L, 2095121916, -496909430); // section 12
+ feeder.checkedPut(10400333197934L, 2095121916, 2147144926); // section 12
+ feeder.checkedPut(10400333197944L, 2095121916, -2082366964); // section 12
+ feeder.checkedPut(10400336947684L, 2095121916, -1404212288); // section 12
+ feeder.checkedPut(10401000000594L, 657829272, 124369790); // section 12
+ feeder.checkedPut(10400331896264L, 2095121916, -1028383492); // section 12
+ feeder.checkedPut(10400332415044L, 2095121916, 1629436704); // section 12
+ feeder.checkedPut(10400345749614L, 1186831289, 1027996827); // section 12
+ feeder.checkedPut(10401000500424L, 969314784, -1871616544); // section 12
+ feeder.checkedPut(10400336947694L, 2095121916, -1468802722); // section 12
+ feeder.checkedPut(10410002672481L, 2154973, 1515288586); // section 12
+ feeder.checkedPut(10410345749171L, 2154973, 2084791828); // section 12
+ feeder.checkedPut(10400004960671L, 2154973, 1554754674); // section 12
+ feeder.checkedPut(10410009983601L, 2154973, -2049707334); // section 12
+ feeder.checkedPut(10410335811601L, 2154973, 1547385114); // section 12
+ feeder.checkedPut(10410000005951L, 2154973, -1136117016); // section 12
+ feeder.checkedPut(10400004938331L, 2154973, -1361373018); // section 12
+ feeder.checkedPut(10410001490421L, 2154973, -818792874); // section 12
+ feeder.checkedPut(10400001187131L, 2154973, 649763142); // section 12
+ feeder.checkedPut(10410000409071L, 2154973, -614460616); // section 12
+ feeder.checkedPut(10410333717391L, 2154973, 1343531416); // section 12
+ feeder.checkedPut(10410336680071L, 2154973, -914544144); // section 12
+ feeder.checkedPut(10410002068511L, 2154973, -746995576); // section 12
+ feeder.checkedPut(10410336207851L, 2154973, 863146156); // section 12
+ feeder.checkedPut(10410002365251L, 2154973, 542724164); // section 12
+ feeder.checkedPut(10400335812581L, 2154973, 2146284796); // section 12
+ feeder.checkedPut(10410337345361L, 2154973, -384625318); // section 12
+ feeder.checkedPut(10410000409091L, 2154973, -528258556); // section 12
+ return feeder;
+ }
+
+ // ---
+ static private class TestKeyFeeder {
+ private final Hashtable<Integer, List<TestKey>> _items = new Hashtable<Integer, List<TestKey>>();
+ private int _size = 0;
+ public int size() { return _size; }
+ // Put items into the hashtable, sorted by 'type' into LinkedLists.
+ public void checkedPut(final long id, final int type, final int hash) {
+ _size++;
+ final TestKey item = new TestKey(id, type, hash);
+ if( !_items.containsKey(type) )
+ _items.put(type, new LinkedList<TestKey>());
+ _items.get(type).add(item);
+ }
+
+ public NonBlockingIdentityHashMap<Long,TestKey> getMapMultithreaded() throws InterruptedException, ExecutionException {
+ final int threadCount = _items.keySet().size();
+ final NonBlockingIdentityHashMap<Long,TestKey> map = new NonBlockingIdentityHashMap<Long,TestKey>();
+
+ // use a barrier to open the gate for all threads at once to avoid rolling start and no actual concurrency
+ final CyclicBarrier barrier = new CyclicBarrier(threadCount);
+ final ExecutorService ex = Executors.newFixedThreadPool(threadCount);
+ final CompletionService<Integer> co = new ExecutorCompletionService<Integer>(ex);
+ for( Integer type : _items.keySet() ) {
+ // A linked-list of things to insert
+ List<TestKey> items = _items.get(type);
+ TestKeyFeederThread feeder = new TestKeyFeederThread(type, items, map, barrier);
+ co.submit(feeder);
+ }
+
+ // wait for all threads to return
+ int itemCount = 0;
+ for( int retCount = 0; retCount < threadCount; retCount++ ) {
+ final Future<Integer> result = co.take();
+ itemCount += result.get();
+ }
+ ex.shutdown();
+ return map;
+ }
+ }
+
+ // --- TestKeyFeederThread
+ static private class TestKeyFeederThread implements Callable<Integer> {
+ private final int _type;
+ private final NonBlockingIdentityHashMap<Long,TestKey> _map;
+ private final List<TestKey> _items;
+ private final CyclicBarrier _barrier;
+ public TestKeyFeederThread(final int type, final List<TestKey> items, final NonBlockingIdentityHashMap<Long,TestKey> map, final CyclicBarrier barrier) {
+ _type = type;
+ _map = map;
+ _items = items;
+ _barrier = barrier;
+ }
+
+ public Integer call() throws Exception {
+ _barrier.await();
+ int count = 0;
+ for( TestKey item : _items ) {
+ if (_map.contains(item._id)) {
+ System.err.printf("COLLISION DETECTED: %s exists\n", item.toString());
+ }
+ final TestKey exists = _map.putIfAbsent(item._id, item);
+ if (exists == null) {
+ count++;
+ } else {
+ System.err.printf("COLLISION DETECTED: %s exists as %s\n", item.toString(), exists.toString());
+ }
+ }
+ return count;
+ }
+ }
+
+ // ---
+ public void testNonBlockingIdentityHashMapIteratorMultithreaded() throws InterruptedException, ExecutionException {
+ TestKeyFeeder feeder = getTestKeyFeeder();
+ final int itemCount = feeder.size();
+
+ // validate results
+ final NonBlockingIdentityHashMap<Long,TestKey> items = feeder.getMapMultithreaded();
+ assertEquals("size()", itemCount, items.size());
+
+ assertEquals("values().size()", itemCount, items.values().size());
+
+ assertEquals("entrySet().size()", itemCount, items.entrySet().size());
+
+ int iteratorCount = 0;
+ for( TestKey m : items.values() )
+ iteratorCount++;
+ // sometimes a different result comes back the second time
+ int iteratorCount2 = 0;
+ for( Iterator<TestKey> it = items.values().iterator(); it.hasNext(); ) {
+ iteratorCount2++;
+ it.next();
+ }
+ assertEquals("iterator counts differ", iteratorCount, iteratorCount2);
+ assertEquals("values().iterator() count", itemCount, iteratorCount);
+ }
+
+ // This test is a copy of the JCK test Hashtable2027, which is incorrect.
+ // The test requires a particular order of values to appear in the esa
+ // array - but this is not part of the spec. A different implementation
+ // might put the same values into the array but in a different order.
+ //public void testToArray() {
+ // NonBlockingIdentityHashMap ht = new NonBlockingIdentityHashMap();
+ //
+ // ht.put("Nine", new Integer(9));
+ // ht.put("Ten", new Integer(10));
+ // ht.put("Ten1", new Integer(100));
+ //
+ // Collection es = ht.values();
+ //
+ // Object [] esa = es.toArray();
+ //
+ // ht.remove("Ten1");
+ //
+ // assertEquals( "size check", es.size(), 2 );
+ // assertEquals( "iterator_order[0]", new Integer( 9), esa[0] );
+ // assertEquals( "iterator_order[1]", new Integer(10), esa[1] );
+ //}
+}
510 Testing/NBHM_Tester/NBHML_Tester2.java
@@ -0,0 +1,510 @@
+/*
+ * Written by Cliff Click and released to the public domain, as explained at
+ * http://creativecommons.org/licenses/publicdomain
+ * Additional test cases provided by Andy Martin of TeleAtlas.
+ */
+
+import java.io.*;
+import java.util.*;
+import java.util.Map.Entry;
+import java.util.concurrent.*;
+import junit.framework.TestCase;
+import org.cliffc.high_scale_lib.*;
+import static org.hamcrest.CoreMatchers.*;
+import static org.junit.Assert.*;
+
+// Test NonBlockingHashMapLong via JUnit
+public class NBHML_Tester2 extends TestCase {
+ public static void main(String args[]) {
+ org.junit.runner.JUnitCore.main("Testing.NBHM_Tester.NBHML_Tester2");
+ }
+
+ private NonBlockingHashMapLong<String> _nbhml;
+ protected void setUp () { _nbhml = new NonBlockingHashMapLong<String>(); }
+ protected void tearDown() { _nbhml = null; }
+
+ // Test some basic stuff; add a few keys, remove a few keys
+ public void testBasic() {
+ assertTrue ( _nbhml.isEmpty() );
+ assertThat ( _nbhml.put(1,"v1"), nullValue() );
+ checkSizes (1);
+ assertThat ( _nbhml.putIfAbsent(2,"v2"), nullValue() );
+ checkSizes (2);
+ assertTrue ( _nbhml.containsKey(2) );
+ assertThat ( _nbhml.put(1,"v1a"), is("v1") );
+ assertThat ( _nbhml.put(2,"v2a"), is("v2") );
+ checkSizes (2);
+ assertThat ( _nbhml.putIfAbsent(2,"v2b"), is("v2a") );
+ assertThat ( _nbhml.remove(1), is("v1a") );
+ assertFalse( _nbhml.containsKey(1) );
+ checkSizes (1);
+ assertThat ( _nbhml.remove(1), nullValue() );
+ assertThat ( _nbhml.remove(2), is("v2a") );
+ checkSizes (0);
+ assertThat ( _nbhml.remove(2), nullValue() );
+ assertThat ( _nbhml.remove("k3"), nullValue() );
+ assertTrue ( _nbhml.isEmpty() );
+
+ assertThat ( _nbhml.put(0,"v0"), nullValue() );
+ assertTrue ( _nbhml.containsKey(0) );
+ checkSizes (1);
+ assertThat ( _nbhml.remove(0), is("v0") );
+ assertFalse( _nbhml.containsKey(0) );
+ checkSizes (0);
+
+ assertThat ( _nbhml.replace(0,"v0"), nullValue() );
+ assertFalse( _nbhml.containsKey(0) );
+ assertThat ( _nbhml.put(0,"v0"), nullValue() );
+ assertEquals(_nbhml.replace(0,"v0a"), "v0" );
+ assertEquals(_nbhml.get(0), "v0a" );
+ assertThat ( _nbhml.remove(0), is("v0a") );
+ assertFalse( _nbhml.containsKey(0) );
+ checkSizes (0);
+
+ assertThat ( _nbhml.replace(1,"v1"), nullValue() );
+ assertFalse( _nbhml.containsKey(1) );
+ assertThat ( _nbhml.put(1,"v1"), nullValue() );
+ assertEquals(_nbhml.replace(1,"v1a"), "v1" );
+ assertEquals(_nbhml.get(1), "v1a" );
+ assertThat ( _nbhml.remove(1), is("v1a") );
+ assertFalse( _nbhml.containsKey(1) );
+ checkSizes (0);
+
+ // Simple insert of simple keys, with no reprobing on insert until the
+ // table gets full exactly. Then do a 'get' on the totally full table.
+ NonBlockingHashMapLong<Object> map = new NonBlockingHashMapLong<Object>(32);
+ for( int i = 1; i < 32; i++ )
+ map.put(i, new Object());
+ map.get(33); // this causes a NPE
+ }
+
+ // Check all iterators for correct size counts
+ private void checkSizes(int expectedSize) {
+ assertEquals( "size()", _nbhml.size(), expectedSize );
+ Collection<String> vals = _nbhml.values();
+ checkSizes("values()",vals.size(),vals.iterator(),expectedSize);
+ Set<Long> keys = _nbhml.keySet();
+ checkSizes("keySet()",keys.size(),keys.iterator(),expectedSize);
+ Set<Entry<Long,String>> ents = _nbhml.entrySet();
+ checkSizes("entrySet()",ents.size(),ents.iterator(),expectedSize);
+ }
+
+ // Check that the iterator iterates the correct number of times
+ private void checkSizes(String msg, int sz, Iterator it, int expectedSize) {
+ assertEquals( msg, expectedSize, sz );
+ int result = 0;
+ while (it.hasNext()) {
+ result++;
+ it.next();
+ }
+ assertEquals( msg, expectedSize, result );
+ }
+
+
+ public void testIterationBig2() {
+ final int CNT = 10000;
+ assertThat( _nbhml.size(), is(0) );
+ final String v = "v";
+ for( int i=0; i<CNT; i++ ) {
+ _nbhml.put(i,v);
+ String s = _nbhml.get(i);
+ assertThat( s, is(v) );
+ }
+ assertThat( _nbhml.size(), is(CNT) );
+ }
+
+
+ public void testIteration() {
+ assertTrue ( _nbhml.isEmpty() );
+ assertThat ( _nbhml.put(1,"v1"), nullValue() );
+ assertThat ( _nbhml.put(2,"v2"), nullValue() );
+
+ String str1 = "";
+ for( Iterator<Map.Entry<Long,String>> i = _nbhml.entrySet().iterator(); i.hasNext(); ) {
+ Map.Entry<Long,String> e = i.next();
+ str1 += e.getKey();
+ }
+ assertThat("found all entries",str1,anyOf(is("12"),is("21")));
+
+ String str2 = "";
+ for( Iterator<Long> i = _nbhml.keySet().iterator(); i.hasNext(); ) {
+ Long key = i.next();
+ str2 += key;
+ }
+ assertThat("found all keys",str2,anyOf(is("12"),is("21")));
+
+ String str3 = "";
+ for( Iterator<String> i = _nbhml.values().iterator(); i.hasNext(); ) {
+ String val = i.next();
+ str3 += val;
+ }
+ assertThat("found all vals",str3,anyOf(is("v1v2"),is("v2v1")));
+
+ assertThat("toString works",_nbhml.toString(), anyOf(is("{1=v1, 2=v2}"),is("{2=v2, 1=v1}")));
+ }
+
+ public void testSerial() {
+ assertTrue ( _nbhml.isEmpty() );
+ assertThat ( _nbhml.put(0x12345678L,"v1"), nullValue() );
+ assertThat ( _nbhml.put(0x87654321L,"v2"), nullValue() );
+
+ // Serialize it out
+ try {
+ FileOutputStream fos = new FileOutputStream("NBHML_test.txt");
+ ObjectOutputStream out = new ObjectOutputStream(fos);
+ out.writeObject(_nbhml);
+ out.close();
+ } catch(IOException ex) {
+ ex.printStackTrace();
+ }
+
+ // Read it back
+ try {
+ File f = new File("NBHML_test.txt");
+ FileInputStream fis = new FileInputStream(f);
+ ObjectInputStream in = new ObjectInputStream(fis);
+ NonBlockingHashMapLong nbhml = (NonBlockingHashMapLong)in.readObject();
+ in.close();
+ assertEquals(_nbhml.toString(),nbhml.toString());
+ if( !f.delete() ) throw new IOException("delete failed");
+ } catch(IOException ex) {
+ ex.printStackTrace();
+ } catch(ClassNotFoundException ex) {
+ ex.printStackTrace();
+ }
+
+ }
+
+ public void testIterationBig() {
+ final int CNT = 10000;
+ assertThat( _nbhml.size(), is(0) );
+ for( int i=0; i<CNT; i++ )
+ _nbhml.put(i,"v"+i);
+ assertThat( _nbhml.size(), is(CNT) );
+
+ int sz =0;
+ int sum = 0;
+ for( long x : _nbhml.keySet() ) {
+ sz++;
+ sum += x;
+ assertTrue(x>=0 && x<=(CNT-1));
+ }
+ assertThat("Found 10000 ints",sz,is(CNT));
+ assertThat("Found all integers in list",sum,is(CNT*(CNT-1)/2));
+
+ assertThat( "can remove 3", _nbhml.remove(3), is("v3") );
+ assertThat( "can remove 4", _nbhml.remove(4), is("v4") );
+ sz =0;
+ sum = 0;
+ for( long x : _nbhml.keySet() ) {
+ sz++;
+ sum += x;
+ assertTrue(x>=0 && x<=(CNT-1));
+ String v = _nbhml.get(x);
+ assertThat("",v.charAt(0),is('v'));
+ assertThat("",x,is(Long.parseLong(v.substring(1))));
+ }
+ assertThat("Found "+(CNT-2)+" ints",sz,is(CNT-2));
+ assertThat("Found all integers in list",sum,is(CNT*(CNT-1)/2 - (3+4)));
+ }
+
+ // Do some simple concurrent testing
+ public void testConcurrentSimple() throws InterruptedException {
+ final NonBlockingHashMapLong<String> nbhml = new NonBlockingHashMapLong<String>();
+
+ // In 2 threads, add & remove even & odd elements concurrently
+ final int num_thrds = 2;
+ Thread ts[] = new Thread[num_thrds];
+ for( int i=1; i<num_thrds; i++ ) {
+ final int x = i;
+ ts[i] = new Thread() { public void run() { work_helper(nbhml,x,num_thrds); } };
+ }
+ for( int i=1; i<num_thrds; i++ )
+ ts[i].start();
+ work_helper(nbhml,0,num_thrds);
+ for( int i=1; i<num_thrds; i++ )
+ ts[i].join();
+
+ // In the end, all members should be removed
+ StringBuffer buf = new StringBuffer();
+ buf.append("Should be emptyset but has these elements: {");
+ boolean found = false;
+ for( long x : nbhml.keySet() ) {
+ buf.append(" ").append(x);
+ found = true;
+ }
+ if( found ) System.out.println(buf+" }");
+ assertThat( "concurrent size=0", nbhml.size(), is(0) );
+ for( long x : nbhml.keySet() ) {
+ assertTrue("No elements so never get here",false);
+ }
+ }
+
+ void work_helper(NonBlockingHashMapLong<String> nbhml, int d, int num_thrds) {
+ String thrd = "T"+d;
+ final int ITERS = 20000;
+ for( int j=0; j<10; j++ ) {
+ //long start = System.nanoTime();
+ for( int i=d; i<ITERS; i+=num_thrds )
+ assertThat( "key "+i+" not in there, so putIfAbsent must work",
+ nbhml.putIfAbsent((long)i,thrd), is((String)null) );
+ for( int i=d; i<ITERS; i+=num_thrds )
+ assertTrue( nbhml.remove((long)i,thrd) );
+ //double delta_nanos = System.nanoTime()-start;
+ //double delta_secs = delta_nanos/1000000000.0;
+ //double ops = ITERS*2;
+ //System.out.println("Thrd"+thrd+" "+(ops/delta_secs)+" ops/sec size="+nbhml.size());
+ }
+ }
+
+
+ // --- Customer Test Case 1 ------------------------------------------------
+ public final void testNonBlockingHashMapSize() {
+ NonBlockingHashMapLong<String> items = new NonBlockingHashMapLong<String>();
+ items.put(Long.valueOf(100), "100");
+ items.put(Long.valueOf(101), "101");
+
+ assertEquals("keySet().size()", 2, items.keySet().size());
+ assertTrue("keySet().contains(100)", items.keySet().contains(Long.valueOf(100)));
+ assertTrue("keySet().contains(101)", items.keySet().contains(Long.valueOf(101)));
+
+ assertEquals("values().size()", 2, items.values().size());
+ assertTrue("values().contains(\"100\")", items.values().contains("100"));
+ assertTrue("values().contains(\"101\")", items.values().contains("101"));
+
+ assertEquals("entrySet().size()", 2, items.entrySet().size());
+ boolean found100 = false;
+ boolean found101 = false;
+ for (Entry<Long, String> entry : items.entrySet()) {
+ if (entry.getKey().equals(Long.valueOf(100))) {
+ assertEquals("entry[100].getValue()==\"100\"", "100", entry.getValue());
+ found100 = true;
+ } else if (entry.getKey().equals(Long.valueOf(101))) {
+ assertEquals("entry[101].getValue()==\"101\"", "101", entry.getValue());
+ found101 = true;
+ }
+ }
+ assertTrue("entrySet().contains([100])", found100);
+ assertTrue("entrySet().contains([101])", found101);
+ }
+
+ // --- Customer Test Case 2 ------------------------------------------------
+ // Concurrent insertion & then iterator test.
+ static public void testNonBlockingHashMapIterator() throws InterruptedException {
+ final int ITEM_COUNT1 = 1000;
+ final int THREAD_COUNT = 5;
+ final int PER_CNT = ITEM_COUNT1/THREAD_COUNT;
+ final int ITEM_COUNT = PER_CNT*THREAD_COUNT; // fix roundoff for odd thread counts
+
+ NonBlockingHashMapLong<TestKey> nbhml = new NonBlockingHashMapLong<TestKey>();
+ // use a barrier to open the gate for all threads at once to avoid rolling
+ // start and no actual concurrency
+ final CyclicBarrier barrier = new CyclicBarrier(THREAD_COUNT);
+ final ExecutorService ex = Executors.newFixedThreadPool(THREAD_COUNT);
+ final CompletionService<Object> co = new ExecutorCompletionService<Object>(ex);
+ for( int i=0; i<THREAD_COUNT; i++ ) {
+ co.submit(new NBHMLFeeder(nbhml, PER_CNT, barrier, i*PER_CNT));
+ }
+ for( int retCount = 0; retCount < THREAD_COUNT; retCount++ ) {
+ co.take();
+ }
+ ex.shutdown();
+
+ assertEquals("values().size()", ITEM_COUNT, nbhml.values().size());
+ assertEquals("entrySet().size()", ITEM_COUNT, nbhml.entrySet().size());
+ int itemCount = 0;
+ for( TestKey K : nbhml.values() )
+ itemCount++;
+ assertEquals("values().iterator() count", ITEM_COUNT, itemCount);
+ }
+
+ // --- NBHMLFeeder ---
+ // Class to be called from another thread, to get concurrent installs into
+ // the table.
+ static private class NBHMLFeeder implements Callable<Object> {
+ static private final Random _rand = new Random(System.currentTimeMillis());
+ private final NonBlockingHashMapLong<TestKey> _map;
+ private final int _count;
+ private final CyclicBarrier _barrier;
+ private final long _offset;
+ public NBHMLFeeder(final NonBlockingHashMapLong<TestKey> map, final int count, final CyclicBarrier barrier, final long offset) {
+ _map = map;
+ _count = count;
+ _barrier = barrier;
+ _offset = offset;
+ }
+ public Object call() throws Exception {
+ _barrier.await(); // barrier, to force racing start
+ for( long j=0; j<_count; j++ )
+ _map.put(j+_offset, new TestKey(_rand.nextLong(),_rand.nextInt (), (short) _rand.nextInt(Short.MAX_VALUE)));
+ return null;
+ }
+ }
+
+ // --- TestKey ---
+ // Funny key tests all sorts of things, has a pre-wired hashCode & equals.
+ static private final class TestKey {
+ public final int _type;
+ public final long _id;
+ public final int _hash;
+ public TestKey(final long id, final int type, int hash) {
+ _id = id;
+ _type = type;
+ _hash = hash;
+ }
+ public int hashCode() { return _hash; }
+ public boolean equals(Object object) {
+ if (null == object) return false;
+ if (object == this) return true;
+ if (object.getClass() != this.getClass()) return false;
+ final TestKey other = (TestKey) object;
+ return (this._type == other._type && this._id == other._id);
+ }
+ public String toString() { return String.format("%s:%d,%d,%d", getClass().getSimpleName(), _id, _type, _hash); }
+ }
+
+ // --- Customer Test Case 3 ------------------------------------------------
+ private TestKeyFeeder getTestKeyFeeder() {
+ final TestKeyFeeder feeder = new TestKeyFeeder();
+ feeder.checkedPut(10401000001844L, 657829272, 680293140); // section 12
+ feeder.checkedPut(10401000000614L, 657829272, 401326994); // section 12
+ feeder.checkedPut(10400345749304L, 2095121916, -9852212); // section 12
+ feeder.checkedPut(10401000002204L, 657829272, 14438460); // section 12
+ feeder.checkedPut(10400345749234L, 1186831289, -894006017); // section 12
+ feeder.checkedPut(10401000500234L, 969314784, -2112018706); // section 12
+ feeder.checkedPut(10401000000284L, 657829272, 521425852); // section 12
+ feeder.checkedPut(10401000002134L, 657829272, 208406306); // section 12
+ feeder.checkedPut(10400345749254L, 2095121916, -341939818); // section 12
+ feeder.checkedPut(10401000500384L, 969314784, -2136811544); // section 12
+ feeder.checkedPut(10401000001944L, 657829272, 935194952); // section 12
+ feeder.checkedPut(10400345749224L, 1186831289, -828214183); // section 12
+ feeder.checkedPut(10400345749244L, 2095121916, -351234120); // section 12
+ feeder.checkedPut(10400333128994L, 2095121916, -496909430); // section 12
+ feeder.checkedPut(10400333197934L, 2095121916, 2147144926); // section 12
+ feeder.checkedPut(10400333197944L, 2095121916, -2082366964); // section 12
+ feeder.checkedPut(10400336947684L, 2095121916, -1404212288); // section 12
+ feeder.checkedPut(10401000000594L, 657829272, 124369790); // section 12
+ feeder.checkedPut(10400331896264L, 2095121916, -1028383492); // section 12
+ feeder.checkedPut(10400332415044L, 2095121916, 1629436704); // section 12
+ feeder.checkedPut(10400345749614L, 1186831289, 1027996827); // section 12
+ feeder.checkedPut(10401000500424L, 969314784, -1871616544); // section 12
+ feeder.checkedPut(10400336947694L, 2095121916, -1468802722); // section 12
+ feeder.checkedPut(10410002672481L, 2154973, 1515288586); // section 12
+ feeder.checkedPut(10410345749171L, 2154973, 2084791828); // section 12
+ feeder.checkedPut(10400004960671L, 2154973, 1554754674); // section 12
+ feeder.checkedPut(10410009983601L, 2154973, -2049707334); // section 12
+ feeder.checkedPut(10410335811601L, 2154973, 1547385114); // section 12
+ feeder.checkedPut(10410000005951L, 2154973, -1136117016); // section 12
+ feeder.checkedPut(10400004938331L, 2154973, -1361373018); // section 12
+ feeder.checkedPut(10410001490421L, 2154973, -818792874); // section 12
+ feeder.checkedPut(10400001187131L, 2154973, 649763142); // section 12
+ feeder.checkedPut(10410000409071L, 2154973, -614460616); // section 12
+ feeder.checkedPut(10410333717391L, 2154973, 1343531416); // section 12
+ feeder.checkedPut(10410336680071L, 2154973, -914544144); // section 12
+ feeder.checkedPut(10410002068511L, 2154973, -746995576); // section 12
+ feeder.checkedPut(10410336207851L, 2154973, 863146156); // section 12
+ feeder.checkedPut(10410002365251L, 2154973, 542724164); // section 12
+ feeder.checkedPut(10400335812581L, 2154973, 2146284796); // section 12
+ feeder.checkedPut(10410337345361L, 2154973, -384625318); // section 12
+ feeder.checkedPut(10410000409091L, 2154973, -528258556); // section 12
+ return feeder;
+ }
+
+ // ---
+ static private class TestKeyFeeder {
+ private final Hashtable<Integer, List<TestKey>> _items = new Hashtable<Integer, List<TestKey>>();
+ private int _size = 0;
+ public int size() { return _size; }
+ // Put items into the hashtable, sorted by 'type' into LinkedLists.
+ public void checkedPut(final long id, final int type, final int hash) {
+ _size++;
+ final TestKey item = new TestKey(id, type, hash);
+ if( !_items.containsKey(type) )
+ _items.put(type, new LinkedList<TestKey>());
+ _items.get(type).add(item);
+ }
+
+ public NonBlockingHashMapLong<TestKey> getMapMultithreaded() throws InterruptedException, ExecutionException {
+ final int threadCount = _items.keySet().size();
+ final NonBlockingHashMapLong<TestKey> map = new NonBlockingHashMapLong<TestKey>();
+
+ // use a barrier to open the gate for all threads at once to avoid rolling start and no actual concurrency
+ final CyclicBarrier barrier = new CyclicBarrier(threadCount);
+ final ExecutorService ex = Executors.newFixedThreadPool(threadCount);
+ final CompletionService<Integer> co = new ExecutorCompletionService<Integer>(ex);
+ for( Integer type : _items.keySet() ) {
+ // A linked-list of things to insert
+ List<TestKey> items = _items.get(type);
+ TestKeyFeederThread feeder = new TestKeyFeederThread(type, items, map, barrier);
+ co.submit(feeder);
+ }
+
+ // wait for all threads to return
+ int itemCount = 0;
+ for( int retCount = 0; retCount < threadCount; retCount++ ) {
+ final Future<Integer> result = co.take();
+ itemCount += result.get();
+ }
+ ex.shutdown();
+ return map;
+ }
+ }
+
+ // --- TestKeyFeederThread
+ static private class TestKeyFeederThread implements Callable<Integer> {
+ private final int _type;
+ private final NonBlockingHashMapLong<TestKey> _map;
+ private final List<TestKey> _items;
+ private final CyclicBarrier _barrier;
+ public TestKeyFeederThread(final int type, final List<TestKey> items, final NonBlockingHashMapLong<TestKey> map, final CyclicBarrier barrier) {
+ _type = type;
+ _map = map;
+ _items = items;
+ _barrier = barrier;
+ }
+
+ public Integer call() throws Exception {
+ _barrier.await();
+ int count = 0;
+ for( TestKey item : _items ) {
+ if (_map.contains(item._id)) {
+ System.err.printf("COLLISION DETECTED: %s exists\n", item.toString());
+ }
+ final TestKey exists = _map.putIfAbsent(item._id, item);
+ if (exists == null) {
+ count++;
+ } else {
+ System.err.printf("COLLISION DETECTED: %s exists as %s\n", item.toString(), exists.toString());
+ }
+ }
+ return count;
+ }
+ }
+
+ // ---
+ public void testNonBlockingHashMapIteratorMultithreaded() throws InterruptedException, ExecutionException {
+ TestKeyFeeder feeder = getTestKeyFeeder();
+ final int itemCount = feeder.size();
+
+ // validate results
+ final NonBlockingHashMapLong<TestKey> items = feeder.getMapMultithreaded();
+ assertEquals("size()", itemCount, items.size());
+
+ assertEquals("values().size()", itemCount, items.values().size());
+
+ assertEquals("entrySet().size()", itemCount, items.entrySet().size());
+
+ int iteratorCount = 0;
+ for( TestKey m : items.values() )
+ iteratorCount++;
+ // sometimes a different result comes back the second time
+ int iteratorCount2 = 0;
+ for( Iterator<TestKey> it = items.values().iterator(); it.hasNext(); ) {
+ iteratorCount2++;
+ it.next();
+ }
+ assertEquals("iterator counts differ", iteratorCount, iteratorCount2);
+ assertEquals("values().iterator() count", itemCount, iteratorCount);
+ }
+
+}
667 Testing/NBHM_Tester/NBHM_Tester.java
@@ -0,0 +1,667 @@
+// A Java Program to more formally test the ideas in my Non-Blocking-Hash-Map
+
+import java.util.*;
+import java.util.concurrent.*;
+
+class NBHM_Tester {
+ // Set of States for an individual State Machine.
+ // Each State is really a pair of memory words.
+ // The first word is only 0, K, X; the 2nd word is only 0, A/a, B/b, _ or x.
+ enum S { // States
+ BAD (0),
+ MT (1), // No Key, No Value
+ X0 (2), // Key is X'd out (slot is dead, nothing to copy)
+ K0 (3), // Key only, Value is NULL
+ KA (4), // Key/Value-A pair
+ Ka (5), // Key/Value-A' pair
+ KB (6), // Key/Value-B pair
+ Kb (7), // Key/Value-B' pair
+ K_ (8), // Key/Tombstone - deleted
+ KX (9); // Key/X pair - copied
+
+ // A field to let me cheapo map to integers
+ final int _idx;
+ S(int idx) { _idx=idx; }
+ static final int MAX = values().length;
+
+ // --- compute_reached ---------------------------------------------------
+ // Used to test sanity of the allowed-transitions
+ private void compute_reached(boolean [] reached) {
+ if( reached[_idx] ) return; // Already reached this state
+ reached[_idx] = true; // First time reached this state
+ S[] T = _allowed_transitions; // Short handy name
+ // Visit all transitions...
+ for( int i=0; i<T.length; i+= 2 )
+ if( T[i] == this ) // If see a transition starting from this state
+ T[i+1].compute_reached(reached); // compute reaching from here
+ }
+
+ public static S [] _prime = {
+ BAD,BAD,BAD,BAD,
+ Ka,KA,Kb,KB,
+ BAD,BAD
+ };
+ public S prime() { return _prime[_idx]; }
+ };
+
+ // List of allowed-transitions as S-pairs
+ public static final S[] _allowed_transitions = {
+ S.BAD, S.MT, // Bogus starting transition
+ S.MT, S.X0, // Empty -> dead_slot
+ S.MT, S.K0, // Empty -> Key insertion
+
+ S.K0, S.KA, // Key -> Key/A pair
+ S.K0, S.Ka, // Key -> Key/A' pair
+ S.K0, S.KB, // Key -> Key/B pair
+ S.K0, S.Kb, // Key -> Key/B' pair
+ S.K0, S.K_, // Key -> deleted
+
+ S.KA, S.KB, // Key/A -> Key/B
+ S.KA, S.K_, // Key/A -> deleted
+
+ S.KB, S.KA, // Key/B -> Key/A
+ S.KB, S.K_, // Key/B -> deleted
+
+ S.K_, S.KA, // deleted -> Key/A
+ S.K_, S.KB, // deleted -> Key/B
+
+ S.Ka, S.KA, // Key/A' -> Key/A (strip prime)
+ S.Ka, S.Kb, // Key/A' -> Key/B'
+ S.Ka, S.K0, // Key/A' -> Key alone (same as deleted-prime)
+ S.Ka, S.KB, // Key/A' -> Key/B (last write overrides copy)
+ S.Ka, S.K_, // Key/A' -> Key delete
+
+ S.Kb, S.KB, // Key/B' -> Key/B (strip prime)
+ S.Kb, S.Ka, // Key/B' -> Key/A'
+ S.Kb, S.K0, // Key/B' -> Key alone (same as deleted-prime)
+ S.Kb, S.KA, // Key/B' -> Key/A (last write overrides copy)
+ S.Kb, S.K_, // Key/B' -> Key delete
+
+ S.K0, S.KX, // Key -> copied
+ S.KA, S.KX, // Key/A -> copied
+ S.KB, S.KX, // Key/B -> copied
+ S.K_, S.KX, // deleted -> copied
+
+ null
+ };
+ // power-of-2 larger than _allowed_transitions.length
+ private static final int LOG2TRAN = 6;
+ private static final int MAXTRAN = 1<<LOG2TRAN;
+
+ private static final int[][] fill_state_machine() {
+ int [][] sm = new int[S.MAX][S.MAX];
+ S[] T = _allowed_transitions; // Short handy name
+ // Visit all transitions...
+ for( int i=2; i<T.length-1; i+= 2 )
+ sm[T[i+0]._idx][T[i+1]._idx] = i;
+ return sm;
+ }
+ // Array of allowed transitions
+ public static final int[][] _state_machine = fill_state_machine();
+ // Is this transition allowed as part of the state-machine?
+ public static final int is_SM(S x,S y) { return _state_machine[x._idx][y._idx]; }
+
+
+ // --- Thrd ----------------------------------------------------------------
+ // Notion of an action performed by a single thread, such as 'put(K,A)' or
+ // 'delete(K)' - always with respect to key K. This action will turn into a
+ // series of state-machine transitions (or perhaps a request to move to a
+ // newer state machine) or
+
+ public static abstract class Thrd {
+ final String _name; // Nice thread name
+ final int _tid; // This thread index; invariant: _Thrds[_tid]==this
+ final boolean _ordered[]; // This thread is ordered after what other threads?
+ static int _tids; // Max number of threads
+ static final Thrd[] _thrds = new Thrd[10]; // Array of them
+ // Thread that can begin at any time
+ Thrd( String name ) {
+ _tid = _tids++;
+ _name = name;
+ _thrds[_tid] = this;
+ _ordered = null; // shortcut for un-ordered
+ }
+ // Thread that must wait until thread t0 has seen 'at_goal'
+ Thrd( String name, Thrd t0 ) {
+ _tid = _tids++;
+ _name = name;
+ _thrds[_tid] = this;
+ _ordered = new boolean[_thrds.length];
+ _ordered[t0._tid] = true;
+ }
+ // Thread that must wait until threads t0 and t1 have seen 'at_goal'
+ Thrd( String name, Thrd t0, Thrd t1 ) {
+ _tid = _tids++;
+ _name = name;
+ _thrds[_tid] = this;
+ _ordered = new boolean[_thrds.length];
+ _ordered[t0._tid] = true;
+ _ordered[t1._tid] = true;
+ }
+
+ //abstract History step(History h);
+ //abstract boolean at_goal(History h);
+ public String toString() { return _name; }
+ // threads cannot start until prior-ordered-threads finish.
+ // passed in an array of active Thrds (or NULL)
+ public boolean can_start( Thrd[] thrds ) {
+ if( _ordered == null ) return true;
+ for( int i=0; i<thrds.length; i++ )
+ if( thrds[i] != null && _ordered[thrds[i]._tid] )