From aed43d181a88be2a42d9aa6a288dca9a288cc544 Mon Sep 17 00:00:00 2001 From: Chris Vest Date: Mon, 21 Aug 2017 12:14:42 +0200 Subject: [PATCH] Add IncrementalXXH64 as an option for producing index entry ids This can help alleviate situations where many transactions with a large number of updates via merge, would end up with false deadlocks due to collisions on their index entry hashes. We cannot use the new hash function by default because that would be an HA protocol change, but we can hide it behind a feature toggle. --- .../kernel/impl/locking/ResourceTypes.java | 122 +++++++++++++++++- 1 file changed, 115 insertions(+), 7 deletions(-) diff --git a/community/kernel/src/main/java/org/neo4j/kernel/impl/locking/ResourceTypes.java b/community/kernel/src/main/java/org/neo4j/kernel/impl/locking/ResourceTypes.java index cb17598aba0dc..ac3ccef8d0700 100644 --- a/community/kernel/src/main/java/org/neo4j/kernel/impl/locking/ResourceTypes.java +++ b/community/kernel/src/main/java/org/neo4j/kernel/impl/locking/ResourceTypes.java @@ -19,15 +19,18 @@ */ package org.neo4j.kernel.impl.locking; +import java.lang.reflect.Array; import java.util.HashMap; import java.util.Map; +import org.neo4j.hashing.IncrementalXXH64; import org.neo4j.kernel.api.properties.DefinedProperty; import org.neo4j.kernel.api.properties.Property; import org.neo4j.kernel.api.schema.IndexQuery; import org.neo4j.kernel.impl.util.concurrent.LockWaitStrategies; import org.neo4j.storageengine.api.lock.ResourceType; import org.neo4j.storageengine.api.lock.WaitStrategy; +import org.neo4j.unsafe.impl.internal.dragons.FeatureToggles; import static org.neo4j.collection.primitive.hopscotch.HopScotchHashingAlgorithm.DEFAULT_HASHING; @@ -40,7 +43,11 @@ public enum ResourceTypes implements ResourceType INDEX_ENTRY( 4, LockWaitStrategies.INCREMENTAL_BACKOFF ), LEGACY_INDEX( 5, LockWaitStrategies.INCREMENTAL_BACKOFF ); - private static final Map idToType = new HashMap<>(); + private static final boolean useStrongHashing = + FeatureToggles.flag( ResourceTypes.class, "useStrongHashing", false ); + + private static final Map idToType = new HashMap<>(); + static { for ( ResourceTypes resourceTypes : ResourceTypes.values() ) @@ -71,35 +78,52 @@ public WaitStrategy waitStrategy() return waitStrategy; } + /** + * The schema index entry hashing method used up until 2.2.0. + */ public static long legacyIndexResourceId( String name, String key ) { - return (long)name.hashCode() << 32 | key.hashCode(); + return (long) name.hashCode() << 32 | key.hashCode(); } /** * This is the schema index entry hashing method used since 2.2.0 and onwards. + *

+ * Use the {@link ResourceTypes#useStrongHashing} feature toggle to use a stronger hash function, which will become + * the default in a future release. Note that changing this hash function is effectively a + * clustering protocol change in HA setups. Causal cluster setups are unaffected because followers do not take any + * locks on the cluster leader. */ public static long indexEntryResourceId( long labelId, IndexQuery.ExactPredicate... predicates ) { - return indexEntryResourceId( labelId, predicates, 0 ); + if ( !useStrongHashing ) + { + // Default + return indexEntryResourceId_2_2_0( labelId, predicates, 0 ); + } + else + { + // Opt-in + return indexEntryResourceId_4_x( labelId, predicates ); + } } - private static long indexEntryResourceId( long labelId, IndexQuery.ExactPredicate[] predicates, int i ) + private static long indexEntryResourceId_2_2_0( long labelId, IndexQuery.ExactPredicate[] predicates, int i ) { int propertyKeyId = predicates[i].propertyKeyId(); Object value = predicates[i].value(); // Note: // It is important that single-property indexes only hash with this particular call; no additional hashing! - long hash = indexEntryResourceId( labelId, propertyKeyId, stringOf( propertyKeyId, value ) ); + long hash = indexEntryResourceId_2_2_0( labelId, propertyKeyId, stringOf( propertyKeyId, value ) ); i++; if ( i < predicates.length ) { - hash = hash( hash + indexEntryResourceId( labelId, predicates, i ) ); + hash = hash( hash + indexEntryResourceId_2_2_0( labelId, predicates, i ) ); } return hash; } - private static long indexEntryResourceId( long labelId, long propertyKeyId, String propertyValue ) + private static long indexEntryResourceId_2_2_0( long labelId, long propertyKeyId, String propertyValue ) { long hob = hash( labelId + hash( propertyKeyId ) ); hob <<= 32; @@ -135,4 +159,88 @@ public static ResourceType fromId( int typeId ) { return idToType.get( typeId ); } + + /** + * This is a stronger, full 64-bit hashing method for schema index entries that we should use by default in a + * future release, where we will also upgrade the HA protocol version. Currently this is indicated by the "4_x" + * name suffix, but any version where the HA protocol version changes anyway would be just as good an opportunity. + * + * @see IncrementalXXH64 + */ + public static long indexEntryResourceId_4_x( long labelId, IndexQuery.ExactPredicate... predicates ) + { + long hash = IncrementalXXH64.init( 0x0123456789abcdefL ); + + hash = IncrementalXXH64.update( hash, labelId ); + + for ( IndexQuery.ExactPredicate predicate : predicates ) + { + int propertyKeyId = predicate.propertyKeyId(); + Object value = predicate.value(); + Class type = value.getClass(); + + hash = IncrementalXXH64.update( hash, propertyKeyId ); + + if ( type == String.class ) + { + String str = (String) value; + int length = str.length(); + + hash = IncrementalXXH64.update( hash, length ); + + for ( int i = 0; i < length; i++ ) + { + hash = IncrementalXXH64.update( hash, str.charAt( i ) ); + } + } + else if ( type.isArray() ) + { + int length = Array.getLength( value ); + Class componentType = type.getComponentType(); + + hash = IncrementalXXH64.update( hash, length ); + + if ( componentType == String.class ) + { + for ( int i = 0; i < length; i++ ) + { + String str = (String) Array.get( value, i ); + int len = str.length(); + + hash = IncrementalXXH64.update( hash, len ); + + for ( int j = 0; i < len; j++ ) + { + hash = IncrementalXXH64.update( hash, str.charAt( j ) ); + } + } + } + else if ( componentType == Double.class ) + { + for ( int i = 0; i < length; i++ ) + { + hash = IncrementalXXH64.update( + hash, Double.doubleToLongBits( (Double) Array.get( value, i ) ) ); + } + } + else + { + for ( int i = 0; i < length; i++ ) + { + hash = IncrementalXXH64.update( hash, ((Number) Array.get( value, i )).longValue() ); + } + } + } + else if ( type == Double.class ) + { + hash = IncrementalXXH64.update( hash, Double.doubleToLongBits( (Double) value ) ); + } + else + { + hash = IncrementalXXH64.update( hash, ((Number) value).longValue() ); + } + } + + return IncrementalXXH64.finalise( hash ); + } }