Skip to content

Commit

Permalink
Values are immutable, compute hash once
Browse files Browse the repository at this point in the history
Since values are immutable we only need to compute the hash value once
and memoize the results. Note that ints are guaranteed to be read
atomically so it is safe to use non-volatile read, see `java.util.String`.
  • Loading branch information
pontusmelke committed Aug 21, 2017
1 parent 8b6f3ad commit b34baff
Show file tree
Hide file tree
Showing 55 changed files with 153 additions and 149 deletions.
Expand Up @@ -34,7 +34,7 @@ case class PlannedComponent(queryGraph: QueryGraph, plan: LogicalPlan)


/* /*
This class is responsible for connecting two disconnected logical plans, which can be This class is responsible for connecting two disconnected logical plans, which can be
done with hash joins when an useful predicate connects the two plans, or with cartesian done with computeHash joins when an useful predicate connects the two plans, or with cartesian
product lacking that. product lacking that.
The input is a set of disconnected patterns and this class will greedily find the The input is a set of disconnected patterns and this class will greedily find the
Expand Down
Expand Up @@ -23,7 +23,7 @@ import org.neo4j.cypher.internal.frontend.v3_3.ast.Equals
import org.neo4j.cypher.internal.ir.v3_3.{CardinalityEstimation, PlannerQuery} import org.neo4j.cypher.internal.ir.v3_3.{CardinalityEstimation, PlannerQuery}


/* /*
Represents a common hash join on values and not node ids. The definition of a value join is an equality predicate Represents a common computeHash join on values and not node ids. The definition of a value join is an equality predicate
between two expressions that have different, non-empty variable-dependency sets. between two expressions that have different, non-empty variable-dependency sets.
*/ */
case class ValueHashJoin(left: LogicalPlan, right: LogicalPlan, join: Equals) case class ValueHashJoin(left: LogicalPlan, right: LogicalPlan, join: Equals)
Expand Down
Expand Up @@ -87,10 +87,10 @@ object verifyBestPlan extends PlanTransformer[PlannerQuery] {


private def processUnfulfilledJoinHints(context: LogicalPlanningContext, hints: Set[UsingJoinHint]) = { private def processUnfulfilledJoinHints(context: LogicalPlanningContext, hints: Set[UsingJoinHint]) = {
if (hints.nonEmpty) { if (hints.nonEmpty) {
// we were unable to plan hash join on some requested nodes // we were unable to plan computeHash join on some requested nodes
if (context.useErrorsOverWarnings) { if (context.useErrorsOverWarnings) {
val firstJoinHint = hints.head val firstJoinHint = hints.head
throw new JoinHintException(firstJoinHint.variables.map(_.name).reduceLeft(_ + ", " + _), "Unable to plan hash join") throw new JoinHintException(firstJoinHint.variables.map(_.name).reduceLeft(_ + ", " + _), "Unable to plan computeHash join")
} else { } else {
hints.foreach { hint => hints.foreach { hint =>
context.notificationLogger.log(JoinHintUnfulfillableNotification(hint.variables.map(_.name).toIndexedSeq)) context.notificationLogger.log(JoinHintUnfulfillableNotification(hint.variables.map(_.name).toIndexedSeq))
Expand Down
Expand Up @@ -60,7 +60,7 @@ class FindShortestPathsPlanningIntegrationTest extends CypherFunSuite with Logic
) )
} }


test("find shortest paths on top of hash joins") { test("find shortest paths on top of computeHash joins") {
val result = (new given { val result = (new given {
cardinality = mapCardinality { cardinality = mapCardinality {
// node label scan // node label scan
Expand Down
Expand Up @@ -59,7 +59,7 @@ class NodeHashJoinPlanningIntegrationTest extends CypherFunSuite with LogicalPla
result should equal(expected) result should equal(expected)
} }


test("should plan hash join when join hint is used") { test("should plan computeHash join when join hint is used") {
val cypherQuery = """ val cypherQuery = """
|MATCH (a:A)-[r1:X]->(b)-[r2:X]->(c:C) |MATCH (a:A)-[r1:X]->(b)-[r2:X]->(c:C)
|USING JOIN ON b |USING JOIN ON b
Expand Down
Expand Up @@ -83,7 +83,7 @@ class CartesianProductsOrValueJoinsTest
) )
} }


test("should plan hash join between 2 pattern nodes") { test("should plan computeHash join between 2 pattern nodes") {
val equality = Equals(prop("a", "id"), prop("b", "id"))(pos) val equality = Equals(prop("a", "id"), prop("b", "id"))(pos)


testThis( testThis(
Expand All @@ -96,7 +96,7 @@ class CartesianProductsOrValueJoinsTest
expectedPlan = ValueHashJoin(planA, planB, equality)(solved)) expectedPlan = ValueHashJoin(planA, planB, equality)(solved))
} }


test("should plan hash joins between 3 pattern nodes") { test("should plan computeHash joins between 3 pattern nodes") {
val eq1 = Equals(prop("b", "id"), prop("a", "id"))(pos) val eq1 = Equals(prop("b", "id"), prop("a", "id"))(pos)
val eq2 = Equals(prop("b", "id"), prop("c", "id"))(pos) val eq2 = Equals(prop("b", "id"), prop("c", "id"))(pos)
val eq3 = Equals(prop("a", "id"), prop("c", "id"))(pos) val eq3 = Equals(prop("a", "id"), prop("c", "id"))(pos)
Expand Down
Expand Up @@ -21,7 +21,6 @@ package org.neo4j.cypher.internal.compiler.v3_3.planner.logical.idp


import org.mockito.Mockito.{times, verify, verifyNoMoreInteractions} import org.mockito.Mockito.{times, verify, verifyNoMoreInteractions}
import org.neo4j.cypher.internal.compiler.v3_3.planner.LogicalPlanningTestSupport2 import org.neo4j.cypher.internal.compiler.v3_3.planner.LogicalPlanningTestSupport2
import org.neo4j.cypher.internal.compiler.v3_3.planner.logical.LogicalPlanningContext
import org.neo4j.cypher.internal.compiler.v3_3.planner.logical.plans._ import org.neo4j.cypher.internal.compiler.v3_3.planner.logical.plans._
import org.neo4j.cypher.internal.frontend.v3_3.SemanticDirection import org.neo4j.cypher.internal.frontend.v3_3.SemanticDirection
import org.neo4j.cypher.internal.frontend.v3_3.SemanticDirection.OUTGOING import org.neo4j.cypher.internal.frontend.v3_3.SemanticDirection.OUTGOING
Expand Down Expand Up @@ -733,7 +732,7 @@ class IDPQueryGraphSolverTest extends CypherFunSuite with LogicalPlanningTestSup
verify(monitor).startConnectingComponents(omQG) verify(monitor).startConnectingComponents(omQG)
verify(monitor).endConnectingComponents(omQG, expandAtoB) verify(monitor).endConnectingComponents(omQG, expandAtoB)


// outer hash joins // outer computeHash joins
val omQGWithoutArguments = omQG.withoutArguments() val omQGWithoutArguments = omQG.withoutArguments()


verify(monitor).initTableFor(omQGWithoutArguments) verify(monitor).initTableFor(omQGWithoutArguments)
Expand Down
Expand Up @@ -27,7 +27,7 @@
import org.neo4j.helpers.MathUtil; import org.neo4j.helpers.MathUtil;


/** /**
* Helper class for dealing with equivalence an hash code in compiled code. * Helper class for dealing with equivalence an computeHash code in compiled code.
* *
* Note this class contains a lot of duplicated code in order to minimize boxing. * Note this class contains a lot of duplicated code in order to minimize boxing.
*/ */
Expand Down Expand Up @@ -181,9 +181,9 @@ else if ( lhs instanceof Map<?,?> && rhs instanceof Map<?,?> )
} }


/** /**
* Calculates hash code of a given object * Calculates computeHash code of a given object
* @param element the element to calculate hash code for * @param element the element to calculate computeHash code for
* @return the hash code of the given object * @return the computeHash code of the given object
*/ */
@SuppressWarnings( "unchecked" ) @SuppressWarnings( "unchecked" )
public static int hashCode( Object element ) public static int hashCode( Object element )
Expand Down Expand Up @@ -255,9 +255,9 @@ else if ( element instanceof boolean[] )
} }


/** /**
* Calculate hash code of a map * Calculate computeHash code of a map
* @param map the element to calculate hash code for * @param map the element to calculate computeHash code for
* @return the hash code of the given map * @return the computeHash code of the given map
*/ */
public static int hashCode( Map<String,Object> map ) public static int hashCode( Map<String,Object> map )
{ {
Expand All @@ -272,39 +272,39 @@ public static int hashCode( Map<String,Object> map )
} }


/** /**
* Calculate hash code of a long value * Calculate computeHash code of a long value
* @param value the value to compute hash code for * @param value the value to compute computeHash code for
* @return the hash code of the given value * @return the computeHash code of the given value
*/ */
public static int hashCode( long value ) public static int hashCode( long value )
{ {
return Long.hashCode( value ); return Long.hashCode( value );
} }


/** /**
* Calculate hash code of a boolean value * Calculate computeHash code of a boolean value
* @param value the value to compute hash code for * @param value the value to compute computeHash code for
* @return the hash code of the given value * @return the computeHash code of the given value
*/ */
public static int hashCode( boolean value ) public static int hashCode( boolean value )
{ {
return Boolean.hashCode( value ); return Boolean.hashCode( value );
} }


/** /**
* Calculate hash code of a char value * Calculate computeHash code of a char value
* @param value the value to compute hash code for * @param value the value to compute computeHash code for
* @return the hash code of the given value * @return the computeHash code of the given value
*/ */
public static int hashCode( char value ) public static int hashCode( char value )
{ {
return Character.hashCode( value ); return Character.hashCode( value );
} }


/** /**
* Calculate hash code of a char[] value * Calculate computeHash code of a char[] value
* @param array the value to compute hash code for * @param array the value to compute computeHash code for
* @return the hash code of the given value * @return the computeHash code of the given value
*/ */
public static int hashCode( char[] array ) public static int hashCode( char[] array )
{ {
Expand All @@ -325,9 +325,9 @@ public static int hashCode( char[] array )
} }


/** /**
* Calculate hash code of a list value * Calculate computeHash code of a list value
* @param list the value to compute hash code for * @param list the value to compute computeHash code for
* @return the hash code of the given value * @return the computeHash code of the given value
*/ */
public static int hashCode( List<?> list ) public static int hashCode( List<?> list )
{ {
Expand All @@ -349,9 +349,9 @@ public static int hashCode( List<?> list )
} }


/** /**
* Calculate hash code of a Object[] value * Calculate computeHash code of a Object[] value
* @param array the value to compute hash code for * @param array the value to compute computeHash code for
* @return the hash code of the given value * @return the computeHash code of the given value
*/ */
public static int hashCode( Object[] array ) public static int hashCode( Object[] array )
{ {
Expand All @@ -372,9 +372,9 @@ public static int hashCode( Object[] array )
} }


/** /**
* Calculate hash code of a byte[] value * Calculate computeHash code of a byte[] value
* @param array the value to compute hash code for * @param array the value to compute computeHash code for
* @return the hash code of the given value * @return the computeHash code of the given value
*/ */
public static int hashCode( byte[] array ) public static int hashCode( byte[] array )
{ {
Expand All @@ -395,9 +395,9 @@ public static int hashCode( byte[] array )
} }


/** /**
* Calculate hash code of a short[] value * Calculate computeHash code of a short[] value
* @param array the value to compute hash code for * @param array the value to compute computeHash code for
* @return the hash code of the given value * @return the computeHash code of the given value
*/ */
public static int hashCode( short[] array ) public static int hashCode( short[] array )
{ {
Expand All @@ -418,9 +418,9 @@ public static int hashCode( short[] array )
} }


/** /**
* Calculate hash code of a int[] value * Calculate computeHash code of a int[] value
* @param array the value to compute hash code for * @param array the value to compute computeHash code for
* @return the hash code of the given value * @return the computeHash code of the given value
*/ */
public static int hashCode( int[] array ) public static int hashCode( int[] array )
{ {
Expand All @@ -441,9 +441,9 @@ public static int hashCode( int[] array )
} }


/** /**
* Calculate hash code of a long[] value * Calculate computeHash code of a long[] value
* @param array the value to compute hash code for * @param array the value to compute computeHash code for
* @return the hash code of the given value * @return the computeHash code of the given value
*/ */
public static int hashCode( long[] array ) public static int hashCode( long[] array )
{ {
Expand All @@ -464,9 +464,9 @@ public static int hashCode( long[] array )
} }


/** /**
* Calculate hash code of a float[] value * Calculate computeHash code of a float[] value
* @param array the value to compute hash code for * @param array the value to compute computeHash code for
* @return the hash code of the given value * @return the computeHash code of the given value
*/ */
public static int hashCode( float[] array ) public static int hashCode( float[] array )
{ {
Expand All @@ -488,9 +488,9 @@ public static int hashCode( float[] array )
} }


/** /**
* Calculate hash code of a double[] value * Calculate computeHash code of a double[] value
* @param array the value to compute hash code for * @param array the value to compute computeHash code for
* @return the hash code of the given value * @return the computeHash code of the given value
*/ */
public static int hashCode( double[] array ) public static int hashCode( double[] array )
{ {
Expand All @@ -512,9 +512,9 @@ public static int hashCode( double[] array )
} }


/** /**
* Calculate hash code of a boolean[] value * Calculate computeHash code of a boolean[] value
* @param array the value to compute hash code for * @param array the value to compute computeHash code for
* @return the hash code of the given value * @return the computeHash code of the given value
*/ */
public static int hashCode( boolean[] array ) public static int hashCode( boolean[] array )
{ {
Expand Down
Expand Up @@ -23,9 +23,9 @@ import java.util
import java.util.stream.{DoubleStream, IntStream, LongStream} import java.util.stream.{DoubleStream, IntStream, LongStream}


import org.mockito.Mockito.when import org.mockito.Mockito.when
import org.neo4j.cypher.internal.codegen.CompiledConversionUtils.makeValueNeoSafe
import org.neo4j.cypher.internal.frontend.v3_3.CypherTypeException import org.neo4j.cypher.internal.frontend.v3_3.CypherTypeException
import org.neo4j.cypher.internal.frontend.v3_3.test_helpers.CypherFunSuite import org.neo4j.cypher.internal.frontend.v3_3.test_helpers.CypherFunSuite
import org.neo4j.cypher.internal.codegen.CompiledConversionUtils.makeValueNeoSafe
import org.neo4j.graphdb.{Node, Relationship} import org.neo4j.graphdb.{Node, Relationship}


import scala.collection.JavaConverters._ import scala.collection.JavaConverters._
Expand Down Expand Up @@ -82,7 +82,7 @@ class CompiledConversionUtilsTest extends CypherFunSuite {
CompiledConversionUtils.loadParameter(Array(1L, 2L, "Hello")).getClass.getComponentType.isPrimitive shouldBe false CompiledConversionUtils.loadParameter(Array(1L, 2L, "Hello")).getClass.getComponentType.isPrimitive shouldBe false
} }


test("should be able to use a composite key in a hash map") { test("should be able to use a composite key in a computeHash map") {
//given //given
val theKey = CompiledConversionUtils.compositeKey(1l, 2L, 11L) val theKey = CompiledConversionUtils.compositeKey(1l, 2L, 11L)
val theObject = mock[Object] val theObject = mock[Object]
Expand Down
Expand Up @@ -186,7 +186,7 @@ class PipeExecutionPlanBuilderIT extends CypherFunSuite with LogicalPlanningTest
OptionalExpandIntoPipe(AllNodesScanPipe("a")(), "a", "r", "a", SemanticDirection.INCOMING, LazyTypes.empty, True())()) OptionalExpandIntoPipe(AllNodesScanPipe("a")(), "a", "r", "a", SemanticDirection.INCOMING, LazyTypes.empty, True())())
} }


test("simple hash join") { test("simple computeHash join") {
val logicalPlan = val logicalPlan =
NodeHashJoin( NodeHashJoin(
Set(IdName("b")), Set(IdName("b")),
Expand Down
Expand Up @@ -29,7 +29,7 @@ class RuntimeScalaValueConverterTest extends CypherFunSuite {


val converter = new RuntimeScalaValueConverter(_ => false) val converter = new RuntimeScalaValueConverter(_ => false)


test("should convert hash map") { test("should convert computeHash map") {
val it = new util.HashMap[String, Any]() val it = new util.HashMap[String, Any]()
it.put("k1", 5) it.put("k1", 5)
it.put("k2", 15) it.put("k2", 15)
Expand Down
Expand Up @@ -22,14 +22,13 @@ package org.neo4j.cypher.internal.compatibility.v3_3.runtime.helpers
import java.util import java.util
import java.util.Arrays.asList import java.util.Arrays.asList
import java.util.Collections.{emptyList, emptyMap, singletonMap, singleton => singletonSet} import java.util.Collections.{emptyList, emptyMap, singletonMap, singleton => singletonSet}
import org.neo4j.cypher.internal.compatibility.v3_3.runtime.helpers.ScalaCompatibility.asScalaCompatible



import org.neo4j.cypher.internal.compatibility.v3_3.runtime.helpers.ScalaCompatibility.asScalaCompatible
import org.neo4j.cypher.internal.frontend.v3_3.test_helpers.CypherFunSuite import org.neo4j.cypher.internal.frontend.v3_3.test_helpers.CypherFunSuite


class ScalaCompatibilityTest extends CypherFunSuite { class ScalaCompatibilityTest extends CypherFunSuite {


test("should convert hash map") { test("should convert computeHash map") {
val it = new util.HashMap[String, Any]() val it = new util.HashMap[String, Any]()
it.put("k1", 5) it.put("k1", 5)
it.put("k2", 15) it.put("k2", 15)
Expand Down
Expand Up @@ -37,7 +37,7 @@ class NodeHashJoinPipeTest extends CypherFunSuite {


import org.mockito.Mockito._ import org.mockito.Mockito._


test("should support simple hash join over nodes") { test("should support simple computeHash join over nodes") {
// given // given
val node1 = newMockedNode(1) val node1 = newMockedNode(1)
val node2 = newMockedNode(2) val node2 = newMockedNode(2)
Expand Down
Expand Up @@ -36,7 +36,7 @@ class NodeOuterHashJoinPipeTest extends CypherFunSuite {
val node2 = newMockedNode(2) val node2 = newMockedNode(2)
val node3 = newMockedNode(3) val node3 = newMockedNode(3)


test("should support simple hash join over nodes") { test("should support simple computeHash join over nodes") {
// given // given
val queryState = QueryStateHelper.empty val queryState = QueryStateHelper.empty


Expand Down
Expand Up @@ -35,7 +35,7 @@ class ValueHashJoinPipeTest extends CypherFunSuite {


import org.mockito.Mockito._ import org.mockito.Mockito._


test("should support simple hash join between two identifiers") { test("should support simple computeHash join between two identifiers") {
// given // given
val queryState = QueryStateHelper.empty val queryState = QueryStateHelper.empty


Expand Down
Expand Up @@ -120,7 +120,7 @@ class ActualCostCalculationTest extends CypherFunSuite {
} }
} }


ignore("hash joins") { ignore("computeHash joins") {
val path = Files.createTempDirectory("apa").toFile.getAbsolutePath val path = Files.createTempDirectory("apa").toFile.getAbsolutePath
val graph: GraphDatabaseQueryService = new GraphDatabaseCypherService(new TestGraphDatabaseFactory().newEmbeddedDatabase(new File(path))) val graph: GraphDatabaseQueryService = new GraphDatabaseCypherService(new TestGraphDatabaseFactory().newEmbeddedDatabase(new File(path)))
val labels = Seq("A", "B", "C", "D", "E", "F", "G", "H", "I", "J") val labels = Seq("A", "B", "C", "D", "E", "F", "G", "H", "I", "J")
Expand All @@ -131,7 +131,7 @@ class ActualCostCalculationTest extends CypherFunSuite {
setupDbForJoins(graph, labels) setupDbForJoins(graph, labels)


//permutate lhs, and rhs of the hashjoin, for each permutation //permutate lhs, and rhs of the hashjoin, for each permutation
//calculate cost of lhs, rhs and the cost for the hash join //calculate cost of lhs, rhs and the cost for the computeHash join
for {label1 <- labels for {label1 <- labels
label2 <- labels if label1 != label2} { label2 <- labels if label1 != label2} {


Expand Down
Expand Up @@ -1158,7 +1158,7 @@ private ReadableDiffSets<Long> getIndexUpdatesForRangeSeekByPrefix( IndexDescrip
} }


// Ensure sorted index updates for a given index. This is needed for range query support and // Ensure sorted index updates for a given index. This is needed for range query support and
// may involve converting the existing hash map first // may involve converting the existing computeHash map first
// //
private TreeMap<ValueTuple, DiffSets<Long>> getSortedIndexUpdates( LabelSchemaDescriptor descriptor ) private TreeMap<ValueTuple, DiffSets<Long>> getSortedIndexUpdates( LabelSchemaDescriptor descriptor )
{ {
Expand Down

0 comments on commit b34baff

Please sign in to comment.