From 24d66df0794e3c3733f9d2edf1386ca6892f6d7b Mon Sep 17 00:00:00 2001 From: Yang Song Date: Tue, 17 Jul 2018 02:05:20 -0400 Subject: [PATCH 1/3] Added `intersects` link to table of contents (#107) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index c0a10bc86..fe4e427d2 100644 --- a/README.md +++ b/README.md @@ -41,6 +41,7 @@ It's modeled after Python's `json.tool`, reading from stdin and writing to stdou * [in_collection](#in_collection) * [has_substring](#has_substring) * [contains](#contains) + * [intersects](#intersects) * [has_edge_degree](#has_edge_degree) * [Type coercions](#type-coercions) * [Meta fields](#meta-fields) From baa456f15f3ad10eae76e376d40f112e28de1685 Mon Sep 17 00:00:00 2001 From: Predrag Gruevski Date: Tue, 17 Jul 2018 14:14:05 -0400 Subject: [PATCH 2/3] Ensure that OrientDB query plans only consider good start points. (#102) * Ensure that OrientDB query plans only consider good start points. * Fix many of the tests. * Fix scheduling of optional traversal sub-queries. * Locations with non-local filtering are always ineligible. * More tests, comments, and cleanup. * Fix lint, enable a few py3-specific pylint checks. * Fix test cases after rebase. --- .pylintrc | 12 +- graphql_compiler/compiler/emit_match.py | 29 +- graphql_compiler/compiler/helpers.py | 9 + .../ir_lowering_gremlin/ir_lowering.py | 10 +- .../compiler/ir_lowering_match/__init__.py | 5 +- .../compiler/ir_lowering_match/ir_lowering.py | 27 +- .../compiler/ir_lowering_match/utils.py | 28 +- .../workarounds/orientdb_class_with_while.py | 28 +- .../workarounds/orientdb_query_execution.py | 380 ++++++++++++++++++ graphql_compiler/tests/test_compiler.py | 95 ++++- graphql_compiler/tests/test_input_data.py | 25 ++ graphql_compiler/tests/test_ir_generation.py | 37 ++ graphql_compiler/tests/test_ir_lowering.py | 8 +- .../tests/test_testing_invariants.py | 4 +- 14 files changed, 601 insertions(+), 96 deletions(-) create mode 100644 graphql_compiler/compiler/workarounds/orientdb_query_execution.py diff --git a/.pylintrc b/.pylintrc index b3c5d56c3..c2d4843d9 100644 --- a/.pylintrc +++ b/.pylintrc @@ -81,6 +81,7 @@ enable=abstract-class-instantiated, bad-str-strip-call, bad-super-call, bare-except, + basestring-builtin, binary-op-exception, boolean-datetime boolean-datetime, @@ -99,6 +100,7 @@ enable=abstract-class-instantiated, deprecated-lambda, deprecated-method, deprecated-module, + dict-iter-method, dict-view-method, duplicate-argument-name, duplicate-bases, @@ -110,7 +112,6 @@ enable=abstract-class-instantiated, expression-not-assigned, file-builtin, filter-builtin-not-iterating, - fixme, format-combined-specification, format-needs-mapping, function-redefined, @@ -149,6 +150,7 @@ enable=abstract-class-instantiated, lost-exception, lowercase-l-suffix, map-builtin-not-iterating, + metaclass-assignment, method-hidden, misplaced-bare-raise, misplaced-future, @@ -229,6 +231,7 @@ enable=abstract-class-instantiated, unexpected-keyword-arg, unexpected-special-method-signature, unichr-builtin, + unicode-builtin, unnecessary-lambda, unnecessary-pass, unnecessary-semicolon, @@ -250,6 +253,7 @@ enable=abstract-class-instantiated, using-cmp-argument, using-constant-test, wildcard-import, + xrange-builtin, yield-inside-async-function, yield-outside-function, zip-builtin-not-iterating, @@ -259,15 +263,11 @@ enable=abstract-class-instantiated, docstyle, # Disabled for now, maybe enable in the future: # abstract-method, # needs some cleanup first - # basestring-builtin, # will come with Python 3 support - # dict-iter-method, # will come with Python 3 support - # metaclass-assignment, # will come with Python 3 support # no-absolute-import, # maybe with Python 3 support # parameter_documentation, # needs some cleanup and maybe configuration - # unicode-builtin, # will come with Python 3 support # unused-argument, # needs some cleanup and per-line suppression, # buggy / unclear how to suppress only a single function - # xrange-builtin, # will come with Python 3 support + # fixme, # sometimes there is a legitimate need for a TODO # Consider the following sample rc files for errors to enable/disable: diff --git a/graphql_compiler/compiler/emit_match.py b/graphql_compiler/compiler/emit_match.py index 0285bbf3a..3f3dbffaa 100644 --- a/graphql_compiler/compiler/emit_match.py +++ b/graphql_compiler/compiler/emit_match.py @@ -6,7 +6,7 @@ from .blocks import Filter, QueryRoot, Recurse, Traverse from .expressions import TrueLiteral -from .helpers import validate_safe_string +from .helpers import get_only_element_from_collection, validate_safe_string def _get_vertex_location_name(location): @@ -20,31 +20,30 @@ def _get_vertex_location_name(location): def _first_step_to_match(match_step): """Transform the very first MATCH step into a MATCH query string.""" - if not isinstance(match_step.root_block, QueryRoot): - raise AssertionError(u'Expected QueryRoot root block, received: ' - u'{} {}'.format(match_step.root_block, match_step)) + parts = [] - match_step.root_block.validate() + if match_step.root_block is not None: + if not isinstance(match_step.root_block, QueryRoot): + raise AssertionError(u'Expected None or QueryRoot root block, received: ' + u'{} {}'.format(match_step.root_block, match_step)) + + match_step.root_block.validate() - start_class_set = match_step.root_block.start_class - if len(start_class_set) != 1: - raise AssertionError(u'Attempted to emit MATCH but did not have exactly one start class: ' - u'{} {}'.format(start_class_set, match_step)) - start_class = list(start_class_set)[0] + start_class = get_only_element_from_collection(match_step.root_block.start_class) + parts.append(u'class: %s' % (start_class,)) # MATCH steps with a QueryRoot root block shouldn't have a 'coerce_type_block'. if match_step.coerce_type_block is not None: raise AssertionError(u'Invalid MATCH step: {}'.format(match_step)) - parts = [ - u'class: %s' % (start_class,), - ] - if match_step.where_block: match_step.where_block.validate() parts.append(u'where: (%s)' % (match_step.where_block.predicate.to_match(),)) - if match_step.as_block: + if match_step.as_block is None: + raise AssertionError(u'Found a MATCH step without a corresponding Location. ' + u'This should never happen: {}'.format(match_step)) + else: match_step.as_block.validate() parts.append(u'as: %s' % (_get_vertex_location_name(match_step.as_block.location),)) diff --git a/graphql_compiler/compiler/helpers.py b/graphql_compiler/compiler/helpers.py index 5f941d495..526ac9673 100644 --- a/graphql_compiler/compiler/helpers.py +++ b/graphql_compiler/compiler/helpers.py @@ -3,6 +3,7 @@ from collections import namedtuple import string +import funcy from graphql import GraphQLList, GraphQLNonNull, GraphQLString, is_type from graphql.language.ast import InlineFragment from graphql.type.definition import GraphQLInterfaceType, GraphQLObjectType, GraphQLUnionType @@ -23,6 +24,14 @@ ('directive', 'field_ast', 'field_name', 'field_type')) +def get_only_element_from_collection(one_element_collection): + """Assert that the collection has exactly one element, then return that element.""" + if len(one_element_collection) != 1: + raise AssertionError(u'Expected a collection with exactly one element, but got: {}' + .format(one_element_collection)) + return funcy.first(one_element_collection) + + def get_ast_field_name(ast): """Return the normalized field name for the given AST node.""" replacements = { diff --git a/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py b/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py index 142b9e245..9bf317a60 100644 --- a/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py +++ b/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py @@ -19,7 +19,8 @@ from ..expressions import (BinaryComposition, FoldedOutputContextField, Literal, LocalField, NullLiteral) from ..helpers import (STANDARD_DATE_FORMAT, STANDARD_DATETIME_FORMAT, FoldScopeLocation, - strip_non_null_from_type, validate_safe_string) + get_only_element_from_collection, strip_non_null_from_type, + validate_safe_string) from ..ir_lowering_common import extract_folds_from_ir_blocks @@ -55,12 +56,7 @@ def lower_coerce_type_block_type_data(ir_blocks, type_equivalence_hints): for block in ir_blocks: new_block = block if isinstance(block, CoerceType): - if len(block.target_class) != 1: - raise AssertionError(u'Expected only a single target class for the type coercion, ' - u'but received {}'.format(block.target_class)) - - # Sets are not indexable, so we have to grab the first element of its iterator. - target_class = next(x for x in block.target_class) + target_class = get_only_element_from_collection(block.target_class) if target_class in equivalent_type_names: new_block = CoerceType(equivalent_type_names[target_class]) diff --git a/graphql_compiler/compiler/ir_lowering_match/__init__.py b/graphql_compiler/compiler/ir_lowering_match/__init__.py index 49669e328..636314caa 100644 --- a/graphql_compiler/compiler/ir_lowering_match/__init__.py +++ b/graphql_compiler/compiler/ir_lowering_match/__init__.py @@ -19,7 +19,8 @@ convert_optional_traversals_to_compound_match_query, lower_context_field_expressions, prune_non_existent_outputs) from ..match_query import convert_to_match_query -from ..workarounds import orientdb_class_with_while, orientdb_eval_scheduling +from ..workarounds import (orientdb_class_with_while, orientdb_eval_scheduling, + orientdb_query_execution) from .utils import construct_where_filter_predicate ############## @@ -106,5 +107,7 @@ def lower_ir(ir_blocks, location_types, type_equivalence_hints=None): compound_match_query = truncate_repeated_single_step_traversals_in_sub_queries( compound_match_query) + compound_match_query = orientdb_query_execution.expose_ideal_query_execution_start_points( + compound_match_query, location_types) return compound_match_query diff --git a/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py b/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py index 975eb2d6c..091738835 100644 --- a/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py +++ b/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py @@ -8,15 +8,13 @@ us to convert this Expression into other Expressions, using data already present in the IR, to simplify the final code generation step. """ -import funcy.py2 as funcy import six -from ..blocks import Backtrack, CoerceType, Filter, MarkLocation, QueryRoot +from ..blocks import Backtrack, CoerceType, MarkLocation, QueryRoot from ..expressions import (BinaryComposition, ContextField, ContextFieldExistence, FalseLiteral, - FoldedOutputContextField, Literal, LocalField, TernaryConditional, - TrueLiteral) + FoldedOutputContextField, Literal, TernaryConditional, TrueLiteral) from ..helpers import FoldScopeLocation -from .utils import CompoundMatchQuery +from .utils import convert_coerce_type_to_instanceof_filter ################################## @@ -324,21 +322,10 @@ def lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks): """Lower CoerceType blocks into "INSTANCEOF" Filter blocks. Indended for folded IR blocks.""" new_folded_ir_blocks = [] for block in folded_ir_blocks: - new_block = block - if isinstance(block, CoerceType): - coerce_type_target = block.target_class - if len(coerce_type_target) != 1: - raise AssertionError(u'Unexpected "coerce_type_target" for MATCH query: ' - u'{}'.format(coerce_type_target)) - coerce_type_target = funcy.first(coerce_type_target) - - # INSTANCEOF requires the target class to be passed in as a string, - # so we make the target class a string literal. - new_predicate = BinaryComposition( - u'INSTANCEOF', LocalField('@this'), Literal(coerce_type_target)) - - new_block = Filter(new_predicate) + new_block = convert_coerce_type_to_instanceof_filter(block) + else: + new_block = block new_folded_ir_blocks.append(new_block) @@ -361,4 +348,4 @@ def truncate_repeated_single_step_traversals_in_sub_queries(compound_match_query new_match_query = truncate_repeated_single_step_traversals(match_query) lowered_match_queries.append(new_match_query) - return CompoundMatchQuery(match_queries=lowered_match_queries) + return compound_match_query._replace(match_queries=lowered_match_queries) diff --git a/graphql_compiler/compiler/ir_lowering_match/utils.py b/graphql_compiler/compiler/ir_lowering_match/utils.py index b7024f51a..c3797a44a 100644 --- a/graphql_compiler/compiler/ir_lowering_match/utils.py +++ b/graphql_compiler/compiler/ir_lowering_match/utils.py @@ -3,9 +3,33 @@ import six -from ..expressions import (BinaryComposition, Expression, LocalField, NullLiteral, +from ..blocks import Filter +from ..expressions import (BinaryComposition, Expression, Literal, LocalField, NullLiteral, SelectEdgeContextField, TrueLiteral, UnaryTransformation, ZeroLiteral) -from ..helpers import Location, is_vertex_field_name +from ..helpers import Location, get_only_element_from_collection, is_vertex_field_name + + +def convert_coerce_type_to_instanceof_filter(coerce_type_block): + """Create an "INSTANCEOF" Filter block from a CoerceType block.""" + coerce_type_target = get_only_element_from_collection(coerce_type_block.target_class) + + # INSTANCEOF requires the target class to be passed in as a string, + # so we make the target class a string literal. + new_predicate = BinaryComposition( + u'INSTANCEOF', LocalField('@this'), Literal(coerce_type_target)) + + return Filter(new_predicate) + + +def convert_coerce_type_and_add_to_where_block(coerce_type_block, where_block): + """Create an "INSTANCEOF" Filter from a CoerceType, adding to an existing Filter if any.""" + instanceof_filter = convert_coerce_type_to_instanceof_filter(coerce_type_block) + + if where_block: + # There was already a Filter block -- we'll merge the two predicates together. + return Filter(BinaryComposition(u'&&', instanceof_filter.predicate, where_block.predicate)) + else: + return instanceof_filter def expression_list_to_conjunction(expression_list): diff --git a/graphql_compiler/compiler/workarounds/orientdb_class_with_while.py b/graphql_compiler/compiler/workarounds/orientdb_class_with_while.py index 88a8d5c16..c00f75075 100644 --- a/graphql_compiler/compiler/workarounds/orientdb_class_with_while.py +++ b/graphql_compiler/compiler/workarounds/orientdb_class_with_while.py @@ -4,30 +4,8 @@ For details, see: https://github.com/orientechnologies/orientdb/issues/8129 """ -import funcy - -from ..blocks import Filter, Recurse -from ..expressions import BinaryComposition, Literal, LocalField - - -def _coerce_block_to_filter_block(coerce_type_block, where_block): - """Create an "INSTANCEOF" Filter block from a CoerceType block.""" - coerce_type_target = coerce_type_block.target_class - if len(coerce_type_target) != 1: - raise AssertionError(u'Unexpected "coerce_type_target" for MATCH query: ' - u'{}'.format(coerce_type_target)) - coerce_type_target = funcy.first(coerce_type_target) - - # INSTANCEOF requires the target class to be passed in as a string, - # so we make the target class a string literal. - new_predicate = BinaryComposition( - u'INSTANCEOF', LocalField('@this'), Literal(coerce_type_target)) - - if where_block: - # There was already a Filter block -- we'll merge the two predicates together. - new_predicate = BinaryComposition(u'&&', new_predicate, where_block.predicate) - - return Filter(new_predicate) +from ..blocks import Recurse +from ..ir_lowering_match.utils import convert_coerce_type_and_add_to_where_block def workaround_type_coercions_in_recursions(match_query): @@ -52,7 +30,7 @@ def workaround_type_coercions_in_recursions(match_query): has_recurse_root = isinstance(match_step.root_block, Recurse) if has_coerce_type and has_recurse_root: - new_where_block = _coerce_block_to_filter_block( + new_where_block = convert_coerce_type_and_add_to_where_block( match_step.coerce_type_block, match_step.where_block) new_match_step = match_step._replace(coerce_type_block=None, where_block=new_where_block) diff --git a/graphql_compiler/compiler/workarounds/orientdb_query_execution.py b/graphql_compiler/compiler/workarounds/orientdb_query_execution.py new file mode 100644 index 000000000..0f0e003f9 --- /dev/null +++ b/graphql_compiler/compiler/workarounds/orientdb_query_execution.py @@ -0,0 +1,380 @@ +# Copyright 2018-present Kensho Technologies, LLC. +"""Workarounds for OrientDB scheduler issue that causes poor query planning for certain queries. + +For purposes of query planning, the OrientDB query planner ignores "where:" clauses +that hit indexes but do not use the "=" operator. For example, "CONTAINS" can be used to check +that a field covered by an index is in a specified list of values, and can therefore be covered +by an index, but OrientDB will ignore this. When no equality ("=") checks on indexed columns +are present, OrientDB will generate a query plan that starts execution at the class with +lowest cardinality, which can lead to excessive numbers of scanned and discarded records. + +Assuming the query planner creates a query plan where a location with CONTAINS is +the first in the execution order, the execution system will apply indexes +to speed up this operation. Therefore, it's sufficient to trick the query planner into +always creating such a query plan, even though it thinks indexes cannot be used in the query. + +Valid query execution start points for the OrientDB query planner must satisfy the following: + - Must not be "optional: true". + - Must not have a "while:" clause nor follow a location that has one. + - Must have a "class:" defined. This class is used for cardinality estimation, and to + look for available indexes that may cover any "where:" clause that may be present. + +The optimizations in this file improve performance by enabling execution start points according +to the following assumptions: + 1. Start points with "where:" clauses that reference only local fields (i.e. not tagged values + from other query locations) are always better than start points without a "where:". + This is because the filter will have to be applied one way or the other, so we might as well + apply it early. + 2. If no such start points are available, we'd like to make available as many start points + as possible, since we'd like OrientDB to start at the start point whose class has + the lowest possible cardinality. + +The process of applying the optimizations is as follows: + - Exclude and ignore all query steps that are inside a fold, optional, or recursion scope, + or have a "where:" clause that references a non-local (i.e. tagged) field. + - Find all remaining query steps with "where:" clauses that reference only local fields. + - If any are found, we guide our actions from assumption 1 above: + - Ensure they have a defined "class:" -- i.e. the OrientDB scheduler will consider them + valid start points. + - Then, prune all other query steps (ones without such "where:" clauses) by removing their + "class:" clause, making them invalid as query start points for OrientDB's scheduler. + - If none are found, we guide our actions from assumption 2 above: + - Ensure that all query points not inside fold, optional, or recursion scope contain + a "class:" clause. That increases the number of available query start points, + so OrientDB can choose the start point of lowest cardinality. +""" + +from ..blocks import CoerceType, QueryRoot, Recurse, Traverse +from ..expressions import ContextField, ContextFieldExistence +from ..helpers import get_only_element_from_collection +from ..ir_lowering_match.utils import convert_coerce_type_and_add_to_where_block + + +def _is_local_filter(filter_block): + """Return True if the Filter block references no non-local fields, and False otherwise.""" + # We need the "result" value of this function to be mutated within the "visitor_fn". + # Since we support both Python 2 and Python 3, we can't use the "nonlocal" keyword here: + # https://www.python.org/dev/peps/pep-3104/ + # Instead, we use a dict to store the value we need mutated, since the "visitor_fn" + # can mutate state in the parent scope, but not rebind variables in it without "nonlocal". + # TODO(predrag): Revisit this if we drop support for Python 2. + result = { + 'is_local_filter': True + } + filter_predicate = filter_block.predicate + + def visitor_fn(expression): + """Expression visitor function that looks for uses of non-local fields.""" + non_local_expression_types = (ContextField, ContextFieldExistence) + + if isinstance(expression, non_local_expression_types): + result['is_local_filter'] = False + + # Don't change the expression. + return expression + + filter_predicate.visit_and_update(visitor_fn) + + return result['is_local_filter'] + + +def _classify_query_locations(match_query): + """Classify query locations into three groups: preferred, eligible, ineligible. + + - Ineligible locations are ones that cannot be the starting point of query execution. + These include locations within recursions, locations that are the target of + an optional traversal, and locations with an associated "where:" clause with non-local filter. + - Preferred locations are ones that are eligible to be the starting point, and also have + an associated "where:" clause that references no non-local fields -- only local fields, + literals, and variables. + - Eligible locations are all locations that do not fall into either of these two categories. + + Args: + match_query: MatchQuery object describing the query being analyzed for optimization + + Returns: + tuple (preferred, eligible, ineligible) where each element is a set of Location objects. + The three sets are disjoint. + """ + preferred_locations = set() + eligible_locations = set() + ineligible_locations = set() + + # Any query must have at least one traversal with at least one step. + # The first step in this traversal must be a QueryRoot. + first_match_step = match_query.match_traversals[0][0] + if not isinstance(first_match_step.root_block, QueryRoot): + raise AssertionError(u'First step of first traversal unexpectedly was not QueryRoot: ' + u'{} {}'.format(first_match_step, match_query)) + + # The first step in the first traversal cannot possibly be inside an optional, recursion, + # or fold. Its location is always an eligible start location for a query. + # We need to determine whether it is merely eligible, or actually a preferred location. + if first_match_step.where_block is not None: + if _is_local_filter(first_match_step.where_block): + preferred_locations.add(first_match_step.as_block.location) + else: + # TODO(predrag): Fix once we have a proper fix for tag-and-filter in the same scope. + # Either the locally-scoped tag will have to generate a LocalField + # instead of a ContextField, or we'll have to rework the local filter + # detection code in this module. + raise AssertionError(u'The first step of the first traversal somehow had a non-local ' + u'filter. This should not be possible, since there is nowhere ' + u'for the tagged value to have come from. Values: {} {}' + .format(first_match_step, match_query)) + else: + eligible_locations.add(first_match_step.as_block.location) + + # This loop will repeat the analysis of the first step of the first traversal. + # QueryRoots other than the first are required to always be at a location whose status + # (preferred / eligible / ineligible) is already known. Since we already processed + # the first QueryRoot above, the rest of the loop can assume all QueryRoots are like that. + for current_traversal in match_query.match_traversals: + for match_step in current_traversal: + current_step_location = match_step.as_block.location + + if isinstance(match_step.root_block, QueryRoot): + already_encountered_location = any(( + current_step_location in preferred_locations, + current_step_location in eligible_locations, + current_step_location in ineligible_locations, + )) + + if not already_encountered_location: + raise AssertionError(u'Unexpectedly encountered a location in QueryRoot whose ' + u'status has not been determined: {} {} {}' + .format(current_step_location, match_step, match_query)) + + at_eligible_or_preferred_location = ( + current_step_location in preferred_locations or + current_step_location in eligible_locations) + + # This location has already been encountered and processed. + # Other than setting the "at_eligible_or_preferred_location" state for the sake of + # the following MATCH steps, there is nothing further to be done. + continue + elif isinstance(match_step.root_block, Recurse): + # All Recurse blocks cause locations within to be ineligible. + at_eligible_or_preferred_location = False + elif isinstance(match_step.root_block, Traverse): + # Optional Traverse blocks cause locations within to be ineligible. + # Non-optional Traverse blocks do not change the eligibility of locations within: + # if the pre-Traverse location was eligible, so will the location within, + # and if it was not eligible, neither will the location within. + if match_step.root_block.optional: + at_eligible_or_preferred_location = False + else: + raise AssertionError(u'Unreachable condition reached: {} {} {}' + .format(match_step.root_block, match_step, match_query)) + + if not at_eligible_or_preferred_location: + ineligible_locations.add(current_step_location) + elif match_step.where_block is not None: + if _is_local_filter(match_step.where_block): + # This location has a local filter, and is not otherwise ineligible (it's not + # in a recursion etc.). Therefore, it's a preferred query start location. + preferred_locations.add(current_step_location) + else: + # Locations with non-local filters are never eligible locations, since they + # depend on another location being executed before them. + ineligible_locations.add(current_step_location) + else: + # No local filtering (i.e. not preferred), but also not ineligible. Eligible it is. + eligible_locations.add(current_step_location) + + return preferred_locations, eligible_locations, ineligible_locations + + +def _calculate_type_bound_at_step(match_step): + """Return the GraphQL type bound at the given step, or None if no bound is given.""" + current_type_bounds = [] + + if isinstance(match_step.root_block, QueryRoot): + # The QueryRoot start class is a type bound. + current_type_bounds.extend(match_step.root_block.start_class) + + if match_step.coerce_type_block is not None: + # The CoerceType target class is also a type bound. + current_type_bounds.extend(match_step.coerce_type_block.target_class) + + if current_type_bounds: + # A type bound exists. Assert that there is exactly one bound, defined in precisely one way. + return get_only_element_from_collection(current_type_bounds) + else: + # No type bound exists at this MATCH step. + return None + + +def _assert_type_bounds_are_not_conflicting(current_type_bound, previous_type_bound, + location, match_query): + """Ensure that the two bounds either are an exact match, or one of them is None.""" + if all((current_type_bound is not None, + previous_type_bound is not None, + current_type_bound != previous_type_bound)): + raise AssertionError( + u'Conflicting type bounds calculated at location {}: {} vs {} ' + u'for query {}'.format(location, previous_type_bound, current_type_bound, match_query)) + + +def _expose_only_preferred_locations(match_query, location_types, + preferred_locations, eligible_locations): + """Return a MATCH query where only preferred locations are valid as query start locations.""" + preferred_location_types = dict() + eligible_location_types = dict() + + new_match_traversals = [] + for current_traversal in match_query.match_traversals: + new_traversal = [] + for match_step in current_traversal: + new_step = match_step + current_step_location = match_step.as_block.location + + if current_step_location in preferred_locations: + # This location is preferred. We have to make sure that at least one occurrence + # of this location in the MATCH query has an associated "class:" clause, + # which would be generated by a type bound at the corresponding MATCH step. + current_type_bound = _calculate_type_bound_at_step(match_step) + previous_type_bound = preferred_location_types.get(current_step_location, None) + + if previous_type_bound is not None: + # The location is already valid. If so, make sure that this step either does + # not have any type bounds (e.g. via QueryRoot or CoerceType blocks), + # or has type bounds that match the previously-decided type bound. + _assert_type_bounds_are_not_conflicting( + current_type_bound, previous_type_bound, current_step_location, match_query) + else: + # The location is not yet known to be valid. If it does not have + # a type bound in this MATCH step, add a type coercion to the type + # registered in "location_types". + if current_type_bound is None: + current_type_bound = location_types[current_step_location].name + new_step = match_step._replace( + coerce_type_block=CoerceType({current_type_bound})) + + preferred_location_types[current_step_location] = current_type_bound + elif current_step_location in eligible_locations: + # This location is eligible, but not preferred. We have not make sure + # none of the MATCH steps with this location have type bounds, and therefore + # will not produce a corresponding "class:" clause in the resulting MATCH query. + current_type_bound = _calculate_type_bound_at_step(match_step) + previous_type_bound = eligible_location_types.get(current_step_location, None) + if current_type_bound is not None: + # There is a type bound here that we need to neutralize. + _assert_type_bounds_are_not_conflicting( + current_type_bound, previous_type_bound, current_step_location, match_query) + + # Record the deduced type bound, so that if we encounter this location again, + # we ensure that we again infer the same type bound. + eligible_location_types[current_step_location] = current_type_bound + + if current_type_bound == location_types[current_step_location].name: + # The type bound here is already implied by the GraphQL query structure. + # We can simply delete the QueryRoot / CoerceType blocks that impart it. + if isinstance(match_step.root_block, QueryRoot): + new_root_block = None + else: + new_root_block = match_step.root_block + + new_step = match_step._replace( + root_block=new_root_block, coerce_type_block=None) + else: + # The type bound here is not already implied by the GraphQL query structure. + # This should only be possible via a CoerceType block. Lower this CoerceType + # block into a Filter with INSTANCEOF to ensure the resulting query has the + # same semantics, while making the location invalid as a query start point. + if (isinstance(match_step.root_block, QueryRoot) or + match_step.coerce_type_block is None): + raise AssertionError(u'Unexpected MATCH step applying a type bound not ' + u'already implied by the GraphQL query structure: ' + u'{} {}'.format(match_step, match_query)) + + new_where_block = convert_coerce_type_and_add_to_where_block( + match_step.coerce_type_block, match_step.where_block) + new_step = match_step._replace( + coerce_type_block=None, where_block=new_where_block) + else: + # There is no type bound that OrientDB can find defined at this location. + # No action is necessary. + pass + else: + # This location is neither preferred nor eligible. + # No action is necessary at this location. + pass + + new_traversal.append(new_step) + new_match_traversals.append(new_traversal) + return match_query._replace(match_traversals=new_match_traversals) + + +def _expose_all_eligible_locations(match_query, location_types, eligible_locations): + """Return a MATCH query where all eligible locations are valid as query start locations.""" + eligible_location_types = dict() + + new_match_traversals = [] + for current_traversal in match_query.match_traversals: + new_traversal = [] + for match_step in current_traversal: + new_step = match_step + current_step_location = match_step.as_block.location + + if current_step_location in eligible_locations: + # This location is eligible. We need to make sure it has an associated type bound, + # so that it produces a "class:" clause that will make it a valid query start + # location. It either already has such a type bound, or we can use the type + # implied by the GraphQL query structure to add one. + current_type_bound = _calculate_type_bound_at_step(match_step) + previous_type_bound = eligible_location_types.get(current_step_location, None) + if current_type_bound is None: + current_type_bound = location_types[current_step_location].name + new_coerce_type_block = CoerceType({current_type_bound}) + new_step = match_step._replace(coerce_type_block=new_coerce_type_block) + else: + # There is a type bound here. We simply ensure that the bound is not conflicting + # with any other type bound at a different MATCH step with the same location. + _assert_type_bounds_are_not_conflicting( + current_type_bound, previous_type_bound, current_step_location, match_query) + + # Record the deduced type bound, so that if we encounter this location again, + # we ensure that we again infer the same type bound. + eligible_location_types[current_step_location] = current_type_bound + else: + # This function may only be called if there are no preferred locations. Since this + # location cannot be preferred, and is not eligible, it must be ineligible. + # No action is necessary in this case. + pass + + new_traversal.append(new_step) + new_match_traversals.append(new_traversal) + return match_query._replace(match_traversals=new_match_traversals) + + +def expose_ideal_query_execution_start_points(compound_match_query, location_types): + """Ensure that OrientDB only considers desirable query start points in query planning.""" + new_queries = [] + + for match_query in compound_match_query.match_queries: + location_classification = _classify_query_locations(match_query) + preferred_locations, eligible_locations, _ = location_classification + + if preferred_locations: + # Convert all eligible locations into non-eligible ones, by removing + # their "class:" clause. The "class:" clause is provided either by having + # a QueryRoot block or a CoerceType block in the MatchStep corresponding + # to the location. We remove it by converting the class check into + # an "INSTANCEOF" Filter block, which OrientDB is unable to optimize away. + new_query = _expose_only_preferred_locations( + match_query, location_types, preferred_locations, eligible_locations) + elif eligible_locations: + # Make sure that all eligible locations have a "class:" clause by adding + # a CoerceType block that is a no-op as guaranteed by the schema. This merely + # ensures that OrientDB is able to use each of these locations as a query start point, + # and will choose the one whose class is of lowest cardinality. + new_query = _expose_all_eligible_locations( + match_query, location_types, eligible_locations) + else: + raise AssertionError(u'This query has no preferred or eligible query start locations. ' + u'This is almost certainly a bug: {}'.format(match_query)) + + new_queries.append(new_query) + + return compound_match_query._replace(match_queries=new_queries) diff --git a/graphql_compiler/tests/test_compiler.py b/graphql_compiler/tests/test_compiler.py index a8464a0d3..4c7dc7076 100644 --- a/graphql_compiler/tests/test_compiler.py +++ b/graphql_compiler/tests/test_compiler.py @@ -206,6 +206,7 @@ def test_traverse_and_output(self): class: Animal, as: Animal___1 }}.out('Animal_ParentOf') {{ + class: Animal, as: Animal__out_Animal_ParentOf___1 }} RETURN $matches @@ -237,6 +238,7 @@ def test_optional_traverse_after_mandatory_traverse(self): class: Animal, as: Animal___1 }}.out('Animal_OfSpecies') {{ + class: Species, as: Animal__out_Animal_OfSpecies___1 }} , {{ class: Animal, @@ -284,9 +286,9 @@ def test_traverse_filter_and_output(self): Animal__out_Animal_ParentOf___1.name AS `parent_name` FROM ( MATCH {{ - class: Animal, as: Animal___1 }}.out('Animal_ParentOf') {{ + class: Animal, where: (((name = {wanted}) OR (alias CONTAINS {wanted}))), as: Animal__out_Animal_ParentOf___1 }} @@ -315,9 +317,9 @@ def test_name_or_alias_filter_on_interface_type(self): Animal__out_Entity_Related___1.name AS `related_entity` FROM ( MATCH {{ - class: Animal, as: Animal___1 }}.out('Entity_Related') {{ + class: Entity, where: (((name = {wanted}) OR (alias CONTAINS {wanted}))), as: Animal__out_Entity_Related___1 }} @@ -382,6 +384,7 @@ def test_filter_on_optional_variable_equality(self): class: Animal, as: Animal___1 }}.out('Animal_ParentOf') {{ + class: Animal, as: Animal__out_Animal_ParentOf___1 }}.out('Animal_FedAt') {{ optional: true, @@ -763,7 +766,6 @@ def test_complex_optional_variables(self): optional: true, as: Animal__out_Animal_ParentOf__out_Animal_FedAt___1 }} , {{ - class: Animal, as: Animal__out_Animal_ParentOf___1 }}.in('Animal_ParentOf') {{ as: Animal__out_Animal_ParentOf__in_Animal_ParentOf___1 @@ -915,6 +917,7 @@ def test_simple_fragment(self): class: Animal, as: Animal__out_Entity_Related___1 }}.out('Animal_OfSpecies') {{ + class: Species, as: Animal__out_Entity_Related__out_Animal_OfSpecies___1 }} RETURN $matches @@ -951,6 +954,7 @@ def test_typename_output(self): class: Animal, as: Animal___1 }}.out('Animal_OfSpecies') {{ + class: Species, as: Animal__out_Animal_OfSpecies___1 }} RETURN $matches @@ -1786,9 +1790,9 @@ def test_has_edge_degree_op_filter_with_optional(self): Species___1.name AS `species_name` FROM ( MATCH {{ - class: Species, as: Species___1 }}.in('Animal_OfSpecies') {{ + class: Animal, where: (( (({child_count} = 0) AND (out_Animal_ParentOf IS null)) OR ((out_Animal_ParentOf IS NOT null) AND @@ -1847,9 +1851,9 @@ def test_has_edge_degree_op_filter_with_fold(self): Species___1.name AS `species_name` FROM ( MATCH {{ - class: Species, as: Species___1 }}.in('Animal_OfSpecies') {{ + class: Animal, where: (( (({child_count} = 0) AND (out_Animal_ParentOf IS null)) OR ((out_Animal_ParentOf IS NOT null) AND @@ -1928,7 +1932,6 @@ def test_filter_on_fragment_in_union(self): Species___1.name AS `species_name` FROM ( MATCH {{ - class: Species, as: Species___1 }}.out('Species_Eats') {{ class: Food, @@ -2236,6 +2239,7 @@ def test_fold_after_traverse(self): class: Animal, as: Animal___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf___1 }} RETURN $matches @@ -2363,6 +2367,7 @@ def test_traverse_and_fold_and_traverse(self): class: Animal, as: Animal___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf___1 }} RETURN $matches @@ -3076,8 +3081,10 @@ def test_optional_and_traverse(self): class: Animal, as: Animal___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf__in_Animal_ParentOf___1 }} RETURN $matches @@ -3218,10 +3225,13 @@ def test_optional_and_deep_traverse(self): class: Animal, as: Animal___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf___1 }}.out('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf__out_Animal_ParentOf___1 }}.out('Animal_OfSpecies') {{ + class: Species, as: Animal__in_Animal_ParentOf__out_Animal_ParentOf __out_Animal_OfSpecies___1 }} @@ -3278,9 +3288,9 @@ def test_traverse_and_optional_and_traverse(self): Animal__in_Animal_ParentOf___1.name AS `child_name` FROM ( MATCH {{ - class: Animal, as: Animal___1 }}.in('Animal_ParentOf') {{ + class: Animal, where: (( (out_Animal_ParentOf IS null) OR @@ -3304,10 +3314,13 @@ def test_traverse_and_optional_and_traverse(self): class: Animal, as: Animal___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf___1 }}.out('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf__out_Animal_ParentOf___1 }}.out('Animal_OfSpecies') {{ + class: Species, as: Animal__in_Animal_ParentOf__out_Animal_ParentOf __out_Animal_OfSpecies___1 }} @@ -3666,11 +3679,13 @@ def test_coercion_on_interface_within_optional_traversal(self): class: Animal, as: Animal___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf___1 }}.out('Entity_Related') {{ class: Animal, as: Animal__in_Animal_ParentOf__out_Entity_Related___1 }}.out('Animal_OfSpecies') {{ + class: Species, as: Animal__in_Animal_ParentOf__out_Entity_Related __out_Animal_OfSpecies___1 }} @@ -3719,9 +3734,9 @@ def test_filter_on_optional_traversal_equality(self): Animal___1.name AS `animal_name` FROM ( MATCH {{ - class: Animal, as: Animal___1 }}.out('Animal_ParentOf') {{ + class: Animal, where: (( (out_Animal_ParentOf IS null) OR @@ -3730,7 +3745,6 @@ def test_filter_on_optional_traversal_equality(self): as: Animal__out_Animal_ParentOf___1 }} , {{ - class: Animal, as: Animal___1 }}.out('Animal_FedAt') {{ as: Animal__out_Animal_FedAt___1 @@ -3746,10 +3760,13 @@ def test_filter_on_optional_traversal_equality(self): class: Animal, as: Animal___1 }}.out('Animal_ParentOf') {{ + class: Animal, as: Animal__out_Animal_ParentOf___1 }}.out('Animal_ParentOf') {{ + class: Animal, as: Animal__out_Animal_ParentOf__out_Animal_ParentOf___1 }}.out('Animal_FedAt') {{ + class: Event, as: Animal__out_Animal_ParentOf__out_Animal_ParentOf __out_Animal_FedAt___1 }} , @@ -3840,8 +3857,10 @@ def test_filter_on_optional_traversal_name_or_alias(self): class: Animal, as: Animal___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf__in_Animal_ParentOf___1 }} , {{ @@ -3918,6 +3937,7 @@ def test_complex_optional_traversal_variables(self): where: ((name = {animal_name})), as: Animal___1 }}.out('Animal_ParentOf') {{ + class: Animal, where: (( (in_Animal_ParentOf IS null) OR @@ -3988,7 +4008,6 @@ def test_complex_optional_traversal_variables(self): as: Animal__out_Animal_ParentOf__out_Animal_FedAt___1 }} , {{ - class: Animal, as: Animal__out_Animal_ParentOf___1 }}.in('Animal_ParentOf') {{ as: Animal__out_Animal_ParentOf__in_Animal_ParentOf___1 @@ -4153,6 +4172,7 @@ def test_simple_optional_recurse(self): class: Animal, as: Animal___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf___1 }}.out('Animal_ParentOf') {{ while: ($depth < 3), @@ -4229,14 +4249,17 @@ def test_multiple_traverse_within_optional(self): class: Animal, as: Animal___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf__in_Animal_ParentOf___1 }} , {{ class: Animal, as: Animal__in_Animal_ParentOf___1 }}.out('Animal_FedAt') {{ + class: Event, as: Animal__in_Animal_ParentOf__out_Animal_FedAt___1 }} RETURN $matches @@ -4408,7 +4431,7 @@ def test_optional_traversal_and_fold_traversal(self): (in_Animal_ParentOf.size() = 0))), as: Animal___1 }} - RETURN $matches + RETURN $matches ) LET $Animal___1___out_Animal_ParentOf = @@ -4425,11 +4448,13 @@ def test_optional_traversal_and_fold_traversal(self): class: Animal, as: Animal___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf__in_Animal_ParentOf___1 }} - RETURN $matches + RETURN $matches ) LET $Animal___1___out_Animal_ParentOf @@ -4486,7 +4511,7 @@ def test_fold_traversal_and_optional_traversal(self): (in_Animal_ParentOf.size() = 0))), as: Animal___1 }} - RETURN $matches + RETURN $matches ) LET $Animal___1___out_Animal_ParentOf = @@ -4503,11 +4528,13 @@ def test_fold_traversal_and_optional_traversal(self): class: Animal, as: Animal___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf___1 }}.in('Animal_ParentOf') {{ + class: Animal, as: Animal__in_Animal_ParentOf__in_Animal_ParentOf___1 }} - RETURN $matches + RETURN $matches ) LET $Animal___1___out_Animal_ParentOf @@ -4573,3 +4600,43 @@ def test_between_lowering(self): ''' check_test_data(self, test_data, expected_match, expected_gremlin) + + def test_coercion_and_filter_with_tag(self): + test_data = test_input_data.coercion_and_filter_with_tag() + + expected_match = ''' + SELECT + Animal___1.name AS `origin`, + Animal__out_Entity_Related___1.name AS `related_name` + FROM ( + MATCH {{ + class: Animal, + as: Animal___1 + }}.out('Entity_Related') {{ + class: Animal, + where: ((name LIKE ('%' + ($matched.Animal___1.name + '%')))), + as: Animal__out_Entity_Related___1 + }} + RETURN $matches + ) + ''' + expected_gremlin = ''' + g.V('@class', 'Animal') + .as('Animal___1') + .out('Entity_Related') + .filter{it, m -> + ( + ['Animal'].contains(it['@class']) + && + it.name.contains(m.Animal___1.name) + ) + } + .as('Animal__out_Entity_Related___1') + .back('Animal___1') + .transform{it, m -> new com.orientechnologies.orient.core.record.impl.ODocument([ + origin: m.Animal___1.name, + related_name: m.Animal__out_Entity_Related___1.name + ])} + ''' + + check_test_data(self, test_data, expected_match, expected_gremlin) diff --git a/graphql_compiler/tests/test_input_data.py b/graphql_compiler/tests/test_input_data.py index 3ef91fec7..21fd4eca9 100644 --- a/graphql_compiler/tests/test_input_data.py +++ b/graphql_compiler/tests/test_input_data.py @@ -2119,3 +2119,28 @@ def between_lowering(): expected_output_metadata=expected_output_metadata, expected_input_metadata=expected_input_metadata, type_equivalence_hints=None) + + +def coercion_and_filter_with_tag(): + graphql_input = '''{ + Animal { + name @output(out_name: "origin") @tag(tag_name: "related") + out_Entity_Related { + ... on Animal { + name @filter(op_name: "has_substring", value: ["%related"]) + @output(out_name: "related_name") + } + } + } + }''' + expected_output_metadata = { + 'origin': OutputMetadata(type=GraphQLString, optional=False), + 'related_name': OutputMetadata(type=GraphQLString, optional=False), + } + expected_input_metadata = {} + + return CommonTestData( + graphql_input=graphql_input, + expected_output_metadata=expected_output_metadata, + expected_input_metadata=expected_input_metadata, + type_equivalence_hints=None) diff --git a/graphql_compiler/tests/test_ir_generation.py b/graphql_compiler/tests/test_ir_generation.py index 7a975094f..307d42314 100644 --- a/graphql_compiler/tests/test_ir_generation.py +++ b/graphql_compiler/tests/test_ir_generation.py @@ -3371,3 +3371,40 @@ def test_between_lowering(self): } check_test_data(self, test_data, expected_blocks, expected_location_types) + + def test_coercion_and_filter_with_tag(self): + test_data = test_input_data.coercion_and_filter_with_tag() + + base_location = helpers.Location(('Animal',)) + related_location = base_location.navigate_to_subpath('out_Entity_Related') + + expected_blocks = [ + blocks.QueryRoot({'Animal'}), + blocks.MarkLocation(base_location), + + blocks.Traverse('out', 'Entity_Related'), + blocks.CoerceType({'Animal'}), + + blocks.Filter( + expressions.BinaryComposition( + u'has_substring', + expressions.LocalField('name'), + expressions.ContextField(base_location.navigate_to_field('name')), + ), + ), + blocks.MarkLocation(related_location), + + blocks.Backtrack(base_location), + blocks.ConstructResult({ + 'origin': expressions.OutputContextField( + base_location.navigate_to_field('name'), GraphQLString), + 'related_name': expressions.OutputContextField( + related_location.navigate_to_field('name'), GraphQLString), + }), + ] + expected_location_types = { + base_location: 'Animal', + related_location: 'Animal', + } + + check_test_data(self, test_data, expected_blocks, expected_location_types) diff --git a/graphql_compiler/tests/test_ir_lowering.py b/graphql_compiler/tests/test_ir_lowering.py index 1973aaa5e..11e4449de 100644 --- a/graphql_compiler/tests/test_ir_lowering.py +++ b/graphql_compiler/tests/test_ir_lowering.py @@ -5,8 +5,8 @@ from graphql import GraphQLString from ..compiler import ir_lowering_common, ir_lowering_gremlin, ir_lowering_match, ir_sanity_checks -from ..compiler.blocks import (Backtrack, ConstructResult, EndOptional, Filter, MarkLocation, - QueryRoot, Traverse) +from ..compiler.blocks import (Backtrack, CoerceType, ConstructResult, EndOptional, Filter, + MarkLocation, QueryRoot, Traverse) from ..compiler.expressions import (BinaryComposition, ContextField, ContextFieldExistence, FalseLiteral, Literal, LocalField, NullLiteral, OutputContextField, TernaryConditional, TrueLiteral, @@ -706,8 +706,6 @@ def test_between_lowering_inverted_inequalities(self): final_query = ir_lowering_match.lower_comparisons_to_between(match_query) check_test_data(self, expected_final_query, final_query) - # Disabled until OrientDB fixes the limitation against traversing from an optional vertex. - # For details, see https://github.com/orientechnologies/orientdb/issues/6788 def test_optional_traversal_edge_case(self): # Both Animal and out_Animal_ParentOf have an out_Animal_FedAt field, # ensure the correct such field is picked out after full lowering. @@ -786,9 +784,11 @@ def test_optional_traversal_edge_case(self): QueryRoot({'Animal'}), MarkLocation(base_location), Traverse('out', 'Animal_ParentOf'), + CoerceType({'Animal'}), MarkLocation(child_location), Traverse('out', 'Animal_FedAt'), + CoerceType({'Event'}), MarkLocation(child_fed_at_location), ConstructResult({ diff --git a/graphql_compiler/tests/test_testing_invariants.py b/graphql_compiler/tests/test_testing_invariants.py index e17d973b6..617e903c5 100644 --- a/graphql_compiler/tests/test_testing_invariants.py +++ b/graphql_compiler/tests/test_testing_invariants.py @@ -50,7 +50,7 @@ def test_ir_generation_test_invariants(self): ir_generation_test_names = get_test_function_names_from_class(IrGenerationTests) for expected_test_function_name in self.expected_test_functions: if expected_test_function_name not in ir_generation_test_names: - raise AssertionError(u'Test case "{}" not found in ir_generation_tests.py.' + raise AssertionError(u'Test case "{}" not found in test_ir_generation.py.' .format(expected_test_function_name)) def test_compiler_test_invariants(self): @@ -60,5 +60,5 @@ def test_compiler_test_invariants(self): compiler_test_names = get_test_function_names_from_class(CompilerTests) for expected_test_function_name in self.expected_test_functions: if expected_test_function_name not in compiler_test_names: - raise AssertionError(u'Test case "{}" not found in compiler_tests.py.' + raise AssertionError(u'Test case "{}" not found in test_compiler.py.' .format(expected_test_function_name)) From cc373cea0e4dd8e591566a8b05909a7d9d52f5dd Mon Sep 17 00:00:00 2001 From: Predrag Gruevski Date: Tue, 17 Jul 2018 14:22:06 -0400 Subject: [PATCH 3/3] Release v1.7.0. --- CHANGELOG.md | 13 +++++++++++++ graphql_compiler/__init__.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7a4e6da93..14177c1b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,19 @@ ## Current development version +## v1.7.0 + +- Add a new `@filter` operator: `intersects`. [#100](https://github.com/kensho-technologies/graphql-compiler/pull/100) +- Add an optimization that helps OrientDB choose a good starting point for query evaluation. [#102](https://github.com/kensho-technologies/graphql-compiler/pull/102) + +The new optimization pass manages what type information is visible at different points in +the generated query. By exposing additional type information, or hiding existing type information, +the compiler maximizes the likelihood that OrientDB will start evaluating the query at the +location of lowest cardinality. This produces a massive performance benefit -- up to 1000x +on some queries! + +Thanks to `yangsong97` for making his first contribution with the `intersects` operator! + ## v1.6.2 - Fix incorrect filtering in `@optional` locations. [#95](https://github.com/kensho-technologies/graphql-compiler/pull/95) diff --git a/graphql_compiler/__init__.py b/graphql_compiler/__init__.py index 353141035..3224f12ec 100644 --- a/graphql_compiler/__init__.py +++ b/graphql_compiler/__init__.py @@ -14,7 +14,7 @@ __package_name__ = 'graphql-compiler' -__version__ = '1.6.2' +__version__ = '1.7.0' def graphql_to_match(schema, graphql_query, parameters, type_equivalence_hints=None):