Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Amend metadata table for "explain" support. (#141)
* Amend metadata table for "explain" support. * QueryMetadataTable.get_explain_infos() returns information required for explain analysis. It is in the form of a list of ExplainInfo objects. * ExplainFilterInfo provides: - op_name: Operation is useful to estimate complexity, i.e. == is faster than between. - field_name: For checking if the field has index or not. - args: If there are $args, we can check the size of user provided input. * ExplainRecurseInfo provides the recursion depth for now. * fix ExplainFilterInfo -> FilterInfo. * fix lint error * remove unused import * lint issue * sort imports * Cleanup explain infos. - metadata table now has separate calls for FilterInfo and RecurseInfo - Unittest checks if any extra infos appear. - Unittest is deriving from the class and using self.assertEqual - setdefault is used instead of defaultdict * fix unused import * Allow multiple RecurseInfos. We only keep vertices in the metadata table. Multiple recursions at the same vertex for different edges should be recorded together. Name and direction of each edge is also added to the RecurseInfo. * unit test cleanup - test method names reflect test query name from test_input_data - has_edge_degree test added - multiple filters on same field test added - test expectations are simplified with variables * fix lint spacing * review fixes - FilterInfo field_name renamed to fields and is now a tuple. Most operators have one field, but name_or_alias sets (name,alias) tuple here. This makes it more consistent for callers to process filter's fields. - FilterInfo args field is now a tuple instead of list. - assertEqual order is fixed to better error display in tests. - One more test case for name_or_alias - Test expectation indentations improved. * fix lint errors * Simplify statement. * fix indent
- Loading branch information
1 parent
695fa5a
commit 4a97610
Showing
4 changed files
with
193 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,132 @@ | ||
# Copyright 2018-present Kensho Technologies, LLC. | ||
import unittest | ||
|
||
from . import test_input_data | ||
from ..compiler.compiler_frontend import graphql_to_ir | ||
from ..compiler.helpers import Location | ||
from ..compiler.metadata import FilterInfo, RecurseInfo | ||
from .test_helpers import get_schema | ||
|
||
|
||
class ExplainInfoTests(unittest.TestCase): | ||
"""Ensure we get correct information about filters and recursion.""" | ||
|
||
def setUp(self): | ||
"""Initialize the test schema once for all tests.""" | ||
self.schema = get_schema() | ||
|
||
def check(self, graphql_test, expected_filters, expected_recurses): | ||
"""Verify query produces expected explain infos.""" | ||
ir_and_metadata = graphql_to_ir(self.schema, graphql_test().graphql_input) | ||
meta = ir_and_metadata.query_metadata_table | ||
# Unfortunately literal dicts don't accept Location() as keys | ||
expected_filters = dict(expected_filters) | ||
expected_recurses = dict(expected_recurses) | ||
for location, _ in meta.registered_locations: | ||
# Do filters match with expected for this location? | ||
filters = meta.get_filter_infos(location) | ||
self.assertEqual(expected_filters.get(location, []), filters) | ||
if filters: | ||
del expected_filters[location] | ||
# Do recurse match with expected for this location? | ||
recurse = meta.get_recurse_infos(location) | ||
self.assertEqual(expected_recurses.get(location, []), recurse) | ||
if recurse: | ||
del expected_recurses[location] | ||
# Any expected infos missing? | ||
self.assertEqual(0, len(expected_filters)) | ||
self.assertEqual(0, len(expected_recurses)) | ||
|
||
def test_traverse_filter_and_output(self): | ||
loc = Location(('Animal', 'out_Animal_ParentOf'), None, 1) | ||
filters = [ | ||
FilterInfo(fields=('name', 'alias'), op_name='name_or_alias', args=('$wanted',)), | ||
] | ||
|
||
self.check(test_input_data.traverse_filter_and_output, | ||
[(loc, filters)], | ||
[]) | ||
|
||
def test_complex_optional_traversal_variables(self): | ||
loc1 = Location(('Animal',), None, 1) | ||
filters1 = [ | ||
FilterInfo(fields=('name',), op_name='=', args=('$animal_name',)), | ||
] | ||
|
||
loc2 = Location(('Animal', 'in_Animal_ParentOf', 'out_Animal_FedAt'), None, 1) | ||
filters2 = [ | ||
FilterInfo(fields=('name',), op_name='=', args=('%parent_fed_at_event',)), | ||
FilterInfo(fields=('event_date',), | ||
op_name='between', | ||
args=('%other_child_fed_at', '%parent_fed_at')), | ||
] | ||
|
||
self.check(test_input_data.complex_optional_traversal_variables, | ||
[(loc1, filters1), (loc2, filters2)], | ||
[]) | ||
|
||
def test_coercion_filters_and_multiple_outputs_within_fold_scope(self): | ||
self.check(test_input_data.coercion_filters_and_multiple_outputs_within_fold_scope, | ||
[], | ||
[]) | ||
|
||
def test_multiple_filters(self): | ||
loc = Location(('Animal',), None, 1) | ||
filters = [ | ||
FilterInfo(fields=('name',), op_name='>=', args=('$lower_bound',)), | ||
FilterInfo(fields=('name',), op_name='<', args=('$upper_bound',)) | ||
] | ||
|
||
self.check(test_input_data.multiple_filters, | ||
[(loc, filters)], | ||
[]) | ||
|
||
def test_has_edge_degree_op_filter(self): | ||
loc = Location(('Animal',), None, 1) | ||
filters = [ | ||
FilterInfo(fields=('in_Animal_ParentOf',), | ||
op_name='has_edge_degree', | ||
args=('$child_count',)) | ||
] | ||
|
||
self.check(test_input_data.has_edge_degree_op_filter, | ||
[(loc, filters)], | ||
[]) | ||
|
||
def test_simple_recurse(self): | ||
loc = Location(('Animal',), None, 1) | ||
recurses = [ | ||
RecurseInfo(edge_direction='out', edge_name='Animal_ParentOf', depth=1) | ||
] | ||
|
||
self.check(test_input_data.simple_recurse, | ||
[], | ||
[(loc, recurses)]) | ||
|
||
def test_two_consecutive_recurses(self): | ||
loc = Location(('Animal',), None, 1) | ||
filters = [ | ||
FilterInfo(fields=('name', 'alias'), | ||
op_name='name_or_alias', | ||
args=('$animal_name_or_alias',)) | ||
] | ||
recurses = [ | ||
RecurseInfo(edge_direction='out', edge_name='Animal_ParentOf', depth=2), | ||
RecurseInfo(edge_direction='in', edge_name='Animal_ParentOf', depth=2) | ||
] | ||
|
||
self.check(test_input_data.two_consecutive_recurses, | ||
[(loc, filters)], | ||
[(loc, recurses)]) | ||
|
||
def test_filter_on_optional_traversal_name_or_alias(self): | ||
loc = Location(('Animal', 'out_Animal_ParentOf'), None, 1) | ||
filters = [ | ||
FilterInfo(fields=('name', 'alias'), | ||
op_name='name_or_alias', | ||
args=('%grandchild_name',)) | ||
] | ||
|
||
self.check(test_input_data.filter_on_optional_traversal_name_or_alias, | ||
[(loc, filters)], | ||
[]) |