Skip to content

Commit

Permalink
Merge branch 'master' into feat_snapshot_tests
Browse files Browse the repository at this point in the history
  • Loading branch information
amartyashankha committed Jul 10, 2018
2 parents 8de2455 + 0b62747 commit be15d9d
Show file tree
Hide file tree
Showing 58 changed files with 1,810 additions and 257 deletions.
3 changes: 2 additions & 1 deletion .travis.yml
Expand Up @@ -10,9 +10,10 @@ install:
before_script:
- docker-compose up -d
script:
- ./scripts/copyright_line_check.sh
- isort --check-only --verbose --recursive graphql_compiler/
- flake8 --exclude **/snap_*.py graphql_compiler/
- pydocstyle graphql_compiler/
- isort --check-only --verbose --recursive graphql_compiler/
- pylint graphql_compiler/
- bandit -r graphql_compiler/
- py.test --cov=graphql_compiler graphql_compiler/tests
Expand Down
1 change: 1 addition & 0 deletions AUTHORS.md
@@ -1 +1,2 @@
Predrag Gruevski (github: obi1kenobi)
Amartya Shankha Biswas (github: amartyashankha)
16 changes: 16 additions & 0 deletions CHANGELOG.md
Expand Up @@ -2,6 +2,22 @@

## Current development version

## v1.6.2

- Fix incorrect filtering in `@optional` locations. [#95](https://github.com/kensho-technologies/graphql-compiler/pull/95)

Thanks to `amartyashankha` for the fix!

## v1.6.1

- Fix a bad compilation bug on `@fold` and `@optional` in the same scope. [#86](https://github.com/kensho-technologies/graphql-compiler/pull/86)

Thanks to `amartyashankha` for the fix!

## v1.6.0

- Add full support for `Decimal` data, including both filtering and output. [#91](https://github.com/kensho-technologies/graphql-compiler/pull/91)

## v1.5.0

- Allow expanding vertex fields within `@optional` scopes. [#83](https://github.com/kensho-technologies/graphql-compiler/pull/83)
Expand Down
10 changes: 8 additions & 2 deletions CONTRIBUTING.md
Expand Up @@ -33,13 +33,19 @@ This project follows the

Additionally, any contributions must pass the following set of lint and style checks with no issues:
```
isort --check-only --verbose --recursive graphql_compiler/
flake8 graphql_compiler/
pydocstyle graphql_compiler/
isort --check-only --verbose --recursive graphql_compiler/
pylint graphql_compiler/
bandit -r graphql_compiler/
```

Finally, all python files in the repository must display the copyright of the project,
to protect the terms of the license. Please make sure that your files start with a line like:
```
# Copyright 20xx-present Kensho Technologies, LLC.
```
3 changes: 2 additions & 1 deletion README.md
Expand Up @@ -1153,4 +1153,5 @@ software distributed under the License is distributed on an "AS IS" BASIS, WITHO
CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language
governing permissions and limitations under the License.

Copyright 2017 Kensho Technologies, LLC.
Copyright 2017-present Kensho Technologies, LLC. The present date is determined by the timestamp
of the most recent commit in the repository.
6 changes: 3 additions & 3 deletions graphql_compiler/__init__.py
@@ -1,4 +1,4 @@
# Copyright 2017 Kensho Technologies, LLC.
# Copyright 2017-present Kensho Technologies, LLC.
"""Commonly-used functions and data types from this package."""
from .compiler import CompilationResult, OutputMetadata # noqa
from .compiler import compile_graphql_to_gremlin, compile_graphql_to_match # noqa
Expand All @@ -10,11 +10,11 @@
from .exceptions import GraphQLValidationError, GraphQLInvalidArgumentError # noqa

from .schema import DIRECTIVES # noqa
from .schema import GraphQLDate, GraphQLDateTime # noqa
from .schema import GraphQLDate, GraphQLDateTime, GraphQLDecimal # noqa


__package_name__ = 'graphql-compiler'
__version__ = '1.5.0'
__version__ = '1.6.2'


def graphql_to_match(schema, graphql_query, parameters, type_equivalence_hints=None):
Expand Down
2 changes: 1 addition & 1 deletion graphql_compiler/compiler/__init__.py
@@ -1,4 +1,4 @@
# Copyright 2017 Kensho Technologies, LLC.
# Copyright 2017-present Kensho Technologies, LLC.
from .common import CompilationResult, compile_graphql_to_gremlin, compile_graphql_to_match # noqa
from .common import GREMLIN_LANGUAGE, MATCH_LANGUAGE # noqa
from .compiler_frontend import OutputMetadata # noqa
10 changes: 9 additions & 1 deletion graphql_compiler/compiler/blocks.py
@@ -1,4 +1,4 @@
# Copyright 2017 Kensho Technologies, LLC.
# Copyright 2017-present Kensho Technologies, LLC.
"""Definitions of the basic blocks of the compiler."""

import six
Expand Down Expand Up @@ -444,3 +444,11 @@ class EndOptional(MarkerBlock):
def validate(self):
"""In isolation, EndOptional blocks are always valid."""
pass


class GlobalOperationsStart(MarkerBlock):
"""Marker block for the end of MATCH traversals, and the beginning of global operations."""

def validate(self):
"""In isolation, GlobalOperationsStart blocks are always valid."""
pass
2 changes: 1 addition & 1 deletion graphql_compiler/compiler/common.py
@@ -1,4 +1,4 @@
# Copyright 2017 Kensho Technologies, LLC.
# Copyright 2017-present Kensho Technologies, LLC.
from collections import namedtuple

from . import emit_gremlin, emit_match, ir_lowering_gremlin, ir_lowering_match
Expand Down
2 changes: 1 addition & 1 deletion graphql_compiler/compiler/compiler_entities.py
@@ -1,4 +1,4 @@
# Copyright 2017 Kensho Technologies, LLC.
# Copyright 2017-present Kensho Technologies, LLC.
"""Base classes for compiler entity objects like basic blocks and expressions."""

from abc import ABCMeta, abstractmethod
Expand Down
89 changes: 82 additions & 7 deletions graphql_compiler/compiler/compiler_frontend.py
@@ -1,4 +1,4 @@
# Copyright 2017 Kensho Technologies, LLC.
# Copyright 2017-present Kensho Technologies, LLC.
"""Front-end for GraphQL to database queries compiler.
High-level overview of the GraphQL ingestion process that outputs the compiler's
Expand Down Expand Up @@ -82,6 +82,26 @@
is_vertex_field_name, strip_non_null_from_type, validate_safe_string)


# LocationStackEntry contains the following:
# - location: Location object correspoding to an inserted MarkLocation block
# - num_traverses: Int counter for the number of traverses intserted after the last MarkLocation
# (corresponding Location stored in `location`)
LocationStackEntry = namedtuple('LocationStackEntry', ('location', 'num_traverses'))


def _construct_location_stack_entry(location, num_traverses):
"""Return a LocationStackEntry namedtuple with the specified parameters."""
if not isinstance(num_traverses, int) or num_traverses < 0:
raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid '
u'value for "num_traverses" {}. This is not allowed.'
.format(num_traverses))
if not isinstance(location, Location):
raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid '
u'value for "location" {}. This is not allowed.'
.format(location))
return LocationStackEntry(location=location, num_traverses=num_traverses)


# The OutputMetadata will have the following types for its members:
# - type: a GraphQL type object, like String or Integer, describing the type of that output value
# - optional: boolean, whether the output is part of an optional traversal and
Expand Down Expand Up @@ -357,12 +377,18 @@ def _compile_vertex_ast(schema, current_schema_type, ast,
inner_location, context)
basic_blocks.extend(inner_basic_blocks)

# The length of the stack should be the same before exiting this function
initial_marked_location_stack_size = len(context['marked_location_stack'])

# step V-3: mark the graph position, and process output_source directive
if not is_in_fold_scope(context):
# We only mark the position if we aren't in a folded scope.
# Folded scopes don't actually traverse to the location, so it's never really visited.
context['location_types'][location] = strip_non_null_from_type(current_schema_type)
basic_blocks.append(_mark_location(location))
# The following append is the Location corresponding to the initial MarkLocation
# for the current vertex and the `num_traverses` counter set to 0.
context['marked_location_stack'].append(_construct_location_stack_entry(location, 0))

output_source = _process_output_source_directive(schema, current_schema_type, ast,
location, context, unique_local_directives)
Expand Down Expand Up @@ -395,13 +421,16 @@ def _compile_vertex_ast(schema, current_schema_type, ast,

if edge_traversal_is_optional:
# Entering an optional block!
# Make sure there's a tag right before it for the optional Backtrack to jump back to.
# Otherwise, the traversal could rewind to an old tag and might ignore
# entire stretches of applied filtering.
if not isinstance(basic_blocks[-1], blocks.MarkLocation):
# Make sure there's a marked location right before it for the optional Backtrack
# to jump back to. Otherwise, the traversal could rewind to an old marked location
# and might ignore entire stretches of applied filtering.
if context['marked_location_stack'][-1].num_traverses > 0:
location = location.revisit()
context['location_types'][location] = strip_non_null_from_type(current_schema_type)
basic_blocks.append(_mark_location(location))
context['marked_location_stack'].pop()
new_stack_entry = _construct_location_stack_entry(location, 0)
context['marked_location_stack'].append(new_stack_entry)

# Remember where the topmost optional context started.
topmost_optional = context.get('optional', None)
Expand All @@ -412,7 +441,8 @@ def _compile_vertex_ast(schema, current_schema_type, ast,
edge_direction, edge_name = _get_edge_direction_and_name(field_name)

if fold_directive:
fold_scope_location = FoldScopeLocation(location, (edge_direction, edge_name))
current_location = context['marked_location_stack'][-1].location
fold_scope_location = FoldScopeLocation(current_location, (edge_direction, edge_name))
fold_block = blocks.Fold(fold_scope_location)
basic_blocks.append(fold_block)
context['fold'] = fold_scope_location
Expand All @@ -428,6 +458,14 @@ def _compile_vertex_ast(schema, current_schema_type, ast,
optional=edge_traversal_is_optional,
within_optional_scope=within_optional_scope))

if not fold_directive and not is_in_fold_scope(context):
# Current block is either a Traverse or a Recurse that is not within any fold context.
# Increment the `num_traverses` counter.
old_location_stack_entry = context['marked_location_stack'][-1]
new_location_stack_entry = _construct_location_stack_entry(
old_location_stack_entry.location, old_location_stack_entry.num_traverses + 1)
context['marked_location_stack'][-1] = new_location_stack_entry

inner_basic_blocks = _compile_ast_node_to_ir(schema, field_schema_type, field_ast,
inner_location, context)
basic_blocks.extend(inner_basic_blocks)
Expand Down Expand Up @@ -457,14 +495,30 @@ def _compile_vertex_ast(schema, current_schema_type, ast,
basic_blocks.append(blocks.Backtrack(location, optional=True))

# Exiting optional block!
# Add a tag right after the optional, to ensure future Backtrack blocks
# Add a MarkLocation right after the optional, to ensure future Backtrack blocks
# return to a position after the optional set of blocks.
location = location.revisit()
context['location_types'][location] = strip_non_null_from_type(current_schema_type)
basic_blocks.append(_mark_location(location))
context['marked_location_stack'].pop()
new_stack_entry = _construct_location_stack_entry(location, 0)
context['marked_location_stack'].append(new_stack_entry)
else:
basic_blocks.append(blocks.Backtrack(location))

# Pop off the initial Location for the current vertex.
if not is_in_fold_scope(context):
context['marked_location_stack'].pop()

# Check that the length of the stack remains the same as when control entered this function.
final_marked_location_stack_size = len(context['marked_location_stack'])
if initial_marked_location_stack_size != final_marked_location_stack_size:
raise AssertionError(u'Size of stack changed from {} to {} after executing this function.'
u'This should never happen : {}'
.format(initial_marked_location_stack_size,
final_marked_location_stack_size,
context['marked_location_stack']))

return basic_blocks


Expand Down Expand Up @@ -624,11 +678,32 @@ def _compile_root_ast_to_ir(schema, ast, type_equivalence_hints=None):
# Construct the start location of the query, and the starting context object.
location = Location((base_start_type,))
context = {
# 'tags' is a dict containing
# - location: Location where the tag was defined
# - optional: boolean representing whether the tag was defined within an @optional scope
# - type: GraphQLType of the tagged value
'tags': dict(),
# 'outputs' is a dict mapping each output name to another dict which contains
# - location: Location where to output from
# - optional: boolean representing whether the output was defined within an @optional scope
# - type: GraphQLType of the output
# - fold: FoldScopeLocation object if the current output was defined within a fold scope,
# and None otherwise
'outputs': dict(),
# 'inputs' is a dict mapping input parameter names to their respective expected GraphQL
# types, as automatically inferred by inspecting the query structure
'inputs': dict(),
# 'location_types' is a dict mapping each Location to its GraphQLType
# (schema type of the location)
'location_types': dict(),
# 'type_equivalence_hints' is a dict mapping GraphQL types to equivalent GraphQL unions
'type_equivalence_hints': type_equivalence_hints or dict(),
# The marked_location_stack explicitly maintains a stack (implemented as list)
# of namedtuples (each corresponding to a MarkLocation) containing:
# - location: the location within the corresponding MarkLocation object
# - num_traverses: the number of Recurse and Traverse blocks created
# after the corresponding MarkLocation
'marked_location_stack': []
}

# Add the query root basic block to the output.
Expand Down
2 changes: 1 addition & 1 deletion graphql_compiler/compiler/context_helpers.py
@@ -1,4 +1,4 @@
# Copyright 2017 Kensho Technologies, LLC.
# Copyright 2017-present Kensho Technologies, LLC.
"""Helper functions for dealing with the frontend "context" object."""

from ..exceptions import GraphQLCompilationError
Expand Down
2 changes: 1 addition & 1 deletion graphql_compiler/compiler/directive_helpers.py
@@ -1,4 +1,4 @@
# Copyright 2017 Kensho Technologies, LLC.
# Copyright 2017-present Kensho Technologies, LLC.
"""Helper functions for dealing with GraphQL directives."""

from graphql.language.ast import InlineFragment
Expand Down
2 changes: 1 addition & 1 deletion graphql_compiler/compiler/emit_gremlin.py
@@ -1,4 +1,4 @@
# Copyright 2017 Kensho Technologies, LLC.
# Copyright 2017-present Kensho Technologies, LLC.
"""Convert lowered IR basic blocks to Gremlin query strings."""


Expand Down
15 changes: 14 additions & 1 deletion graphql_compiler/compiler/emit_match.py
@@ -1,10 +1,11 @@
# Copyright 2017 Kensho Technologies, LLC.
# Copyright 2017-present Kensho Technologies, LLC.
"""Convert lowered IR basic blocks to MATCH query strings."""
from collections import deque

import six

from .blocks import Filter, QueryRoot, Recurse, Traverse
from .expressions import TrueLiteral
from .helpers import validate_safe_string


Expand Down Expand Up @@ -156,6 +157,14 @@ def _construct_output_to_match(output_block):
return u'SELECT %s FROM' % (u', '.join(selections),)


def _construct_where_to_match(where_block):
"""Transform a Filter block into a MATCH query string."""
if where_block.predicate == TrueLiteral:
raise AssertionError(u'Received WHERE block with TrueLiteral predicate: {}'
.format(where_block))
return u'WHERE ' + where_block.predicate.to_match()


##############
# Public API #
##############
Expand Down Expand Up @@ -197,6 +206,10 @@ def emit_code_from_single_match_query(match_query):
# Represent and add the SELECT clauses with the proper output data.
query_data.appendleft(_construct_output_to_match(match_query.output_block))

# Represent and add the WHERE clause with the proper filters.
if match_query.where_block is not None:
query_data.append(_construct_where_to_match(match_query.where_block))

return u' '.join(query_data)


Expand Down

0 comments on commit be15d9d

Please sign in to comment.