From 4796d024d489c7ca2a9c9ebe3e4e9ef557457cca Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Fri, 31 May 2019 20:57:44 -0400 Subject: [PATCH 01/16] Getting ES fields from a `StringFilterES` with a `django_filters.filters.CharFilter` --- graphene_django/elasticsearch/__init__.py | 9 +++ .../elasticsearch/filter/__init__.py | 9 +++ .../elasticsearch/filter/filters.py | 45 +++++++++++++++ .../elasticsearch/filter/filterset.py | 55 +++++++++++++++++++ graphene_django/utils/__init__.py | 2 + graphene_django/utils/utils.py | 8 +++ 6 files changed, 128 insertions(+) create mode 100644 graphene_django/elasticsearch/__init__.py create mode 100644 graphene_django/elasticsearch/filter/__init__.py create mode 100644 graphene_django/elasticsearch/filter/filters.py create mode 100644 graphene_django/elasticsearch/filter/filterset.py diff --git a/graphene_django/elasticsearch/__init__.py b/graphene_django/elasticsearch/__init__.py new file mode 100644 index 000000000..31497d3cd --- /dev/null +++ b/graphene_django/elasticsearch/__init__.py @@ -0,0 +1,9 @@ +import warnings +from ..utils import DJANGO_ELASTICSEARCH_DSL_INSTALLED + +if not DJANGO_ELASTICSEARCH_DSL_INSTALLED: + warnings.warn( + "Use of elasticsearch integration requires the django_elasticsearch_dsl package " + "be installed. You can do so using `pip install django_elasticsearch_dsl`", + ImportWarning, + ) diff --git a/graphene_django/elasticsearch/filter/__init__.py b/graphene_django/elasticsearch/filter/__init__.py new file mode 100644 index 000000000..1f318b608 --- /dev/null +++ b/graphene_django/elasticsearch/filter/__init__.py @@ -0,0 +1,9 @@ +import warnings +from ...utils import DJANGO_FILTER_INSTALLED + +if not DJANGO_FILTER_INSTALLED: + warnings.warn( + "Use of django elasticsearch filtering requires the django-filter package " + "be installed. You can do so using `pip install django-filter`", + ImportWarning, + ) diff --git a/graphene_django/elasticsearch/filter/filters.py b/graphene_django/elasticsearch/filter/filters.py new file mode 100644 index 000000000..b1d6b9d25 --- /dev/null +++ b/graphene_django/elasticsearch/filter/filters.py @@ -0,0 +1,45 @@ +"""Filters to ElasticSearch""" +from collections import OrderedDict +from django_filters import CharFilter +from elasticsearch_dsl import Q + + +class StringFilterES(object): # pylint: disable=R0902 + """String Fields specific to ElasticSearch.""" + + default_expr = 'contain' + filter_class = CharFilter + + variants = { + "contain": lambda name, value: Q('match', + **{name: { + "query": value, + "fuzziness": "auto" + }}), + + "term": lambda name, value: Q('term', **{name: value}), + } + + def __init__(self, name=None, attr=None): + """ + :param name: Name of the field. This is the name that will be exported. + :param attr: Path to the index attr that will be used as filter. + """ + assert name or attr, "At least the field name or the field attr should be passed" + self.field_name = name or attr.replace('.', '_') + self.fields = self.generate_fields() + + def generate_fields(self): + """ + All FilterSet objects should specify its fields for the introspection. + + :return: A mapping of field to Filter type of field with all the suffix + expressions combinations. + """ + fields = OrderedDict() + for variant in self.variants: + variant_name = self.field_name if variant in ["default", self.default_expr] \ + else "%s_%s" % (self.field_name, variant) + fields[variant_name] = self.filter_class(field_name=variant_name) + + return fields diff --git a/graphene_django/elasticsearch/filter/filterset.py b/graphene_django/elasticsearch/filter/filterset.py new file mode 100644 index 000000000..4bf15e97e --- /dev/null +++ b/graphene_django/elasticsearch/filter/filterset.py @@ -0,0 +1,55 @@ +"""Fields""" +from collections import OrderedDict +from django.utils import six +from django_filters.filterset import BaseFilterSet + +from .filters import StringFilterES + + +class FilterSetESMetaclass(type): + """Captures the meta class of the filterSet class.""" + + def __new__(mcs, name, bases, attrs): + """Get filters declared explicitly in the class""" + + declared_filters = mcs.get_declared_filters(bases, attrs) + attrs['declared_filters'] = declared_filters + + new_class = super(FilterSetESMetaclass, mcs).__new__(mcs, name, bases, attrs) + + if issubclass(new_class, BaseFilterSet): + base_filters = OrderedDict() + for name, filter_field in six.iteritems(declared_filters): + base_filters.update(filter_field.fields) + new_class.base_filters = base_filters + + return new_class + + @classmethod + def get_declared_filters(mcs, bases, attrs): + """ + Get the filters declared in the class. + :param bases: base classes of the current class + :param attrs: attributes captured to be included as metadata + :return: An OrderedDict of filter fields declared in the class as static fields. + """ + + # List of filters declared in the class as static fields. + filters = [ + (filter_name, attrs.pop(filter_name)) + for filter_name, obj in list(attrs.items()) + if isinstance(obj, StringFilterES) + ] + + # Merge declared filters from base classes + for base in reversed(bases): + if hasattr(base, 'declared_filters'): + filters = [(name, field) for name, field in base.declared_filters.items() if name not in attrs] \ + + filters + + return OrderedDict(filters) + + +class FilterSetES(six.with_metaclass(FilterSetESMetaclass, object)): + """FilterSet specific for ElasticSearch.""" + pass diff --git a/graphene_django/utils/__init__.py b/graphene_django/utils/__init__.py index f9c388dc6..73c871deb 100644 --- a/graphene_django/utils/__init__.py +++ b/graphene_django/utils/__init__.py @@ -1,5 +1,6 @@ from .utils import ( DJANGO_FILTER_INSTALLED, + DJANGO_ELASTICSEARCH_DSL_INSTALLED, get_reverse_fields, maybe_queryset, get_model_fields, @@ -10,6 +11,7 @@ __all__ = [ "DJANGO_FILTER_INSTALLED", + "DJANGO_ELASTICSEARCH_DSL_INSTALLED", "get_reverse_fields", "maybe_queryset", "get_model_fields", diff --git a/graphene_django/utils/utils.py b/graphene_django/utils/utils.py index 02c47eec7..5195e252e 100644 --- a/graphene_django/utils/utils.py +++ b/graphene_django/utils/utils.py @@ -12,6 +12,14 @@ DJANGO_FILTER_INSTALLED = False +try: + import django_elasticsearch_dsl # noqa + + DJANGO_ELASTICSEARCH_DSL_INSTALLED = True +except ImportError: + DJANGO_ELASTICSEARCH_DSL_INSTALLED = False + + def get_reverse_fields(model, local_field_names): for name, attr in model.__dict__.items(): # Don't duplicate any local fields From fc186ea23d668919af746f7713568a549f9cdc0f Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Fri, 31 May 2019 22:16:36 -0400 Subject: [PATCH 02/16] Adding testing for `graphene_django/elasticsearch/*` --- .../elasticsearch/tests/__init__.py | 0 .../elasticsearch/tests/filters.py | 8 ++++++ .../elasticsearch/tests/test_fields.py | 27 +++++++++++++++++++ graphene_django/utils/utils.py | 2 +- setup.py | 1 + 5 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 graphene_django/elasticsearch/tests/__init__.py create mode 100644 graphene_django/elasticsearch/tests/filters.py create mode 100644 graphene_django/elasticsearch/tests/test_fields.py diff --git a/graphene_django/elasticsearch/tests/__init__.py b/graphene_django/elasticsearch/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/graphene_django/elasticsearch/tests/filters.py b/graphene_django/elasticsearch/tests/filters.py new file mode 100644 index 000000000..2cabc80f9 --- /dev/null +++ b/graphene_django/elasticsearch/tests/filters.py @@ -0,0 +1,8 @@ + +from graphene_django.elasticsearch.filter import filters +from graphene_django.elasticsearch.filter.filterset import FilterSetES + + +class ArticleFilterES(FilterSetES): + + headline = filters.StringFilterES(attr='headline') diff --git a/graphene_django/elasticsearch/tests/test_fields.py b/graphene_django/elasticsearch/tests/test_fields.py new file mode 100644 index 000000000..62f73d670 --- /dev/null +++ b/graphene_django/elasticsearch/tests/test_fields.py @@ -0,0 +1,27 @@ +import pytest + +from graphene_django.filter.tests.test_fields import assert_arguments, ArticleNode + +from graphene_django.elasticsearch.tests.filters import ArticleFilterES +from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED + + +pytestmark = [] + +if DJANGO_FILTER_INSTALLED and DJANGO_ELASTICSEARCH_DSL_INSTALLED: + from graphene_django.filter import ( + DjangoFilterConnectionField, + ) +else: + pytestmark.append( + pytest.mark.skipif( + True, reason="django_filters not installed or not compatible" + ) + ) + +pytestmark.append(pytest.mark.django_db) + + +def test_filter_string_fields(): + field = DjangoFilterConnectionField(ArticleNode, filterset_class=ArticleFilterES) + assert_arguments(field, "headline", "headline_term") diff --git a/graphene_django/utils/utils.py b/graphene_django/utils/utils.py index 5195e252e..c15ff634c 100644 --- a/graphene_django/utils/utils.py +++ b/graphene_django/utils/utils.py @@ -13,7 +13,7 @@ try: - import django_elasticsearch_dsl # noqa + import elasticsearch_dsl # noqa DJANGO_ELASTICSEARCH_DSL_INSTALLED = True except ImportError: diff --git a/setup.py b/setup.py index e622a718b..473b0413c 100644 --- a/setup.py +++ b/setup.py @@ -22,6 +22,7 @@ "django-filter<2;python_version<'3'", "django-filter>=2;python_version>='3'", "pytest-django>=3.3.2", + "elasticsearch-dsl<7.0", ] + rest_framework_require From 5b4d8144ee118f8f2cce03b9c1c6e30ae378f730 Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Fri, 31 May 2019 22:43:08 -0400 Subject: [PATCH 03/16] changing `django_filters` types in the ES filters by `graphene` types --- graphene_django/elasticsearch/filter/filters.py | 7 +++---- graphene_django/filter/utils.py | 10 ++++++++-- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/graphene_django/elasticsearch/filter/filters.py b/graphene_django/elasticsearch/filter/filters.py index b1d6b9d25..8c2faec2d 100644 --- a/graphene_django/elasticsearch/filter/filters.py +++ b/graphene_django/elasticsearch/filter/filters.py @@ -1,15 +1,13 @@ """Filters to ElasticSearch""" from collections import OrderedDict -from django_filters import CharFilter from elasticsearch_dsl import Q +from graphene import String class StringFilterES(object): # pylint: disable=R0902 """String Fields specific to ElasticSearch.""" default_expr = 'contain' - filter_class = CharFilter - variants = { "contain": lambda name, value: Q('match', **{name: { @@ -27,6 +25,7 @@ def __init__(self, name=None, attr=None): """ assert name or attr, "At least the field name or the field attr should be passed" self.field_name = name or attr.replace('.', '_') + self.argument = String().Argument() self.fields = self.generate_fields() def generate_fields(self): @@ -40,6 +39,6 @@ def generate_fields(self): for variant in self.variants: variant_name = self.field_name if variant in ["default", self.default_expr] \ else "%s_%s" % (self.field_name, variant) - fields[variant_name] = self.filter_class(field_name=variant_name) + fields[variant_name] = self.argument return fields diff --git a/graphene_django/filter/utils.py b/graphene_django/filter/utils.py index cfa5621a1..1487793f4 100644 --- a/graphene_django/filter/utils.py +++ b/graphene_django/filter/utils.py @@ -1,4 +1,5 @@ import six +from graphene import Argument from .filterset import custom_filterset_factory, setup_filterset @@ -12,8 +13,13 @@ def get_filtering_args_from_filterset(filterset_class, type): args = {} for name, filter_field in six.iteritems(filterset_class.base_filters): - field_type = convert_form_field(filter_field.field).Argument() - field_type.description = filter_field.label + + if not isinstance(filter_field, Argument): + field_type = convert_form_field(filter_field.field).Argument() + field_type.description = filter_field.label + else: + field_type = filter_field + args[name] = field_type return args From 837d74f941b2fc614f9dd12ed7152f8c004098a9 Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Sun, 2 Jun 2019 20:13:14 -0400 Subject: [PATCH 04/16] generating queries from filters to resolve the data first in ES. --- .../elasticsearch/filter/bridges.py | 25 +++++++++++ .../elasticsearch/filter/fields.py | 17 +++++++ .../elasticsearch/filter/filters.py | 26 ++++++++++- .../elasticsearch/filter/filterset.py | 45 ++++++++++++++++++- graphene_django/filter/utils.py | 6 +-- graphene_django/utils/utils.py | 2 +- setup.py | 2 +- 7 files changed, 116 insertions(+), 7 deletions(-) create mode 100644 graphene_django/elasticsearch/filter/bridges.py create mode 100644 graphene_django/elasticsearch/filter/fields.py diff --git a/graphene_django/elasticsearch/filter/bridges.py b/graphene_django/elasticsearch/filter/bridges.py new file mode 100644 index 000000000..0de147ce9 --- /dev/null +++ b/graphene_django/elasticsearch/filter/bridges.py @@ -0,0 +1,25 @@ + +class QuerysetBridge(object): + """Bridge to Queryset through ES query""" + + def __init__(self, search): + """Taking as search, the ES search resolved by DjangoESFilterConnectionField""" + self.search = search + + def get_queryset(self): + """Returning self as Queryset to be the bridge""" + return self + + def apply_query(self, method, *args, **kwargs): + """Helper method to apply mutation to ES Query""" + if hasattr(self.search, method): + self.search = getattr(self.search, method)(*args, **kwargs) + + def __len__(self): + """Bridget method to response the ES count as QS len""" + return self.search.count() + + def __getitem__(self, k): + """Applying slice to ES and generating a QS from that""" + _slice = self.search.__getitem__(k) + return _slice.to_queryset() diff --git a/graphene_django/elasticsearch/filter/fields.py b/graphene_django/elasticsearch/filter/fields.py new file mode 100644 index 000000000..bbacadbf2 --- /dev/null +++ b/graphene_django/elasticsearch/filter/fields.py @@ -0,0 +1,17 @@ +from graphene_django.elasticsearch.filter.bridges import QuerysetBridge +from graphene_django.filter import DjangoFilterConnectionField +from elasticsearch_dsl.query import Query + + +class DjangoESFilterConnectionField(DjangoFilterConnectionField): + """A Field to replace DjangoFilterConnectionField manager by QuerysetBridge""" + + def get_manager(self): + """Retuning a QuerysetBridge to replace the direct use over the QS""" + return QuerysetBridge(search=self.filterset_class._meta.index.search()) + + def merge_querysets(cls, default_queryset, queryset): + """Merge ES queries""" + if isinstance(default_queryset, Query): + return default_queryset & queryset + return default_queryset.query(queryset) diff --git a/graphene_django/elasticsearch/filter/filters.py b/graphene_django/elasticsearch/filter/filters.py index 8c2faec2d..358616fbc 100644 --- a/graphene_django/elasticsearch/filter/filters.py +++ b/graphene_django/elasticsearch/filter/filters.py @@ -39,6 +39,30 @@ def generate_fields(self): for variant in self.variants: variant_name = self.field_name if variant in ["default", self.default_expr] \ else "%s_%s" % (self.field_name, variant) - fields[variant_name] = self.argument + fields[variant_name] = self return fields + + def get_q(self, arguments): + """ + :param arguments: parameters of the query. + :return: Returns a elasticsearch_dsl.Q query object. + """ + queries = [] + + for argument, value in arguments.iteritems(): + if argument in self.fields: + + if argument == self.field_name: + suffix_expr = self.default_expr or 'default' + else: + argument_split = argument.split("_") + suffix_expr = argument_split[len(argument_split) - 1] + + if suffix_expr in self.variants: + query = self.variants.get(suffix_expr, None) + + if query: + queries.extend([query(self.field_name, value)]) + + return Q("bool", must=queries[0]) if len(queries) == 1 else Q("bool", must={"bool": {"should": queries}}) diff --git a/graphene_django/elasticsearch/filter/filterset.py b/graphene_django/elasticsearch/filter/filterset.py index 4bf15e97e..556e2aadc 100644 --- a/graphene_django/elasticsearch/filter/filterset.py +++ b/graphene_django/elasticsearch/filter/filterset.py @@ -1,11 +1,24 @@ """Fields""" from collections import OrderedDict + +from elasticsearch_dsl import Q from django.utils import six from django_filters.filterset import BaseFilterSet from .filters import StringFilterES +class FilterSetESOptions(object): + """Basic FilterSetES options to Metadata""" + def __init__(self, options=None): + """ + The field option is combined with the index to automatically generate + filters. + """ + self.index = getattr(options, 'index', None) + self.model = self.index._doc_type.model if self.index else None + + class FilterSetESMetaclass(type): """Captures the meta class of the filterSet class.""" @@ -23,6 +36,7 @@ def __new__(mcs, name, bases, attrs): base_filters.update(filter_field.fields) new_class.base_filters = base_filters + new_class._meta = FilterSetESOptions(getattr(new_class, 'Meta', None)) return new_class @classmethod @@ -52,4 +66,33 @@ def get_declared_filters(mcs, bases, attrs): class FilterSetES(six.with_metaclass(FilterSetESMetaclass, object)): """FilterSet specific for ElasticSearch.""" - pass + def __init__(self, data, queryset, request): + """ + Receiving params necessaries to resolved the data + :param data: argument passed to query + :param queryset: a ES queryset + :param request: the context of request + """ + self.data = data + self.es_query = queryset + self.request = request + + @property + def qs(self): + """Returning ES queryset as QS""" + query_base = self.generate_q() + self.es_query.apply_query("query", query_base) + self.es_query.apply_query("source", ["id"]) + return self.es_query + + def generate_q(self): + """ + Generate a query for each filter. + :return: Generates a super query with bool as root, and combines all sub-queries from each argument. + """ + query_base = Q("bool") + for name, filter_es in six.iteritems(self.declared_filters): + query_filter = filter_es.get_q(self.data) + if query_filter is not None: + query_base += query_filter + return query_base diff --git a/graphene_django/filter/utils.py b/graphene_django/filter/utils.py index 1487793f4..3c09619be 100644 --- a/graphene_django/filter/utils.py +++ b/graphene_django/filter/utils.py @@ -1,5 +1,5 @@ import six -from graphene import Argument +from django_filters import Filter from .filterset import custom_filterset_factory, setup_filterset @@ -14,11 +14,11 @@ def get_filtering_args_from_filterset(filterset_class, type): args = {} for name, filter_field in six.iteritems(filterset_class.base_filters): - if not isinstance(filter_field, Argument): + if isinstance(filter_field, Filter): field_type = convert_form_field(filter_field.field).Argument() field_type.description = filter_field.label else: - field_type = filter_field + field_type = filter_field.argument args[name] = field_type diff --git a/graphene_django/utils/utils.py b/graphene_django/utils/utils.py index c15ff634c..5195e252e 100644 --- a/graphene_django/utils/utils.py +++ b/graphene_django/utils/utils.py @@ -13,7 +13,7 @@ try: - import elasticsearch_dsl # noqa + import django_elasticsearch_dsl # noqa DJANGO_ELASTICSEARCH_DSL_INSTALLED = True except ImportError: diff --git a/setup.py b/setup.py index 473b0413c..c5a21232f 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ "django-filter<2;python_version<'3'", "django-filter>=2;python_version>='3'", "pytest-django>=3.3.2", - "elasticsearch-dsl<7.0", + "django_elasticsearch_dsl", ] + rest_framework_require From 75946f97f5a7d822a42983b18b12640a642684c0 Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Mon, 3 Jun 2019 01:06:18 -0400 Subject: [PATCH 05/16] testing interaction between DjangoFilterConnectionField and DjangoESFilterConnectionField to get a QS --- .../elasticsearch/filter/fields.py | 4 +- .../elasticsearch/filter/filterset.py | 2 +- .../elasticsearch/tests/filters.py | 26 +++++++++ .../elasticsearch/tests/test_fields.py | 56 ++++++++++++++++++- 4 files changed, 83 insertions(+), 5 deletions(-) diff --git a/graphene_django/elasticsearch/filter/fields.py b/graphene_django/elasticsearch/filter/fields.py index bbacadbf2..5324d0b41 100644 --- a/graphene_django/elasticsearch/filter/fields.py +++ b/graphene_django/elasticsearch/filter/fields.py @@ -1,13 +1,13 @@ +from elasticsearch_dsl.query import Query from graphene_django.elasticsearch.filter.bridges import QuerysetBridge from graphene_django.filter import DjangoFilterConnectionField -from elasticsearch_dsl.query import Query class DjangoESFilterConnectionField(DjangoFilterConnectionField): """A Field to replace DjangoFilterConnectionField manager by QuerysetBridge""" def get_manager(self): - """Retuning a QuerysetBridge to replace the direct use over the QS""" + """Returning a QuerysetBridge to replace the direct use over the QS""" return QuerysetBridge(search=self.filterset_class._meta.index.search()) def merge_querysets(cls, default_queryset, queryset): diff --git a/graphene_django/elasticsearch/filter/filterset.py b/graphene_django/elasticsearch/filter/filterset.py index 556e2aadc..cb1079339 100644 --- a/graphene_django/elasticsearch/filter/filterset.py +++ b/graphene_django/elasticsearch/filter/filterset.py @@ -92,7 +92,7 @@ def generate_q(self): """ query_base = Q("bool") for name, filter_es in six.iteritems(self.declared_filters): - query_filter = filter_es.get_q(self.data) + query_filter = filter_es.get_q(self.data) if len(self.data) else None if query_filter is not None: query_base += query_filter return query_base diff --git a/graphene_django/elasticsearch/tests/filters.py b/graphene_django/elasticsearch/tests/filters.py index 2cabc80f9..c0aed52d0 100644 --- a/graphene_django/elasticsearch/tests/filters.py +++ b/graphene_django/elasticsearch/tests/filters.py @@ -1,8 +1,34 @@ +from graphene import ObjectType +from django_elasticsearch_dsl import DocType, Index +from graphene_django.tests.models import Article +from graphene_django.filter.tests.test_fields import ArticleNode from graphene_django.elasticsearch.filter import filters +from graphene_django.elasticsearch.filter.fields import DjangoESFilterConnectionField from graphene_django.elasticsearch.filter.filterset import FilterSetES +ads_index = Index('articles') + + +@ads_index.doc_type +class ArticleDocument(DocType): + """Article document describing Index""" + class Meta(object): + """Metaclass config""" + model = Article + class ArticleFilterES(FilterSetES): + """Article Filter for ES""" + class Meta(object): + """Metaclass data""" + index = ArticleDocument headline = filters.StringFilterES(attr='headline') + + +class ESFilterQuery(ObjectType): + """A query for ES fields""" + articles = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterES + ) diff --git a/graphene_django/elasticsearch/tests/test_fields.py b/graphene_django/elasticsearch/tests/test_fields.py index 62f73d670..5fab90e81 100644 --- a/graphene_django/elasticsearch/tests/test_fields.py +++ b/graphene_django/elasticsearch/tests/test_fields.py @@ -1,9 +1,13 @@ +from datetime import datetime + import pytest +from mock import mock +from graphene import Schema +from graphene_django.tests.models import Article, Reporter from graphene_django.filter.tests.test_fields import assert_arguments, ArticleNode - -from graphene_django.elasticsearch.tests.filters import ArticleFilterES from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED +from graphene_django.elasticsearch.tests.filters import ArticleFilterES, ESFilterQuery pytestmark = [] @@ -25,3 +29,51 @@ def test_filter_string_fields(): field = DjangoFilterConnectionField(ArticleNode, filterset_class=ArticleFilterES) assert_arguments(field, "headline", "headline_term") + + +def test_filter_query(): + r1 = Reporter.objects.create(first_name="r1", last_name="r1", email="r1@test.com") + + a1 = Article.objects.create( + headline="a1", + pub_date=datetime.now(), + pub_date_time=datetime.now(), + reporter=r1, + editor=r1, + ) + a2 = Article.objects.create( + headline="a2", + pub_date=datetime.now(), + pub_date_time=datetime.now(), + reporter=r1, + editor=r1, + ) + + query = """ + query { + articles { + edges { + node { + headline + } + } + } + } + """ + + mock_count = mock.Mock(return_value=3) + mock_slice = mock.Mock(return_value=mock.Mock(to_queryset=mock.Mock( + return_value=Article.objects.filter(pk__in=[a1.id, a2.id]) + ))) + + with mock.patch('django_elasticsearch_dsl.search.Search.count', mock_count),\ + mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice): + + schema = Schema(query=ESFilterQuery) + result = schema.execute(query) + + assert not result.errors + + assert len(result.data["articles"]["edges"]) == 2 + assert result.data["articles"]["edges"][0]["node"]["headline"] == "a1" + assert result.data["articles"]["edges"][1]["node"]["headline"] == "a2" From 25a5ceb2a8f6bd7aad00da4804f19e32923365b5 Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Mon, 3 Jun 2019 18:22:14 -0400 Subject: [PATCH 06/16] generating filters from meta specification --- .../elasticsearch/filter/filters.py | 21 ++- .../elasticsearch/filter/filterset.py | 166 +++++++++++++++++- .../elasticsearch/tests/filters.py | 36 +++- .../elasticsearch/tests/test_fields.py | 53 +++--- 4 files changed, 244 insertions(+), 32 deletions(-) diff --git a/graphene_django/elasticsearch/filter/filters.py b/graphene_django/elasticsearch/filter/filters.py index 358616fbc..dbca8a9f4 100644 --- a/graphene_django/elasticsearch/filter/filters.py +++ b/graphene_django/elasticsearch/filter/filters.py @@ -1,5 +1,7 @@ """Filters to ElasticSearch""" from collections import OrderedDict + +import six from elasticsearch_dsl import Q from graphene import String @@ -18,13 +20,15 @@ class StringFilterES(object): # pylint: disable=R0902 "term": lambda name, value: Q('term', **{name: value}), } - def __init__(self, name=None, attr=None): + def __init__(self, name=None, attr=None, lookup_expressions=None, default_expr=None): """ :param name: Name of the field. This is the name that will be exported. :param attr: Path to the index attr that will be used as filter. """ assert name or attr, "At least the field name or the field attr should be passed" self.field_name = name or attr.replace('.', '_') + self.default_expr = default_expr or self.default_expr + self.lookup_expressions = lookup_expressions self.argument = String().Argument() self.fields = self.generate_fields() @@ -36,9 +40,16 @@ def generate_fields(self): expressions combinations. """ fields = OrderedDict() - for variant in self.variants: - variant_name = self.field_name if variant in ["default", self.default_expr] \ - else "%s_%s" % (self.field_name, variant) + if self.lookup_expressions: + + for variant in self.lookup_expressions: + if variant in self.variants: + variant_name = self.field_name if variant in ["default", self.default_expr] \ + else "%s_%s" % (self.field_name, variant) + fields[variant_name] = self + + else: + variant_name = self.field_name fields[variant_name] = self return fields @@ -50,7 +61,7 @@ def get_q(self, arguments): """ queries = [] - for argument, value in arguments.iteritems(): + for argument, value in six.iteritems(arguments): if argument in self.fields: if argument == self.field_name: diff --git a/graphene_django/elasticsearch/filter/filterset.py b/graphene_django/elasticsearch/filter/filterset.py index cb1079339..3be0149b5 100644 --- a/graphene_django/elasticsearch/filter/filterset.py +++ b/graphene_django/elasticsearch/filter/filterset.py @@ -1,12 +1,20 @@ """Fields""" +import copy from collections import OrderedDict - from elasticsearch_dsl import Q +from django_elasticsearch_dsl import ObjectField, StringField, TextField from django.utils import six +from django_filters.utils import try_dbfield from django_filters.filterset import BaseFilterSet from .filters import StringFilterES +# Basic conversion from ES fields to FilterES fields +FILTER_FOR_ESFIELD_DEFAULTS = { + StringField: {'filter_class': StringFilterES}, + TextField: {'filter_class': StringFilterES}, +} + class FilterSetESOptions(object): """Basic FilterSetES options to Metadata""" @@ -14,8 +22,68 @@ def __init__(self, options=None): """ The field option is combined with the index to automatically generate filters. + + The includes option accept two kind of syntax: + - a list of field names + - a dictionary of field names mapped to a list of expressions + + Example: + class UserFilter(FilterSetES): + class Meta: + index = UserIndex + includes = ['username', 'last_login'] + + or + + class UserFilter(FilterSetES): + class Meta: + index = UserIndex + includes = { + 'username': ['term'] + 'last_login': ['lte', 'gte] + } + + The list syntax will create an filter with a behavior by default, + for each field included in includes. The dictionary syntax will + create a filter for each expression declared for its corresponding + field. + + Note that the generated filters will not overwrite filters + declared on the FilterSet. + + Example: + class UserFilter(FilterSetES): + username = StringFieldES('username', core_type='text', expr=['partial']) + class Meta: + index = UserIndex + includes = { + 'username': ['term', 'word'] + } + + A query with username as a parameter, will match those words with the + username value as substring + + The excludes option accept a list of field names. + + Example: + class UserFilter(FilterSetES): + class Meta: + index = UserIndex + excludes = ['username', 'last_login'] + + or + + It is necessary to provide includes or excludes. You cant provide a excludes empty to generate all fields """ self.index = getattr(options, 'index', None) + self.includes = getattr(options, 'includes', None) + self.excludes = getattr(options, 'excludes', None) + + if self.index is None: + raise ValueError('You need provide a Index in Meta.') + if self.excludes is None and self.includes is None: + raise ValueError('You need provide includes or excludes field in Meta.') + self.model = self.index._doc_type.model if self.index else None @@ -31,12 +99,15 @@ def __new__(mcs, name, bases, attrs): new_class = super(FilterSetESMetaclass, mcs).__new__(mcs, name, bases, attrs) if issubclass(new_class, BaseFilterSet): + new_class._meta = FilterSetESOptions(getattr(new_class, 'Meta', None)) base_filters = OrderedDict() for name, filter_field in six.iteritems(declared_filters): base_filters.update(filter_field.fields) + + meta_filters = mcs.get_meta_filters(new_class._meta) + base_filters.update(OrderedDict(meta_filters)) new_class.base_filters = base_filters - new_class._meta = FilterSetESOptions(getattr(new_class, 'Meta', None)) return new_class @classmethod @@ -63,6 +134,95 @@ def get_declared_filters(mcs, bases, attrs): return OrderedDict(filters) + @classmethod + def get_meta_filters(mcs, meta): + """ + Get filters from Meta configuration + :return: Field extracted from index and from the FilterSetES. + """ + index_fields = mcs.get_index_fields(meta) + + meta_filters = [] + for name, index_field, data in index_fields: + + if isinstance(index_field, ObjectField): + meta_filters.extend((name, mcs.get_filter_object(name, index_field, data))) + else: + meta_filters.append((name, mcs.get_filter_exp(name, index_field, data))) + + return meta_filters + + @classmethod + def get_index_fields(mcs, meta): + """ + Get fields from index that appears in the meta class configuration of the filter_set + :return: Tuple of (name, field, lookup_expr) describing name of the field, ES class of the field and lookup_expr + """ + index_fields = meta.index._doc_type._fields() + meta_includes = meta.includes + meta_excludes = meta.excludes + + if isinstance(meta_includes, dict): + # The lookup_expr are defined in Meta + filter_fields = [(name, index_fields[name], data) for name, data in meta_includes.items()] + elif meta_includes is not None: + # The lookup_expr are not defined + filter_fields = [(name, index_fields[name], None) for name in meta_includes] + else: + # No `includes` are declared in meta, so all not `excludes` fields from index will be converted to filters + filter_fields = [(name, field, None) for name, field in index_fields.items() if name not in meta_excludes] + return filter_fields + + @classmethod + def get_filter_object(mcs, name, field, data): + """Get filters from ObjectField""" + index_fields = [] + + properties = field._doc_class._doc_type.mapping.properties._params.get('properties', {}) + + for inner_name, inner_field in properties.items(): + + if data and inner_name not in data: + # This inner field is not filterable + continue + inner_data = data[inner_name] if data else None + + index_fields.append(mcs.get_filter_exp(inner_name, inner_field, inner_data, root=name)) + + return index_fields + + @classmethod + def get_filter_exp(mcs, name, field, data=None, root=None): + """Initialize filter""" + field_data = try_dbfield(FILTER_FOR_ESFIELD_DEFAULTS.get, field.__class__) or {} + filter_class = field_data.get('filter_class') + + extra = field_data.get('extra', {}) + kwargs = copy.deepcopy(extra) + + # Get lookup_expr from configuration + if data and 'lookup_exprs' in data: + if 'lookup_exprs' in kwargs: + kwargs['lookup_exprs'] = set(kwargs['lookup_exprs']).intersection(set(data['lookup_exprs'])) + else: + kwargs['lookup_exprs'] = set(data['lookup_exprs']) + elif 'lookup_exprs' in kwargs: + kwargs['lookup_exprs'] = set(kwargs['lookup_exprs']) + + kwargs['name'], kwargs['attr'] = mcs.get_name(name, root, data) + return filter_class(**kwargs) + + @staticmethod + def get_name(name, root, data): + """Get names of the field and the path to resolve it""" + field_name = data.get('name', None) if data else None + attr = data.get('attr', None) if data else None + if not field_name: + field_name = '{root}_{name}'.format(root=root, name=name) if root else name + if not attr: + attr = '{root}.{name}'.format(root=root, name=name) if root else name + return field_name, attr + class FilterSetES(six.with_metaclass(FilterSetESMetaclass, object)): """FilterSet specific for ElasticSearch.""" @@ -91,7 +251,7 @@ def generate_q(self): :return: Generates a super query with bool as root, and combines all sub-queries from each argument. """ query_base = Q("bool") - for name, filter_es in six.iteritems(self.declared_filters): + for name, filter_es in six.iteritems(self.base_filters): query_filter = filter_es.get_q(self.data) if len(self.data) else None if query_filter is not None: query_base += query_filter diff --git a/graphene_django/elasticsearch/tests/filters.py b/graphene_django/elasticsearch/tests/filters.py index c0aed52d0..5265e75d3 100644 --- a/graphene_django/elasticsearch/tests/filters.py +++ b/graphene_django/elasticsearch/tests/filters.py @@ -16,19 +16,49 @@ class ArticleDocument(DocType): class Meta(object): """Metaclass config""" model = Article + fields = [ + 'headline', + ] -class ArticleFilterES(FilterSetES): +class ArticleFilterESAsField(FilterSetES): """Article Filter for ES""" class Meta(object): """Metaclass data""" index = ArticleDocument + includes = [] headline = filters.StringFilterES(attr='headline') +class ArticleFilterESInMeta(FilterSetES): + """Article Filter for ES""" + class Meta(object): + """Metaclass data""" + index = ArticleDocument + includes = ['headline'] + + +class ArticleFilterESInMetaDict(FilterSetES): + """Article Filter for ES""" + class Meta(object): + """Metaclass data""" + index = ArticleDocument + includes = { + 'headline': { + 'lookup_expressions': ['term', 'contains'] + } + } + + class ESFilterQuery(ObjectType): """A query for ES fields""" - articles = DjangoESFilterConnectionField( - ArticleNode, filterset_class=ArticleFilterES + articles_as_field = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterESAsField + ) + articles_in_meta = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterESInMeta + ) + articles_in_meta_dict = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterESInMetaDict ) diff --git a/graphene_django/elasticsearch/tests/test_fields.py b/graphene_django/elasticsearch/tests/test_fields.py index 5fab90e81..6bdc6ba9f 100644 --- a/graphene_django/elasticsearch/tests/test_fields.py +++ b/graphene_django/elasticsearch/tests/test_fields.py @@ -4,19 +4,15 @@ from mock import mock from graphene import Schema + +from graphene_django.elasticsearch.filter import filters from graphene_django.tests.models import Article, Reporter -from graphene_django.filter.tests.test_fields import assert_arguments, ArticleNode from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED -from graphene_django.elasticsearch.tests.filters import ArticleFilterES, ESFilterQuery - +from graphene_django.elasticsearch.tests.filters import ESFilterQuery, ArticleDocument pytestmark = [] -if DJANGO_FILTER_INSTALLED and DJANGO_ELASTICSEARCH_DSL_INSTALLED: - from graphene_django.filter import ( - DjangoFilterConnectionField, - ) -else: +if not DJANGO_FILTER_INSTALLED or not DJANGO_ELASTICSEARCH_DSL_INSTALLED: pytestmark.append( pytest.mark.skipif( True, reason="django_filters not installed or not compatible" @@ -26,14 +22,8 @@ pytestmark.append(pytest.mark.django_db) -def test_filter_string_fields(): - field = DjangoFilterConnectionField(ArticleNode, filterset_class=ArticleFilterES) - assert_arguments(field, "headline", "headline_term") - - -def test_filter_query(): +def fake_data(): r1 = Reporter.objects.create(first_name="r1", last_name="r1", email="r1@test.com") - a1 = Article.objects.create( headline="a1", pub_date=datetime.now(), @@ -48,10 +38,15 @@ def test_filter_query(): reporter=r1, editor=r1, ) + return a1, a2 + + +def filter_generation(field, query_str, spected_arguments): + a1, a2 = fake_data() query = """ query { - articles { + %s(%s) { edges { node { headline @@ -59,21 +54,37 @@ def test_filter_query(): } } } - """ + """ % (field, query_str) mock_count = mock.Mock(return_value=3) mock_slice = mock.Mock(return_value=mock.Mock(to_queryset=mock.Mock( return_value=Article.objects.filter(pk__in=[a1.id, a2.id]) ))) + mock_query = mock.Mock(return_value=ArticleDocument.search()) with mock.patch('django_elasticsearch_dsl.search.Search.count', mock_count),\ - mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice): + mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice),\ + mock.patch('elasticsearch_dsl.Search.query', mock_query): schema = Schema(query=ESFilterQuery) result = schema.execute(query) assert not result.errors - assert len(result.data["articles"]["edges"]) == 2 - assert result.data["articles"]["edges"][0]["node"]["headline"] == "a1" - assert result.data["articles"]["edges"][1]["node"]["headline"] == "a2" + mock_query.assert_called_with(filters.StringFilterES(attr='headline').get_q(spected_arguments)) + + assert len(result.data[field]["edges"]) == 2 + assert result.data[field]["edges"][0]["node"]["headline"] == "a1" + assert result.data[field]["edges"][1]["node"]["headline"] == "a2" + + +def test_filter_as_field(): + filter_generation("articlesAsField", "headline: \"A text\"", {"headline": "A text"}) + + +def test_filter_in_meta(): + filter_generation("articlesInMeta", "headline: \"A text\"", {"headline": "A text"}) + + +def test_filter_in_meta_dict(): + filter_generation("articlesInMetaDict", "headline: \"A text\"", {"headline": "A text"}) From 5c57ffccd7cd248b39a86f02ec5f0b91a7db2c2a Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Tue, 4 Jun 2019 15:17:32 -0400 Subject: [PATCH 07/16] added order by feature as meta. --- .../elasticsearch/filter/fields.py | 15 ++ .../elasticsearch/filter/filters.py | 12 +- .../elasticsearch/filter/filterset.py | 142 +++++++++++++++--- .../elasticsearch/tests/filters.py | 5 +- .../elasticsearch/tests/test_fields.py | 33 +++- graphene_django/filter/utils.py | 2 +- 6 files changed, 178 insertions(+), 31 deletions(-) diff --git a/graphene_django/elasticsearch/filter/fields.py b/graphene_django/elasticsearch/filter/fields.py index 5324d0b41..cf1219b6e 100644 --- a/graphene_django/elasticsearch/filter/fields.py +++ b/graphene_django/elasticsearch/filter/fields.py @@ -1,10 +1,25 @@ from elasticsearch_dsl.query import Query + from graphene_django.elasticsearch.filter.bridges import QuerysetBridge from graphene_django.filter import DjangoFilterConnectionField class DjangoESFilterConnectionField(DjangoFilterConnectionField): """A Field to replace DjangoFilterConnectionField manager by QuerysetBridge""" + def __init__(self, object_type, *args, **kwargs): + """Validating field allowed for this connection""" + fields = kwargs.get('fields', None) + if fields is not None: + raise ValueError('DjangoESFilterConnectionField do not permit argument fields yet.') + + order_by = kwargs.get('order_by', None) + if order_by is not None: + raise ValueError('DjangoESFilterConnectionField do not permit argument order_by yet.') + + filterset_class = kwargs.get('filterset_class', None) + if filterset_class is None: + raise ValueError('You should provide a FilterSetES as filterset_class argument.') + super(DjangoESFilterConnectionField, self).__init__(object_type, *args, **kwargs) def get_manager(self): """Returning a QuerysetBridge to replace the direct use over the QS""" diff --git a/graphene_django/elasticsearch/filter/filters.py b/graphene_django/elasticsearch/filter/filters.py index dbca8a9f4..a467cd314 100644 --- a/graphene_django/elasticsearch/filter/filters.py +++ b/graphene_django/elasticsearch/filter/filters.py @@ -29,7 +29,7 @@ def __init__(self, name=None, attr=None, lookup_expressions=None, default_expr=N self.field_name = name or attr.replace('.', '_') self.default_expr = default_expr or self.default_expr self.lookup_expressions = lookup_expressions - self.argument = String().Argument() + self.argument = String() self.fields = self.generate_fields() def generate_fields(self): @@ -54,8 +54,9 @@ def generate_fields(self): return fields - def get_q(self, arguments): + def generate_es_query(self, arguments): """ + Generating a query based on the arguments passed to graphene field :param arguments: parameters of the query. :return: Returns a elasticsearch_dsl.Q query object. """ @@ -77,3 +78,10 @@ def get_q(self, arguments): queries.extend([query(self.field_name, value)]) return Q("bool", must=queries[0]) if len(queries) == 1 else Q("bool", must={"bool": {"should": queries}}) + + def Argument(self): + """ + Defining graphene Argument type for this filter + :return: A Argument type + """ + return self.argument.Argument() diff --git a/graphene_django/elasticsearch/filter/filterset.py b/graphene_django/elasticsearch/filter/filterset.py index 3be0149b5..f49d0241d 100644 --- a/graphene_django/elasticsearch/filter/filterset.py +++ b/graphene_django/elasticsearch/filter/filterset.py @@ -2,8 +2,10 @@ import copy from collections import OrderedDict from elasticsearch_dsl import Q -from django_elasticsearch_dsl import ObjectField, StringField, TextField +from graphene import Enum, InputObjectType, Field +from django_elasticsearch_dsl import StringField, TextField from django.utils import six + from django_filters.utils import try_dbfield from django_filters.filterset import BaseFilterSet @@ -16,6 +18,19 @@ } +class OrderEnum(Enum): + """Order enum to desc-asc""" + asc = 'asc' + desc = 'desc' + + @property + def description(self): + """Description to order enum""" + if self == OrderEnum.asc: + return 'Ascendant order' + return 'Descendant order' + + class FilterSetESOptions(object): """Basic FilterSetES options to Metadata""" def __init__(self, options=None): @@ -71,13 +86,33 @@ class Meta: index = UserIndex excludes = ['username', 'last_login'] + It is necessary to provide includes or excludes. You cant provide a excludes empty to generate all fields + + You can also pass sort_by to Meta to allow field be ordered + + Example: + class UserFilter(FilterSetES): + class Meta: + index = UserIndex + excludes = [] + order_by = ['username', 'last_login'] + or - It is necessary to provide includes or excludes. You cant provide a excludes empty to generate all fields + class UserFilter(FilterSetES): + class Meta: + index = UserIndex + excludes = [] + order_by = { + 'username': user.name + 'last_login': last_login + } + """ self.index = getattr(options, 'index', None) self.includes = getattr(options, 'includes', None) self.excludes = getattr(options, 'excludes', None) + self.order_by = getattr(options, 'order_by', None) if self.index is None: raise ValueError('You need provide a Index in Meta.') @@ -101,11 +136,21 @@ def __new__(mcs, name, bases, attrs): if issubclass(new_class, BaseFilterSet): new_class._meta = FilterSetESOptions(getattr(new_class, 'Meta', None)) base_filters = OrderedDict() + for name, filter_field in six.iteritems(declared_filters): base_filters.update(filter_field.fields) meta_filters = mcs.get_meta_filters(new_class._meta) - base_filters.update(OrderedDict(meta_filters)) + base_filters.update(meta_filters) + + sort_fields = {} + + if new_class._meta.order_by is not None: + sort_fields = mcs.generate_sort_field(new_class._meta.order_by) + sort_type = mcs.create_sort_enum(name, sort_fields) + base_filters['sort'] = sort_type() + + new_class.sort_fields = sort_fields new_class.base_filters = base_filters return new_class @@ -142,13 +187,11 @@ def get_meta_filters(mcs, meta): """ index_fields = mcs.get_index_fields(meta) - meta_filters = [] + meta_filters = OrderedDict() for name, index_field, data in index_fields: - if isinstance(index_field, ObjectField): - meta_filters.extend((name, mcs.get_filter_object(name, index_field, data))) - else: - meta_filters.append((name, mcs.get_filter_exp(name, index_field, data))) + filter_class = mcs.get_filter_exp(name, index_field, data) + meta_filters.update(filter_class.fields) return meta_filters @@ -201,13 +244,14 @@ def get_filter_exp(mcs, name, field, data=None, root=None): kwargs = copy.deepcopy(extra) # Get lookup_expr from configuration - if data and 'lookup_exprs' in data: - if 'lookup_exprs' in kwargs: - kwargs['lookup_exprs'] = set(kwargs['lookup_exprs']).intersection(set(data['lookup_exprs'])) + if data and 'lookup_expressions' in data: + if 'lookup_expressions' in kwargs: + kwargs['lookup_expressions'] = set(kwargs['lookup_expressions'])\ + .intersection(set(data['lookup_expressions'])) else: - kwargs['lookup_exprs'] = set(data['lookup_exprs']) - elif 'lookup_exprs' in kwargs: - kwargs['lookup_exprs'] = set(kwargs['lookup_exprs']) + kwargs['lookup_expressions'] = set(data['lookup_expressions']) + elif 'lookup_expressions' in kwargs: + kwargs['lookup_expressions'] = set(kwargs['lookup_expressions']) kwargs['name'], kwargs['attr'] = mcs.get_name(name, root, data) return filter_class(**kwargs) @@ -223,6 +267,49 @@ def get_name(name, root, data): attr = '{root}.{name}'.format(root=root, name=name) if root else name return field_name, attr + @staticmethod + def create_sort_enum(name, sort_fields): + """ + Create enum to sort by fields. + As graphene is typed, it is necessary generate a Enum by Field + to have inside, the document fields allowed to be ordered + """ + + sort_enum_name = "{}SortFields".format(name) + sort_descriptions = {field: "Sort by {field}".format(field=field) for field in + sort_fields.keys()} + sort_fields = [(field, field) for field in sort_fields.keys()] + + class EnumWithDescriptionsType(object): + """Set description to enum fields""" + + @property + def description(self): + """Description to EnumSort""" + return sort_descriptions[self.name] + + enum = Enum(sort_enum_name, sort_fields, type=EnumWithDescriptionsType) + + class SortType(InputObjectType): + """Sort Type""" + order = Field(OrderEnum) + field = Field(enum, required=True) + + sort_name = "{}Sort".format(name) + sort_type = type(sort_name, (SortType,), {}) + return sort_type + + @staticmethod + def generate_sort_field(order_by): + """To normalize the sort field data""" + if not order_by: + sort_fields = {} + elif isinstance(order_by, dict): + sort_fields = order_by.copy() + else: + sort_fields = {field: field for field in order_by} + return sort_fields + class FilterSetES(six.with_metaclass(FilterSetESMetaclass, object)): """FilterSet specific for ElasticSearch.""" @@ -240,19 +327,34 @@ def __init__(self, data, queryset, request): @property def qs(self): """Returning ES queryset as QS""" - query_base = self.generate_q() + query_base = self.generate_es_query() self.es_query.apply_query("query", query_base) self.es_query.apply_query("source", ["id"]) + + if 'sort' in self.data: + sort_data = self.data['sort'].copy() + field_name = self.sort_fields[sort_data.pop('field')] + self.es_query.apply_query("sort", {field_name: sort_data}) + return self.es_query - def generate_q(self): + def generate_es_query(self): """ Generate a query for each filter. :return: Generates a super query with bool as root, and combines all sub-queries from each argument. """ query_base = Q("bool") - for name, filter_es in six.iteritems(self.base_filters): - query_filter = filter_es.get_q(self.data) if len(self.data) else None - if query_filter is not None: - query_base += query_filter + # if the query have data + if len(self.data): + # for each field passed to the query + for name in self.data: + filter_es = self.base_filters.get(name) + # If a target filter is en FilterEs + if isinstance(filter_es, StringFilterES): + # It is generated a query or response None if the filter don't have data + query_filter = filter_es.generate_es_query(self.data) + + if query_filter is not None: + query_base += query_filter + return query_base diff --git a/graphene_django/elasticsearch/tests/filters.py b/graphene_django/elasticsearch/tests/filters.py index 5265e75d3..787304e06 100644 --- a/graphene_django/elasticsearch/tests/filters.py +++ b/graphene_django/elasticsearch/tests/filters.py @@ -27,8 +27,9 @@ class Meta(object): """Metaclass data""" index = ArticleDocument includes = [] + order_by = ['id'] - headline = filters.StringFilterES(attr='headline') + headline = filters.StringFilterES(attr='headline', lookup_expressions=['term', 'contain']) class ArticleFilterESInMeta(FilterSetES): @@ -46,7 +47,7 @@ class Meta(object): index = ArticleDocument includes = { 'headline': { - 'lookup_expressions': ['term', 'contains'] + 'lookup_expressions': ['term', 'contain'] } } diff --git a/graphene_django/elasticsearch/tests/test_fields.py b/graphene_django/elasticsearch/tests/test_fields.py index 6bdc6ba9f..e8c7d4839 100644 --- a/graphene_django/elasticsearch/tests/test_fields.py +++ b/graphene_django/elasticsearch/tests/test_fields.py @@ -41,7 +41,7 @@ def fake_data(): return a1, a2 -def filter_generation(field, query_str, spected_arguments): +def filter_generation(field, query_str, expected_arguments, method_to_mock="query"): a1, a2 = fake_data() query = """ @@ -64,14 +64,14 @@ def filter_generation(field, query_str, spected_arguments): with mock.patch('django_elasticsearch_dsl.search.Search.count', mock_count),\ mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice),\ - mock.patch('elasticsearch_dsl.Search.query', mock_query): + mock.patch("elasticsearch_dsl.Search.%s" % method_to_mock, mock_query): schema = Schema(query=ESFilterQuery) result = schema.execute(query) assert not result.errors - mock_query.assert_called_with(filters.StringFilterES(attr='headline').get_q(spected_arguments)) + mock_query.assert_called_with(expected_arguments) assert len(result.data[field]["edges"]) == 2 assert result.data[field]["edges"][0]["node"]["headline"] == "a1" @@ -79,12 +79,33 @@ def filter_generation(field, query_str, spected_arguments): def test_filter_as_field(): - filter_generation("articlesAsField", "headline: \"A text\"", {"headline": "A text"}) + filter_generation( + "articlesAsField", + "headline: \"A text\"", + filters.StringFilterES(attr='headline').generate_es_query({"headline": "A text"}), + ) + + +def test_filter_as_field_order_by(): + filter_generation( + "articlesAsField", + "headline: \"A text\", sort:{order:desc, field:id}", + {'id': {'order': 'desc'}}, + "sort" + ) def test_filter_in_meta(): - filter_generation("articlesInMeta", "headline: \"A text\"", {"headline": "A text"}) + filter_generation( + "articlesInMeta", + "headline: \"A text\"", + filters.StringFilterES(attr='headline').generate_es_query({"headline": "A text"}), + ) def test_filter_in_meta_dict(): - filter_generation("articlesInMetaDict", "headline: \"A text\"", {"headline": "A text"}) + filter_generation( + "articlesInMetaDict", + "headline: \"A text\"", + filters.StringFilterES(attr='headline').generate_es_query({"headline": "A text"}), + ) diff --git a/graphene_django/filter/utils.py b/graphene_django/filter/utils.py index 3c09619be..ef1310fab 100644 --- a/graphene_django/filter/utils.py +++ b/graphene_django/filter/utils.py @@ -18,7 +18,7 @@ def get_filtering_args_from_filterset(filterset_class, type): field_type = convert_form_field(filter_field.field).Argument() field_type.description = filter_field.label else: - field_type = filter_field.argument + field_type = filter_field.Argument() args[name] = field_type From 4e4387d674f70a03f3f7d0228be15b1ca060fc04 Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Thu, 6 Jun 2019 00:36:14 -0400 Subject: [PATCH 08/16] added processors for all type of es query --- .../elasticsearch/filter/bridges.py | 16 +- .../elasticsearch/filter/fields.py | 9 +- .../elasticsearch/filter/filters.py | 103 +++++------ .../elasticsearch/filter/filterset.py | 89 ++++++---- .../elasticsearch/filter/processors.py | 167 ++++++++++++++++++ .../elasticsearch/tests/filters.py | 21 ++- .../elasticsearch/tests/test_fields.py | 27 ++- 7 files changed, 328 insertions(+), 104 deletions(-) create mode 100644 graphene_django/elasticsearch/filter/processors.py diff --git a/graphene_django/elasticsearch/filter/bridges.py b/graphene_django/elasticsearch/filter/bridges.py index 0de147ce9..a98771241 100644 --- a/graphene_django/elasticsearch/filter/bridges.py +++ b/graphene_django/elasticsearch/filter/bridges.py @@ -6,10 +6,6 @@ def __init__(self, search): """Taking as search, the ES search resolved by DjangoESFilterConnectionField""" self.search = search - def get_queryset(self): - """Returning self as Queryset to be the bridge""" - return self - def apply_query(self, method, *args, **kwargs): """Helper method to apply mutation to ES Query""" if hasattr(self.search, method): @@ -23,3 +19,15 @@ def __getitem__(self, k): """Applying slice to ES and generating a QS from that""" _slice = self.search.__getitem__(k) return _slice.to_queryset() + + +class ManagerBridge(object): + """Bridge to Queryset through ES query""" + + def __init__(self, search_manager): + """Taking as search, the ES search resolved by DjangoESFilterConnectionField""" + self.search_manager = search_manager + + def get_queryset(self): + """Returning self as Queryset to be the bridge""" + return QuerysetBridge(search=self.search_manager()) diff --git a/graphene_django/elasticsearch/filter/fields.py b/graphene_django/elasticsearch/filter/fields.py index cf1219b6e..dfce2c8cb 100644 --- a/graphene_django/elasticsearch/filter/fields.py +++ b/graphene_django/elasticsearch/filter/fields.py @@ -1,6 +1,6 @@ from elasticsearch_dsl.query import Query -from graphene_django.elasticsearch.filter.bridges import QuerysetBridge +from graphene_django.elasticsearch.filter.bridges import ManagerBridge from graphene_django.filter import DjangoFilterConnectionField @@ -19,11 +19,14 @@ def __init__(self, object_type, *args, **kwargs): filterset_class = kwargs.get('filterset_class', None) if filterset_class is None: raise ValueError('You should provide a FilterSetES as filterset_class argument.') + super(DjangoESFilterConnectionField, self).__init__(object_type, *args, **kwargs) + self.manager = ManagerBridge(search_manager=self.filterset_class._meta.index.search) + def get_manager(self): - """Returning a QuerysetBridge to replace the direct use over the QS""" - return QuerysetBridge(search=self.filterset_class._meta.index.search()) + """Returning a ManagerBridge to replace the direct use over the Model manager""" + return self.manager def merge_querysets(cls, default_queryset, queryset): """Merge ES queries""" diff --git a/graphene_django/elasticsearch/filter/filters.py b/graphene_django/elasticsearch/filter/filters.py index a467cd314..fc34c0ca8 100644 --- a/graphene_django/elasticsearch/filter/filters.py +++ b/graphene_django/elasticsearch/filter/filters.py @@ -1,58 +1,47 @@ """Filters to ElasticSearch""" -from collections import OrderedDict +from graphene import String, Boolean, Int +from graphene_django.elasticsearch.filter.processors import PROCESSORS -import six -from elasticsearch_dsl import Q -from graphene import String +class FilterES(object): + """Fields specific to ElasticSearch.""" + default_processor = 'term' + default_argument = String() -class StringFilterES(object): # pylint: disable=R0902 - """String Fields specific to ElasticSearch.""" - - default_expr = 'contain' - variants = { - "contain": lambda name, value: Q('match', - **{name: { - "query": value, - "fuzziness": "auto" - }}), - - "term": lambda name, value: Q('term', **{name: value}), - } - - def __init__(self, name=None, attr=None, lookup_expressions=None, default_expr=None): + def __init__(self, field_name, field_name_es=None, lookup_expressions=None, + default_processor=None, argument=None): """ :param name: Name of the field. This is the name that will be exported. :param attr: Path to the index attr that will be used as filter. """ - assert name or attr, "At least the field name or the field attr should be passed" - self.field_name = name or attr.replace('.', '_') - self.default_expr = default_expr or self.default_expr - self.lookup_expressions = lookup_expressions - self.argument = String() - self.fields = self.generate_fields() + self.field_name = field_name - def generate_fields(self): - """ - All FilterSet objects should specify its fields for the introspection. + if isinstance(field_name_es, list): + self.field_name_es = field_name_es + else: + self.field_name_es = [field_name_es or field_name] - :return: A mapping of field to Filter type of field with all the suffix - expressions combinations. - """ - fields = OrderedDict() - if self.lookup_expressions: + self.default_filter_processor = default_processor or self.default_processor + + self.lookup_expressions = lookup_expressions + self.processor = None + if self.lookup_expressions: for variant in self.lookup_expressions: - if variant in self.variants: - variant_name = self.field_name if variant in ["default", self.default_expr] \ - else "%s_%s" % (self.field_name, variant) - fields[variant_name] = self + if variant in PROCESSORS: + self.processor = self.build_processor(variant) + else: + raise ValueError('We do not have processor: %s.' % variant) else: - variant_name = self.field_name - fields[variant_name] = self + self.processor = self.build_processor(self.default_processor) + + self.fields = self.processor.generate_field() + self.argument = argument or self.default_argument - return fields + def build_processor(self, variant): + processor_class = PROCESSORS[variant] + return processor_class(self, self.processor) def generate_es_query(self, arguments): """ @@ -60,24 +49,7 @@ def generate_es_query(self, arguments): :param arguments: parameters of the query. :return: Returns a elasticsearch_dsl.Q query object. """ - queries = [] - - for argument, value in six.iteritems(arguments): - if argument in self.fields: - - if argument == self.field_name: - suffix_expr = self.default_expr or 'default' - else: - argument_split = argument.split("_") - suffix_expr = argument_split[len(argument_split) - 1] - - if suffix_expr in self.variants: - query = self.variants.get(suffix_expr, None) - - if query: - queries.extend([query(self.field_name, value)]) - - return Q("bool", must=queries[0]) if len(queries) == 1 else Q("bool", must={"bool": {"should": queries}}) + return self.processor.generate_es_query(arguments) def Argument(self): """ @@ -85,3 +57,18 @@ def Argument(self): :return: A Argument type """ return self.argument.Argument() + + +class StringFilterES(FilterES): + """String Fields specific to ElasticSearch.""" + default_processor = 'contains' + + +class BoolFilterES(FilterES): + """Boolean filter to ES""" + default_argument = Boolean() + + +class NumberFilterES(FilterES): + """Filter to an numeric value to ES""" + default_argument = Int() diff --git a/graphene_django/elasticsearch/filter/filterset.py b/graphene_django/elasticsearch/filter/filterset.py index f49d0241d..94199b791 100644 --- a/graphene_django/elasticsearch/filter/filterset.py +++ b/graphene_django/elasticsearch/filter/filterset.py @@ -2,19 +2,40 @@ import copy from collections import OrderedDict from elasticsearch_dsl import Q -from graphene import Enum, InputObjectType, Field -from django_elasticsearch_dsl import StringField, TextField +from graphene import Enum, InputObjectType, Field, Int, Float +from django_elasticsearch_dsl import StringField, TextField, BooleanField, IntegerField, FloatField, LongField, \ + ShortField, DoubleField, DateField, KeywordField from django.utils import six from django_filters.utils import try_dbfield from django_filters.filterset import BaseFilterSet -from .filters import StringFilterES +from .filters import StringFilterES, FilterES, BoolFilterES, NumberFilterES # Basic conversion from ES fields to FilterES fields FILTER_FOR_ESFIELD_DEFAULTS = { StringField: {'filter_class': StringFilterES}, TextField: {'filter_class': StringFilterES}, + BooleanField: {'filter_class': BoolFilterES}, + IntegerField: {'filter_class': NumberFilterES}, + FloatField: {'filter_class': NumberFilterES, + 'extra': { + 'argument': Int() + }}, + LongField: {'filter_class': NumberFilterES, + 'extra': { + 'argument': Int() + }}, + ShortField: {'filter_class': NumberFilterES, + 'extra': { + 'argument': Int() + }}, + DoubleField: {'filter_class': NumberFilterES, + 'extra': { + 'argument': Float() + }}, + DateField: {'filter_class': StringFilterES}, + KeywordField: {'filter_class': StringFilterES}, } @@ -54,9 +75,12 @@ class UserFilter(FilterSetES): class Meta: index = UserIndex includes = { - 'username': ['term'] - 'last_login': ['lte', 'gte] - } + 'username': { + 'field_name': 'graphene_field', + 'field_name_es': 'elasticsearch_field', + 'lookup_expressions': ['term', 'contains'] + } + } The list syntax will create an filter with a behavior by default, for each field included in includes. The dictionary syntax will @@ -68,11 +92,12 @@ class Meta: Example: class UserFilter(FilterSetES): - username = StringFieldES('username', core_type='text', expr=['partial']) + username = StringFieldES(field_name='username', lookup_expressions=['contains']) class Meta: index = UserIndex includes = { - 'username': ['term', 'word'] + 'username': { + 'lookup_expressions': ['term', 'contains'] } A query with username as a parameter, will match those words with the @@ -127,7 +152,7 @@ class FilterSetESMetaclass(type): def __new__(mcs, name, bases, attrs): """Get filters declared explicitly in the class""" - + # get declared as field declared_filters = mcs.get_declared_filters(bases, attrs) attrs['declared_filters'] = declared_filters @@ -135,16 +160,20 @@ def __new__(mcs, name, bases, attrs): if issubclass(new_class, BaseFilterSet): new_class._meta = FilterSetESOptions(getattr(new_class, 'Meta', None)) - base_filters = OrderedDict() + # get declared as meta + meta_filters = mcs.get_meta_filters(new_class._meta) + + declared_filters.update(meta_filters) + new_class.filters_es = declared_filters + + # recollecting registered graphene fields + base_filters = OrderedDict() for name, filter_field in six.iteritems(declared_filters): base_filters.update(filter_field.fields) - meta_filters = mcs.get_meta_filters(new_class._meta) - base_filters.update(meta_filters) - + # adding sort field sort_fields = {} - if new_class._meta.order_by is not None: sort_fields = mcs.generate_sort_field(new_class._meta.order_by) sort_type = mcs.create_sort_enum(name, sort_fields) @@ -166,9 +195,9 @@ def get_declared_filters(mcs, bases, attrs): # List of filters declared in the class as static fields. filters = [ - (filter_name, attrs.pop(filter_name)) + (obj.field_name, attrs.pop(filter_name)) for filter_name, obj in list(attrs.items()) - if isinstance(obj, StringFilterES) + if isinstance(obj, FilterES) ] # Merge declared filters from base classes @@ -191,7 +220,7 @@ def get_meta_filters(mcs, meta): for name, index_field, data in index_fields: filter_class = mcs.get_filter_exp(name, index_field, data) - meta_filters.update(filter_class.fields) + meta_filters.update({name: filter_class}) return meta_filters @@ -229,7 +258,6 @@ def get_filter_object(mcs, name, field, data): # This inner field is not filterable continue inner_data = data[inner_name] if data else None - index_fields.append(mcs.get_filter_exp(inner_name, inner_field, inner_data, root=name)) return index_fields @@ -245,27 +273,23 @@ def get_filter_exp(mcs, name, field, data=None, root=None): # Get lookup_expr from configuration if data and 'lookup_expressions' in data: - if 'lookup_expressions' in kwargs: - kwargs['lookup_expressions'] = set(kwargs['lookup_expressions'])\ - .intersection(set(data['lookup_expressions'])) - else: - kwargs['lookup_expressions'] = set(data['lookup_expressions']) + kwargs['lookup_expressions'] = set(data['lookup_expressions']) elif 'lookup_expressions' in kwargs: kwargs['lookup_expressions'] = set(kwargs['lookup_expressions']) - kwargs['name'], kwargs['attr'] = mcs.get_name(name, root, data) + kwargs['field_name'], kwargs['field_name_es'] = mcs.get_name(name, root, data) return filter_class(**kwargs) @staticmethod def get_name(name, root, data): """Get names of the field and the path to resolve it""" - field_name = data.get('name', None) if data else None - attr = data.get('attr', None) if data else None + field_name = data.get('field_name', None) if data else None + field_name_es = data.get('field_name_es', None) if data else None if not field_name: field_name = '{root}_{name}'.format(root=root, name=name) if root else name - if not attr: - attr = '{root}.{name}'.format(root=root, name=name) if root else name - return field_name, attr + if not field_name_es: + field_name_es = '{root}.{name}'.format(root=root, name=name) if root else name + return field_name, field_name_es @staticmethod def create_sort_enum(name, sort_fields): @@ -347,12 +371,11 @@ def generate_es_query(self): # if the query have data if len(self.data): # for each field passed to the query - for name in self.data: - filter_es = self.base_filters.get(name) + for name, filter in six.iteritems(self.filters_es): # If a target filter is en FilterEs - if isinstance(filter_es, StringFilterES): + if isinstance(filter, FilterES): # It is generated a query or response None if the filter don't have data - query_filter = filter_es.generate_es_query(self.data) + query_filter = filter.generate_es_query(self.data) if query_filter is not None: query_base += query_filter diff --git a/graphene_django/elasticsearch/filter/processors.py b/graphene_django/elasticsearch/filter/processors.py new file mode 100644 index 000000000..ecd037f4c --- /dev/null +++ b/graphene_django/elasticsearch/filter/processors.py @@ -0,0 +1,167 @@ +from collections import OrderedDict + +from elasticsearch_dsl import Q +from graphene import List + + +class Processor(object): + suffix_expr = 'term' + + def __init__(self, filter_es, parent_processor=None): + """ + Abstract processor to generate graphene field and ES query to lookups + :type filter_es: graphene_django.elasticsearch.filter.filterset.FilterES + :type parent_processor: graphene_django.elasticsearch.filter.filterset.Processor + """ + self.filter_es = filter_es + self.parent_processor = parent_processor + self.variant_name = self._get_variant_name() + + def generate_field(self): + """Field Decorator""" + self_field = self._build_field() + + if self.parent_processor is not None: + parent_fields = self.parent_processor.generate_field() + parent_fields.update(self_field) + return parent_fields + + else: + return self_field + + def get_type(self): + return self.filter_es.argument + + def generate_es_query(self, data): + + if self.variant_name in data: + value = data.get(self.variant_name) + self_query = self._build_query(value) + else: + self_query = Q("bool") + + if self.parent_processor is not None: + parent_query = self.parent_processor.generate_es_query(data) + parent_query += self_query + return parent_query + + else: + return self_query + + def _build_field(self): + variant_name = self.variant_name + + return OrderedDict({variant_name: self.filter_es}) + + def _get_variant_name(self): + if self.suffix_expr == self.filter_es.default_filter_processor: + variant_name = self.filter_es.field_name + + else: + variant_name = "%s_%s" % (self.filter_es.field_name, self.suffix_expr) + + return variant_name + + def _build_query(self, value): + result = len(self.filter_es.field_name_es) + + if result > 1: + queries = [self._get_query(name, value) for name in self.filter_es.field_name_es] + return Q("bool", must={"bool": {"should": queries}}) + + return Q("bool", must=self._get_query(self.filter_es.field_name_es[0], value)) + + @staticmethod + def _get_query(name, value): + return Q('term', **{name: value}) + + +class TermProcessor(Processor): + pass + + +class ContainsProcessor(Processor): + suffix_expr = 'contains' + + @staticmethod + def _get_query(name, value): + return Q('match', + **{name: { + "query": value, + "fuzziness": "auto" + }}) + + +class RegexProcessor(Processor): + suffix_expr = 'regex' + + @staticmethod + def _get_query(name, value): + return Q('wildcard', **{name: value}) + + +class PhraseProcessor(Processor): + suffix_expr = 'phrase' + + @staticmethod + def _get_query(name, value): + return Q('match_phrase', + **{name: { + "query": value + }}) + + +class PrefixProcessor(Processor): + suffix_expr = 'prefix' + + @staticmethod + def _get_query(name, value): + return Q('match_phrase_prefix', + **{name: { + "query": value + }}) + + +class InProcessor(Processor): + suffix_expr = 'in' + + def get_type(self): + return List(self.filter_es.argument.Argument().type) + + +class ExitsProcessor(Processor): + suffix_expr = 'exits' + + @staticmethod + def _get_query(name, value): + return Q('bool', **{ + 'must' if value else 'must_not': {'exists': {'field': name}} + }) + + +class LteProcessor(Processor): + suffix_expr = 'lte' + + @staticmethod + def _get_query(name, value): + return Q("bool", must={'range': {name: {'lte': value}}}) + + +class GteProcessor(Processor): + suffix_expr = 'gte' + + @staticmethod + def _get_query(name, value): + return Q("bool", must={'range': {name: {'gte': value}}}) + + +PROCESSORS = { + "contains": ContainsProcessor, + "term": TermProcessor, + "regex": RegexProcessor, + "phrase": PhraseProcessor, + "prefix": PrefixProcessor, + "in": InProcessor, + "lte": LteProcessor, + "gte": GteProcessor, +} diff --git a/graphene_django/elasticsearch/tests/filters.py b/graphene_django/elasticsearch/tests/filters.py index 787304e06..1a01e8d69 100644 --- a/graphene_django/elasticsearch/tests/filters.py +++ b/graphene_django/elasticsearch/tests/filters.py @@ -29,7 +29,7 @@ class Meta(object): includes = [] order_by = ['id'] - headline = filters.StringFilterES(attr='headline', lookup_expressions=['term', 'contain']) + headline = filters.StringFilterES(field_name='headline', lookup_expressions=['term', 'contains']) class ArticleFilterESInMeta(FilterSetES): @@ -47,11 +47,25 @@ class Meta(object): index = ArticleDocument includes = { 'headline': { - 'lookup_expressions': ['term', 'contain'] + 'lookup_expressions': ['term', 'contains'] } } +class ArticleFilterMultiField(FilterSetES): + """Article Filter for ES""" + class Meta(object): + """Metaclass data""" + index = ArticleDocument + includes = [] + + headline = filters.StringFilterES( + field_name='contain', + field_name_es=['headline', 'lang'], + lookup_expressions=['contains'] + ) + + class ESFilterQuery(ObjectType): """A query for ES fields""" articles_as_field = DjangoESFilterConnectionField( @@ -63,3 +77,6 @@ class ESFilterQuery(ObjectType): articles_in_meta_dict = DjangoESFilterConnectionField( ArticleNode, filterset_class=ArticleFilterESInMetaDict ) + articles_in_multi_field = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterMultiField + ) diff --git a/graphene_django/elasticsearch/tests/test_fields.py b/graphene_django/elasticsearch/tests/test_fields.py index e8c7d4839..dc30d00bb 100644 --- a/graphene_django/elasticsearch/tests/test_fields.py +++ b/graphene_django/elasticsearch/tests/test_fields.py @@ -78,11 +78,19 @@ def filter_generation(field, query_str, expected_arguments, method_to_mock="quer assert result.data[field]["edges"][1]["node"]["headline"] == "a2" -def test_filter_as_field(): +def test_filter_string(): filter_generation( "articlesAsField", "headline: \"A text\"", - filters.StringFilterES(attr='headline').generate_es_query({"headline": "A text"}), + filters.StringFilterES(field_name='headline').generate_es_query({"headline": "A text"}), + ) + + +def test_filter_string_date(): + filter_generation( + "articlesAsField", + "headline: \"A text\"", + filters.StringFilterES(field_name='headline').generate_es_query({"headline": "A text"}), ) @@ -99,7 +107,7 @@ def test_filter_in_meta(): filter_generation( "articlesInMeta", "headline: \"A text\"", - filters.StringFilterES(attr='headline').generate_es_query({"headline": "A text"}), + filters.StringFilterES(field_name='headline').generate_es_query({"headline": "A text"}), ) @@ -107,5 +115,16 @@ def test_filter_in_meta_dict(): filter_generation( "articlesInMetaDict", "headline: \"A text\"", - filters.StringFilterES(attr='headline').generate_es_query({"headline": "A text"}), + filters.StringFilterES(field_name='headline').generate_es_query({"headline": "A text"}), + ) + + +def test_filter_in_multi_field(): + filter_generation( + "articlesInMultiField", + "contain: \"A text\"", + filters.StringFilterES( + field_name='contain', + field_name_es=['headline', 'lang'], + ).generate_es_query({"contain": "A text"}), ) From 1836b8b4c2121ac6b6ae73001b6cace041675dd6 Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Fri, 7 Jun 2019 14:43:24 -0400 Subject: [PATCH 09/16] Improve test coverage --- .../elasticsearch/filter/fields.py | 16 +- .../elasticsearch/filter/filters.py | 21 +- .../elasticsearch/filter/filterset.py | 44 ++-- .../elasticsearch/filter/processors.py | 103 +++++++- .../filter/{bridges.py => proxy.py} | 6 +- .../elasticsearch/tests/filters.py | 42 +++- .../elasticsearch/tests/test_fields.py | 219 ++++++++++++++++-- 7 files changed, 389 insertions(+), 62 deletions(-) rename graphene_django/elasticsearch/filter/{bridges.py => proxy.py} (89%) diff --git a/graphene_django/elasticsearch/filter/fields.py b/graphene_django/elasticsearch/filter/fields.py index dfce2c8cb..cd21fd001 100644 --- a/graphene_django/elasticsearch/filter/fields.py +++ b/graphene_django/elasticsearch/filter/fields.py @@ -1,13 +1,13 @@ -from elasticsearch_dsl.query import Query - -from graphene_django.elasticsearch.filter.bridges import ManagerBridge +from graphene_django.elasticsearch.filter.proxy import ManagerProxy from graphene_django.filter import DjangoFilterConnectionField class DjangoESFilterConnectionField(DjangoFilterConnectionField): """A Field to replace DjangoFilterConnectionField manager by QuerysetBridge""" def __init__(self, object_type, *args, **kwargs): - """Validating field allowed for this connection""" + """Validating field allowed for this connection + :param object_type: DjangoObjectType + """ fields = kwargs.get('fields', None) if fields is not None: raise ValueError('DjangoESFilterConnectionField do not permit argument fields yet.') @@ -22,14 +22,8 @@ def __init__(self, object_type, *args, **kwargs): super(DjangoESFilterConnectionField, self).__init__(object_type, *args, **kwargs) - self.manager = ManagerBridge(search_manager=self.filterset_class._meta.index.search) + self.manager = ManagerProxy(search_manager=self.filterset_class._meta.index.search) def get_manager(self): """Returning a ManagerBridge to replace the direct use over the Model manager""" return self.manager - - def merge_querysets(cls, default_queryset, queryset): - """Merge ES queries""" - if isinstance(default_queryset, Query): - return default_queryset & queryset - return default_queryset.query(queryset) diff --git a/graphene_django/elasticsearch/filter/filters.py b/graphene_django/elasticsearch/filter/filters.py index fc34c0ca8..4044a995c 100644 --- a/graphene_django/elasticsearch/filter/filters.py +++ b/graphene_django/elasticsearch/filter/filters.py @@ -11,8 +11,11 @@ class FilterES(object): def __init__(self, field_name, field_name_es=None, lookup_expressions=None, default_processor=None, argument=None): """ - :param name: Name of the field. This is the name that will be exported. - :param attr: Path to the index attr that will be used as filter. + :param field_name: Name of the field. This is the name that will be exported. + :param field_name_es: Path to the index attr that will be used as filter. + :param lookup_expressions: List of processor. + :param default_processor: Processor by default used when lookup_expressions in empty. + :param argument: Gaphene type base for this field. """ self.field_name = field_name @@ -36,10 +39,15 @@ def __init__(self, field_name, field_name_es=None, lookup_expressions=None, else: self.processor = self.build_processor(self.default_processor) - self.fields = self.processor.generate_field() self.argument = argument or self.default_argument + self.fields = self.processor.generate_field() def build_processor(self, variant): + """ + Create a new processor based on the name + :param variant: Processor name + :return: Returns a Processor instance + """ processor_class = PROCESSORS[variant] return processor_class(self, self.processor) @@ -51,13 +59,6 @@ def generate_es_query(self, arguments): """ return self.processor.generate_es_query(arguments) - def Argument(self): - """ - Defining graphene Argument type for this filter - :return: A Argument type - """ - return self.argument.Argument() - class StringFilterES(FilterES): """String Fields specific to ElasticSearch.""" diff --git a/graphene_django/elasticsearch/filter/filterset.py b/graphene_django/elasticsearch/filter/filterset.py index 94199b791..012bc1cb1 100644 --- a/graphene_django/elasticsearch/filter/filterset.py +++ b/graphene_django/elasticsearch/filter/filterset.py @@ -20,7 +20,7 @@ IntegerField: {'filter_class': NumberFilterES}, FloatField: {'filter_class': NumberFilterES, 'extra': { - 'argument': Int() + 'argument': Float() }}, LongField: {'filter_class': NumberFilterES, 'extra': { @@ -32,7 +32,7 @@ }}, DoubleField: {'filter_class': NumberFilterES, 'extra': { - 'argument': Float() + 'argument': Int() }}, DateField: {'filter_class': StringFilterES}, KeywordField: {'filter_class': StringFilterES}, @@ -169,7 +169,7 @@ def __new__(mcs, name, bases, attrs): # recollecting registered graphene fields base_filters = OrderedDict() - for name, filter_field in six.iteritems(declared_filters): + for filter_name, filter_field in six.iteritems(declared_filters): base_filters.update(filter_field.fields) # adding sort field @@ -212,13 +212,13 @@ def get_declared_filters(mcs, bases, attrs): def get_meta_filters(mcs, meta): """ Get filters from Meta configuration + :param meta: A FilterSetESOptions instance with meta options :return: Field extracted from index and from the FilterSetES. """ index_fields = mcs.get_index_fields(meta) meta_filters = OrderedDict() for name, index_field, data in index_fields: - filter_class = mcs.get_filter_exp(name, index_field, data) meta_filters.update({name: filter_class}) @@ -228,6 +228,7 @@ def get_meta_filters(mcs, meta): def get_index_fields(mcs, meta): """ Get fields from index that appears in the meta class configuration of the filter_set + :param meta: A FilterSetESOptions instance with meta options :return: Tuple of (name, field, lookup_expr) describing name of the field, ES class of the field and lookup_expr """ index_fields = meta.index._doc_type._fields() @@ -247,7 +248,12 @@ def get_index_fields(mcs, meta): @classmethod def get_filter_object(mcs, name, field, data): - """Get filters from ObjectField""" + """ + Get filters from ObjectField + :param name: name of the field + :param field: ES index field + :param data: lookup_expr + """ index_fields = [] properties = field._doc_class._doc_type.mapping.properties._params.get('properties', {}) @@ -264,7 +270,13 @@ def get_filter_object(mcs, name, field, data): @classmethod def get_filter_exp(mcs, name, field, data=None, root=None): - """Initialize filter""" + """ + Initialize filter + :param name: name of the field + :param field: ES index field + :param data: lookup_expr + :param root: root name + """ field_data = try_dbfield(FILTER_FOR_ESFIELD_DEFAULTS.get, field.__class__) or {} filter_class = field_data.get('filter_class') @@ -274,15 +286,18 @@ def get_filter_exp(mcs, name, field, data=None, root=None): # Get lookup_expr from configuration if data and 'lookup_expressions' in data: kwargs['lookup_expressions'] = set(data['lookup_expressions']) - elif 'lookup_expressions' in kwargs: - kwargs['lookup_expressions'] = set(kwargs['lookup_expressions']) kwargs['field_name'], kwargs['field_name_es'] = mcs.get_name(name, root, data) return filter_class(**kwargs) @staticmethod def get_name(name, root, data): - """Get names of the field and the path to resolve it""" + """ + Get names of the field and the path to resolve it + :param name: name of the field + :param data: lookup_expr + :param root: root name + """ field_name = data.get('field_name', None) if data else None field_name_es = data.get('field_name_es', None) if data else None if not field_name: @@ -297,6 +312,8 @@ def create_sort_enum(name, sort_fields): Create enum to sort by fields. As graphene is typed, it is necessary generate a Enum by Field to have inside, the document fields allowed to be ordered + :param name: name of the field + :param sort_fields: Field allowed to be ordered """ sort_enum_name = "{}SortFields".format(name) @@ -325,10 +342,11 @@ class SortType(InputObjectType): @staticmethod def generate_sort_field(order_by): - """To normalize the sort field data""" - if not order_by: - sort_fields = {} - elif isinstance(order_by, dict): + """ + To normalize the sort field data + :param order_by: Sort data + """ + if isinstance(order_by, dict): sort_fields = order_by.copy() else: sort_fields = {field: field for field in order_by} diff --git a/graphene_django/elasticsearch/filter/processors.py b/graphene_django/elasticsearch/filter/processors.py index ecd037f4c..7180f152c 100644 --- a/graphene_django/elasticsearch/filter/processors.py +++ b/graphene_django/elasticsearch/filter/processors.py @@ -1,7 +1,7 @@ from collections import OrderedDict from elasticsearch_dsl import Q -from graphene import List +from graphene import List, Boolean class Processor(object): @@ -10,8 +10,8 @@ class Processor(object): def __init__(self, filter_es, parent_processor=None): """ Abstract processor to generate graphene field and ES query to lookups - :type filter_es: graphene_django.elasticsearch.filter.filterset.FilterES - :type parent_processor: graphene_django.elasticsearch.filter.filterset.Processor + :param filter_es: A FilterES target + :param parent_processor: Next Processor to the generate field chain """ self.filter_es = filter_es self.parent_processor = parent_processor @@ -30,10 +30,14 @@ def generate_field(self): return self_field def get_type(self): + """Define the argument for graphene field""" return self.filter_es.argument def generate_es_query(self, data): - + """ + Define the argument for graphene field + :param data: Data passed to field in the query + """ if self.variant_name in data: value = data.get(self.variant_name) self_query = self._build_query(value) @@ -49,11 +53,19 @@ def generate_es_query(self, data): return self_query def _build_field(self): + """ + Specific detail about field creation to be overwrite if necessary. + :return: A field + """ variant_name = self.variant_name - return OrderedDict({variant_name: self.filter_es}) + return OrderedDict({variant_name: self.get_type()}) def _get_variant_name(self): + """ + Make a variant based on filter name and processor suffix + :return: A variant name + """ if self.suffix_expr == self.filter_es.default_filter_processor: variant_name = self.filter_es.field_name @@ -63,6 +75,11 @@ def _get_variant_name(self): return variant_name def _build_query(self, value): + """ + Make a query based on specific processor query + :param value: Value passed to this processor + :return: A elasticsearch Query + """ result = len(self.filter_es.field_name_es) if result > 1: @@ -73,18 +90,32 @@ def _build_query(self, value): @staticmethod def _get_query(name, value): + """ + Specific detail about query creation to be overwrite if necessary. + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ return Q('term', **{name: value}) class TermProcessor(Processor): + """Have a same behavior of parent this is only with semantic proposal""" pass class ContainsProcessor(Processor): + """fuzzy search""" suffix_expr = 'contains' @staticmethod def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ return Q('match', **{name: { "query": value, @@ -93,18 +124,32 @@ def _get_query(name, value): class RegexProcessor(Processor): + """Search based on regular expressions""" suffix_expr = 'regex' @staticmethod def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ return Q('wildcard', **{name: value}) class PhraseProcessor(Processor): + """Search by the union of many terms""" suffix_expr = 'phrase' @staticmethod def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ return Q('match_phrase', **{name: { "query": value @@ -112,10 +157,17 @@ def _get_query(name, value): class PrefixProcessor(Processor): + """Search by the prefix of the terms""" suffix_expr = 'prefix' @staticmethod def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ return Q('match_phrase_prefix', **{name: { "query": value @@ -123,36 +175,72 @@ def _get_query(name, value): class InProcessor(Processor): + """Search by many value for a field""" suffix_expr = 'in' + @staticmethod + def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q('terms', **{name: value}) + def get_type(self): + """Change base argument by a list of base argument""" return List(self.filter_es.argument.Argument().type) class ExitsProcessor(Processor): + """Search by if the field is in the document""" suffix_expr = 'exits' @staticmethod def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ return Q('bool', **{ 'must' if value else 'must_not': {'exists': {'field': name}} }) + def get_type(self): + return Boolean() + class LteProcessor(Processor): + """Search by range less than""" suffix_expr = 'lte' @staticmethod def _get_query(name, value): - return Q("bool", must={'range': {name: {'lte': value}}}) + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q('range', **{name: {'lte': value}}) class GteProcessor(Processor): + """Search by range greater than""" suffix_expr = 'gte' @staticmethod def _get_query(name, value): - return Q("bool", must={'range': {name: {'gte': value}}}) + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q("range", **{name: {'gte': value}}) PROCESSORS = { @@ -162,6 +250,7 @@ def _get_query(name, value): "phrase": PhraseProcessor, "prefix": PrefixProcessor, "in": InProcessor, + "exits": ExitsProcessor, "lte": LteProcessor, "gte": GteProcessor, } diff --git a/graphene_django/elasticsearch/filter/bridges.py b/graphene_django/elasticsearch/filter/proxy.py similarity index 89% rename from graphene_django/elasticsearch/filter/bridges.py rename to graphene_django/elasticsearch/filter/proxy.py index a98771241..f253d2a1f 100644 --- a/graphene_django/elasticsearch/filter/bridges.py +++ b/graphene_django/elasticsearch/filter/proxy.py @@ -1,5 +1,5 @@ -class QuerysetBridge(object): +class QuerysetProxy(object): """Bridge to Queryset through ES query""" def __init__(self, search): @@ -21,7 +21,7 @@ def __getitem__(self, k): return _slice.to_queryset() -class ManagerBridge(object): +class ManagerProxy(object): """Bridge to Queryset through ES query""" def __init__(self, search_manager): @@ -30,4 +30,4 @@ def __init__(self, search_manager): def get_queryset(self): """Returning self as Queryset to be the bridge""" - return QuerysetBridge(search=self.search_manager()) + return QuerysetProxy(search=self.search_manager()) diff --git a/graphene_django/elasticsearch/tests/filters.py b/graphene_django/elasticsearch/tests/filters.py index 1a01e8d69..ca603b816 100644 --- a/graphene_django/elasticsearch/tests/filters.py +++ b/graphene_django/elasticsearch/tests/filters.py @@ -17,7 +17,12 @@ class Meta(object): """Metaclass config""" model = Article fields = [ + 'id', 'headline', + 'pub_date', + 'pub_date_time', + 'lang', + 'importance', ] @@ -37,7 +42,8 @@ class ArticleFilterESInMeta(FilterSetES): class Meta(object): """Metaclass data""" index = ArticleDocument - includes = ['headline'] + includes = ['id', 'headline'] + order_by = {'id': 'es_id'} class ArticleFilterESInMetaDict(FilterSetES): @@ -47,7 +53,17 @@ class Meta(object): index = ArticleDocument includes = { 'headline': { - 'lookup_expressions': ['term', 'contains'] + 'lookup_expressions': [ + 'term', + 'contains', + 'regex', + 'phrase', + 'prefix', + 'in', + 'exits', + 'lte', + 'gte', + ] } } @@ -66,6 +82,22 @@ class Meta(object): ) +class ArticleFilterGenerateAll(FilterSetES): + """Article Filter for ES""" + class Meta(object): + """Metaclass data""" + index = ArticleDocument + excludes = [] + + +class ArticleFilterExcludes(FilterSetES): + """Article Filter for ES""" + class Meta(object): + """Metaclass data""" + index = ArticleDocument + excludes = ['headline'] + + class ESFilterQuery(ObjectType): """A query for ES fields""" articles_as_field = DjangoESFilterConnectionField( @@ -80,3 +112,9 @@ class ESFilterQuery(ObjectType): articles_in_multi_field = DjangoESFilterConnectionField( ArticleNode, filterset_class=ArticleFilterMultiField ) + articles_in_generate_all = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterGenerateAll + ) + articles_in_excludes = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterExcludes + ) diff --git a/graphene_django/elasticsearch/tests/test_fields.py b/graphene_django/elasticsearch/tests/test_fields.py index dc30d00bb..c7752a07e 100644 --- a/graphene_django/elasticsearch/tests/test_fields.py +++ b/graphene_django/elasticsearch/tests/test_fields.py @@ -1,11 +1,15 @@ from datetime import datetime import pytest +from py.test import raises from mock import mock -from graphene import Schema +from elasticsearch_dsl.query import Bool, Match, Term, Wildcard, MatchPhrase, MatchPhrasePrefix, Range, Terms, Exists +from graphene import Schema, ObjectType -from graphene_django.elasticsearch.filter import filters +from graphene_django.elasticsearch.filter.fields import DjangoESFilterConnectionField +from graphene_django.elasticsearch.filter.filterset import FilterSetES +from graphene_django.filter.tests.test_fields import ArticleNode from graphene_django.tests.models import Article, Reporter from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED from graphene_django.elasticsearch.tests.filters import ESFilterQuery, ArticleDocument @@ -41,9 +45,7 @@ def fake_data(): return a1, a2 -def filter_generation(field, query_str, expected_arguments, method_to_mock="query"): - a1, a2 = fake_data() - +def generate_query(field, query_str): query = """ query { %s(%s) { @@ -55,6 +57,13 @@ def filter_generation(field, query_str, expected_arguments, method_to_mock="quer } } """ % (field, query_str) + return query + + +def filter_generation(field, query_str, expected_arguments, method_to_mock="query"): + a1, a2 = fake_data() + + query = generate_query(field, query_str) mock_count = mock.Mock(return_value=3) mock_slice = mock.Mock(return_value=mock.Mock(to_queryset=mock.Mock( @@ -62,10 +71,9 @@ def filter_generation(field, query_str, expected_arguments, method_to_mock="quer ))) mock_query = mock.Mock(return_value=ArticleDocument.search()) - with mock.patch('django_elasticsearch_dsl.search.Search.count', mock_count),\ - mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice),\ + with mock.patch('django_elasticsearch_dsl.search.Search.count', mock_count), \ + mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice), \ mock.patch("elasticsearch_dsl.Search.%s" % method_to_mock, mock_query): - schema = Schema(query=ESFilterQuery) result = schema.execute(query) @@ -82,7 +90,7 @@ def test_filter_string(): filter_generation( "articlesAsField", "headline: \"A text\"", - filters.StringFilterES(field_name='headline').generate_es_query({"headline": "A text"}), + Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), ) @@ -90,7 +98,7 @@ def test_filter_string_date(): filter_generation( "articlesAsField", "headline: \"A text\"", - filters.StringFilterES(field_name='headline').generate_es_query({"headline": "A text"}), + Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), ) @@ -103,11 +111,20 @@ def test_filter_as_field_order_by(): ) +def test_filter_as_field_order_by_dict(): + filter_generation( + "articlesInMeta", + "headline: \"A text\", sort:{order:desc, field:id}", + {'es_id': {'order': 'desc'}}, + "sort" + ) + + def test_filter_in_meta(): filter_generation( "articlesInMeta", "headline: \"A text\"", - filters.StringFilterES(field_name='headline').generate_es_query({"headline": "A text"}), + Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), ) @@ -115,7 +132,7 @@ def test_filter_in_meta_dict(): filter_generation( "articlesInMetaDict", "headline: \"A text\"", - filters.StringFilterES(field_name='headline').generate_es_query({"headline": "A text"}), + Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), ) @@ -123,8 +140,178 @@ def test_filter_in_multi_field(): filter_generation( "articlesInMultiField", "contain: \"A text\"", - filters.StringFilterES( - field_name='contain', - field_name_es=['headline', 'lang'], - ).generate_es_query({"contain": "A text"}), + Bool(must=[Bool(should=[ + Match(headline={'query': 'A text', 'fuzziness': 'auto'}), + Match(lang={'query': 'A text', 'fuzziness': 'auto'}) + ])]), + ) + + +def test_filter_generating_all(): + filter_generation( + "articlesInGenerateAll", + "headline: \"A text\", " + "pubDate: \"0000-00-00\", " + "pubDateTime: \"00:00:00\", " + "lang: \"es\", " + "importance: 1, ", + Bool(must=[ + Match(headline={'query': 'A text', 'fuzziness': 'auto'}), + Match(pub_date={'query': '0000-00-00', 'fuzziness': 'auto'}), + Match(pub_date_time={'query': '00:00:00', 'fuzziness': 'auto'}), + Match(lang={'query': 'es', 'fuzziness': 'auto'}), + Term(importance=1) + ]), + ) + + +def test_filter_generating_exclude(): + query = generate_query("articlesInExcludes", "headline: \"A text\", ") + + schema = Schema(query=ESFilterQuery) + result = schema.execute(query) + + assert len(result.errors) > 0 + + +def test_filter_bad_processor(): + class ArticleFilterBadProcessor(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + index = ArticleDocument + includes = { + 'headline': { + 'lookup_expressions': ['bad_processor'] + } + } + + with raises(ValueError) as error_info: + DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterBadProcessor + ) + + assert "bad_processor" in str(error_info.value) + + +def test_filter_field_without_filterset_class(): + with raises(ValueError) as error_info: + DjangoESFilterConnectionField( + ArticleNode + ) + + assert "filterset_class" in str(error_info.value) + + +def test_filter_field_with_fields(): + with raises(ValueError) as error_info: + DjangoESFilterConnectionField( + ArticleNode, fields=['headline'] + ) + + assert "fields" in str(error_info.value) + + +def test_filter_field_with_order_by(): + with raises(ValueError) as error_info: + DjangoESFilterConnectionField( + ArticleNode, order_by=['headline'] + ) + + assert "order_by" in str(error_info.value) + + +def test_filter_filterset_without_index(): + with raises(ValueError) as error_info: + class ArticleFilterBadProcessor(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + + DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterBadProcessor + ) + + assert "Index in Meta" in str(error_info.value) + + +def test_filter_filterset_without_xcludes(): + with raises(ValueError) as error_info: + class ArticleFilterBadProcessor(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + index = ArticleDocument + + DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterBadProcessor + ) + + assert "includes or excludes field in Meta" in str(error_info.value) + + +def test_processor_term(): + filter_generation( + "articlesInMetaDict", + "headlineTerm: \"A text\"", + Bool(must=[Term(headline='A text')]), + ) + + +def test_processor_regex(): + filter_generation( + "articlesInMetaDict", + "headlineRegex: \"A text\"", + Bool(must=[Wildcard(headline='A text')]), + ) + + +def test_processor_phrase(): + filter_generation( + "articlesInMetaDict", + "headlinePhrase: \"A text\"", + Bool(must=[MatchPhrase(headline={'query': 'A text'})]), + ) + + +def test_processor_prefix(): + filter_generation( + "articlesInMetaDict", + "headlinePrefix: \"A text\"", + Bool(must=[MatchPhrasePrefix(headline={'query': 'A text'})]), + ) + + +def test_processor_in(): + filter_generation( + "articlesInMetaDict", + "headlineIn: [\"A text 1\", \"A text 2\"]", + Bool(must=[Terms(headline=['A text 1', 'A text 2'])]), + ) + + +def test_processor_exits(): + filter_generation( + "articlesInMetaDict", + "headlineExits: true", + Bool(must=[Bool(must=[Exists(field='headline')])]), + ) + + +def test_processor_lte(): + filter_generation( + "articlesInMetaDict", + "headlineLte: \"A text\"", + Bool(must=Range(headline={'lte': 'A text'})), + ) + + +def test_processor_gte(): + filter_generation( + "articlesInMetaDict", + "headlineGte: \"A text\"", + Bool(must=Range(headline={'gte': 'A text'})), ) From 8692c822668cd24eb683e940dfd6cb4b64f7cbce Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Sat, 8 Jun 2019 09:41:46 -0400 Subject: [PATCH 10/16] Add feature to processor to generate based on related_models --- .../elasticsearch/filter/filterset.py | 45 ++++++++----------- .../elasticsearch/tests/filters.py | 14 ++++-- .../elasticsearch/tests/test_fields.py | 8 ++++ 3 files changed, 37 insertions(+), 30 deletions(-) diff --git a/graphene_django/elasticsearch/filter/filterset.py b/graphene_django/elasticsearch/filter/filterset.py index 012bc1cb1..aa4ae4b08 100644 --- a/graphene_django/elasticsearch/filter/filterset.py +++ b/graphene_django/elasticsearch/filter/filterset.py @@ -4,7 +4,7 @@ from elasticsearch_dsl import Q from graphene import Enum, InputObjectType, Field, Int, Float from django_elasticsearch_dsl import StringField, TextField, BooleanField, IntegerField, FloatField, LongField, \ - ShortField, DoubleField, DateField, KeywordField + ShortField, DoubleField, DateField, KeywordField, ObjectField from django.utils import six from django_filters.utils import try_dbfield @@ -18,22 +18,10 @@ TextField: {'filter_class': StringFilterES}, BooleanField: {'filter_class': BoolFilterES}, IntegerField: {'filter_class': NumberFilterES}, - FloatField: {'filter_class': NumberFilterES, - 'extra': { - 'argument': Float() - }}, - LongField: {'filter_class': NumberFilterES, - 'extra': { - 'argument': Int() - }}, - ShortField: {'filter_class': NumberFilterES, - 'extra': { - 'argument': Int() - }}, - DoubleField: {'filter_class': NumberFilterES, - 'extra': { - 'argument': Int() - }}, + FloatField: {'filter_class': NumberFilterES, 'argument': Float()}, + LongField: {'filter_class': NumberFilterES, 'argument': Int()}, + ShortField: {'filter_class': NumberFilterES, 'argument': Int()}, + DoubleField: {'filter_class': NumberFilterES, 'argument': Int()}, DateField: {'filter_class': StringFilterES}, KeywordField: {'filter_class': StringFilterES}, } @@ -219,8 +207,12 @@ def get_meta_filters(mcs, meta): meta_filters = OrderedDict() for name, index_field, data in index_fields: - filter_class = mcs.get_filter_exp(name, index_field, data) - meta_filters.update({name: filter_class}) + if isinstance(index_field, ObjectField): + filters_class = mcs.get_filter_object(name, index_field, data) + meta_filters.update(filters_class) + else: + filter_class = mcs.get_filter_exp(name, index_field, data) + meta_filters.update({name: filter_class}) return meta_filters @@ -254,7 +246,7 @@ def get_filter_object(mcs, name, field, data): :param field: ES index field :param data: lookup_expr """ - index_fields = [] + index_fields = OrderedDict() properties = field._doc_class._doc_type.mapping.properties._params.get('properties', {}) @@ -263,8 +255,11 @@ def get_filter_object(mcs, name, field, data): if data and inner_name not in data: # This inner field is not filterable continue + inner_data = data[inner_name] if data else None - index_fields.append(mcs.get_filter_exp(inner_name, inner_field, inner_data, root=name)) + + filter_exp = mcs.get_filter_exp(inner_name, inner_field, inner_data, root=name) + index_fields.update({inner_name: filter_exp}) return index_fields @@ -280,14 +275,10 @@ def get_filter_exp(mcs, name, field, data=None, root=None): field_data = try_dbfield(FILTER_FOR_ESFIELD_DEFAULTS.get, field.__class__) or {} filter_class = field_data.get('filter_class') - extra = field_data.get('extra', {}) - kwargs = copy.deepcopy(extra) - - # Get lookup_expr from configuration - if data and 'lookup_expressions' in data: - kwargs['lookup_expressions'] = set(data['lookup_expressions']) + kwargs = copy.deepcopy(data) if data is not None else {} kwargs['field_name'], kwargs['field_name_es'] = mcs.get_name(name, root, data) + return filter_class(**kwargs) @staticmethod diff --git a/graphene_django/elasticsearch/tests/filters.py b/graphene_django/elasticsearch/tests/filters.py index ca603b816..ab19b8cce 100644 --- a/graphene_django/elasticsearch/tests/filters.py +++ b/graphene_django/elasticsearch/tests/filters.py @@ -1,7 +1,7 @@ from graphene import ObjectType -from django_elasticsearch_dsl import DocType, Index +from django_elasticsearch_dsl import DocType, Index, fields -from graphene_django.tests.models import Article +from graphene_django.tests.models import Article, Reporter from graphene_django.filter.tests.test_fields import ArticleNode from graphene_django.elasticsearch.filter import filters from graphene_django.elasticsearch.filter.fields import DjangoESFilterConnectionField @@ -24,6 +24,13 @@ class Meta(object): 'lang', 'importance', ] + related_models = (Reporter,) + + reporter = fields.ObjectField(properties={ + 'id': fields.IntegerField(), + 'first_name': fields.KeywordField(), + 'email': fields.KeywordField(), + }) class ArticleFilterESAsField(FilterSetES): @@ -64,7 +71,8 @@ class Meta(object): 'lte', 'gte', ] - } + }, + 'reporter': {}, } diff --git a/graphene_django/elasticsearch/tests/test_fields.py b/graphene_django/elasticsearch/tests/test_fields.py index c7752a07e..61e6584e3 100644 --- a/graphene_django/elasticsearch/tests/test_fields.py +++ b/graphene_django/elasticsearch/tests/test_fields.py @@ -136,6 +136,14 @@ def test_filter_in_meta_dict(): ) +def test_filter_in_meta_dict_foreign(): + filter_generation( + "articlesInMetaDict", + "reporterEamail: \"A mail\"", + Bool(must=[Match(reporter__email={'query': 'A mail', 'fuzziness': 'auto'})]), + ) + + def test_filter_in_multi_field(): filter_generation( "articlesInMultiField", From 721894fe1a4367293cfb923afbea3e3e321f00a8 Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Sun, 9 Jun 2019 10:38:46 -0400 Subject: [PATCH 11/16] refactoring of elasticsearch filter test --- .../elasticsearch/tests/commons.py | 68 ++++ .../elasticsearch/tests/test_fields.py | 325 ------------------ .../elasticsearch/tests/test_filter_fields.py | 98 ++++++ .../tests/test_filter_filters.py | 114 ++++++ .../tests/test_filter_processor.py | 80 +++++ 5 files changed, 360 insertions(+), 325 deletions(-) create mode 100644 graphene_django/elasticsearch/tests/commons.py delete mode 100644 graphene_django/elasticsearch/tests/test_fields.py create mode 100644 graphene_django/elasticsearch/tests/test_filter_fields.py create mode 100644 graphene_django/elasticsearch/tests/test_filter_filters.py create mode 100644 graphene_django/elasticsearch/tests/test_filter_processor.py diff --git a/graphene_django/elasticsearch/tests/commons.py b/graphene_django/elasticsearch/tests/commons.py new file mode 100644 index 000000000..0848517dd --- /dev/null +++ b/graphene_django/elasticsearch/tests/commons.py @@ -0,0 +1,68 @@ +from datetime import datetime + +from mock import mock + +from graphene import Schema + +from graphene_django.tests.models import Article, Reporter +from graphene_django.elasticsearch.tests.filters import ESFilterQuery, ArticleDocument + + +def fake_data(): + r1 = Reporter.objects.create(first_name="r1", last_name="r1", email="r1@test.com") + a1 = Article.objects.create( + headline="a1", + pub_date=datetime.now(), + pub_date_time=datetime.now(), + reporter=r1, + editor=r1, + ) + a2 = Article.objects.create( + headline="a2", + pub_date=datetime.now(), + pub_date_time=datetime.now(), + reporter=r1, + editor=r1, + ) + return a1, a2 + + +def generate_query(field, query_str): + query = """ + query { + %s(%s) { + edges { + node { + headline + } + } + } + } + """ % (field, query_str) + return query + + +def filter_generation(field, query_str, expected_arguments, method_to_mock="query"): + a1, a2 = fake_data() + + query = generate_query(field, query_str) + + mock_count = mock.Mock(return_value=3) + mock_slice = mock.Mock(return_value=mock.Mock(to_queryset=mock.Mock( + return_value=Article.objects.filter(pk__in=[a1.id, a2.id]) + ))) + mock_query = mock.Mock(return_value=ArticleDocument.search()) + + with mock.patch('django_elasticsearch_dsl.search.Search.count', mock_count), \ + mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice), \ + mock.patch("elasticsearch_dsl.Search.%s" % method_to_mock, mock_query): + schema = Schema(query=ESFilterQuery) + result = schema.execute(query) + + assert not result.errors + + mock_query.assert_called_with(expected_arguments) + + assert len(result.data[field]["edges"]) == 2 + assert result.data[field]["edges"][0]["node"]["headline"] == "a1" + assert result.data[field]["edges"][1]["node"]["headline"] == "a2" diff --git a/graphene_django/elasticsearch/tests/test_fields.py b/graphene_django/elasticsearch/tests/test_fields.py deleted file mode 100644 index 61e6584e3..000000000 --- a/graphene_django/elasticsearch/tests/test_fields.py +++ /dev/null @@ -1,325 +0,0 @@ -from datetime import datetime - -import pytest -from py.test import raises -from mock import mock - -from elasticsearch_dsl.query import Bool, Match, Term, Wildcard, MatchPhrase, MatchPhrasePrefix, Range, Terms, Exists -from graphene import Schema, ObjectType - -from graphene_django.elasticsearch.filter.fields import DjangoESFilterConnectionField -from graphene_django.elasticsearch.filter.filterset import FilterSetES -from graphene_django.filter.tests.test_fields import ArticleNode -from graphene_django.tests.models import Article, Reporter -from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED -from graphene_django.elasticsearch.tests.filters import ESFilterQuery, ArticleDocument - -pytestmark = [] - -if not DJANGO_FILTER_INSTALLED or not DJANGO_ELASTICSEARCH_DSL_INSTALLED: - pytestmark.append( - pytest.mark.skipif( - True, reason="django_filters not installed or not compatible" - ) - ) - -pytestmark.append(pytest.mark.django_db) - - -def fake_data(): - r1 = Reporter.objects.create(first_name="r1", last_name="r1", email="r1@test.com") - a1 = Article.objects.create( - headline="a1", - pub_date=datetime.now(), - pub_date_time=datetime.now(), - reporter=r1, - editor=r1, - ) - a2 = Article.objects.create( - headline="a2", - pub_date=datetime.now(), - pub_date_time=datetime.now(), - reporter=r1, - editor=r1, - ) - return a1, a2 - - -def generate_query(field, query_str): - query = """ - query { - %s(%s) { - edges { - node { - headline - } - } - } - } - """ % (field, query_str) - return query - - -def filter_generation(field, query_str, expected_arguments, method_to_mock="query"): - a1, a2 = fake_data() - - query = generate_query(field, query_str) - - mock_count = mock.Mock(return_value=3) - mock_slice = mock.Mock(return_value=mock.Mock(to_queryset=mock.Mock( - return_value=Article.objects.filter(pk__in=[a1.id, a2.id]) - ))) - mock_query = mock.Mock(return_value=ArticleDocument.search()) - - with mock.patch('django_elasticsearch_dsl.search.Search.count', mock_count), \ - mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice), \ - mock.patch("elasticsearch_dsl.Search.%s" % method_to_mock, mock_query): - schema = Schema(query=ESFilterQuery) - result = schema.execute(query) - - assert not result.errors - - mock_query.assert_called_with(expected_arguments) - - assert len(result.data[field]["edges"]) == 2 - assert result.data[field]["edges"][0]["node"]["headline"] == "a1" - assert result.data[field]["edges"][1]["node"]["headline"] == "a2" - - -def test_filter_string(): - filter_generation( - "articlesAsField", - "headline: \"A text\"", - Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), - ) - - -def test_filter_string_date(): - filter_generation( - "articlesAsField", - "headline: \"A text\"", - Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), - ) - - -def test_filter_as_field_order_by(): - filter_generation( - "articlesAsField", - "headline: \"A text\", sort:{order:desc, field:id}", - {'id': {'order': 'desc'}}, - "sort" - ) - - -def test_filter_as_field_order_by_dict(): - filter_generation( - "articlesInMeta", - "headline: \"A text\", sort:{order:desc, field:id}", - {'es_id': {'order': 'desc'}}, - "sort" - ) - - -def test_filter_in_meta(): - filter_generation( - "articlesInMeta", - "headline: \"A text\"", - Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), - ) - - -def test_filter_in_meta_dict(): - filter_generation( - "articlesInMetaDict", - "headline: \"A text\"", - Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), - ) - - -def test_filter_in_meta_dict_foreign(): - filter_generation( - "articlesInMetaDict", - "reporterEamail: \"A mail\"", - Bool(must=[Match(reporter__email={'query': 'A mail', 'fuzziness': 'auto'})]), - ) - - -def test_filter_in_multi_field(): - filter_generation( - "articlesInMultiField", - "contain: \"A text\"", - Bool(must=[Bool(should=[ - Match(headline={'query': 'A text', 'fuzziness': 'auto'}), - Match(lang={'query': 'A text', 'fuzziness': 'auto'}) - ])]), - ) - - -def test_filter_generating_all(): - filter_generation( - "articlesInGenerateAll", - "headline: \"A text\", " - "pubDate: \"0000-00-00\", " - "pubDateTime: \"00:00:00\", " - "lang: \"es\", " - "importance: 1, ", - Bool(must=[ - Match(headline={'query': 'A text', 'fuzziness': 'auto'}), - Match(pub_date={'query': '0000-00-00', 'fuzziness': 'auto'}), - Match(pub_date_time={'query': '00:00:00', 'fuzziness': 'auto'}), - Match(lang={'query': 'es', 'fuzziness': 'auto'}), - Term(importance=1) - ]), - ) - - -def test_filter_generating_exclude(): - query = generate_query("articlesInExcludes", "headline: \"A text\", ") - - schema = Schema(query=ESFilterQuery) - result = schema.execute(query) - - assert len(result.errors) > 0 - - -def test_filter_bad_processor(): - class ArticleFilterBadProcessor(FilterSetES): - """Article Filter for ES""" - - class Meta(object): - """Metaclass data""" - index = ArticleDocument - includes = { - 'headline': { - 'lookup_expressions': ['bad_processor'] - } - } - - with raises(ValueError) as error_info: - DjangoESFilterConnectionField( - ArticleNode, filterset_class=ArticleFilterBadProcessor - ) - - assert "bad_processor" in str(error_info.value) - - -def test_filter_field_without_filterset_class(): - with raises(ValueError) as error_info: - DjangoESFilterConnectionField( - ArticleNode - ) - - assert "filterset_class" in str(error_info.value) - - -def test_filter_field_with_fields(): - with raises(ValueError) as error_info: - DjangoESFilterConnectionField( - ArticleNode, fields=['headline'] - ) - - assert "fields" in str(error_info.value) - - -def test_filter_field_with_order_by(): - with raises(ValueError) as error_info: - DjangoESFilterConnectionField( - ArticleNode, order_by=['headline'] - ) - - assert "order_by" in str(error_info.value) - - -def test_filter_filterset_without_index(): - with raises(ValueError) as error_info: - class ArticleFilterBadProcessor(FilterSetES): - """Article Filter for ES""" - - class Meta(object): - """Metaclass data""" - - DjangoESFilterConnectionField( - ArticleNode, filterset_class=ArticleFilterBadProcessor - ) - - assert "Index in Meta" in str(error_info.value) - - -def test_filter_filterset_without_xcludes(): - with raises(ValueError) as error_info: - class ArticleFilterBadProcessor(FilterSetES): - """Article Filter for ES""" - - class Meta(object): - """Metaclass data""" - index = ArticleDocument - - DjangoESFilterConnectionField( - ArticleNode, filterset_class=ArticleFilterBadProcessor - ) - - assert "includes or excludes field in Meta" in str(error_info.value) - - -def test_processor_term(): - filter_generation( - "articlesInMetaDict", - "headlineTerm: \"A text\"", - Bool(must=[Term(headline='A text')]), - ) - - -def test_processor_regex(): - filter_generation( - "articlesInMetaDict", - "headlineRegex: \"A text\"", - Bool(must=[Wildcard(headline='A text')]), - ) - - -def test_processor_phrase(): - filter_generation( - "articlesInMetaDict", - "headlinePhrase: \"A text\"", - Bool(must=[MatchPhrase(headline={'query': 'A text'})]), - ) - - -def test_processor_prefix(): - filter_generation( - "articlesInMetaDict", - "headlinePrefix: \"A text\"", - Bool(must=[MatchPhrasePrefix(headline={'query': 'A text'})]), - ) - - -def test_processor_in(): - filter_generation( - "articlesInMetaDict", - "headlineIn: [\"A text 1\", \"A text 2\"]", - Bool(must=[Terms(headline=['A text 1', 'A text 2'])]), - ) - - -def test_processor_exits(): - filter_generation( - "articlesInMetaDict", - "headlineExits: true", - Bool(must=[Bool(must=[Exists(field='headline')])]), - ) - - -def test_processor_lte(): - filter_generation( - "articlesInMetaDict", - "headlineLte: \"A text\"", - Bool(must=Range(headline={'lte': 'A text'})), - ) - - -def test_processor_gte(): - filter_generation( - "articlesInMetaDict", - "headlineGte: \"A text\"", - Bool(must=Range(headline={'gte': 'A text'})), - ) diff --git a/graphene_django/elasticsearch/tests/test_filter_fields.py b/graphene_django/elasticsearch/tests/test_filter_fields.py new file mode 100644 index 000000000..b5eea4359 --- /dev/null +++ b/graphene_django/elasticsearch/tests/test_filter_fields.py @@ -0,0 +1,98 @@ +import pytest +from py.test import raises + +from graphene_django.elasticsearch.filter.fields import DjangoESFilterConnectionField +from graphene_django.elasticsearch.filter.filterset import FilterSetES +from graphene_django.filter.tests.test_fields import ArticleNode +from graphene_django.elasticsearch.tests.filters import ArticleDocument +from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED + +pytestmark = [] + +if not DJANGO_FILTER_INSTALLED or not DJANGO_ELASTICSEARCH_DSL_INSTALLED: + pytestmark.append( + pytest.mark.skipif( + True, reason="django_filters not installed or not compatible" + ) + ) + +pytestmark.append(pytest.mark.django_db) + + +def test_filter_bad_processor(): + class ArticleFilterBadProcessor(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + index = ArticleDocument + includes = { + 'headline': { + 'lookup_expressions': ['bad_processor'] + } + } + + with raises(ValueError) as error_info: + DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterBadProcessor + ) + + assert "bad_processor" in str(error_info.value) + + +def test_filter_field_without_filterset_class(): + with raises(ValueError) as error_info: + DjangoESFilterConnectionField( + ArticleNode + ) + + assert "filterset_class" in str(error_info.value) + + +def test_filter_field_with_fields(): + with raises(ValueError) as error_info: + DjangoESFilterConnectionField( + ArticleNode, fields=['headline'] + ) + + assert "fields" in str(error_info.value) + + +def test_filter_field_with_order_by(): + with raises(ValueError) as error_info: + DjangoESFilterConnectionField( + ArticleNode, order_by=['headline'] + ) + + assert "order_by" in str(error_info.value) + + +def test_filter_filterset_without_index(): + with raises(ValueError) as error_info: + class ArticleFilterBadProcessor(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + + DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterBadProcessor + ) + + assert "Index in Meta" in str(error_info.value) + + +def test_filter_filterset_without_xcludes(): + with raises(ValueError) as error_info: + class ArticleFilterBadProcessor(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + index = ArticleDocument + + DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterBadProcessor + ) + + assert "includes or excludes field in Meta" in str(error_info.value) diff --git a/graphene_django/elasticsearch/tests/test_filter_filters.py b/graphene_django/elasticsearch/tests/test_filter_filters.py new file mode 100644 index 000000000..a20ff72a1 --- /dev/null +++ b/graphene_django/elasticsearch/tests/test_filter_filters.py @@ -0,0 +1,114 @@ +import pytest +from elasticsearch_dsl.query import Bool, Match, Term +from graphene import Schema + +from graphene_django.elasticsearch.tests.commons import filter_generation, generate_query +from graphene_django.elasticsearch.tests.filters import ESFilterQuery +from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED + +pytestmark = [] + +if not DJANGO_FILTER_INSTALLED or not DJANGO_ELASTICSEARCH_DSL_INSTALLED: + pytestmark.append( + pytest.mark.skipif( + True, reason="django_filters not installed or not compatible" + ) + ) + +pytestmark.append(pytest.mark.django_db) + + +def test_filter_string(): + filter_generation( + "articlesAsField", + "headline: \"A text\"", + Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), + ) + + +def test_filter_string_date(): + filter_generation( + "articlesAsField", + "headline: \"A text\"", + Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), + ) + + +def test_filter_as_field_order_by(): + filter_generation( + "articlesAsField", + "headline: \"A text\", sort:{order:desc, field:id}", + {'id': {'order': 'desc'}}, + "sort" + ) + + +def test_filter_as_field_order_by_dict(): + filter_generation( + "articlesInMeta", + "headline: \"A text\", sort:{order:desc, field:id}", + {'es_id': {'order': 'desc'}}, + "sort" + ) + + +def test_filter_in_meta(): + filter_generation( + "articlesInMeta", + "headline: \"A text\"", + Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), + ) + + +def test_filter_in_meta_dict(): + filter_generation( + "articlesInMetaDict", + "headline: \"A text\"", + Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), + ) + + +def test_filter_in_meta_dict_foreign(): + filter_generation( + "articlesInMetaDict", + "reporterEmail: \"A mail\"", + Bool(must=[Match(reporter__email={'query': 'A mail', 'fuzziness': 'auto'})]), + ) + + +def test_filter_in_multi_field(): + filter_generation( + "articlesInMultiField", + "contain: \"A text\"", + Bool(must=[Bool(should=[ + Match(headline={'query': 'A text', 'fuzziness': 'auto'}), + Match(lang={'query': 'A text', 'fuzziness': 'auto'}) + ])]), + ) + + +def test_filter_generating_all(): + filter_generation( + "articlesInGenerateAll", + "headline: \"A text\", " + "pubDate: \"0000-00-00\", " + "pubDateTime: \"00:00:00\", " + "lang: \"es\", " + "importance: 1, ", + Bool(must=[ + Match(headline={'query': 'A text', 'fuzziness': 'auto'}), + Match(pub_date={'query': '0000-00-00', 'fuzziness': 'auto'}), + Match(pub_date_time={'query': '00:00:00', 'fuzziness': 'auto'}), + Match(lang={'query': 'es', 'fuzziness': 'auto'}), + Term(importance=1) + ]), + ) + + +def test_filter_generating_exclude(): + query = generate_query("articlesInExcludes", "headline: \"A text\", ") + + schema = Schema(query=ESFilterQuery) + result = schema.execute(query) + + assert len(result.errors) > 0 diff --git a/graphene_django/elasticsearch/tests/test_filter_processor.py b/graphene_django/elasticsearch/tests/test_filter_processor.py new file mode 100644 index 000000000..2d677e7a7 --- /dev/null +++ b/graphene_django/elasticsearch/tests/test_filter_processor.py @@ -0,0 +1,80 @@ +import pytest +from elasticsearch_dsl.query import Bool, Term, Wildcard, MatchPhrase, MatchPhrasePrefix, Range, Terms, Exists + +from graphene_django.elasticsearch.tests.commons import filter_generation +from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED + +pytestmark = [] + +if not DJANGO_FILTER_INSTALLED or not DJANGO_ELASTICSEARCH_DSL_INSTALLED: + pytestmark.append( + pytest.mark.skipif( + True, reason="django_filters not installed or not compatible" + ) + ) + +pytestmark.append(pytest.mark.django_db) + + +def test_processor_term(): + filter_generation( + "articlesInMetaDict", + "headlineTerm: \"A text\"", + Bool(must=[Term(headline='A text')]), + ) + + +def test_processor_regex(): + filter_generation( + "articlesInMetaDict", + "headlineRegex: \"A text\"", + Bool(must=[Wildcard(headline='A text')]), + ) + + +def test_processor_phrase(): + filter_generation( + "articlesInMetaDict", + "headlinePhrase: \"A text\"", + Bool(must=[MatchPhrase(headline={'query': 'A text'})]), + ) + + +def test_processor_prefix(): + filter_generation( + "articlesInMetaDict", + "headlinePrefix: \"A text\"", + Bool(must=[MatchPhrasePrefix(headline={'query': 'A text'})]), + ) + + +def test_processor_in(): + filter_generation( + "articlesInMetaDict", + "headlineIn: [\"A text 1\", \"A text 2\"]", + Bool(must=[Terms(headline=['A text 1', 'A text 2'])]), + ) + + +def test_processor_exits(): + filter_generation( + "articlesInMetaDict", + "headlineExits: true", + Bool(must=[Bool(must=[Exists(field='headline')])]), + ) + + +def test_processor_lte(): + filter_generation( + "articlesInMetaDict", + "headlineLte: \"A text\"", + Bool(must=Range(headline={'lte': 'A text'})), + ) + + +def test_processor_gte(): + filter_generation( + "articlesInMetaDict", + "headlineGte: \"A text\"", + Bool(must=Range(headline={'gte': 'A text'})), + ) From fa0de7b33414a2905e87a7662f486476ed2ff52f Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Sun, 9 Jun 2019 11:49:16 -0400 Subject: [PATCH 12/16] change decorator of query generator by a observable of processors --- .../elasticsearch/filter/filters.py | 7 +++--- .../elasticsearch/filter/filterset.py | 23 +++++++++++-------- .../elasticsearch/filter/observable.py | 18 +++++++++++++++ .../elasticsearch/filter/processors.py | 21 +++++------------ 4 files changed, 40 insertions(+), 29 deletions(-) create mode 100644 graphene_django/elasticsearch/filter/observable.py diff --git a/graphene_django/elasticsearch/filter/filters.py b/graphene_django/elasticsearch/filter/filters.py index 4044a995c..2c557859e 100644 --- a/graphene_django/elasticsearch/filter/filters.py +++ b/graphene_django/elasticsearch/filter/filters.py @@ -51,13 +51,12 @@ def build_processor(self, variant): processor_class = PROCESSORS[variant] return processor_class(self, self.processor) - def generate_es_query(self, arguments): + def attach_processor(self, observer): """ Generating a query based on the arguments passed to graphene field - :param arguments: parameters of the query. - :return: Returns a elasticsearch_dsl.Q query object. + :param observer: observer to attach the processors. """ - return self.processor.generate_es_query(arguments) + return self.processor.to_attach(observer) class StringFilterES(FilterES): diff --git a/graphene_django/elasticsearch/filter/filterset.py b/graphene_django/elasticsearch/filter/filterset.py index aa4ae4b08..83fbd2a6c 100644 --- a/graphene_django/elasticsearch/filter/filterset.py +++ b/graphene_django/elasticsearch/filter/filterset.py @@ -10,6 +10,7 @@ from django_filters.utils import try_dbfield from django_filters.filterset import BaseFilterSet +from graphene_django.elasticsearch.filter.observable import FieldResolverObservable from .filters import StringFilterES, FilterES, BoolFilterES, NumberFilterES # Basic conversion from ES fields to FilterES fields @@ -153,12 +154,13 @@ def __new__(mcs, name, bases, attrs): meta_filters = mcs.get_meta_filters(new_class._meta) declared_filters.update(meta_filters) - new_class.filters_es = declared_filters - # recollecting registered graphene fields + # recollecting registered graphene fields and attaching to observable base_filters = OrderedDict() + observable = FieldResolverObservable() for filter_name, filter_field in six.iteritems(declared_filters): base_filters.update(filter_field.fields) + filter_field.attach_processor(observable) # adding sort field sort_fields = {} @@ -169,6 +171,7 @@ def __new__(mcs, name, bases, attrs): new_class.sort_fields = sort_fields new_class.base_filters = base_filters + new_class.observable = observable return new_class @@ -380,13 +383,13 @@ def generate_es_query(self): # if the query have data if len(self.data): # for each field passed to the query - for name, filter in six.iteritems(self.filters_es): - # If a target filter is en FilterEs - if isinstance(filter, FilterES): - # It is generated a query or response None if the filter don't have data - query_filter = filter.generate_es_query(self.data) - - if query_filter is not None: - query_base += query_filter + for name, value in six.iteritems(self.data): + # ignore sort field + if name == "sort": + continue + + # dispatch observable resolve + resolve = self.observable.resolve(name, value) + query_base += resolve return query_base diff --git a/graphene_django/elasticsearch/filter/observable.py b/graphene_django/elasticsearch/filter/observable.py new file mode 100644 index 000000000..d4273ebbf --- /dev/null +++ b/graphene_django/elasticsearch/filter/observable.py @@ -0,0 +1,18 @@ + +class FieldResolverObservable(object): + """Observable to attach processor by field and resolve it with the field value""" + + def __init__(self): + """A new Observable by filterset""" + super(FieldResolverObservable).__init__() + self._fields = {} + + def attach(self, field, processor): + """Add processor to fields""" + self._fields[field] = processor + + def resolve(self, field, value): + """Execute processor of the specific field with the value""" + if field in self._fields: + processor = self._fields[field] + return processor.build_query(value) diff --git a/graphene_django/elasticsearch/filter/processors.py b/graphene_django/elasticsearch/filter/processors.py index 7180f152c..8a6e8e3d6 100644 --- a/graphene_django/elasticsearch/filter/processors.py +++ b/graphene_django/elasticsearch/filter/processors.py @@ -33,24 +33,15 @@ def get_type(self): """Define the argument for graphene field""" return self.filter_es.argument - def generate_es_query(self, data): + def to_attach(self, observer): """ - Define the argument for graphene field - :param data: Data passed to field in the query + Add this processor to FieldResolverObservable + :param observer: observer to attach the processors. """ - if self.variant_name in data: - value = data.get(self.variant_name) - self_query = self._build_query(value) - else: - self_query = Q("bool") + observer.attach(self.variant_name, self) if self.parent_processor is not None: - parent_query = self.parent_processor.generate_es_query(data) - parent_query += self_query - return parent_query - - else: - return self_query + self.parent_processor.to_attach(observer) def _build_field(self): """ @@ -74,7 +65,7 @@ def _get_variant_name(self): return variant_name - def _build_query(self, value): + def build_query(self, value): """ Make a query based on specific processor query :param value: Value passed to this processor From 3778806baec578bf3313f57c00f8070590cacd78 Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Mon, 10 Jun 2019 12:15:14 -0400 Subject: [PATCH 13/16] create a ProcessorFactory --- .../elasticsearch/filter/filters.py | 18 ++------- .../elasticsearch/filter/processors.py | 39 +++++++++++++------ 2 files changed, 31 insertions(+), 26 deletions(-) diff --git a/graphene_django/elasticsearch/filter/filters.py b/graphene_django/elasticsearch/filter/filters.py index 2c557859e..27aaca68b 100644 --- a/graphene_django/elasticsearch/filter/filters.py +++ b/graphene_django/elasticsearch/filter/filters.py @@ -1,6 +1,6 @@ """Filters to ElasticSearch""" from graphene import String, Boolean, Int -from graphene_django.elasticsearch.filter.processors import PROCESSORS +from graphene_django.elasticsearch.filter.processors import ProcessorFactory class FilterES(object): @@ -31,26 +31,14 @@ def __init__(self, field_name, field_name_es=None, lookup_expressions=None, self.processor = None if self.lookup_expressions: for variant in self.lookup_expressions: - if variant in PROCESSORS: - self.processor = self.build_processor(variant) - else: - raise ValueError('We do not have processor: %s.' % variant) + self.processor = ProcessorFactory.make_processor(variant, self, self.processor) else: - self.processor = self.build_processor(self.default_processor) + self.processor = ProcessorFactory.make_processor(self.default_processor, self, self.processor) self.argument = argument or self.default_argument self.fields = self.processor.generate_field() - def build_processor(self, variant): - """ - Create a new processor based on the name - :param variant: Processor name - :return: Returns a Processor instance - """ - processor_class = PROCESSORS[variant] - return processor_class(self, self.processor) - def attach_processor(self, observer): """ Generating a query based on the arguments passed to graphene field diff --git a/graphene_django/elasticsearch/filter/processors.py b/graphene_django/elasticsearch/filter/processors.py index 8a6e8e3d6..03acbfdec 100644 --- a/graphene_django/elasticsearch/filter/processors.py +++ b/graphene_django/elasticsearch/filter/processors.py @@ -234,14 +234,31 @@ def _get_query(name, value): return Q("range", **{name: {'gte': value}}) -PROCESSORS = { - "contains": ContainsProcessor, - "term": TermProcessor, - "regex": RegexProcessor, - "phrase": PhraseProcessor, - "prefix": PrefixProcessor, - "in": InProcessor, - "exits": ExitsProcessor, - "lte": LteProcessor, - "gte": GteProcessor, -} +class ProcessorFactory(object): + processors = { + "contains": ContainsProcessor, + "term": TermProcessor, + "regex": RegexProcessor, + "phrase": PhraseProcessor, + "prefix": PrefixProcessor, + "in": InProcessor, + "exits": ExitsProcessor, + "lte": LteProcessor, + "gte": GteProcessor, + } + + @classmethod + def make_processor(cls, variant, filter_es, parent_processor): + """ + Create a new processor based on the name + :param variant: Processor name + :param filter_es: Target filter + :param parent_processor: Parent in the chain + :return: Returns a Processor instance + """ + if variant in cls.processors: + processor_class = cls.processors[variant] + return processor_class(filter_es, parent_processor) + + else: + raise ValueError('We do not have processor: %s.' % variant) From 68e940c15dec6bc16afbb301290939fef332e336 Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Mon, 10 Jun 2019 12:19:19 -0400 Subject: [PATCH 14/16] run `make format` --- .../elasticsearch/filter/fields.py | 27 ++-- .../elasticsearch/filter/filters.py | 26 +++- .../elasticsearch/filter/filterset.py | 117 +++++++++++------- .../elasticsearch/filter/observable.py | 1 - .../elasticsearch/filter/processors.py | 65 +++++----- graphene_django/elasticsearch/filter/proxy.py | 1 - .../elasticsearch/tests/commons.py | 25 ++-- .../elasticsearch/tests/filters.py | 80 +++++++----- .../elasticsearch/tests/test_filter_fields.py | 27 ++-- .../tests/test_filter_filters.py | 84 +++++++------ .../tests/test_filter_processor.py | 46 ++++--- 11 files changed, 300 insertions(+), 199 deletions(-) diff --git a/graphene_django/elasticsearch/filter/fields.py b/graphene_django/elasticsearch/filter/fields.py index cd21fd001..0e1369d13 100644 --- a/graphene_django/elasticsearch/filter/fields.py +++ b/graphene_django/elasticsearch/filter/fields.py @@ -4,25 +4,36 @@ class DjangoESFilterConnectionField(DjangoFilterConnectionField): """A Field to replace DjangoFilterConnectionField manager by QuerysetBridge""" + def __init__(self, object_type, *args, **kwargs): """Validating field allowed for this connection :param object_type: DjangoObjectType """ - fields = kwargs.get('fields', None) + fields = kwargs.get("fields", None) if fields is not None: - raise ValueError('DjangoESFilterConnectionField do not permit argument fields yet.') + raise ValueError( + "DjangoESFilterConnectionField do not permit argument fields yet." + ) - order_by = kwargs.get('order_by', None) + order_by = kwargs.get("order_by", None) if order_by is not None: - raise ValueError('DjangoESFilterConnectionField do not permit argument order_by yet.') + raise ValueError( + "DjangoESFilterConnectionField do not permit argument order_by yet." + ) - filterset_class = kwargs.get('filterset_class', None) + filterset_class = kwargs.get("filterset_class", None) if filterset_class is None: - raise ValueError('You should provide a FilterSetES as filterset_class argument.') + raise ValueError( + "You should provide a FilterSetES as filterset_class argument." + ) - super(DjangoESFilterConnectionField, self).__init__(object_type, *args, **kwargs) + super(DjangoESFilterConnectionField, self).__init__( + object_type, *args, **kwargs + ) - self.manager = ManagerProxy(search_manager=self.filterset_class._meta.index.search) + self.manager = ManagerProxy( + search_manager=self.filterset_class._meta.index.search + ) def get_manager(self): """Returning a ManagerBridge to replace the direct use over the Model manager""" diff --git a/graphene_django/elasticsearch/filter/filters.py b/graphene_django/elasticsearch/filter/filters.py index 27aaca68b..745ed9318 100644 --- a/graphene_django/elasticsearch/filter/filters.py +++ b/graphene_django/elasticsearch/filter/filters.py @@ -5,11 +5,18 @@ class FilterES(object): """Fields specific to ElasticSearch.""" - default_processor = 'term' + + default_processor = "term" default_argument = String() - def __init__(self, field_name, field_name_es=None, lookup_expressions=None, - default_processor=None, argument=None): + def __init__( + self, + field_name, + field_name_es=None, + lookup_expressions=None, + default_processor=None, + argument=None, + ): """ :param field_name: Name of the field. This is the name that will be exported. :param field_name_es: Path to the index attr that will be used as filter. @@ -31,10 +38,14 @@ def __init__(self, field_name, field_name_es=None, lookup_expressions=None, self.processor = None if self.lookup_expressions: for variant in self.lookup_expressions: - self.processor = ProcessorFactory.make_processor(variant, self, self.processor) + self.processor = ProcessorFactory.make_processor( + variant, self, self.processor + ) else: - self.processor = ProcessorFactory.make_processor(self.default_processor, self, self.processor) + self.processor = ProcessorFactory.make_processor( + self.default_processor, self, self.processor + ) self.argument = argument or self.default_argument self.fields = self.processor.generate_field() @@ -49,14 +60,17 @@ def attach_processor(self, observer): class StringFilterES(FilterES): """String Fields specific to ElasticSearch.""" - default_processor = 'contains' + + default_processor = "contains" class BoolFilterES(FilterES): """Boolean filter to ES""" + default_argument = Boolean() class NumberFilterES(FilterES): """Filter to an numeric value to ES""" + default_argument = Int() diff --git a/graphene_django/elasticsearch/filter/filterset.py b/graphene_django/elasticsearch/filter/filterset.py index 83fbd2a6c..70d17e187 100644 --- a/graphene_django/elasticsearch/filter/filterset.py +++ b/graphene_django/elasticsearch/filter/filterset.py @@ -3,8 +3,19 @@ from collections import OrderedDict from elasticsearch_dsl import Q from graphene import Enum, InputObjectType, Field, Int, Float -from django_elasticsearch_dsl import StringField, TextField, BooleanField, IntegerField, FloatField, LongField, \ - ShortField, DoubleField, DateField, KeywordField, ObjectField +from django_elasticsearch_dsl import ( + StringField, + TextField, + BooleanField, + IntegerField, + FloatField, + LongField, + ShortField, + DoubleField, + DateField, + KeywordField, + ObjectField, +) from django.utils import six from django_filters.utils import try_dbfield @@ -15,34 +26,36 @@ # Basic conversion from ES fields to FilterES fields FILTER_FOR_ESFIELD_DEFAULTS = { - StringField: {'filter_class': StringFilterES}, - TextField: {'filter_class': StringFilterES}, - BooleanField: {'filter_class': BoolFilterES}, - IntegerField: {'filter_class': NumberFilterES}, - FloatField: {'filter_class': NumberFilterES, 'argument': Float()}, - LongField: {'filter_class': NumberFilterES, 'argument': Int()}, - ShortField: {'filter_class': NumberFilterES, 'argument': Int()}, - DoubleField: {'filter_class': NumberFilterES, 'argument': Int()}, - DateField: {'filter_class': StringFilterES}, - KeywordField: {'filter_class': StringFilterES}, + StringField: {"filter_class": StringFilterES}, + TextField: {"filter_class": StringFilterES}, + BooleanField: {"filter_class": BoolFilterES}, + IntegerField: {"filter_class": NumberFilterES}, + FloatField: {"filter_class": NumberFilterES, "argument": Float()}, + LongField: {"filter_class": NumberFilterES, "argument": Int()}, + ShortField: {"filter_class": NumberFilterES, "argument": Int()}, + DoubleField: {"filter_class": NumberFilterES, "argument": Int()}, + DateField: {"filter_class": StringFilterES}, + KeywordField: {"filter_class": StringFilterES}, } class OrderEnum(Enum): """Order enum to desc-asc""" - asc = 'asc' - desc = 'desc' + + asc = "asc" + desc = "desc" @property def description(self): """Description to order enum""" if self == OrderEnum.asc: - return 'Ascendant order' - return 'Descendant order' + return "Ascendant order" + return "Descendant order" class FilterSetESOptions(object): """Basic FilterSetES options to Metadata""" + def __init__(self, options=None): """ The field option is combined with the index to automatically generate @@ -123,15 +136,15 @@ class Meta: } """ - self.index = getattr(options, 'index', None) - self.includes = getattr(options, 'includes', None) - self.excludes = getattr(options, 'excludes', None) - self.order_by = getattr(options, 'order_by', None) + self.index = getattr(options, "index", None) + self.includes = getattr(options, "includes", None) + self.excludes = getattr(options, "excludes", None) + self.order_by = getattr(options, "order_by", None) if self.index is None: - raise ValueError('You need provide a Index in Meta.') + raise ValueError("You need provide a Index in Meta.") if self.excludes is None and self.includes is None: - raise ValueError('You need provide includes or excludes field in Meta.') + raise ValueError("You need provide includes or excludes field in Meta.") self.model = self.index._doc_type.model if self.index else None @@ -143,12 +156,12 @@ def __new__(mcs, name, bases, attrs): """Get filters declared explicitly in the class""" # get declared as field declared_filters = mcs.get_declared_filters(bases, attrs) - attrs['declared_filters'] = declared_filters + attrs["declared_filters"] = declared_filters new_class = super(FilterSetESMetaclass, mcs).__new__(mcs, name, bases, attrs) if issubclass(new_class, BaseFilterSet): - new_class._meta = FilterSetESOptions(getattr(new_class, 'Meta', None)) + new_class._meta = FilterSetESOptions(getattr(new_class, "Meta", None)) # get declared as meta meta_filters = mcs.get_meta_filters(new_class._meta) @@ -167,7 +180,7 @@ def __new__(mcs, name, bases, attrs): if new_class._meta.order_by is not None: sort_fields = mcs.generate_sort_field(new_class._meta.order_by) sort_type = mcs.create_sort_enum(name, sort_fields) - base_filters['sort'] = sort_type() + base_filters["sort"] = sort_type() new_class.sort_fields = sort_fields new_class.base_filters = base_filters @@ -193,9 +206,12 @@ def get_declared_filters(mcs, bases, attrs): # Merge declared filters from base classes for base in reversed(bases): - if hasattr(base, 'declared_filters'): - filters = [(name, field) for name, field in base.declared_filters.items() if name not in attrs] \ - + filters + if hasattr(base, "declared_filters"): + filters = [ + (name, field) + for name, field in base.declared_filters.items() + if name not in attrs + ] + filters return OrderedDict(filters) @@ -232,13 +248,19 @@ def get_index_fields(mcs, meta): if isinstance(meta_includes, dict): # The lookup_expr are defined in Meta - filter_fields = [(name, index_fields[name], data) for name, data in meta_includes.items()] + filter_fields = [ + (name, index_fields[name], data) for name, data in meta_includes.items() + ] elif meta_includes is not None: # The lookup_expr are not defined filter_fields = [(name, index_fields[name], None) for name in meta_includes] else: # No `includes` are declared in meta, so all not `excludes` fields from index will be converted to filters - filter_fields = [(name, field, None) for name, field in index_fields.items() if name not in meta_excludes] + filter_fields = [ + (name, field, None) + for name, field in index_fields.items() + if name not in meta_excludes + ] return filter_fields @classmethod @@ -251,7 +273,9 @@ def get_filter_object(mcs, name, field, data): """ index_fields = OrderedDict() - properties = field._doc_class._doc_type.mapping.properties._params.get('properties', {}) + properties = field._doc_class._doc_type.mapping.properties._params.get( + "properties", {} + ) for inner_name, inner_field in properties.items(): @@ -261,7 +285,9 @@ def get_filter_object(mcs, name, field, data): inner_data = data[inner_name] if data else None - filter_exp = mcs.get_filter_exp(inner_name, inner_field, inner_data, root=name) + filter_exp = mcs.get_filter_exp( + inner_name, inner_field, inner_data, root=name + ) index_fields.update({inner_name: filter_exp}) return index_fields @@ -276,11 +302,11 @@ def get_filter_exp(mcs, name, field, data=None, root=None): :param root: root name """ field_data = try_dbfield(FILTER_FOR_ESFIELD_DEFAULTS.get, field.__class__) or {} - filter_class = field_data.get('filter_class') + filter_class = field_data.get("filter_class") kwargs = copy.deepcopy(data) if data is not None else {} - kwargs['field_name'], kwargs['field_name_es'] = mcs.get_name(name, root, data) + kwargs["field_name"], kwargs["field_name_es"] = mcs.get_name(name, root, data) return filter_class(**kwargs) @@ -292,12 +318,14 @@ def get_name(name, root, data): :param data: lookup_expr :param root: root name """ - field_name = data.get('field_name', None) if data else None - field_name_es = data.get('field_name_es', None) if data else None + field_name = data.get("field_name", None) if data else None + field_name_es = data.get("field_name_es", None) if data else None if not field_name: - field_name = '{root}_{name}'.format(root=root, name=name) if root else name + field_name = "{root}_{name}".format(root=root, name=name) if root else name if not field_name_es: - field_name_es = '{root}.{name}'.format(root=root, name=name) if root else name + field_name_es = ( + "{root}.{name}".format(root=root, name=name) if root else name + ) return field_name, field_name_es @staticmethod @@ -311,8 +339,9 @@ def create_sort_enum(name, sort_fields): """ sort_enum_name = "{}SortFields".format(name) - sort_descriptions = {field: "Sort by {field}".format(field=field) for field in - sort_fields.keys()} + sort_descriptions = { + field: "Sort by {field}".format(field=field) for field in sort_fields.keys() + } sort_fields = [(field, field) for field in sort_fields.keys()] class EnumWithDescriptionsType(object): @@ -327,6 +356,7 @@ def description(self): class SortType(InputObjectType): """Sort Type""" + order = Field(OrderEnum) field = Field(enum, required=True) @@ -349,6 +379,7 @@ def generate_sort_field(order_by): class FilterSetES(six.with_metaclass(FilterSetESMetaclass, object)): """FilterSet specific for ElasticSearch.""" + def __init__(self, data, queryset, request): """ Receiving params necessaries to resolved the data @@ -367,9 +398,9 @@ def qs(self): self.es_query.apply_query("query", query_base) self.es_query.apply_query("source", ["id"]) - if 'sort' in self.data: - sort_data = self.data['sort'].copy() - field_name = self.sort_fields[sort_data.pop('field')] + if "sort" in self.data: + sort_data = self.data["sort"].copy() + field_name = self.sort_fields[sort_data.pop("field")] self.es_query.apply_query("sort", {field_name: sort_data}) return self.es_query diff --git a/graphene_django/elasticsearch/filter/observable.py b/graphene_django/elasticsearch/filter/observable.py index d4273ebbf..c127fda26 100644 --- a/graphene_django/elasticsearch/filter/observable.py +++ b/graphene_django/elasticsearch/filter/observable.py @@ -1,4 +1,3 @@ - class FieldResolverObservable(object): """Observable to attach processor by field and resolve it with the field value""" diff --git a/graphene_django/elasticsearch/filter/processors.py b/graphene_django/elasticsearch/filter/processors.py index 03acbfdec..8f8bd3d13 100644 --- a/graphene_django/elasticsearch/filter/processors.py +++ b/graphene_django/elasticsearch/filter/processors.py @@ -5,7 +5,7 @@ class Processor(object): - suffix_expr = 'term' + suffix_expr = "term" def __init__(self, filter_es, parent_processor=None): """ @@ -74,7 +74,9 @@ def build_query(self, value): result = len(self.filter_es.field_name_es) if result > 1: - queries = [self._get_query(name, value) for name in self.filter_es.field_name_es] + queries = [ + self._get_query(name, value) for name in self.filter_es.field_name_es + ] return Q("bool", must={"bool": {"should": queries}}) return Q("bool", must=self._get_query(self.filter_es.field_name_es[0], value)) @@ -87,17 +89,19 @@ def _get_query(name, value): :param value: Value passed to this processor :return: A elasticsearch Query """ - return Q('term', **{name: value}) + return Q("term", **{name: value}) class TermProcessor(Processor): """Have a same behavior of parent this is only with semantic proposal""" + pass class ContainsProcessor(Processor): """fuzzy search""" - suffix_expr = 'contains' + + suffix_expr = "contains" @staticmethod def _get_query(name, value): @@ -107,16 +111,13 @@ def _get_query(name, value): :param value: Value passed to this processor :return: A elasticsearch Query """ - return Q('match', - **{name: { - "query": value, - "fuzziness": "auto" - }}) + return Q("match", **{name: {"query": value, "fuzziness": "auto"}}) class RegexProcessor(Processor): """Search based on regular expressions""" - suffix_expr = 'regex' + + suffix_expr = "regex" @staticmethod def _get_query(name, value): @@ -126,12 +127,13 @@ def _get_query(name, value): :param value: Value passed to this processor :return: A elasticsearch Query """ - return Q('wildcard', **{name: value}) + return Q("wildcard", **{name: value}) class PhraseProcessor(Processor): """Search by the union of many terms""" - suffix_expr = 'phrase' + + suffix_expr = "phrase" @staticmethod def _get_query(name, value): @@ -141,15 +143,13 @@ def _get_query(name, value): :param value: Value passed to this processor :return: A elasticsearch Query """ - return Q('match_phrase', - **{name: { - "query": value - }}) + return Q("match_phrase", **{name: {"query": value}}) class PrefixProcessor(Processor): """Search by the prefix of the terms""" - suffix_expr = 'prefix' + + suffix_expr = "prefix" @staticmethod def _get_query(name, value): @@ -159,15 +159,13 @@ def _get_query(name, value): :param value: Value passed to this processor :return: A elasticsearch Query """ - return Q('match_phrase_prefix', - **{name: { - "query": value - }}) + return Q("match_phrase_prefix", **{name: {"query": value}}) class InProcessor(Processor): """Search by many value for a field""" - suffix_expr = 'in' + + suffix_expr = "in" @staticmethod def _get_query(name, value): @@ -177,7 +175,7 @@ def _get_query(name, value): :param value: Value passed to this processor :return: A elasticsearch Query """ - return Q('terms', **{name: value}) + return Q("terms", **{name: value}) def get_type(self): """Change base argument by a list of base argument""" @@ -186,7 +184,8 @@ def get_type(self): class ExitsProcessor(Processor): """Search by if the field is in the document""" - suffix_expr = 'exits' + + suffix_expr = "exits" @staticmethod def _get_query(name, value): @@ -196,9 +195,9 @@ def _get_query(name, value): :param value: Value passed to this processor :return: A elasticsearch Query """ - return Q('bool', **{ - 'must' if value else 'must_not': {'exists': {'field': name}} - }) + return Q( + "bool", **{"must" if value else "must_not": {"exists": {"field": name}}} + ) def get_type(self): return Boolean() @@ -206,7 +205,8 @@ def get_type(self): class LteProcessor(Processor): """Search by range less than""" - suffix_expr = 'lte' + + suffix_expr = "lte" @staticmethod def _get_query(name, value): @@ -216,12 +216,13 @@ def _get_query(name, value): :param value: Value passed to this processor :return: A elasticsearch Query """ - return Q('range', **{name: {'lte': value}}) + return Q("range", **{name: {"lte": value}}) class GteProcessor(Processor): """Search by range greater than""" - suffix_expr = 'gte' + + suffix_expr = "gte" @staticmethod def _get_query(name, value): @@ -231,7 +232,7 @@ def _get_query(name, value): :param value: Value passed to this processor :return: A elasticsearch Query """ - return Q("range", **{name: {'gte': value}}) + return Q("range", **{name: {"gte": value}}) class ProcessorFactory(object): @@ -261,4 +262,4 @@ def make_processor(cls, variant, filter_es, parent_processor): return processor_class(filter_es, parent_processor) else: - raise ValueError('We do not have processor: %s.' % variant) + raise ValueError("We do not have processor: %s." % variant) diff --git a/graphene_django/elasticsearch/filter/proxy.py b/graphene_django/elasticsearch/filter/proxy.py index f253d2a1f..bada412f0 100644 --- a/graphene_django/elasticsearch/filter/proxy.py +++ b/graphene_django/elasticsearch/filter/proxy.py @@ -1,4 +1,3 @@ - class QuerysetProxy(object): """Bridge to Queryset through ES query""" diff --git a/graphene_django/elasticsearch/tests/commons.py b/graphene_django/elasticsearch/tests/commons.py index 0848517dd..ee6ebca18 100644 --- a/graphene_django/elasticsearch/tests/commons.py +++ b/graphene_django/elasticsearch/tests/commons.py @@ -38,7 +38,10 @@ def generate_query(field, query_str): } } } - """ % (field, query_str) + """ % ( + field, + query_str, + ) return query @@ -48,14 +51,22 @@ def filter_generation(field, query_str, expected_arguments, method_to_mock="quer query = generate_query(field, query_str) mock_count = mock.Mock(return_value=3) - mock_slice = mock.Mock(return_value=mock.Mock(to_queryset=mock.Mock( - return_value=Article.objects.filter(pk__in=[a1.id, a2.id]) - ))) + mock_slice = mock.Mock( + return_value=mock.Mock( + to_queryset=mock.Mock( + return_value=Article.objects.filter(pk__in=[a1.id, a2.id]) + ) + ) + ) mock_query = mock.Mock(return_value=ArticleDocument.search()) - with mock.patch('django_elasticsearch_dsl.search.Search.count', mock_count), \ - mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice), \ - mock.patch("elasticsearch_dsl.Search.%s" % method_to_mock, mock_query): + with mock.patch( + "django_elasticsearch_dsl.search.Search.count", mock_count + ), mock.patch( + "django_elasticsearch_dsl.search.Search.__getitem__", mock_slice + ), mock.patch( + "elasticsearch_dsl.Search.%s" % method_to_mock, mock_query + ): schema = Schema(query=ESFilterQuery) result = schema.execute(query) diff --git a/graphene_django/elasticsearch/tests/filters.py b/graphene_django/elasticsearch/tests/filters.py index ab19b8cce..bf1c4a359 100644 --- a/graphene_django/elasticsearch/tests/filters.py +++ b/graphene_django/elasticsearch/tests/filters.py @@ -7,107 +7,119 @@ from graphene_django.elasticsearch.filter.fields import DjangoESFilterConnectionField from graphene_django.elasticsearch.filter.filterset import FilterSetES -ads_index = Index('articles') +ads_index = Index("articles") @ads_index.doc_type class ArticleDocument(DocType): """Article document describing Index""" + class Meta(object): """Metaclass config""" + model = Article - fields = [ - 'id', - 'headline', - 'pub_date', - 'pub_date_time', - 'lang', - 'importance', - ] + fields = ["id", "headline", "pub_date", "pub_date_time", "lang", "importance"] related_models = (Reporter,) - reporter = fields.ObjectField(properties={ - 'id': fields.IntegerField(), - 'first_name': fields.KeywordField(), - 'email': fields.KeywordField(), - }) + reporter = fields.ObjectField( + properties={ + "id": fields.IntegerField(), + "first_name": fields.KeywordField(), + "email": fields.KeywordField(), + } + ) class ArticleFilterESAsField(FilterSetES): """Article Filter for ES""" + class Meta(object): """Metaclass data""" + index = ArticleDocument includes = [] - order_by = ['id'] + order_by = ["id"] - headline = filters.StringFilterES(field_name='headline', lookup_expressions=['term', 'contains']) + headline = filters.StringFilterES( + field_name="headline", lookup_expressions=["term", "contains"] + ) class ArticleFilterESInMeta(FilterSetES): """Article Filter for ES""" + class Meta(object): """Metaclass data""" + index = ArticleDocument - includes = ['id', 'headline'] - order_by = {'id': 'es_id'} + includes = ["id", "headline"] + order_by = {"id": "es_id"} class ArticleFilterESInMetaDict(FilterSetES): """Article Filter for ES""" + class Meta(object): """Metaclass data""" + index = ArticleDocument includes = { - 'headline': { - 'lookup_expressions': [ - 'term', - 'contains', - 'regex', - 'phrase', - 'prefix', - 'in', - 'exits', - 'lte', - 'gte', + "headline": { + "lookup_expressions": [ + "term", + "contains", + "regex", + "phrase", + "prefix", + "in", + "exits", + "lte", + "gte", ] }, - 'reporter': {}, + "reporter": {}, } class ArticleFilterMultiField(FilterSetES): """Article Filter for ES""" + class Meta(object): """Metaclass data""" + index = ArticleDocument includes = [] headline = filters.StringFilterES( - field_name='contain', - field_name_es=['headline', 'lang'], - lookup_expressions=['contains'] + field_name="contain", + field_name_es=["headline", "lang"], + lookup_expressions=["contains"], ) class ArticleFilterGenerateAll(FilterSetES): """Article Filter for ES""" + class Meta(object): """Metaclass data""" + index = ArticleDocument excludes = [] class ArticleFilterExcludes(FilterSetES): """Article Filter for ES""" + class Meta(object): """Metaclass data""" + index = ArticleDocument - excludes = ['headline'] + excludes = ["headline"] class ESFilterQuery(ObjectType): """A query for ES fields""" + articles_as_field = DjangoESFilterConnectionField( ArticleNode, filterset_class=ArticleFilterESAsField ) diff --git a/graphene_django/elasticsearch/tests/test_filter_fields.py b/graphene_django/elasticsearch/tests/test_filter_fields.py index b5eea4359..7dbd73014 100644 --- a/graphene_django/elasticsearch/tests/test_filter_fields.py +++ b/graphene_django/elasticsearch/tests/test_filter_fields.py @@ -5,7 +5,10 @@ from graphene_django.elasticsearch.filter.filterset import FilterSetES from graphene_django.filter.tests.test_fields import ArticleNode from graphene_django.elasticsearch.tests.filters import ArticleDocument -from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED +from graphene_django.utils import ( + DJANGO_FILTER_INSTALLED, + DJANGO_ELASTICSEARCH_DSL_INSTALLED, +) pytestmark = [] @@ -25,12 +28,9 @@ class ArticleFilterBadProcessor(FilterSetES): class Meta(object): """Metaclass data""" + index = ArticleDocument - includes = { - 'headline': { - 'lookup_expressions': ['bad_processor'] - } - } + includes = {"headline": {"lookup_expressions": ["bad_processor"]}} with raises(ValueError) as error_info: DjangoESFilterConnectionField( @@ -42,33 +42,28 @@ class Meta(object): def test_filter_field_without_filterset_class(): with raises(ValueError) as error_info: - DjangoESFilterConnectionField( - ArticleNode - ) + DjangoESFilterConnectionField(ArticleNode) assert "filterset_class" in str(error_info.value) def test_filter_field_with_fields(): with raises(ValueError) as error_info: - DjangoESFilterConnectionField( - ArticleNode, fields=['headline'] - ) + DjangoESFilterConnectionField(ArticleNode, fields=["headline"]) assert "fields" in str(error_info.value) def test_filter_field_with_order_by(): with raises(ValueError) as error_info: - DjangoESFilterConnectionField( - ArticleNode, order_by=['headline'] - ) + DjangoESFilterConnectionField(ArticleNode, order_by=["headline"]) assert "order_by" in str(error_info.value) def test_filter_filterset_without_index(): with raises(ValueError) as error_info: + class ArticleFilterBadProcessor(FilterSetES): """Article Filter for ES""" @@ -84,11 +79,13 @@ class Meta(object): def test_filter_filterset_without_xcludes(): with raises(ValueError) as error_info: + class ArticleFilterBadProcessor(FilterSetES): """Article Filter for ES""" class Meta(object): """Metaclass data""" + index = ArticleDocument DjangoESFilterConnectionField( diff --git a/graphene_django/elasticsearch/tests/test_filter_filters.py b/graphene_django/elasticsearch/tests/test_filter_filters.py index a20ff72a1..dea043e0b 100644 --- a/graphene_django/elasticsearch/tests/test_filter_filters.py +++ b/graphene_django/elasticsearch/tests/test_filter_filters.py @@ -2,9 +2,15 @@ from elasticsearch_dsl.query import Bool, Match, Term from graphene import Schema -from graphene_django.elasticsearch.tests.commons import filter_generation, generate_query +from graphene_django.elasticsearch.tests.commons import ( + filter_generation, + generate_query, +) from graphene_django.elasticsearch.tests.filters import ESFilterQuery -from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED +from graphene_django.utils import ( + DJANGO_FILTER_INSTALLED, + DJANGO_ELASTICSEARCH_DSL_INSTALLED, +) pytestmark = [] @@ -21,92 +27,100 @@ def test_filter_string(): filter_generation( "articlesAsField", - "headline: \"A text\"", - Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), + 'headline: "A text"', + Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]), ) def test_filter_string_date(): filter_generation( "articlesAsField", - "headline: \"A text\"", - Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), + 'headline: "A text"', + Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]), ) def test_filter_as_field_order_by(): filter_generation( "articlesAsField", - "headline: \"A text\", sort:{order:desc, field:id}", - {'id': {'order': 'desc'}}, - "sort" + 'headline: "A text", sort:{order:desc, field:id}', + {"id": {"order": "desc"}}, + "sort", ) def test_filter_as_field_order_by_dict(): filter_generation( "articlesInMeta", - "headline: \"A text\", sort:{order:desc, field:id}", - {'es_id': {'order': 'desc'}}, - "sort" + 'headline: "A text", sort:{order:desc, field:id}', + {"es_id": {"order": "desc"}}, + "sort", ) def test_filter_in_meta(): filter_generation( "articlesInMeta", - "headline: \"A text\"", - Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), + 'headline: "A text"', + Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]), ) def test_filter_in_meta_dict(): filter_generation( "articlesInMetaDict", - "headline: \"A text\"", - Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), + 'headline: "A text"', + Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]), ) def test_filter_in_meta_dict_foreign(): filter_generation( "articlesInMetaDict", - "reporterEmail: \"A mail\"", - Bool(must=[Match(reporter__email={'query': 'A mail', 'fuzziness': 'auto'})]), + 'reporterEmail: "A mail"', + Bool(must=[Match(reporter__email={"query": "A mail", "fuzziness": "auto"})]), ) def test_filter_in_multi_field(): filter_generation( "articlesInMultiField", - "contain: \"A text\"", - Bool(must=[Bool(should=[ - Match(headline={'query': 'A text', 'fuzziness': 'auto'}), - Match(lang={'query': 'A text', 'fuzziness': 'auto'}) - ])]), + 'contain: "A text"', + Bool( + must=[ + Bool( + should=[ + Match(headline={"query": "A text", "fuzziness": "auto"}), + Match(lang={"query": "A text", "fuzziness": "auto"}), + ] + ) + ] + ), ) def test_filter_generating_all(): filter_generation( "articlesInGenerateAll", - "headline: \"A text\", " - "pubDate: \"0000-00-00\", " - "pubDateTime: \"00:00:00\", " - "lang: \"es\", " + 'headline: "A text", ' + 'pubDate: "0000-00-00", ' + 'pubDateTime: "00:00:00", ' + 'lang: "es", ' "importance: 1, ", - Bool(must=[ - Match(headline={'query': 'A text', 'fuzziness': 'auto'}), - Match(pub_date={'query': '0000-00-00', 'fuzziness': 'auto'}), - Match(pub_date_time={'query': '00:00:00', 'fuzziness': 'auto'}), - Match(lang={'query': 'es', 'fuzziness': 'auto'}), - Term(importance=1) - ]), + Bool( + must=[ + Match(headline={"query": "A text", "fuzziness": "auto"}), + Match(pub_date={"query": "0000-00-00", "fuzziness": "auto"}), + Match(pub_date_time={"query": "00:00:00", "fuzziness": "auto"}), + Match(lang={"query": "es", "fuzziness": "auto"}), + Term(importance=1), + ] + ), ) def test_filter_generating_exclude(): - query = generate_query("articlesInExcludes", "headline: \"A text\", ") + query = generate_query("articlesInExcludes", 'headline: "A text", ') schema = Schema(query=ESFilterQuery) result = schema.execute(query) diff --git a/graphene_django/elasticsearch/tests/test_filter_processor.py b/graphene_django/elasticsearch/tests/test_filter_processor.py index 2d677e7a7..c3cbd1c3d 100644 --- a/graphene_django/elasticsearch/tests/test_filter_processor.py +++ b/graphene_django/elasticsearch/tests/test_filter_processor.py @@ -1,8 +1,20 @@ import pytest -from elasticsearch_dsl.query import Bool, Term, Wildcard, MatchPhrase, MatchPhrasePrefix, Range, Terms, Exists +from elasticsearch_dsl.query import ( + Bool, + Term, + Wildcard, + MatchPhrase, + MatchPhrasePrefix, + Range, + Terms, + Exists, +) from graphene_django.elasticsearch.tests.commons import filter_generation -from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED +from graphene_django.utils import ( + DJANGO_FILTER_INSTALLED, + DJANGO_ELASTICSEARCH_DSL_INSTALLED, +) pytestmark = [] @@ -19,40 +31,40 @@ def test_processor_term(): filter_generation( "articlesInMetaDict", - "headlineTerm: \"A text\"", - Bool(must=[Term(headline='A text')]), + 'headlineTerm: "A text"', + Bool(must=[Term(headline="A text")]), ) def test_processor_regex(): filter_generation( "articlesInMetaDict", - "headlineRegex: \"A text\"", - Bool(must=[Wildcard(headline='A text')]), + 'headlineRegex: "A text"', + Bool(must=[Wildcard(headline="A text")]), ) def test_processor_phrase(): filter_generation( "articlesInMetaDict", - "headlinePhrase: \"A text\"", - Bool(must=[MatchPhrase(headline={'query': 'A text'})]), + 'headlinePhrase: "A text"', + Bool(must=[MatchPhrase(headline={"query": "A text"})]), ) def test_processor_prefix(): filter_generation( "articlesInMetaDict", - "headlinePrefix: \"A text\"", - Bool(must=[MatchPhrasePrefix(headline={'query': 'A text'})]), + 'headlinePrefix: "A text"', + Bool(must=[MatchPhrasePrefix(headline={"query": "A text"})]), ) def test_processor_in(): filter_generation( "articlesInMetaDict", - "headlineIn: [\"A text 1\", \"A text 2\"]", - Bool(must=[Terms(headline=['A text 1', 'A text 2'])]), + 'headlineIn: ["A text 1", "A text 2"]', + Bool(must=[Terms(headline=["A text 1", "A text 2"])]), ) @@ -60,21 +72,21 @@ def test_processor_exits(): filter_generation( "articlesInMetaDict", "headlineExits: true", - Bool(must=[Bool(must=[Exists(field='headline')])]), + Bool(must=[Bool(must=[Exists(field="headline")])]), ) def test_processor_lte(): filter_generation( "articlesInMetaDict", - "headlineLte: \"A text\"", - Bool(must=Range(headline={'lte': 'A text'})), + 'headlineLte: "A text"', + Bool(must=Range(headline={"lte": "A text"})), ) def test_processor_gte(): filter_generation( "articlesInMetaDict", - "headlineGte: \"A text\"", - Bool(must=Range(headline={'gte': 'A text'})), + 'headlineGte: "A text"', + Bool(must=Range(headline={"gte": "A text"})), ) From 4b0ca35e2c780e308c3c539e73f95fb4a0e3feb2 Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Mon, 10 Jun 2019 13:11:42 -0400 Subject: [PATCH 15/16] resolving issues with test --- .../elasticsearch/filter/observable.py | 2 +- .../elasticsearch/tests/commons.py | 4 +- .../tests/test_filter_filters.py | 40 ++++++++++--------- .../tests/test_filter_processor.py | 16 ++++---- 4 files changed, 33 insertions(+), 29 deletions(-) diff --git a/graphene_django/elasticsearch/filter/observable.py b/graphene_django/elasticsearch/filter/observable.py index c127fda26..e3c4d32c0 100644 --- a/graphene_django/elasticsearch/filter/observable.py +++ b/graphene_django/elasticsearch/filter/observable.py @@ -3,7 +3,7 @@ class FieldResolverObservable(object): def __init__(self): """A new Observable by filterset""" - super(FieldResolverObservable).__init__() + super(FieldResolverObservable, self).__init__() self._fields = {} def attach(self, field, processor): diff --git a/graphene_django/elasticsearch/tests/commons.py b/graphene_django/elasticsearch/tests/commons.py index ee6ebca18..f2e817f38 100644 --- a/graphene_django/elasticsearch/tests/commons.py +++ b/graphene_django/elasticsearch/tests/commons.py @@ -45,7 +45,7 @@ def generate_query(field, query_str): return query -def filter_generation(field, query_str, expected_arguments, method_to_mock="query"): +def filter_generation(field, query_str, verify_arguments, method_to_mock="query"): a1, a2 = fake_data() query = generate_query(field, query_str) @@ -72,7 +72,7 @@ def filter_generation(field, query_str, expected_arguments, method_to_mock="quer assert not result.errors - mock_query.assert_called_with(expected_arguments) + verify_arguments(mock_query) assert len(result.data[field]["edges"]) == 2 assert result.data[field]["edges"][0]["node"]["headline"] == "a1" diff --git a/graphene_django/elasticsearch/tests/test_filter_filters.py b/graphene_django/elasticsearch/tests/test_filter_filters.py index dea043e0b..00577b3b9 100644 --- a/graphene_django/elasticsearch/tests/test_filter_filters.py +++ b/graphene_django/elasticsearch/tests/test_filter_filters.py @@ -28,7 +28,7 @@ def test_filter_string(): filter_generation( "articlesAsField", 'headline: "A text"', - Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]), + lambda mock: mock.assert_called_with(Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})])), ) @@ -36,7 +36,7 @@ def test_filter_string_date(): filter_generation( "articlesAsField", 'headline: "A text"', - Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]), + lambda mock: mock.assert_called_with(Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})])), ) @@ -44,7 +44,7 @@ def test_filter_as_field_order_by(): filter_generation( "articlesAsField", 'headline: "A text", sort:{order:desc, field:id}', - {"id": {"order": "desc"}}, + lambda mock: mock.assert_called_with({"id": {"order": "desc"}}), "sort", ) @@ -53,7 +53,7 @@ def test_filter_as_field_order_by_dict(): filter_generation( "articlesInMeta", 'headline: "A text", sort:{order:desc, field:id}', - {"es_id": {"order": "desc"}}, + lambda mock: mock.assert_called_with({"es_id": {"order": "desc"}}), "sort", ) @@ -62,7 +62,7 @@ def test_filter_in_meta(): filter_generation( "articlesInMeta", 'headline: "A text"', - Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]), + lambda mock: mock.assert_called_with(Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})])), ) @@ -70,7 +70,7 @@ def test_filter_in_meta_dict(): filter_generation( "articlesInMetaDict", 'headline: "A text"', - Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]), + lambda mock: mock.assert_called_with(Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})])), ) @@ -78,7 +78,7 @@ def test_filter_in_meta_dict_foreign(): filter_generation( "articlesInMetaDict", 'reporterEmail: "A mail"', - Bool(must=[Match(reporter__email={"query": "A mail", "fuzziness": "auto"})]), + lambda mock: mock.assert_called_with(Bool(must=[Match(reporter__email={"query": "A mail", "fuzziness": "auto"})])), ) @@ -86,7 +86,7 @@ def test_filter_in_multi_field(): filter_generation( "articlesInMultiField", 'contain: "A text"', - Bool( + lambda mock: mock.assert_called_with(Bool( must=[ Bool( should=[ @@ -95,11 +95,23 @@ def test_filter_in_multi_field(): ] ) ] - ), + )), ) +def compare_must_array(must, other_must): + assert len(must) == len(other_must) + + for target in must: + assert target in other_must + + def test_filter_generating_all(): + spected_query = Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"}), + Match(pub_date={"query": "0000-00-00", "fuzziness": "auto"}), + Match(pub_date_time={"query": "00:00:00", "fuzziness": "auto"}), + Match(lang={"query": "es", "fuzziness": "auto"}), Term(importance=1), ]) + filter_generation( "articlesInGenerateAll", 'headline: "A text", ' @@ -107,15 +119,7 @@ def test_filter_generating_all(): 'pubDateTime: "00:00:00", ' 'lang: "es", ' "importance: 1, ", - Bool( - must=[ - Match(headline={"query": "A text", "fuzziness": "auto"}), - Match(pub_date={"query": "0000-00-00", "fuzziness": "auto"}), - Match(pub_date_time={"query": "00:00:00", "fuzziness": "auto"}), - Match(lang={"query": "es", "fuzziness": "auto"}), - Term(importance=1), - ] - ), + lambda mock: compare_must_array(mock.call_args[0][0].must, spected_query.must), ) diff --git a/graphene_django/elasticsearch/tests/test_filter_processor.py b/graphene_django/elasticsearch/tests/test_filter_processor.py index c3cbd1c3d..3cf6989ba 100644 --- a/graphene_django/elasticsearch/tests/test_filter_processor.py +++ b/graphene_django/elasticsearch/tests/test_filter_processor.py @@ -32,7 +32,7 @@ def test_processor_term(): filter_generation( "articlesInMetaDict", 'headlineTerm: "A text"', - Bool(must=[Term(headline="A text")]), + lambda mock: mock.assert_called_with(Bool(must=[Term(headline="A text")])), ) @@ -40,7 +40,7 @@ def test_processor_regex(): filter_generation( "articlesInMetaDict", 'headlineRegex: "A text"', - Bool(must=[Wildcard(headline="A text")]), + lambda mock: mock.assert_called_with(Bool(must=[Wildcard(headline="A text")])), ) @@ -48,7 +48,7 @@ def test_processor_phrase(): filter_generation( "articlesInMetaDict", 'headlinePhrase: "A text"', - Bool(must=[MatchPhrase(headline={"query": "A text"})]), + lambda mock: mock.assert_called_with(Bool(must=[MatchPhrase(headline={"query": "A text"})])), ) @@ -56,7 +56,7 @@ def test_processor_prefix(): filter_generation( "articlesInMetaDict", 'headlinePrefix: "A text"', - Bool(must=[MatchPhrasePrefix(headline={"query": "A text"})]), + lambda mock: mock.assert_called_with(Bool(must=[MatchPhrasePrefix(headline={"query": "A text"})])), ) @@ -64,7 +64,7 @@ def test_processor_in(): filter_generation( "articlesInMetaDict", 'headlineIn: ["A text 1", "A text 2"]', - Bool(must=[Terms(headline=["A text 1", "A text 2"])]), + lambda mock: mock.assert_called_with(Bool(must=[Terms(headline=["A text 1", "A text 2"])])), ) @@ -72,7 +72,7 @@ def test_processor_exits(): filter_generation( "articlesInMetaDict", "headlineExits: true", - Bool(must=[Bool(must=[Exists(field="headline")])]), + lambda mock: mock.assert_called_with(Bool(must=[Bool(must=[Exists(field="headline")])])), ) @@ -80,7 +80,7 @@ def test_processor_lte(): filter_generation( "articlesInMetaDict", 'headlineLte: "A text"', - Bool(must=Range(headline={"lte": "A text"})), + lambda mock: mock.assert_called_with(Bool(must=Range(headline={"lte": "A text"}))), ) @@ -88,5 +88,5 @@ def test_processor_gte(): filter_generation( "articlesInMetaDict", 'headlineGte: "A text"', - Bool(must=Range(headline={"gte": "A text"})), + lambda mock: mock.assert_called_with(Bool(must=Range(headline={"gte": "A text"}))), ) From 53e13559c48b3e5e393d17f93ac125406df1519a Mon Sep 17 00:00:00 2001 From: Alejandro Nunez Capote Date: Mon, 10 Jun 2019 13:19:01 -0400 Subject: [PATCH 16/16] run `make format` to `elasticsearch.tests.*` --- .../tests/test_filter_filters.py | 55 ++++++++++++------- .../tests/test_filter_processor.py | 24 ++++++-- 2 files changed, 54 insertions(+), 25 deletions(-) diff --git a/graphene_django/elasticsearch/tests/test_filter_filters.py b/graphene_django/elasticsearch/tests/test_filter_filters.py index 00577b3b9..4c10a3248 100644 --- a/graphene_django/elasticsearch/tests/test_filter_filters.py +++ b/graphene_django/elasticsearch/tests/test_filter_filters.py @@ -28,7 +28,9 @@ def test_filter_string(): filter_generation( "articlesAsField", 'headline: "A text"', - lambda mock: mock.assert_called_with(Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})])), + lambda mock: mock.assert_called_with( + Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]) + ), ) @@ -36,7 +38,9 @@ def test_filter_string_date(): filter_generation( "articlesAsField", 'headline: "A text"', - lambda mock: mock.assert_called_with(Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})])), + lambda mock: mock.assert_called_with( + Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]) + ), ) @@ -62,7 +66,9 @@ def test_filter_in_meta(): filter_generation( "articlesInMeta", 'headline: "A text"', - lambda mock: mock.assert_called_with(Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})])), + lambda mock: mock.assert_called_with( + Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]) + ), ) @@ -70,7 +76,9 @@ def test_filter_in_meta_dict(): filter_generation( "articlesInMetaDict", 'headline: "A text"', - lambda mock: mock.assert_called_with(Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})])), + lambda mock: mock.assert_called_with( + Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]) + ), ) @@ -78,7 +86,9 @@ def test_filter_in_meta_dict_foreign(): filter_generation( "articlesInMetaDict", 'reporterEmail: "A mail"', - lambda mock: mock.assert_called_with(Bool(must=[Match(reporter__email={"query": "A mail", "fuzziness": "auto"})])), + lambda mock: mock.assert_called_with( + Bool(must=[Match(reporter__email={"query": "A mail", "fuzziness": "auto"})]) + ), ) @@ -86,16 +96,18 @@ def test_filter_in_multi_field(): filter_generation( "articlesInMultiField", 'contain: "A text"', - lambda mock: mock.assert_called_with(Bool( - must=[ - Bool( - should=[ - Match(headline={"query": "A text", "fuzziness": "auto"}), - Match(lang={"query": "A text", "fuzziness": "auto"}), - ] - ) - ] - )), + lambda mock: mock.assert_called_with( + Bool( + must=[ + Bool( + should=[ + Match(headline={"query": "A text", "fuzziness": "auto"}), + Match(lang={"query": "A text", "fuzziness": "auto"}), + ] + ) + ] + ) + ), ) @@ -107,10 +119,15 @@ def compare_must_array(must, other_must): def test_filter_generating_all(): - spected_query = Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"}), - Match(pub_date={"query": "0000-00-00", "fuzziness": "auto"}), - Match(pub_date_time={"query": "00:00:00", "fuzziness": "auto"}), - Match(lang={"query": "es", "fuzziness": "auto"}), Term(importance=1), ]) + spected_query = Bool( + must=[ + Match(headline={"query": "A text", "fuzziness": "auto"}), + Match(pub_date={"query": "0000-00-00", "fuzziness": "auto"}), + Match(pub_date_time={"query": "00:00:00", "fuzziness": "auto"}), + Match(lang={"query": "es", "fuzziness": "auto"}), + Term(importance=1), + ] + ) filter_generation( "articlesInGenerateAll", diff --git a/graphene_django/elasticsearch/tests/test_filter_processor.py b/graphene_django/elasticsearch/tests/test_filter_processor.py index 3cf6989ba..04d808473 100644 --- a/graphene_django/elasticsearch/tests/test_filter_processor.py +++ b/graphene_django/elasticsearch/tests/test_filter_processor.py @@ -48,7 +48,9 @@ def test_processor_phrase(): filter_generation( "articlesInMetaDict", 'headlinePhrase: "A text"', - lambda mock: mock.assert_called_with(Bool(must=[MatchPhrase(headline={"query": "A text"})])), + lambda mock: mock.assert_called_with( + Bool(must=[MatchPhrase(headline={"query": "A text"})]) + ), ) @@ -56,7 +58,9 @@ def test_processor_prefix(): filter_generation( "articlesInMetaDict", 'headlinePrefix: "A text"', - lambda mock: mock.assert_called_with(Bool(must=[MatchPhrasePrefix(headline={"query": "A text"})])), + lambda mock: mock.assert_called_with( + Bool(must=[MatchPhrasePrefix(headline={"query": "A text"})]) + ), ) @@ -64,7 +68,9 @@ def test_processor_in(): filter_generation( "articlesInMetaDict", 'headlineIn: ["A text 1", "A text 2"]', - lambda mock: mock.assert_called_with(Bool(must=[Terms(headline=["A text 1", "A text 2"])])), + lambda mock: mock.assert_called_with( + Bool(must=[Terms(headline=["A text 1", "A text 2"])]) + ), ) @@ -72,7 +78,9 @@ def test_processor_exits(): filter_generation( "articlesInMetaDict", "headlineExits: true", - lambda mock: mock.assert_called_with(Bool(must=[Bool(must=[Exists(field="headline")])])), + lambda mock: mock.assert_called_with( + Bool(must=[Bool(must=[Exists(field="headline")])]) + ), ) @@ -80,7 +88,9 @@ def test_processor_lte(): filter_generation( "articlesInMetaDict", 'headlineLte: "A text"', - lambda mock: mock.assert_called_with(Bool(must=Range(headline={"lte": "A text"}))), + lambda mock: mock.assert_called_with( + Bool(must=Range(headline={"lte": "A text"})) + ), ) @@ -88,5 +98,7 @@ def test_processor_gte(): filter_generation( "articlesInMetaDict", 'headlineGte: "A text"', - lambda mock: mock.assert_called_with(Bool(must=Range(headline={"gte": "A text"}))), + lambda mock: mock.assert_called_with( + Bool(must=Range(headline={"gte": "A text"})) + ), )