diff --git a/graphene_django/elasticsearch/__init__.py b/graphene_django/elasticsearch/__init__.py new file mode 100644 index 000000000..31497d3cd --- /dev/null +++ b/graphene_django/elasticsearch/__init__.py @@ -0,0 +1,9 @@ +import warnings +from ..utils import DJANGO_ELASTICSEARCH_DSL_INSTALLED + +if not DJANGO_ELASTICSEARCH_DSL_INSTALLED: + warnings.warn( + "Use of elasticsearch integration requires the django_elasticsearch_dsl package " + "be installed. You can do so using `pip install django_elasticsearch_dsl`", + ImportWarning, + ) diff --git a/graphene_django/elasticsearch/filter/__init__.py b/graphene_django/elasticsearch/filter/__init__.py new file mode 100644 index 000000000..1f318b608 --- /dev/null +++ b/graphene_django/elasticsearch/filter/__init__.py @@ -0,0 +1,9 @@ +import warnings +from ...utils import DJANGO_FILTER_INSTALLED + +if not DJANGO_FILTER_INSTALLED: + warnings.warn( + "Use of django elasticsearch filtering requires the django-filter package " + "be installed. You can do so using `pip install django-filter`", + ImportWarning, + ) diff --git a/graphene_django/elasticsearch/filter/fields.py b/graphene_django/elasticsearch/filter/fields.py new file mode 100644 index 000000000..0e1369d13 --- /dev/null +++ b/graphene_django/elasticsearch/filter/fields.py @@ -0,0 +1,40 @@ +from graphene_django.elasticsearch.filter.proxy import ManagerProxy +from graphene_django.filter import DjangoFilterConnectionField + + +class DjangoESFilterConnectionField(DjangoFilterConnectionField): + """A Field to replace DjangoFilterConnectionField manager by QuerysetBridge""" + + def __init__(self, object_type, *args, **kwargs): + """Validating field allowed for this connection + :param object_type: DjangoObjectType + """ + fields = kwargs.get("fields", None) + if fields is not None: + raise ValueError( + "DjangoESFilterConnectionField do not permit argument fields yet." + ) + + order_by = kwargs.get("order_by", None) + if order_by is not None: + raise ValueError( + "DjangoESFilterConnectionField do not permit argument order_by yet." + ) + + filterset_class = kwargs.get("filterset_class", None) + if filterset_class is None: + raise ValueError( + "You should provide a FilterSetES as filterset_class argument." + ) + + super(DjangoESFilterConnectionField, self).__init__( + object_type, *args, **kwargs + ) + + self.manager = ManagerProxy( + search_manager=self.filterset_class._meta.index.search + ) + + def get_manager(self): + """Returning a ManagerBridge to replace the direct use over the Model manager""" + return self.manager diff --git a/graphene_django/elasticsearch/filter/filters.py b/graphene_django/elasticsearch/filter/filters.py new file mode 100644 index 000000000..745ed9318 --- /dev/null +++ b/graphene_django/elasticsearch/filter/filters.py @@ -0,0 +1,76 @@ +"""Filters to ElasticSearch""" +from graphene import String, Boolean, Int +from graphene_django.elasticsearch.filter.processors import ProcessorFactory + + +class FilterES(object): + """Fields specific to ElasticSearch.""" + + default_processor = "term" + default_argument = String() + + def __init__( + self, + field_name, + field_name_es=None, + lookup_expressions=None, + default_processor=None, + argument=None, + ): + """ + :param field_name: Name of the field. This is the name that will be exported. + :param field_name_es: Path to the index attr that will be used as filter. + :param lookup_expressions: List of processor. + :param default_processor: Processor by default used when lookup_expressions in empty. + :param argument: Gaphene type base for this field. + """ + self.field_name = field_name + + if isinstance(field_name_es, list): + self.field_name_es = field_name_es + else: + self.field_name_es = [field_name_es or field_name] + + self.default_filter_processor = default_processor or self.default_processor + + self.lookup_expressions = lookup_expressions + + self.processor = None + if self.lookup_expressions: + for variant in self.lookup_expressions: + self.processor = ProcessorFactory.make_processor( + variant, self, self.processor + ) + + else: + self.processor = ProcessorFactory.make_processor( + self.default_processor, self, self.processor + ) + + self.argument = argument or self.default_argument + self.fields = self.processor.generate_field() + + def attach_processor(self, observer): + """ + Generating a query based on the arguments passed to graphene field + :param observer: observer to attach the processors. + """ + return self.processor.to_attach(observer) + + +class StringFilterES(FilterES): + """String Fields specific to ElasticSearch.""" + + default_processor = "contains" + + +class BoolFilterES(FilterES): + """Boolean filter to ES""" + + default_argument = Boolean() + + +class NumberFilterES(FilterES): + """Filter to an numeric value to ES""" + + default_argument = Int() diff --git a/graphene_django/elasticsearch/filter/filterset.py b/graphene_django/elasticsearch/filter/filterset.py new file mode 100644 index 000000000..70d17e187 --- /dev/null +++ b/graphene_django/elasticsearch/filter/filterset.py @@ -0,0 +1,426 @@ +"""Fields""" +import copy +from collections import OrderedDict +from elasticsearch_dsl import Q +from graphene import Enum, InputObjectType, Field, Int, Float +from django_elasticsearch_dsl import ( + StringField, + TextField, + BooleanField, + IntegerField, + FloatField, + LongField, + ShortField, + DoubleField, + DateField, + KeywordField, + ObjectField, +) +from django.utils import six + +from django_filters.utils import try_dbfield +from django_filters.filterset import BaseFilterSet + +from graphene_django.elasticsearch.filter.observable import FieldResolverObservable +from .filters import StringFilterES, FilterES, BoolFilterES, NumberFilterES + +# Basic conversion from ES fields to FilterES fields +FILTER_FOR_ESFIELD_DEFAULTS = { + StringField: {"filter_class": StringFilterES}, + TextField: {"filter_class": StringFilterES}, + BooleanField: {"filter_class": BoolFilterES}, + IntegerField: {"filter_class": NumberFilterES}, + FloatField: {"filter_class": NumberFilterES, "argument": Float()}, + LongField: {"filter_class": NumberFilterES, "argument": Int()}, + ShortField: {"filter_class": NumberFilterES, "argument": Int()}, + DoubleField: {"filter_class": NumberFilterES, "argument": Int()}, + DateField: {"filter_class": StringFilterES}, + KeywordField: {"filter_class": StringFilterES}, +} + + +class OrderEnum(Enum): + """Order enum to desc-asc""" + + asc = "asc" + desc = "desc" + + @property + def description(self): + """Description to order enum""" + if self == OrderEnum.asc: + return "Ascendant order" + return "Descendant order" + + +class FilterSetESOptions(object): + """Basic FilterSetES options to Metadata""" + + def __init__(self, options=None): + """ + The field option is combined with the index to automatically generate + filters. + + The includes option accept two kind of syntax: + - a list of field names + - a dictionary of field names mapped to a list of expressions + + Example: + class UserFilter(FilterSetES): + class Meta: + index = UserIndex + includes = ['username', 'last_login'] + + or + + class UserFilter(FilterSetES): + class Meta: + index = UserIndex + includes = { + 'username': { + 'field_name': 'graphene_field', + 'field_name_es': 'elasticsearch_field', + 'lookup_expressions': ['term', 'contains'] + } + } + + The list syntax will create an filter with a behavior by default, + for each field included in includes. The dictionary syntax will + create a filter for each expression declared for its corresponding + field. + + Note that the generated filters will not overwrite filters + declared on the FilterSet. + + Example: + class UserFilter(FilterSetES): + username = StringFieldES(field_name='username', lookup_expressions=['contains']) + class Meta: + index = UserIndex + includes = { + 'username': { + 'lookup_expressions': ['term', 'contains'] + } + + A query with username as a parameter, will match those words with the + username value as substring + + The excludes option accept a list of field names. + + Example: + class UserFilter(FilterSetES): + class Meta: + index = UserIndex + excludes = ['username', 'last_login'] + + It is necessary to provide includes or excludes. You cant provide a excludes empty to generate all fields + + You can also pass sort_by to Meta to allow field be ordered + + Example: + class UserFilter(FilterSetES): + class Meta: + index = UserIndex + excludes = [] + order_by = ['username', 'last_login'] + + or + + class UserFilter(FilterSetES): + class Meta: + index = UserIndex + excludes = [] + order_by = { + 'username': user.name + 'last_login': last_login + } + + """ + self.index = getattr(options, "index", None) + self.includes = getattr(options, "includes", None) + self.excludes = getattr(options, "excludes", None) + self.order_by = getattr(options, "order_by", None) + + if self.index is None: + raise ValueError("You need provide a Index in Meta.") + if self.excludes is None and self.includes is None: + raise ValueError("You need provide includes or excludes field in Meta.") + + self.model = self.index._doc_type.model if self.index else None + + +class FilterSetESMetaclass(type): + """Captures the meta class of the filterSet class.""" + + def __new__(mcs, name, bases, attrs): + """Get filters declared explicitly in the class""" + # get declared as field + declared_filters = mcs.get_declared_filters(bases, attrs) + attrs["declared_filters"] = declared_filters + + new_class = super(FilterSetESMetaclass, mcs).__new__(mcs, name, bases, attrs) + + if issubclass(new_class, BaseFilterSet): + new_class._meta = FilterSetESOptions(getattr(new_class, "Meta", None)) + + # get declared as meta + meta_filters = mcs.get_meta_filters(new_class._meta) + + declared_filters.update(meta_filters) + + # recollecting registered graphene fields and attaching to observable + base_filters = OrderedDict() + observable = FieldResolverObservable() + for filter_name, filter_field in six.iteritems(declared_filters): + base_filters.update(filter_field.fields) + filter_field.attach_processor(observable) + + # adding sort field + sort_fields = {} + if new_class._meta.order_by is not None: + sort_fields = mcs.generate_sort_field(new_class._meta.order_by) + sort_type = mcs.create_sort_enum(name, sort_fields) + base_filters["sort"] = sort_type() + + new_class.sort_fields = sort_fields + new_class.base_filters = base_filters + new_class.observable = observable + + return new_class + + @classmethod + def get_declared_filters(mcs, bases, attrs): + """ + Get the filters declared in the class. + :param bases: base classes of the current class + :param attrs: attributes captured to be included as metadata + :return: An OrderedDict of filter fields declared in the class as static fields. + """ + + # List of filters declared in the class as static fields. + filters = [ + (obj.field_name, attrs.pop(filter_name)) + for filter_name, obj in list(attrs.items()) + if isinstance(obj, FilterES) + ] + + # Merge declared filters from base classes + for base in reversed(bases): + if hasattr(base, "declared_filters"): + filters = [ + (name, field) + for name, field in base.declared_filters.items() + if name not in attrs + ] + filters + + return OrderedDict(filters) + + @classmethod + def get_meta_filters(mcs, meta): + """ + Get filters from Meta configuration + :param meta: A FilterSetESOptions instance with meta options + :return: Field extracted from index and from the FilterSetES. + """ + index_fields = mcs.get_index_fields(meta) + + meta_filters = OrderedDict() + for name, index_field, data in index_fields: + if isinstance(index_field, ObjectField): + filters_class = mcs.get_filter_object(name, index_field, data) + meta_filters.update(filters_class) + else: + filter_class = mcs.get_filter_exp(name, index_field, data) + meta_filters.update({name: filter_class}) + + return meta_filters + + @classmethod + def get_index_fields(mcs, meta): + """ + Get fields from index that appears in the meta class configuration of the filter_set + :param meta: A FilterSetESOptions instance with meta options + :return: Tuple of (name, field, lookup_expr) describing name of the field, ES class of the field and lookup_expr + """ + index_fields = meta.index._doc_type._fields() + meta_includes = meta.includes + meta_excludes = meta.excludes + + if isinstance(meta_includes, dict): + # The lookup_expr are defined in Meta + filter_fields = [ + (name, index_fields[name], data) for name, data in meta_includes.items() + ] + elif meta_includes is not None: + # The lookup_expr are not defined + filter_fields = [(name, index_fields[name], None) for name in meta_includes] + else: + # No `includes` are declared in meta, so all not `excludes` fields from index will be converted to filters + filter_fields = [ + (name, field, None) + for name, field in index_fields.items() + if name not in meta_excludes + ] + return filter_fields + + @classmethod + def get_filter_object(mcs, name, field, data): + """ + Get filters from ObjectField + :param name: name of the field + :param field: ES index field + :param data: lookup_expr + """ + index_fields = OrderedDict() + + properties = field._doc_class._doc_type.mapping.properties._params.get( + "properties", {} + ) + + for inner_name, inner_field in properties.items(): + + if data and inner_name not in data: + # This inner field is not filterable + continue + + inner_data = data[inner_name] if data else None + + filter_exp = mcs.get_filter_exp( + inner_name, inner_field, inner_data, root=name + ) + index_fields.update({inner_name: filter_exp}) + + return index_fields + + @classmethod + def get_filter_exp(mcs, name, field, data=None, root=None): + """ + Initialize filter + :param name: name of the field + :param field: ES index field + :param data: lookup_expr + :param root: root name + """ + field_data = try_dbfield(FILTER_FOR_ESFIELD_DEFAULTS.get, field.__class__) or {} + filter_class = field_data.get("filter_class") + + kwargs = copy.deepcopy(data) if data is not None else {} + + kwargs["field_name"], kwargs["field_name_es"] = mcs.get_name(name, root, data) + + return filter_class(**kwargs) + + @staticmethod + def get_name(name, root, data): + """ + Get names of the field and the path to resolve it + :param name: name of the field + :param data: lookup_expr + :param root: root name + """ + field_name = data.get("field_name", None) if data else None + field_name_es = data.get("field_name_es", None) if data else None + if not field_name: + field_name = "{root}_{name}".format(root=root, name=name) if root else name + if not field_name_es: + field_name_es = ( + "{root}.{name}".format(root=root, name=name) if root else name + ) + return field_name, field_name_es + + @staticmethod + def create_sort_enum(name, sort_fields): + """ + Create enum to sort by fields. + As graphene is typed, it is necessary generate a Enum by Field + to have inside, the document fields allowed to be ordered + :param name: name of the field + :param sort_fields: Field allowed to be ordered + """ + + sort_enum_name = "{}SortFields".format(name) + sort_descriptions = { + field: "Sort by {field}".format(field=field) for field in sort_fields.keys() + } + sort_fields = [(field, field) for field in sort_fields.keys()] + + class EnumWithDescriptionsType(object): + """Set description to enum fields""" + + @property + def description(self): + """Description to EnumSort""" + return sort_descriptions[self.name] + + enum = Enum(sort_enum_name, sort_fields, type=EnumWithDescriptionsType) + + class SortType(InputObjectType): + """Sort Type""" + + order = Field(OrderEnum) + field = Field(enum, required=True) + + sort_name = "{}Sort".format(name) + sort_type = type(sort_name, (SortType,), {}) + return sort_type + + @staticmethod + def generate_sort_field(order_by): + """ + To normalize the sort field data + :param order_by: Sort data + """ + if isinstance(order_by, dict): + sort_fields = order_by.copy() + else: + sort_fields = {field: field for field in order_by} + return sort_fields + + +class FilterSetES(six.with_metaclass(FilterSetESMetaclass, object)): + """FilterSet specific for ElasticSearch.""" + + def __init__(self, data, queryset, request): + """ + Receiving params necessaries to resolved the data + :param data: argument passed to query + :param queryset: a ES queryset + :param request: the context of request + """ + self.data = data + self.es_query = queryset + self.request = request + + @property + def qs(self): + """Returning ES queryset as QS""" + query_base = self.generate_es_query() + self.es_query.apply_query("query", query_base) + self.es_query.apply_query("source", ["id"]) + + if "sort" in self.data: + sort_data = self.data["sort"].copy() + field_name = self.sort_fields[sort_data.pop("field")] + self.es_query.apply_query("sort", {field_name: sort_data}) + + return self.es_query + + def generate_es_query(self): + """ + Generate a query for each filter. + :return: Generates a super query with bool as root, and combines all sub-queries from each argument. + """ + query_base = Q("bool") + # if the query have data + if len(self.data): + # for each field passed to the query + for name, value in six.iteritems(self.data): + # ignore sort field + if name == "sort": + continue + + # dispatch observable resolve + resolve = self.observable.resolve(name, value) + query_base += resolve + + return query_base diff --git a/graphene_django/elasticsearch/filter/observable.py b/graphene_django/elasticsearch/filter/observable.py new file mode 100644 index 000000000..e3c4d32c0 --- /dev/null +++ b/graphene_django/elasticsearch/filter/observable.py @@ -0,0 +1,17 @@ +class FieldResolverObservable(object): + """Observable to attach processor by field and resolve it with the field value""" + + def __init__(self): + """A new Observable by filterset""" + super(FieldResolverObservable, self).__init__() + self._fields = {} + + def attach(self, field, processor): + """Add processor to fields""" + self._fields[field] = processor + + def resolve(self, field, value): + """Execute processor of the specific field with the value""" + if field in self._fields: + processor = self._fields[field] + return processor.build_query(value) diff --git a/graphene_django/elasticsearch/filter/processors.py b/graphene_django/elasticsearch/filter/processors.py new file mode 100644 index 000000000..8f8bd3d13 --- /dev/null +++ b/graphene_django/elasticsearch/filter/processors.py @@ -0,0 +1,265 @@ +from collections import OrderedDict + +from elasticsearch_dsl import Q +from graphene import List, Boolean + + +class Processor(object): + suffix_expr = "term" + + def __init__(self, filter_es, parent_processor=None): + """ + Abstract processor to generate graphene field and ES query to lookups + :param filter_es: A FilterES target + :param parent_processor: Next Processor to the generate field chain + """ + self.filter_es = filter_es + self.parent_processor = parent_processor + self.variant_name = self._get_variant_name() + + def generate_field(self): + """Field Decorator""" + self_field = self._build_field() + + if self.parent_processor is not None: + parent_fields = self.parent_processor.generate_field() + parent_fields.update(self_field) + return parent_fields + + else: + return self_field + + def get_type(self): + """Define the argument for graphene field""" + return self.filter_es.argument + + def to_attach(self, observer): + """ + Add this processor to FieldResolverObservable + :param observer: observer to attach the processors. + """ + observer.attach(self.variant_name, self) + + if self.parent_processor is not None: + self.parent_processor.to_attach(observer) + + def _build_field(self): + """ + Specific detail about field creation to be overwrite if necessary. + :return: A field + """ + variant_name = self.variant_name + + return OrderedDict({variant_name: self.get_type()}) + + def _get_variant_name(self): + """ + Make a variant based on filter name and processor suffix + :return: A variant name + """ + if self.suffix_expr == self.filter_es.default_filter_processor: + variant_name = self.filter_es.field_name + + else: + variant_name = "%s_%s" % (self.filter_es.field_name, self.suffix_expr) + + return variant_name + + def build_query(self, value): + """ + Make a query based on specific processor query + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + result = len(self.filter_es.field_name_es) + + if result > 1: + queries = [ + self._get_query(name, value) for name in self.filter_es.field_name_es + ] + return Q("bool", must={"bool": {"should": queries}}) + + return Q("bool", must=self._get_query(self.filter_es.field_name_es[0], value)) + + @staticmethod + def _get_query(name, value): + """ + Specific detail about query creation to be overwrite if necessary. + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q("term", **{name: value}) + + +class TermProcessor(Processor): + """Have a same behavior of parent this is only with semantic proposal""" + + pass + + +class ContainsProcessor(Processor): + """fuzzy search""" + + suffix_expr = "contains" + + @staticmethod + def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q("match", **{name: {"query": value, "fuzziness": "auto"}}) + + +class RegexProcessor(Processor): + """Search based on regular expressions""" + + suffix_expr = "regex" + + @staticmethod + def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q("wildcard", **{name: value}) + + +class PhraseProcessor(Processor): + """Search by the union of many terms""" + + suffix_expr = "phrase" + + @staticmethod + def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q("match_phrase", **{name: {"query": value}}) + + +class PrefixProcessor(Processor): + """Search by the prefix of the terms""" + + suffix_expr = "prefix" + + @staticmethod + def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q("match_phrase_prefix", **{name: {"query": value}}) + + +class InProcessor(Processor): + """Search by many value for a field""" + + suffix_expr = "in" + + @staticmethod + def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q("terms", **{name: value}) + + def get_type(self): + """Change base argument by a list of base argument""" + return List(self.filter_es.argument.Argument().type) + + +class ExitsProcessor(Processor): + """Search by if the field is in the document""" + + suffix_expr = "exits" + + @staticmethod + def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q( + "bool", **{"must" if value else "must_not": {"exists": {"field": name}}} + ) + + def get_type(self): + return Boolean() + + +class LteProcessor(Processor): + """Search by range less than""" + + suffix_expr = "lte" + + @staticmethod + def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q("range", **{name: {"lte": value}}) + + +class GteProcessor(Processor): + """Search by range greater than""" + + suffix_expr = "gte" + + @staticmethod + def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q("range", **{name: {"gte": value}}) + + +class ProcessorFactory(object): + processors = { + "contains": ContainsProcessor, + "term": TermProcessor, + "regex": RegexProcessor, + "phrase": PhraseProcessor, + "prefix": PrefixProcessor, + "in": InProcessor, + "exits": ExitsProcessor, + "lte": LteProcessor, + "gte": GteProcessor, + } + + @classmethod + def make_processor(cls, variant, filter_es, parent_processor): + """ + Create a new processor based on the name + :param variant: Processor name + :param filter_es: Target filter + :param parent_processor: Parent in the chain + :return: Returns a Processor instance + """ + if variant in cls.processors: + processor_class = cls.processors[variant] + return processor_class(filter_es, parent_processor) + + else: + raise ValueError("We do not have processor: %s." % variant) diff --git a/graphene_django/elasticsearch/filter/proxy.py b/graphene_django/elasticsearch/filter/proxy.py new file mode 100644 index 000000000..bada412f0 --- /dev/null +++ b/graphene_django/elasticsearch/filter/proxy.py @@ -0,0 +1,32 @@ +class QuerysetProxy(object): + """Bridge to Queryset through ES query""" + + def __init__(self, search): + """Taking as search, the ES search resolved by DjangoESFilterConnectionField""" + self.search = search + + def apply_query(self, method, *args, **kwargs): + """Helper method to apply mutation to ES Query""" + if hasattr(self.search, method): + self.search = getattr(self.search, method)(*args, **kwargs) + + def __len__(self): + """Bridget method to response the ES count as QS len""" + return self.search.count() + + def __getitem__(self, k): + """Applying slice to ES and generating a QS from that""" + _slice = self.search.__getitem__(k) + return _slice.to_queryset() + + +class ManagerProxy(object): + """Bridge to Queryset through ES query""" + + def __init__(self, search_manager): + """Taking as search, the ES search resolved by DjangoESFilterConnectionField""" + self.search_manager = search_manager + + def get_queryset(self): + """Returning self as Queryset to be the bridge""" + return QuerysetProxy(search=self.search_manager()) diff --git a/graphene_django/elasticsearch/tests/__init__.py b/graphene_django/elasticsearch/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/graphene_django/elasticsearch/tests/commons.py b/graphene_django/elasticsearch/tests/commons.py new file mode 100644 index 000000000..f2e817f38 --- /dev/null +++ b/graphene_django/elasticsearch/tests/commons.py @@ -0,0 +1,79 @@ +from datetime import datetime + +from mock import mock + +from graphene import Schema + +from graphene_django.tests.models import Article, Reporter +from graphene_django.elasticsearch.tests.filters import ESFilterQuery, ArticleDocument + + +def fake_data(): + r1 = Reporter.objects.create(first_name="r1", last_name="r1", email="r1@test.com") + a1 = Article.objects.create( + headline="a1", + pub_date=datetime.now(), + pub_date_time=datetime.now(), + reporter=r1, + editor=r1, + ) + a2 = Article.objects.create( + headline="a2", + pub_date=datetime.now(), + pub_date_time=datetime.now(), + reporter=r1, + editor=r1, + ) + return a1, a2 + + +def generate_query(field, query_str): + query = """ + query { + %s(%s) { + edges { + node { + headline + } + } + } + } + """ % ( + field, + query_str, + ) + return query + + +def filter_generation(field, query_str, verify_arguments, method_to_mock="query"): + a1, a2 = fake_data() + + query = generate_query(field, query_str) + + mock_count = mock.Mock(return_value=3) + mock_slice = mock.Mock( + return_value=mock.Mock( + to_queryset=mock.Mock( + return_value=Article.objects.filter(pk__in=[a1.id, a2.id]) + ) + ) + ) + mock_query = mock.Mock(return_value=ArticleDocument.search()) + + with mock.patch( + "django_elasticsearch_dsl.search.Search.count", mock_count + ), mock.patch( + "django_elasticsearch_dsl.search.Search.__getitem__", mock_slice + ), mock.patch( + "elasticsearch_dsl.Search.%s" % method_to_mock, mock_query + ): + schema = Schema(query=ESFilterQuery) + result = schema.execute(query) + + assert not result.errors + + verify_arguments(mock_query) + + assert len(result.data[field]["edges"]) == 2 + assert result.data[field]["edges"][0]["node"]["headline"] == "a1" + assert result.data[field]["edges"][1]["node"]["headline"] == "a2" diff --git a/graphene_django/elasticsearch/tests/filters.py b/graphene_django/elasticsearch/tests/filters.py new file mode 100644 index 000000000..bf1c4a359 --- /dev/null +++ b/graphene_django/elasticsearch/tests/filters.py @@ -0,0 +1,140 @@ +from graphene import ObjectType +from django_elasticsearch_dsl import DocType, Index, fields + +from graphene_django.tests.models import Article, Reporter +from graphene_django.filter.tests.test_fields import ArticleNode +from graphene_django.elasticsearch.filter import filters +from graphene_django.elasticsearch.filter.fields import DjangoESFilterConnectionField +from graphene_django.elasticsearch.filter.filterset import FilterSetES + +ads_index = Index("articles") + + +@ads_index.doc_type +class ArticleDocument(DocType): + """Article document describing Index""" + + class Meta(object): + """Metaclass config""" + + model = Article + fields = ["id", "headline", "pub_date", "pub_date_time", "lang", "importance"] + related_models = (Reporter,) + + reporter = fields.ObjectField( + properties={ + "id": fields.IntegerField(), + "first_name": fields.KeywordField(), + "email": fields.KeywordField(), + } + ) + + +class ArticleFilterESAsField(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + + index = ArticleDocument + includes = [] + order_by = ["id"] + + headline = filters.StringFilterES( + field_name="headline", lookup_expressions=["term", "contains"] + ) + + +class ArticleFilterESInMeta(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + + index = ArticleDocument + includes = ["id", "headline"] + order_by = {"id": "es_id"} + + +class ArticleFilterESInMetaDict(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + + index = ArticleDocument + includes = { + "headline": { + "lookup_expressions": [ + "term", + "contains", + "regex", + "phrase", + "prefix", + "in", + "exits", + "lte", + "gte", + ] + }, + "reporter": {}, + } + + +class ArticleFilterMultiField(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + + index = ArticleDocument + includes = [] + + headline = filters.StringFilterES( + field_name="contain", + field_name_es=["headline", "lang"], + lookup_expressions=["contains"], + ) + + +class ArticleFilterGenerateAll(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + + index = ArticleDocument + excludes = [] + + +class ArticleFilterExcludes(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + + index = ArticleDocument + excludes = ["headline"] + + +class ESFilterQuery(ObjectType): + """A query for ES fields""" + + articles_as_field = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterESAsField + ) + articles_in_meta = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterESInMeta + ) + articles_in_meta_dict = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterESInMetaDict + ) + articles_in_multi_field = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterMultiField + ) + articles_in_generate_all = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterGenerateAll + ) + articles_in_excludes = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterExcludes + ) diff --git a/graphene_django/elasticsearch/tests/test_filter_fields.py b/graphene_django/elasticsearch/tests/test_filter_fields.py new file mode 100644 index 000000000..7dbd73014 --- /dev/null +++ b/graphene_django/elasticsearch/tests/test_filter_fields.py @@ -0,0 +1,95 @@ +import pytest +from py.test import raises + +from graphene_django.elasticsearch.filter.fields import DjangoESFilterConnectionField +from graphene_django.elasticsearch.filter.filterset import FilterSetES +from graphene_django.filter.tests.test_fields import ArticleNode +from graphene_django.elasticsearch.tests.filters import ArticleDocument +from graphene_django.utils import ( + DJANGO_FILTER_INSTALLED, + DJANGO_ELASTICSEARCH_DSL_INSTALLED, +) + +pytestmark = [] + +if not DJANGO_FILTER_INSTALLED or not DJANGO_ELASTICSEARCH_DSL_INSTALLED: + pytestmark.append( + pytest.mark.skipif( + True, reason="django_filters not installed or not compatible" + ) + ) + +pytestmark.append(pytest.mark.django_db) + + +def test_filter_bad_processor(): + class ArticleFilterBadProcessor(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + + index = ArticleDocument + includes = {"headline": {"lookup_expressions": ["bad_processor"]}} + + with raises(ValueError) as error_info: + DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterBadProcessor + ) + + assert "bad_processor" in str(error_info.value) + + +def test_filter_field_without_filterset_class(): + with raises(ValueError) as error_info: + DjangoESFilterConnectionField(ArticleNode) + + assert "filterset_class" in str(error_info.value) + + +def test_filter_field_with_fields(): + with raises(ValueError) as error_info: + DjangoESFilterConnectionField(ArticleNode, fields=["headline"]) + + assert "fields" in str(error_info.value) + + +def test_filter_field_with_order_by(): + with raises(ValueError) as error_info: + DjangoESFilterConnectionField(ArticleNode, order_by=["headline"]) + + assert "order_by" in str(error_info.value) + + +def test_filter_filterset_without_index(): + with raises(ValueError) as error_info: + + class ArticleFilterBadProcessor(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + + DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterBadProcessor + ) + + assert "Index in Meta" in str(error_info.value) + + +def test_filter_filterset_without_xcludes(): + with raises(ValueError) as error_info: + + class ArticleFilterBadProcessor(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + + index = ArticleDocument + + DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterBadProcessor + ) + + assert "includes or excludes field in Meta" in str(error_info.value) diff --git a/graphene_django/elasticsearch/tests/test_filter_filters.py b/graphene_django/elasticsearch/tests/test_filter_filters.py new file mode 100644 index 000000000..4c10a3248 --- /dev/null +++ b/graphene_django/elasticsearch/tests/test_filter_filters.py @@ -0,0 +1,149 @@ +import pytest +from elasticsearch_dsl.query import Bool, Match, Term +from graphene import Schema + +from graphene_django.elasticsearch.tests.commons import ( + filter_generation, + generate_query, +) +from graphene_django.elasticsearch.tests.filters import ESFilterQuery +from graphene_django.utils import ( + DJANGO_FILTER_INSTALLED, + DJANGO_ELASTICSEARCH_DSL_INSTALLED, +) + +pytestmark = [] + +if not DJANGO_FILTER_INSTALLED or not DJANGO_ELASTICSEARCH_DSL_INSTALLED: + pytestmark.append( + pytest.mark.skipif( + True, reason="django_filters not installed or not compatible" + ) + ) + +pytestmark.append(pytest.mark.django_db) + + +def test_filter_string(): + filter_generation( + "articlesAsField", + 'headline: "A text"', + lambda mock: mock.assert_called_with( + Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]) + ), + ) + + +def test_filter_string_date(): + filter_generation( + "articlesAsField", + 'headline: "A text"', + lambda mock: mock.assert_called_with( + Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]) + ), + ) + + +def test_filter_as_field_order_by(): + filter_generation( + "articlesAsField", + 'headline: "A text", sort:{order:desc, field:id}', + lambda mock: mock.assert_called_with({"id": {"order": "desc"}}), + "sort", + ) + + +def test_filter_as_field_order_by_dict(): + filter_generation( + "articlesInMeta", + 'headline: "A text", sort:{order:desc, field:id}', + lambda mock: mock.assert_called_with({"es_id": {"order": "desc"}}), + "sort", + ) + + +def test_filter_in_meta(): + filter_generation( + "articlesInMeta", + 'headline: "A text"', + lambda mock: mock.assert_called_with( + Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]) + ), + ) + + +def test_filter_in_meta_dict(): + filter_generation( + "articlesInMetaDict", + 'headline: "A text"', + lambda mock: mock.assert_called_with( + Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]) + ), + ) + + +def test_filter_in_meta_dict_foreign(): + filter_generation( + "articlesInMetaDict", + 'reporterEmail: "A mail"', + lambda mock: mock.assert_called_with( + Bool(must=[Match(reporter__email={"query": "A mail", "fuzziness": "auto"})]) + ), + ) + + +def test_filter_in_multi_field(): + filter_generation( + "articlesInMultiField", + 'contain: "A text"', + lambda mock: mock.assert_called_with( + Bool( + must=[ + Bool( + should=[ + Match(headline={"query": "A text", "fuzziness": "auto"}), + Match(lang={"query": "A text", "fuzziness": "auto"}), + ] + ) + ] + ) + ), + ) + + +def compare_must_array(must, other_must): + assert len(must) == len(other_must) + + for target in must: + assert target in other_must + + +def test_filter_generating_all(): + spected_query = Bool( + must=[ + Match(headline={"query": "A text", "fuzziness": "auto"}), + Match(pub_date={"query": "0000-00-00", "fuzziness": "auto"}), + Match(pub_date_time={"query": "00:00:00", "fuzziness": "auto"}), + Match(lang={"query": "es", "fuzziness": "auto"}), + Term(importance=1), + ] + ) + + filter_generation( + "articlesInGenerateAll", + 'headline: "A text", ' + 'pubDate: "0000-00-00", ' + 'pubDateTime: "00:00:00", ' + 'lang: "es", ' + "importance: 1, ", + lambda mock: compare_must_array(mock.call_args[0][0].must, spected_query.must), + ) + + +def test_filter_generating_exclude(): + query = generate_query("articlesInExcludes", 'headline: "A text", ') + + schema = Schema(query=ESFilterQuery) + result = schema.execute(query) + + assert len(result.errors) > 0 diff --git a/graphene_django/elasticsearch/tests/test_filter_processor.py b/graphene_django/elasticsearch/tests/test_filter_processor.py new file mode 100644 index 000000000..04d808473 --- /dev/null +++ b/graphene_django/elasticsearch/tests/test_filter_processor.py @@ -0,0 +1,104 @@ +import pytest +from elasticsearch_dsl.query import ( + Bool, + Term, + Wildcard, + MatchPhrase, + MatchPhrasePrefix, + Range, + Terms, + Exists, +) + +from graphene_django.elasticsearch.tests.commons import filter_generation +from graphene_django.utils import ( + DJANGO_FILTER_INSTALLED, + DJANGO_ELASTICSEARCH_DSL_INSTALLED, +) + +pytestmark = [] + +if not DJANGO_FILTER_INSTALLED or not DJANGO_ELASTICSEARCH_DSL_INSTALLED: + pytestmark.append( + pytest.mark.skipif( + True, reason="django_filters not installed or not compatible" + ) + ) + +pytestmark.append(pytest.mark.django_db) + + +def test_processor_term(): + filter_generation( + "articlesInMetaDict", + 'headlineTerm: "A text"', + lambda mock: mock.assert_called_with(Bool(must=[Term(headline="A text")])), + ) + + +def test_processor_regex(): + filter_generation( + "articlesInMetaDict", + 'headlineRegex: "A text"', + lambda mock: mock.assert_called_with(Bool(must=[Wildcard(headline="A text")])), + ) + + +def test_processor_phrase(): + filter_generation( + "articlesInMetaDict", + 'headlinePhrase: "A text"', + lambda mock: mock.assert_called_with( + Bool(must=[MatchPhrase(headline={"query": "A text"})]) + ), + ) + + +def test_processor_prefix(): + filter_generation( + "articlesInMetaDict", + 'headlinePrefix: "A text"', + lambda mock: mock.assert_called_with( + Bool(must=[MatchPhrasePrefix(headline={"query": "A text"})]) + ), + ) + + +def test_processor_in(): + filter_generation( + "articlesInMetaDict", + 'headlineIn: ["A text 1", "A text 2"]', + lambda mock: mock.assert_called_with( + Bool(must=[Terms(headline=["A text 1", "A text 2"])]) + ), + ) + + +def test_processor_exits(): + filter_generation( + "articlesInMetaDict", + "headlineExits: true", + lambda mock: mock.assert_called_with( + Bool(must=[Bool(must=[Exists(field="headline")])]) + ), + ) + + +def test_processor_lte(): + filter_generation( + "articlesInMetaDict", + 'headlineLte: "A text"', + lambda mock: mock.assert_called_with( + Bool(must=Range(headline={"lte": "A text"})) + ), + ) + + +def test_processor_gte(): + filter_generation( + "articlesInMetaDict", + 'headlineGte: "A text"', + lambda mock: mock.assert_called_with( + Bool(must=Range(headline={"gte": "A text"})) + ), + ) diff --git a/graphene_django/filter/utils.py b/graphene_django/filter/utils.py index cfa5621a1..ef1310fab 100644 --- a/graphene_django/filter/utils.py +++ b/graphene_django/filter/utils.py @@ -1,4 +1,5 @@ import six +from django_filters import Filter from .filterset import custom_filterset_factory, setup_filterset @@ -12,8 +13,13 @@ def get_filtering_args_from_filterset(filterset_class, type): args = {} for name, filter_field in six.iteritems(filterset_class.base_filters): - field_type = convert_form_field(filter_field.field).Argument() - field_type.description = filter_field.label + + if isinstance(filter_field, Filter): + field_type = convert_form_field(filter_field.field).Argument() + field_type.description = filter_field.label + else: + field_type = filter_field.Argument() + args[name] = field_type return args diff --git a/graphene_django/utils/__init__.py b/graphene_django/utils/__init__.py index f9c388dc6..73c871deb 100644 --- a/graphene_django/utils/__init__.py +++ b/graphene_django/utils/__init__.py @@ -1,5 +1,6 @@ from .utils import ( DJANGO_FILTER_INSTALLED, + DJANGO_ELASTICSEARCH_DSL_INSTALLED, get_reverse_fields, maybe_queryset, get_model_fields, @@ -10,6 +11,7 @@ __all__ = [ "DJANGO_FILTER_INSTALLED", + "DJANGO_ELASTICSEARCH_DSL_INSTALLED", "get_reverse_fields", "maybe_queryset", "get_model_fields", diff --git a/graphene_django/utils/utils.py b/graphene_django/utils/utils.py index b8aaba0a8..16370f991 100644 --- a/graphene_django/utils/utils.py +++ b/graphene_django/utils/utils.py @@ -12,6 +12,14 @@ DJANGO_FILTER_INSTALLED = False +try: + import django_elasticsearch_dsl # noqa + + DJANGO_ELASTICSEARCH_DSL_INSTALLED = True +except ImportError: + DJANGO_ELASTICSEARCH_DSL_INSTALLED = False + + def get_reverse_fields(model, local_field_names): for name, attr in model.__dict__.items(): # Don't duplicate any local fields diff --git a/setup.py b/setup.py index e622a718b..c5a21232f 100644 --- a/setup.py +++ b/setup.py @@ -22,6 +22,7 @@ "django-filter<2;python_version<'3'", "django-filter>=2;python_version>='3'", "pytest-django>=3.3.2", + "django_elasticsearch_dsl", ] + rest_framework_require