Skip to content

Commit

Permalink
add more examples
Browse files Browse the repository at this point in the history
  • Loading branch information
barseghyanartur committed Sep 6, 2019
1 parent 24d1b7e commit 9f4772f
Show file tree
Hide file tree
Showing 9 changed files with 423 additions and 0 deletions.
45 changes: 45 additions & 0 deletions examples/factories/farm_animal.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from faker import Faker as OriginalFaker
from factory.base import Factory
from factory import Faker, LazyAttribute

from search_index.documents import Animal

FAKER = OriginalFaker()

__all__ = (
'AnimalFactory',
)


class AnimalFactory(Factory):
"""Animal factory."""

class Meta(object):
model = Animal

scope = LazyAttribute(lambda x: {
'farm_id': FAKER.uuid4(),
'holding_id': FAKER.uuid4(),
})
action = Faker('word')
entity = 'animal'
id = Faker('uuid4')
app = "s4farm-api"
message_id = Faker('uuid4')
publish_date = Faker('date')
data = LazyAttribute(lambda x: {
'id': FAKER.pyint(),
'genetic': {
'id': FAKER.pyint(),
'name': FAKER.sentence(nb_words=3),
},
'animal_type': {
'id': FAKER.pyint(),
'name': FAKER.word(),
'gender': {
'id': FAKER.pyint(),
'name': FAKER.random_sample(['Male', 'Female'])[0]
}
},
'uuid': FAKER.uuid4(),
})
6 changes: 6 additions & 0 deletions examples/factories/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ def project_dir(base):

from factories.blog_post import PostFactory
from factories.site_user import UserFactory
from factories.farm_animal import AnimalFactory


def generate(num_items=100):
Expand All @@ -32,6 +33,11 @@ def generate(num_items=100):
for user in users:
user.save()

animals = AnimalFactory.create_batch(num_items)

for animal in animals:
animal.save()


if __name__ == '__main__':
generate()
4 changes: 4 additions & 0 deletions examples/schema/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import graphene
from .animal import Query as AnimalQuery
from .post import Query as PostQuery
from .read_only_animal import Query as ReadOnlyAnimalQuery
from .user import Query as UserQuery

__all__ = (
Expand All @@ -9,6 +11,8 @@


class Query(
AnimalQuery,
ReadOnlyAnimalQuery,
PostQuery,
UserQuery,
graphene.ObjectType,
Expand Down
84 changes: 84 additions & 0 deletions examples/schema/animal.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
import graphene
from graphene import Node
from graphene_elastic import (
ElasticsearchObjectType,
ElasticsearchConnectionField,
)
from graphene_elastic.filter_backends import (
FilteringFilterBackend,
SearchFilterBackend,
OrderingFilterBackend,
DefaultOrderingFilterBackend,
)
from graphene_elastic.constants import (
LOOKUP_FILTER_PREFIX,
LOOKUP_FILTER_TERM,
LOOKUP_FILTER_TERMS,
LOOKUP_FILTER_WILDCARD,
LOOKUP_QUERY_EXCLUDE,
LOOKUP_QUERY_IN,
)

from search_index.documents import Animal as AnimalDocument


__all__ = (
'Animal',
'Query',
'schema',
)


class Animal(ElasticsearchObjectType):
"""Animal."""

class Meta:

document = AnimalDocument
interfaces = (Node,)
filter_backends = [
FilteringFilterBackend,
SearchFilterBackend,
OrderingFilterBackend,
DefaultOrderingFilterBackend,
]
filter_fields = {
'id': {
'field': 'id.raw',
'default_lookup': LOOKUP_FILTER_TERM,
},
'action': {
'field': 'action.raw',
'default_lookup': LOOKUP_FILTER_TERM,
},
'entity': {
'field': 'entity.raw',
'default_lookup': LOOKUP_FILTER_TERM,
},
}
search_fields = {
'action': None,
'entity': None,
}
ordering_fields = {
'id': 'id.raw',
'publish_date': 'publish_date',
'action': 'action.raw',
'entity': 'entity.raw',
}

ordering_defaults = (
'id.raw',
'publish_date'
)


class Query(graphene.ObjectType):
"""Animal query."""

animals = ElasticsearchConnectionField(Animal)


schema = graphene.Schema(
query=Query
)
84 changes: 84 additions & 0 deletions examples/schema/read_only_animal.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
import graphene
from graphene import Node
from graphene_elastic import (
ElasticsearchObjectType,
ElasticsearchConnectionField,
)
from graphene_elastic.filter_backends import (
FilteringFilterBackend,
SearchFilterBackend,
OrderingFilterBackend,
DefaultOrderingFilterBackend,
)
from graphene_elastic.constants import (
LOOKUP_FILTER_PREFIX,
LOOKUP_FILTER_TERM,
LOOKUP_FILTER_TERMS,
LOOKUP_FILTER_WILDCARD,
LOOKUP_QUERY_EXCLUDE,
LOOKUP_QUERY_IN,
)

from search_index.documents import ReadOnlyAnimal as ReadOnlyAnimalDocument


__all__ = (
'ReadOnlyAnimal',
'Query',
'schema',
)


class ReadOnlyAnimal(ElasticsearchObjectType):
"""Read-only animal."""

class Meta:

document = ReadOnlyAnimalDocument
interfaces = (Node,)
filter_backends = [
FilteringFilterBackend,
SearchFilterBackend,
OrderingFilterBackend,
DefaultOrderingFilterBackend,
]
filter_fields = {
'id': {
'field': 'id.raw',
'default_lookup': LOOKUP_FILTER_TERM,
},
'action': {
'field': 'action.raw',
'default_lookup': LOOKUP_FILTER_TERM,
},
'entity': {
'field': 'entity.raw',
'default_lookup': LOOKUP_FILTER_TERM,
},
}
search_fields = {
'action': None,
'entity': None,
}
ordering_fields = {
'id': 'id.raw',
'publish_date': 'publish_date',
'action': 'action.raw',
'entity': 'entity.raw',
}

ordering_defaults = (
'id.raw',
'publish_date'
)


class Query(graphene.ObjectType):
"""Animal query."""

read_only_animals = ElasticsearchConnectionField(ReadOnlyAnimal)


schema = graphene.Schema(
query=Query
)
1 change: 1 addition & 0 deletions examples/search_index/documents/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
from .animal import *
from .post import *
from .user import *
142 changes: 142 additions & 0 deletions examples/search_index/documents/animal.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
import datetime
from elasticsearch_dsl import connections
from elasticsearch_dsl import (
analyzer,
Integer,
Boolean,
Completion,
Date,
Document,
InnerDoc,
Keyword,
Nested,
Object,
Text,
)
from .read_only import ReadOnlyDocument
from .settings import FARM_ANIMAL_DOCUMENT_NAME

try:
from elasticsearch import logger
except ImportError:
import logging
logger = logging.getLogger(__name__)

__all__ = (
'Animal',
'ReadOnlyAnimal',
)

connections.create_connection(hosts=['localhost'], timeout=20)


html_strip = analyzer('html_strip',
tokenizer="standard",
filter=["lowercase", "stop", "snowball"],
char_filter=["html_strip"]
)


class Animal(Document):

scope = Object(
properties={
'farm_id': Text(),
'holding_id': Text(),
}
)
action = Text(analyzer=html_strip, fields={'raw': Keyword()})
entity = Text(analyzer=html_strip, fields={'raw': Keyword()})
# This is not internal ID of the Elasticsearch
id = Text(analyzer=html_strip, fields={'raw': Keyword()})
app = Text()
message_id = Text()
publish_date = Date()
data = Object(
properties={
'id': Integer(),
'genetic': Object(
properties={
'id': Integer(),
'name': Text(),
}
),
'animal_type': Object(
properties={
'id': Integer(),
'name': Text(),
'gender': Object(
properties={
'id': Integer(),
'name': Text(),
}
),
}
),
}
)
uuid = Text()

class Index:
name = FARM_ANIMAL_DOCUMENT_NAME
settings = {
'number_of_shards': 1,
'number_of_replicas': 1,
'blocks': {'read_only_allow_delete': None},
}


class ReadOnlyAnimal(ReadOnlyDocument):

scope = Object(
properties={
'farm_id': Text(),
'holding_id': Text(),
}
)
action = Text(analyzer=html_strip, fields={'raw': Keyword()})
entity = Text(analyzer=html_strip, fields={'raw': Keyword()})
# This is not internal ID of the Elasticsearch
id = Text(analyzer=html_strip, fields={'raw': Keyword()})
app = Text()
message_id = Text()
publish_date = Date()
data = Object(
properties={
'id': Integer(),
'genetic': Object(
properties={
'id': Integer(),
'name': Text(),
}
),
'animal_type': Object(
properties={
'id': Integer(),
'name': Text(),
'gender': Object(
properties={
'id': Integer(),
'name': Text(),
}
),
}
),
}
)
uuid = Text()

class Index:
name = FARM_ANIMAL_DOCUMENT_NAME
settings = {
'number_of_shards': 1,
'number_of_replicas': 1,
'blocks': {'read_only_allow_delete': None},
}


try:
# Create the mappings in Elasticsearch
Animal.init()
except Exception as err:
logger.error(err)

0 comments on commit 9f4772f

Please sign in to comment.