Skip to content

Commit

Permalink
[bug 768912, bug 768232] Part 1: Add document_{topic, product} to index.
Browse files Browse the repository at this point in the history
  • Loading branch information
rlr committed Aug 1, 2012
1 parent 5a1752b commit 599551e
Show file tree
Hide file tree
Showing 4 changed files with 82 additions and 16 deletions.
22 changes: 14 additions & 8 deletions apps/search/models.py
Expand Up @@ -5,7 +5,7 @@
from django.conf import settings
from django.core import signals
from django.db import models
from django.db.models.signals import pre_delete, post_save
from django.db.models.signals import pre_delete, post_save, m2m_changed
from django.dispatch import receiver

from search.tasks import index_task, unindex_task
Expand Down Expand Up @@ -172,7 +172,8 @@ def unindex(cls, id_, es=None):

def register_for_indexing(sender_class,
app,
instance_to_indexee=_identity):
instance_to_indexee=_identity,
m2m=False):
"""Register a model whose changes might invalidate ElasticSearch
indexes.
Expand Down Expand Up @@ -230,12 +231,17 @@ def indexing_receiver(signal, signal_name):
_search_models[sender_class._meta.db_table] = sender_class

# Register signal listeners to keep indexes up to date:
indexing_receiver(post_save, 'post_save')(update)
indexing_receiver(pre_delete, 'pre_delete')(
# If it's the indexed instance that's been deleted, go ahead
# and delete it from the index. Otherwise, we just want to
# update whatever model it's related to.
delete if instance_to_indexee is _identity else update)
# TODO: Untangle this mess - Bug 778753
if m2m:
indexing_receiver(m2m_changed, 'm2m_changed')(update)
else:
indexing_receiver(post_save, 'post_save')(update)

indexing_receiver(pre_delete, 'pre_delete')(
# If it's the indexed instance that's been deleted, go ahead
# and delete it from the index. Otherwise, we just want to
# update whatever model it's related to.
delete if instance_to_indexee is _identity else update)


def generate_tasks(**kwargs):
Expand Down
48 changes: 41 additions & 7 deletions apps/wiki/models.py
Expand Up @@ -494,6 +494,32 @@ def is_watched_by(self, user):
from wiki.events import EditDocumentEvent
return EditDocumentEvent.is_notifying(user, self)

def get_topics(self, uncached=False):
"""Return the list of topics that apply to this document.
If the document has a parent, it inherits the parent's topics.
"""
if self.parent:
return self.parent.get_topics()
if uncached:
q = Topic.uncached
else:
q = Topic.objects
return q.filter(document=self)

def get_products(self, uncached=False):
"""Return the list of products that apply to this document.
If the document has a parent, it inherits the parent's products.
"""
if self.parent:
return self.parent.get_products()
if uncached:
q = Product.uncached
else:
q = Product.objects
return q.filter(document=self)

@classmethod
def get_query_fields(cls):
return ['document_title__text',
Expand Down Expand Up @@ -522,7 +548,8 @@ def get_mapping(cls):
'document_is_archived': {'type': 'boolean'},
'document_summary': {'type': 'string', 'analyzer': 'snowball'},
'document_keywords': {'type': 'string', 'analyzer': 'snowball'},
'document_tag': {'type': 'string', 'index': 'not_analyzed'}}
'document_product': {'type': 'string', 'index': 'not_analyzed'},
'document_topic': {'type': 'string', 'index': 'not_analyzed'}}

@classmethod
def extract_document(cls, obj_id):
Expand All @@ -543,12 +570,9 @@ def extract_document(cls, obj_id):
d['document_slug'] = obj.slug
d['document_is_archived'] = obj.is_archived

if obj.parent is None:
tags = [tag['name'] for tag in obj.tags.values()]
else:
# Translations inherit tags from their parents.
tags = [tag['name'] for tag in obj.parent.tags.values()]
d['document_tag'] = tags
d['document_topic'] = [t.slug for t in obj.get_topics(True)]
d['document_product'] = [p.slug for p in obj.get_products(True)]

if obj.current_revision:
d['document_summary'] = obj.current_revision.summary
d['document_keywords'] = obj.current_revision.keywords
Expand Down Expand Up @@ -592,6 +616,16 @@ def search(cls):


register_for_indexing(Document, 'wiki')
register_for_indexing(
Document.topics.through,
'wiki',
instance_to_indexee=lambda i: i,
m2m=True)
register_for_indexing(
Document.products.through,
'wiki',
instance_to_indexee=lambda i: i,
m2m=True)
register_for_indexing(
TaggedItem,
'wiki',
Expand Down
26 changes: 26 additions & 0 deletions apps/wiki/tests/test_models.py
Expand Up @@ -5,8 +5,10 @@

from django.core.exceptions import ValidationError

from products.tests import product
from sumo import ProgrammingError
from sumo.tests import TestCase
from topics.tests import topic
from wiki.cron import calculate_related_documents
from wiki.models import Document, RelatedDocument
from wiki.config import (REDIRECT_SLUG, REDIRECT_TITLE, REDIRECT_HTML,
Expand Down Expand Up @@ -285,6 +287,30 @@ def test_redirect_nondefault_locales(self):
eq_(redirect_to.document.get_absolute_url(),
redirector.document.redirect_url())

def test_get_topics(self):
"""Test the get_topics() method."""
en_us = document(save=True)
en_us.topics.add(topic(save=True))
en_us.topics.add(topic(save=True))

eq_(2, len(en_us.get_topics()))

# Localized document inherits parent's topics.
l10n = document(parent=en_us, save=True)
eq_(2, len(en_us.get_topics()))

def test_get_products(self):
"""Test the get_products() method."""
en_us = document(save=True)
en_us.products.add(product(save=True))
en_us.products.add(product(save=True))

eq_(2, len(en_us.get_products()))

# Localized document inherits parent's topics.
l10n = document(parent=en_us, save=True)
eq_(2, len(en_us.get_products()))


class LocalizableOrLatestRevisionTests(TestCase):
"""Tests for Document.localizable_or_latest_revision()"""
Expand Down
2 changes: 1 addition & 1 deletion settings.py
Expand Up @@ -710,7 +710,7 @@ def JINJA_CONFIG():
ES_INDEXES = {'default': 'sumo-20120622'}
# Indexes for indexing--set this to ES_INDEXES if you want to read to
# and write to the same index.
ES_WRITE_INDEXES = ES_INDEXES
ES_WRITE_INDEXES = {'default': 'sumo-20120731'}
# This is prepended to index names to get the final read/write index
# names used by kitsune. This is so that you can have multiple environments
# pointed at the same ElasticSearch cluster and not have them bump into
Expand Down

0 comments on commit 599551e

Please sign in to comment.