Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

[bug 731558, 729103] Handle ES errors in suggestions

This adds handling for ES-related issues like max retry, timeout,
exceptions, ... In these cases, _search_suggestions returns
an empty result set--that seems like the right thing to do.
  • Loading branch information...
commit f788c7721dc782cd49282deaf0dbca65aa2c14c8 1 parent 40f0deb
@willkg willkg authored
Showing with 58 additions and 38 deletions.
  1. +58 −38 apps/questions/views.py
View
96 apps/questions/views.py
@@ -46,6 +46,7 @@
question_searcher)
from questions.question_config import products
from search.utils import locale_or_default
+from search.es_utils import ESTimeoutError, ESMaxRetryError, ESException
from search import SearchError
from sumo.helpers import urlparams
from sumo.urlresolvers import reverse
@@ -921,6 +922,11 @@ def _search_suggestions(request, query, locale, category_tags):
Returns up to 3 wiki pages, then up to 3 questions.
"""
+ if waffle.flag_is_active(request, 'elasticsearch'):
+ engine = 'elastic'
+ else:
+ engine = 'sphinx'
+
my_question_search = question_searcher(request)
my_wiki_search = wiki_searcher(request)
@@ -932,44 +938,58 @@ def _search_suggestions(request, query, locale, category_tags):
my_question_search = my_question_search.filter(tag__in=category_tags)
my_wiki_search = my_wiki_search.filter(tag__in=category_tags)
- raw_results = (
- my_wiki_search.filter(locale=locale,
- category__in=settings.SEARCH_DEFAULT_CATEGORIES)
- .query(query)
- .values_dict('id')[:WIKI_RESULTS])
-
- # Lazily build excerpts from results. Stop when we have enough:
- results = []
- for r in raw_results:
- try:
- doc = (Document.objects.select_related('current_revision').
- get(pk=r['id']))
- results.append({
- 'search_summary': doc.current_revision.summary,
- 'url': doc.get_absolute_url(),
- 'title': doc.title,
- 'type': 'document',
- 'object': doc,
- })
- except Document.DoesNotExist:
- pass
-
- # Questions app is en-US only.
- raw_results = (my_question_search.query(query)
- .values_dict('id')[:QUESTIONS_RESULTS])
-
- for r in raw_results:
- try:
- q = Question.objects.get(pk=r['id'])
- results.append({
- 'search_summary': q.content[0:500],
- 'url': q.get_absolute_url(),
- 'title': q.title,
- 'type': 'question',
- 'object': q
- })
- except Question.DoesNotExist:
- pass
+ try:
+ raw_results = (
+ my_wiki_search.filter(locale=locale,
+ category__in=settings.SEARCH_DEFAULT_CATEGORIES)
+ .query(query)
+ .values_dict('id')[:WIKI_RESULTS])
+
+ # Lazily build excerpts from results. Stop when we have enough:
+ results = []
+ for r in raw_results:
+ try:
+ doc = (Document.objects.select_related('current_revision').
+ get(pk=r['id']))
+ results.append({
+ 'search_summary': doc.current_revision.summary,
+ 'url': doc.get_absolute_url(),
+ 'title': doc.title,
+ 'type': 'document',
+ 'object': doc,
+ })
+ except Document.DoesNotExist:
+ pass
+
+ # Questions app is en-US only.
+ raw_results = (my_question_search.query(query)
+ .values_dict('id')[:QUESTIONS_RESULTS])
+
+ for r in raw_results:
+ try:
+ q = Question.objects.get(pk=r['id'])
+ results.append({
+ 'search_summary': q.content[0:500],
+ 'url': q.get_absolute_url(),
+ 'title': q.title,
+ 'type': 'question',
+ 'object': q
+ })
+ except Question.DoesNotExist:
+ pass
+
+ except (SearchError, ESTimeoutError, ESMaxRetryError, ESException), exc:
+ if isinstance(exc, SearchError):
+ statsd.incr('questions.suggestions.%s.searcherror' % engine)
+ elif isinstance(exc, ESTimeoutError):
+ statsd.incr('questions.suggestions.%s.timeouterror' % engine)
+ elif isinstance(exc, ESMaxRetryError):
+ statsd.incr('questions.suggestions.%s.maxretryerror' % engine)
+ elif isinstance(exc, ESException):
+ statsd.incr('questions.suggestions.%s.elasticsearchexception' %
+ engine)
+
+ return []
return results
Please sign in to comment.
Something went wrong with that request. Please try again.