Skip to content
This repository has been archived by the owner on Nov 9, 2017. It is now read-only.

Search providers #1288

Closed
wants to merge 16 commits into from
4 changes: 4 additions & 0 deletions r2/example.ini
Expand Up @@ -383,6 +383,9 @@ wiki_max_page_separators = 3


############################################ SEARCH
# search provider name
search_provider = cloudsearch

# endpoint for link search
CLOUDSEARCH_SEARCH_API =
# endpoint for link upload
Expand Down Expand Up @@ -756,3 +759,4 @@ feature_require_https = off
# as well.
feature_give_hsts_grants = off
feature_multireddit_customizations = off

46 changes: 15 additions & 31 deletions r2/r2/controllers/front.py
Expand Up @@ -53,8 +53,6 @@
from r2.lib.db import queries
from r2.lib.db.tdb_cassandra import MultiColumnQuery
from r2.lib.strings import strings
from r2.lib.search import (SearchQuery, SubredditSearchQuery, SearchException,
InvalidQuery)
from r2.lib.validator import *
from r2.lib import jsontemplates
from r2.lib import sup
Expand Down Expand Up @@ -255,13 +253,13 @@ def GET_comments(
# Determine if we should show the embed link for comments
c.can_embed = feature.is_enabled("comment_embeds") and bool(comment)

is_embed = embeds.prepare_embed_request(sr)
embed_key = embeds.prepare_embed_request(sr)

# check for 304
self.check_modified(article, 'comments')

if is_embed:
embeds.set_up_embed(sr, comment, showedits=showedits)
if embed_key:
embeds.set_up_embed(embed_key, sr, comment, showedits=showedits)

# Temporary hook until IAMA app "OP filter" is moved from partners
# Not to be open-sourced
Expand Down Expand Up @@ -380,16 +378,12 @@ def GET_comments(
suggested_sort = article.sort_if_suggested() if feature.is_enabled('default_sort') else None
if article.contest_mode:
if c.user_is_loggedin and sr.is_moderator(c.user):
# Default to top for contest mode to make determining winners
# easier, but allow them to override it for moderation
# purposes.
if 'sort' not in request.params:
sort = "top"
sort = "top"
else:
sort = "random"
elif suggested_sort and 'sort' not in request.params:
sort = suggested_sort
suggested_sort_active = True
sort = suggested_sort
suggested_sort_active = True

# finally add the comment listing
displayPane.append(CommentPane(article, CommentSortMenu.operator(sort),
Expand Down Expand Up @@ -572,16 +566,6 @@ def GET_moderationlog(self, num, after, reverse, count, mod, action):
mod = mods[mod_id]
mod_buttons.append(QueryButton(mod.name, mod.name,
query_param='mod'))
# add a choice for the automoderator account if it's not a mod
if (g.automoderator_account and
all(mod.name != g.automoderator_account
for mod in mods.values())):
automod_button = QueryButton(
g.automoderator_account,
g.automoderator_account,
query_param="mod",
)
mod_buttons.append(automod_button)
mod_buttons.append(QueryButton(_('admins*'), 'a', query_param='mod'))
base_path = request.path
menus = [NavMenu(action_buttons, base_path=base_path,
Expand Down Expand Up @@ -866,7 +850,7 @@ def GET_related(self, num, article, after, reverse, count):
end = int(time_module.mktime((article._date + rel_range).utctimetuple()))
nsfw = u"nsfw:0" if not (article.over_18 or article._nsfw.findall(article.title)) else u""
query = u"(and %s timestamp:%s..%s %s)" % (query, start, end, nsfw)
q = SearchQuery(query, raw_sort="-text_relevance",
q = g.search.SearchQuery(query, raw_sort="-text_relevance",
syntax="cloudsearch")
pane = self._search(q, num=num, after=after, reverse=reverse,
count=count)[2]
Expand Down Expand Up @@ -912,7 +896,7 @@ def GET_duplicates(self, article, num, after, reverse, count):
@api_doc(api_section.subreddits, uri='/subreddits/search', supports_rss=True)
def GET_search_reddits(self, query, reverse, after, count, num):
"""Search subreddits by title and description."""
q = SubredditSearchQuery(query)
q = g.search.SubredditSearchQuery(query)

results, etime, spane = self._search(q, num=num, reverse=reverse,
after=after, count=count,
Expand All @@ -935,7 +919,7 @@ def GET_search_reddits(self, query, reverse, after, count, num):
sort=VMenu('sort', SearchSortMenu, remember=False),
recent=VMenu('t', TimeMenu, remember=False),
restrict_sr=VBoolean('restrict_sr', default=False),
syntax=VOneOf('syntax', options=SearchQuery.known_syntaxes))
syntax=VOneOf('syntax', options=g.search.SearchQuery.known_syntaxes))
@api_doc(api_section.search, supports_rss=True, uses_site=True)
def GET_search(self, query, num, reverse, after, count, sort, recent,
restrict_sr, syntax):
Expand All @@ -951,25 +935,25 @@ def GET_search(self, query, num, reverse, after, count, sort, recent,
site = c.site

if not syntax:
syntax = SearchQuery.default_syntax
syntax = g.search.SearchQuery.default_syntax

try:
cleanup_message = None
try:
q = SearchQuery(query, site, sort,
q = g.search.SearchQuery(query, site, sort,
recent=recent, syntax=syntax)
results, etime, spane = self._search(q, num=num, after=after,
reverse=reverse,
count=count)
except InvalidQuery:
except g.search.InvalidQuery:
# Clean the search of characters that might be causing the
# InvalidQuery exception. If the cleaned search boils down
# to an empty string, the search code is expected to bail
# out early with an empty result set.
cleaned = re.sub("[^\w\s]+", " ", query)
cleaned = cleaned.lower().strip()

q = SearchQuery(cleaned, site, sort, recent=recent)
q = g.search.SearchQuery(cleaned, site, sort, recent=recent)
results, etime, spane = self._search(q, num=num,
after=after,
reverse=reverse,
Expand Down Expand Up @@ -1002,7 +986,7 @@ def GET_search(self, query, num, reverse, after, count, sort, recent,
).render()

return res
except SearchException + (socket.error,) as e:
except g.search.SearchException + (socket.error,) as e:
return self.search_fail(e)

def _search(self, query_obj, num, after, reverse, count=0,
Expand All @@ -1025,7 +1009,7 @@ def _search(self, query_obj, num, after, reverse, count=0,
# computed after fetch_more
try:
res = listing.listing()
except SearchException + (socket.error,) as e:
except g.search.SearchException + (socket.error,) as e:
return self.search_fail(e)
timing = time_module.time() - builder.start_time

Expand Down
39 changes: 13 additions & 26 deletions r2/r2/controllers/listingcontroller.py
Expand Up @@ -40,7 +40,6 @@
from r2.lib.db.thing import Query, Merge, Relations
from r2.lib.db import queries
from r2.lib.strings import Score
import r2.lib.search as search
from r2.lib.template_helpers import add_sr
from r2.lib.admin_utils import check_cheating
from r2.lib.csrf import csrf_exempt
Expand All @@ -59,6 +58,7 @@
from api_docs import api_doc, api_section

from pylons.i18n import _
from pylons import g

from datetime import timedelta
import random
Expand All @@ -67,6 +67,7 @@
class ListingController(RedditController):
"""Generalized controller for pages with lists of links."""


# toggle skipping of links based on the users' save/hide/vote preferences
skip = True

Expand Down Expand Up @@ -163,7 +164,7 @@ def builder(self):
builder_cls = self.builder_cls
elif isinstance(self.query_obj, Query):
builder_cls = QueryBuilder
elif isinstance(self.query_obj, search.SearchQuery):
elif isinstance(self.query_obj, g.search.SearchQuery):
builder_cls = SearchBuilder
elif isinstance(self.query_obj, iters):
builder_cls = IDBuilder
Expand Down Expand Up @@ -457,32 +458,18 @@ def trending_info(cls):

def content(self):
content = super(HotController, self).content()

if c.render_style == "html":
stack = None
if isinstance(c.site, DefaultSR) and not self.listing_obj.prev:
trending_info = self.trending_info()
if trending_info:
stack = [
self.spotlight,
TrendingSubredditsBar(**trending_info),
self.listing_obj,
]
else:
hot_hook = hooks.get_hook("hot.get_content")
hot_pane = hot_hook.call_until_return(controller=self)
if hot_pane:
stack = [
self.spotlight,
hot_pane,
self.listing_obj
]

if stack:
return PaneStack(filter(None, stack), css_class='spacer')

if (c.render_style == "html" and isinstance(c.site, DefaultSR) and
not self.listing_obj.prev):
trending_info = self.trending_info()
if trending_info:
return PaneStack(filter(None, [
self.spotlight,
TrendingSubredditsBar(**trending_info),
self.listing_obj,
]), css_class='spacer')
return content


def title(self):
return c.site.title

Expand Down
12 changes: 5 additions & 7 deletions r2/r2/controllers/reddit_base.py
Expand Up @@ -477,7 +477,7 @@ def set_multireddit():
# Only supported via API as we don't have a valid non-query
# parameter equivalent for cross-user multis, which means
# we can't generate proper links to /new, /top, etc in HTML
multi_ids = [m.lower() for m in request.GET.getall("m")]
multi_ids = request.GET.getall("m")
multiurl = ""

if multi_ids is not None:
Expand All @@ -488,14 +488,13 @@ def set_multireddit():
elif len(multis) == 1:
c.site = multis[0]
else:
sr_ids = Subreddit.random_reddits(
srs = Subreddit.random_reddits(
logged_in_username,
list(set(itertools.chain.from_iterable(
multi.sr_ids for multi in multis
multi.srs for multi in multis
))),
LabeledMulti.MAX_SR_COUNT,
LabeledMulti.MAX_SR_COUNT
)
srs = Subreddit._byID(sr_ids, data=True, return_dict=False)
c.site = MultiReddit(multiurl, srs)
if any(m.weighting_scheme == "fresh" for m in multis):
c.site.weighting_scheme = "fresh"
Expand Down Expand Up @@ -1743,8 +1742,7 @@ def abort_if_not_modified(self, last_modified, private=True,
abort(304, 'not modified')

def search_fail(self, exception):
from r2.lib.search import SearchException
if isinstance(exception, SearchException + (socket.error,)):
if isinstance(exception, g.search.SearchException + (socket.error,)):
g.log.error("Search Error: %s" % repr(exception))

errpage = pages.RedditError(_("search failed"),
Expand Down
17 changes: 7 additions & 10 deletions r2/r2/lib/menus.py
Expand Up @@ -20,13 +20,12 @@
# Inc. All Rights Reserved.
###############################################################################

from pylons import c, request
from pylons import g, c, request
from pylons.i18n import _, N_

from r2.config import feature
from r2.lib.db import operators
from r2.lib.filters import _force_unicode
from r2.lib.search import sorts as search_sorts
from r2.lib.strings import StringHandler, plurals
from r2.lib.utils import class_property, query_string, timeago
from r2.lib.wrapped import Styled
Expand Down Expand Up @@ -57,7 +56,7 @@ def __getattr__(self, attr):
gilded = _('gilded'),
confidence = _('best'),
random = _('random'),
qa = _('q&a'),
qa = _('q & a'),
saved = _('saved {toolbar}'),
recommended = _('recommended'),
rising = _('rising'),
Expand Down Expand Up @@ -591,12 +590,10 @@ def visible_options(cls):

@class_property
def hidden_options(cls):
sorts = ['random']
if not feature.is_enabled('qa_sort'):
sorts.append('qa')
if feature.is_enabled('remove_hot_comments'):
sorts.append('hot')
return sorts
if feature.is_enabled('qa_sort'):
return ('random',)
else:
return ('random', 'qa',)

def make_title(self, attr):
title = super(CommentSortMenu, self).make_title(attr)
Expand All @@ -609,7 +606,7 @@ def make_title(self, attr):
class SearchSortMenu(SortMenu):
"""Sort menu for search pages."""
_default = 'relevance'
mapping = search_sorts
mapping = g.search.sorts
_options = mapping.keys()

@classmethod
Expand Down
24 changes: 18 additions & 6 deletions r2/r2/lib/search.py → r2/r2/lib/providers/search/__init__.py
Expand Up @@ -20,13 +20,25 @@
# Inc. All Rights Reserved.
###############################################################################

import r2.lib.cloudsearch as cloudsearch

class SearchProvider(object):
"""Provider for search.
"""

InvalidQuery = (cloudsearch.InvalidQuery,)
SearchException = (cloudsearch.CloudSearchHTTPError,)
def InvalidQuery(self):
return NotImplementedError

SearchQuery = cloudsearch.LinkSearchQuery
SubredditSearchQuery = cloudsearch.SubredditSearchQuery
def SearchException(self):
return NotImplementedError

sorts = cloudsearch.LinkSearchQuery.sorts_menu_mapping
def Query(self):
return NotImplementedError

def SubredditSearchQuery(self):
return NotImplementedError

def sorts(self):
return NotImplementedError

def run_changed(self):
return NotImplementedError