Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Exclude items hidden to the user from 'other discussions'

Exclude from both the listing as well as the count in the tab
  • Loading branch information...
commit 191c249934b2540e33c40990dbb3f01e2f7ebdd5 1 parent 4907bc4
@Deimos Deimos authored
View
19 r2/r2/controllers/front.py
@@ -31,7 +31,7 @@
from r2.lib.pages import trafficpages
from r2.lib.menus import *
from r2.lib.utils import to36, sanitize_url, check_cheating, title_to_url
-from r2.lib.utils import query_string, UrlParser, link_from_url, link_duplicates
+from r2.lib.utils import query_string, UrlParser, link_from_url, url_links_builder
from r2.lib.template_helpers import get_domain
from r2.lib.filters import unsafe, _force_unicode, _force_utf8
from r2.lib.emailer import has_opted_out, Email
@@ -760,16 +760,19 @@ def GET_duplicates(self, article, num, after, reverse, count):
if not can_view_link_comments(article):
abort(403, 'forbidden')
- links = link_duplicates(article)
- links.sort(key=attrgetter('num_comments'), reverse=True)
- builder = IDBuilder([ link._fullname for link in links ],
- num = num, after = after, reverse = reverse,
- count = count, skip = False)
- listing = LinkListing(builder).listing()
+ # only look up duplicates if it's not a self-post
+ if not getattr(article, 'is_self', False):
+ builder = url_links_builder(article.url,
+ exclude=article._fullname)
+ num_duplicates = len(builder.get_items()[0])
+ listing = LinkListing(builder).listing()
+ else:
+ num_duplicates = 0
+ listing = None
res = LinkInfoPage(link=article,
comment=None,
- duplicates=links,
+ num_duplicates=num_duplicates,
content=listing,
page_classes=['other-discussions-page'],
subtitle=_('other discussions')).render()
View
7 r2/r2/controllers/wiki.py
@@ -23,7 +23,7 @@
from pylons import request, g, c
from pylons.controllers.util import redirect_to
from reddit_base import RedditController
-from r2.lib.utils import url_links
+from r2.lib.utils import url_links_builder
from reddit_base import paginated_listing
from r2.models.wiki import (WikiPage, WikiRevision, ContentLengthError,
modactions)
@@ -207,10 +207,7 @@ def GET_wiki_redirect(self, page='index'):
@validate(page=VWikiPage('page', restricted=True))
def GET_wiki_discussions(self, page, num, after, reverse, count):
page_url = add_sr("%s/%s" % (c.wiki_base_url, page.name))
- links = url_links(page_url)
- builder = IDBuilder([link._fullname for link in links],
- num=num, after=after, reverse=reverse,
- count=count, skip=False)
+ builder = url_links_builder(page_url)
listing = LinkListing(builder).listing()
return WikiDiscussions(listing, page=page.name,
may_revise=this_may_revise(page)).render()
View
17 r2/r2/lib/pages/pages.py
@@ -49,7 +49,7 @@
from r2.lib.menus import OffsiteButton, menu, JsNavMenu
from r2.lib.strings import plurals, rand_strings, strings, Score
from r2.lib.utils import title_to_url, query_string, UrlParser, vote_hash
-from r2.lib.utils import link_duplicates, make_offset_date, median, to36
+from r2.lib.utils import url_links_builder, make_offset_date, median, to36
from r2.lib.utils import trunc_time, timesince, timeuntil, weighted_lottery
from r2.lib.template_helpers import add_sr, get_domain, format_number
from r2.lib.subreddit_search import popular_searches
@@ -1034,7 +1034,7 @@ class LinkInfoPage(Reddit):
extra_page_classes = ['single-page']
def __init__(self, link = None, comment = None,
- link_title = '', subtitle = None, duplicates = None,
+ link_title = '', subtitle = None, num_duplicates = None,
*a, **kw):
c.permalink_page = True
@@ -1076,10 +1076,12 @@ def __init__(self, link = None, comment = None,
# if we're already looking at the 'duplicates' page, we can
# avoid doing this lookup twice
- if duplicates is None:
- self.duplicates = link_duplicates(self.link)
+ if num_duplicates is None:
+ builder = url_links_builder(self.link.url,
+ exclude=self.link._fullname)
+ self.num_duplicates = len(builder.get_items()[0])
else:
- self.duplicates = duplicates
+ self.num_duplicates = num_duplicates
robots = "noindex,nofollow" if link._deleted else None
Reddit.__init__(self, title = title, short_description=short_description, robots=robots, *a, **kw)
@@ -1098,9 +1100,8 @@ def info_button(name, **fmt_args):
buttons.extend([info_button('comments'),
info_button('related')])
- if not self.link.is_self and self.duplicates:
- buttons.append(info_button('duplicates',
- num = len(self.duplicates)))
+ if not self.link.is_self and self.num_duplicates > 0:
+ buttons.append(info_button('duplicates', num=self.num_duplicates))
if c.user_is_admin:
buttons.append(NamedButton("details", dest="/details/"+self.link._fullname))
View
30 r2/r2/lib/utils/utils.py
@@ -40,7 +40,7 @@
from time import sleep
from datetime import datetime, timedelta
-from pylons import g
+from pylons import c, g
from pylons.i18n import ungettext, _
from r2.lib.filters import _force_unicode, _force_utf8
from mako.filters import url_escape
@@ -1023,15 +1023,9 @@ def cmp_links(a, b):
# among those, show them the hottest one
return links if multiple else links[0]
-def link_duplicates(article):
- # don't bother looking it up if the link doesn't have a URL anyway
- if getattr(article, 'is_self', False):
- return []
-
- return url_links(article.url, exclude = article._fullname)
-
-def url_links(url, exclude=None):
- from r2.models import Link, NotFound
+def url_links_builder(url, exclude=None):
+ from r2.models import IDBuilder, Link, NotFound
+ from operator import attrgetter
try:
links = tup(Link._by_url(url, None))
@@ -1040,7 +1034,21 @@ def url_links(url, exclude=None):
links = [ link for link in links
if link._fullname != exclude ]
- return links
+ links.sort(key=attrgetter('num_comments'), reverse=True)
+
+ # don't show removed links in duplicates unless admin or mod
+ # or unless it's your own post
+ def include_link(link):
+ return (not link._spam or
+ (c.user_is_loggedin and
+ (link.author_id == c.user._id or
+ c.user_is_admin or
+ link.subreddit.is_moderator(c.user))))
+
+ builder = IDBuilder([link._fullname for link in links],
+ skip=True, keep_fn=include_link)
+
+ return builder
class TimeoutFunctionException(Exception):
pass
Please sign in to comment.
Something went wrong with that request. Please try again.