Skip to content

Commit

Permalink
Bugfixes:
Browse files Browse the repository at this point in the history
     * no-repeat on some background images in the sprite (reddit-archive#797)
     * fix deleted comments on the mobile site -- preserve author anonymity (#624
     * faulty permalinks on pages with all unicode title (#776)
     * no more spreadshirt
     * reorganize comment_tree and _builder.pyx to clean up cache handling
     * fix styling for Bug #78
     * Improve handling of /r/all+all (bug reddit-archive#699), etc (not always a 400 now)
     * Fix the time listings: sometimes we get passed unicode for self.time
     * Don't allow private reddits to show in /r/random
     * Try to allow remote sites to show their own favicons in the reddit
        toolbar

  Speed improvements:
  * CommentBuilder refactor rount 1:
     * create new sort and parent permacache entries so that we don't need to loa
     * update sorts and parents when a new comment comes in
     * update non-date sorts when a new comment vote comes in
     * add more trace info to timeouts in CommentBuilder
  * Some misc. performance hacks (incl. adding _utils.pyx)
  * Increase SR description box limit from 1k to 5k
  * Fix a bug where we weren't properly allowing hidden items to be hidden
    on time-filtered listings
  * Make Subreddit._by_name take a list of names like byID
  * Upload thumbs to S3 with reduced_redundancy==True
  * make rss feeds without a 'feed' get parameter act as if the user is not logged in
  * Require a boto that knows about reduced_redundancy
  * remove fast_queries from Message.add_props and put the onus on fetching the unread messages from the permacache
  * Store the list of popular subreddits in the permacache
    * make SubredditTopBar cacheable per-user.
  * add (as safe as we can make it) annotation for sql selects to track down requests hitting the db when they shouldn't be.

 User submitted features:
  * Merge and clean up Phire's selfs-only/no-selfs patch.

  * Don't show expired items on time-filtered controversy listings

  * Also add the 'hide' button to the toolbar just for breakneckridge.
    Next time someone calls me prickly, breakneckridge had better step
    up to the plate for me or I'm rolling it back.
  • Loading branch information
KeyserSosa committed Jun 26, 2010
1 parent bff6185 commit ea1aa01
Show file tree
Hide file tree
Showing 44 changed files with 2,646 additions and 830 deletions.
1 change: 1 addition & 0 deletions .gitignore
Expand Up @@ -37,6 +37,7 @@ r2/_sorts.egg-info/
r2/r2/lib/_normalized_hot.c
r2/r2/lib/db/_sorts.c
r2/r2/lib/sgm.c
r2/r2/lib/utils/_utils.c
r2/r2/lib/wrapped.c
r2/r2/models/_builder.c
r2/sgm.egg-info/
Expand Down
2 changes: 0 additions & 2 deletions r2/r2/config/routing.py
Expand Up @@ -121,8 +121,6 @@ def make_map(global_conf={}, app_conf={}):
action = 'details', title=None)
mc('/traffic/:article/:title', controller='front',
action = 'traffic', title=None)
mc('/shirt/:article/:title', controller='front',
action = 'shirt', title=None)
mc('/comments/:article/:title/:comment', controller='front',
action = 'comments', title=None, comment = None)
mc('/duplicates/:article/:title', controller = 'front',
Expand Down
43 changes: 30 additions & 13 deletions r2/r2/controllers/api.py
Expand Up @@ -189,12 +189,12 @@ def POST_compose(self, form, jquery, to, subject, body, ip):
VRatelimit(rate_user = True, rate_ip = True,
prefix = "rate_submit_"),
ip = ValidIP(),
sr = VSubmitSR('sr'),
sr = VSubmitSR('sr', 'kind'),
url = VUrl(['url', 'sr']),
title = VTitle('title'),
save = VBoolean('save'),
selftext = VMarkdown('text'),
kind = VOneOf('kind', ['link', 'self', 'poll']),
kind = VOneOf('kind', ['link', 'self']),
then = VOneOf('then', ('tb', 'comments'),
default='comments'),
extension = VLength("extension", 20))
Expand All @@ -210,23 +210,39 @@ def POST_submit(self, form, jquery, url, selftext, kind, title,
# VUrl may have replaced 'url' by adding 'http://'
form.set_inputs(url = url)

if not kind:
if not kind or form.has_errors('sr', errors.INVALID_OPTION):
# this should only happen if somebody is trying to post
# links in some automated manner outside of the regular
# submission page, and hasn't updated their script
return

if form.has_errors('sr', errors.SUBREDDIT_NOEXIST,
errors.SUBREDDIT_NOTALLOWED,
errors.SUBREDDIT_REQUIRED):
if (form.has_errors('sr',
errors.SUBREDDIT_NOEXIST,
errors.SUBREDDIT_NOTALLOWED,
errors.SUBREDDIT_REQUIRED,
errors.NO_SELFS,
errors.NO_LINKS)
or not sr):
# checking to get the error set in the form, but we can't
# check for rate-limiting if there's no subreddit
return
else:
should_ratelimit = sr.should_ratelimit(c.user, 'link')
#remove the ratelimit error if the user's karma is high
if not should_ratelimit:
c.errors.remove((errors.RATELIMIT, 'ratelimit'))

if sr.link_type == 'link' and kind == 'self':
# this could happen if they actually typed "self" into the
# URL box and we helpfully translated it for them
c.errors.add(errors.NO_SELFS, field='sr')

# and trigger that by hand for the form
form.has_errors('sr', errors.NO_SELFS)

return

should_ratelimit = sr.should_ratelimit(c.user, 'link')
#remove the ratelimit error if the user's karma is high
if not should_ratelimit:
c.errors.remove((errors.RATELIMIT, 'ratelimit'))

banmsg = None

banmsg = None

Expand Down Expand Up @@ -1139,12 +1155,13 @@ def POST_upload_sr_img(self, file, header, sponsor, name, form_id):
name = VSubredditName("name"),
title = VLength("title", max_length = 100),
domain = VCnameDomain("domain"),
description = VMarkdown("description", max_length = 1000),
description = VMarkdown("description", max_length = 5120),
lang = VLang("lang"),
over_18 = VBoolean('over_18'),
allow_top = VBoolean('allow_top'),
show_media = VBoolean('show_media'),
type = VOneOf('type', ('public', 'private', 'restricted')),
link_type = VOneOf('link_type', ('any', 'link', 'self')),
ip = ValidIP(),
sponsor_text =VLength('sponsorship-text', max_length = 500),
sponsor_name =VLength('sponsorship-name', max_length = 500),
Expand All @@ -1159,7 +1176,7 @@ def POST_site_admin(self, form, jquery, name, ip, sr,
redir = False
kw = dict((k, v) for k, v in kw.iteritems()
if k in ('name', 'title', 'domain', 'description', 'over_18',
'show_media', 'type', 'lang', "css_on_cname",
'show_media', 'type', 'link_type', 'lang', "css_on_cname",
'allow_top'))

#if a user is banned, return rate-limit errors
Expand Down
3 changes: 3 additions & 0 deletions r2/r2/controllers/errors.py
Expand Up @@ -76,6 +76,9 @@
('BAD_CARD', _('card problem: %(message)s')),
('TOO_LONG', _("this is too long (max: %(max_length)s)")),
('NO_TEXT', _('we need something here')),

('NO_SELFS', _("that reddit doesn't allow text posts")),
('NO_LINKS', _("that reddit only allows text posts")),
))
errors = Storage([(e, e) for e in error_list.keys()])

Expand Down
46 changes: 12 additions & 34 deletions r2/r2/controllers/front.py
Expand Up @@ -83,7 +83,7 @@ def GET_oldinfo(self, article, type, dest, rest=None, comment=''):

def GET_random(self):
"""The Serendipity button"""
sort = 'new' if rand.choice((True,False)) else 'hot'
sort = rand.choice(('new','hot'))
links = c.site.get_links(sort, 'all')
if isinstance(links, thing.Query):
links._limit = g.num_serendipity
Expand Down Expand Up @@ -318,28 +318,6 @@ def _edit_modcontrib_reddit(self, location, num, after, reverse, count, created)
extension_handling = "private"
else:
return self.abort404()
if isinstance(c.site, ModSR):
level = 'mod'
elif isinstance(c.site, ContribSR):
level = 'contrib'
elif isinstance(c.site, AllSR):
level = 'all'
else:
raise ValueError

if ((level == 'mod' and
location in ('reports', 'spam', 'trials', 'modqueue'))
or
(level == 'all' and
location == 'trials')):
pane = self._make_spamlisting(location, num, after, reverse, count)
if c.user.pref_private_feeds:
extension_handling = "private"
else:
return self.abort404()

return EditReddit(content = pane,
extension_handling = extension_handling).render()

return EditReddit(content = pane,
extension_handling = extension_handling).render()
Expand Down Expand Up @@ -617,17 +595,6 @@ def GET_submit(self, url, title, then):
captcha=captcha,
then = then)).render()

def _render_opt_in_out(self, msg_hash, leave):
"""Generates the form for an optin/optout page"""
email = Email.handler.get_recipient(msg_hash)
if not email:
return self.abort404()
sent = (has_opted_out(email) == leave)
return BoringPage(_("opt out") if leave else _("welcome back"),
content = OptOut(email = email, leave = leave,
sent = sent,
msg_hash = msg_hash)).render()

def GET_frame(self):
"""used for cname support. makes a frame and
puts the proper url as the frame source"""
Expand Down Expand Up @@ -914,6 +881,17 @@ def GET_validuser(self):
c.response.content = ''
return c.response

def _render_opt_in_out(self, msg_hash, leave):
"""Generates the form for an optin/optout page"""
email = Email.handler.get_recipient(msg_hash)
if not email:
return self.abort404()
sent = (has_opted_out(email) == leave)
return BoringPage(_("opt out") if leave else _("welcome back"),
content = OptOut(email = email, leave = leave,
sent = sent,
msg_hash = msg_hash)).render()

@validate(msg_hash = nop('x'))
def GET_optout(self, msg_hash):
"""handles /mail/optout to add an email to the optout mailing
Expand Down
59 changes: 41 additions & 18 deletions r2/r2/controllers/listingcontroller.py
Expand Up @@ -259,21 +259,25 @@ def spotlight(self):
left_side = max(-1, min(num_tl - 3, 8))
disp_links = [spotlight_links[(i + pos) % num_tl]
for i in xrange(-2, left_side)]
def keep_fn(item):

def trial_keep_fn(item):
if trial and trial._fullname == item._fullname:
return True
return organic.keep_fresh_links(item)

def wrap(item):
def trial_wrap(item):
if item is trial:
w = Wrapped(item)
w.trial_mode = True
w.render_class = LinkOnTrial
return w
return self.builder_wrapper(item)
b = IDBuilder(disp_links, wrap = wrap,

b = IDBuilder(disp_links,
wrap = trial_wrap if trial else self.builder_wrapper,
num = organic.organic_length,
skip = True, keep_fn = keep_fn)
skip = True,
keep_fn = trial_keep_fn if trial else organic.keep_fresh_links)

try:
vislink = spotlight_links[pos]
Expand All @@ -282,11 +286,10 @@ def wrap(item):
g.log.error("pos = %d" % pos)
raise

s = SpotlightListing(b,
spotlight_links = spotlight_links,
visible_link = vislink,
max_num = self.listing_obj.max_num,
max_score = self.listing_obj.max_score).listing()
s = SpotlightListing(b, spotlight_links = spotlight_links,
visible_link = vislink,
max_num = self.listing_obj.max_num,
max_score = self.listing_obj.max_score).listing()

if len(s.things) > 0:
# only pass through a listing if the links made it
Expand All @@ -304,15 +307,16 @@ def wrap(item):
if res.things:
return res



def query(self):
#no need to worry when working from the cache
if g.use_query_cache or c.site == Default:
self.fix_listing = False

if c.site == Default:
sr_ids = Subreddit.user_subreddits(c.user)
sr_ids = Subreddit.user_subreddits(c.user,
limit=(Subreddit.sr_limit
if c.user_is_loggedin
else g.num_default_reddits))
return normalized_hot(sr_ids)
#if not using the query_cache we still want cached front pages
elif (not g.use_query_cache
Expand Down Expand Up @@ -394,6 +398,17 @@ def GET_listing(self, sort, **env):
class BrowseController(ListingController):
where = 'browse'

def keep_fn(self):
"""For merged time-listings, don't show items that are too old
(this can happen when mr_top hasn't run in a while)"""
if self.time != 'all' and c.default_sr:
oldest = timeago('1 %s' % (str(self.time),))
def keep(item):
return item._date > oldest and item.keep_item(item)
return keep
else:
return ListingController.keep_fn(self)

@property
def menus(self):
return [ControversyTimeMenu(default = self.time)]
Expand Down Expand Up @@ -726,20 +741,28 @@ def title(self):
def query(self):
if self.where == 'banned' and c.user_is_admin:
reddits = Subreddit._query(Subreddit.c._spam == True,
sort = desc('_date'))
sort = desc('_date'),
write_cache = True,
read_cache = True,
cache_time = 5 * 60)
else:
reddits = Subreddit._query()
reddits = None
if self.where == 'new':
reddits = Subreddit._query( write_cache = True,
read_cache = True,
cache_time = 5 * 60)
reddits._sort = desc('_date')
else:
reddits = Subreddit._query( write_cache = True,
read_cache = True,
cache_time = 60 * 60)
reddits._sort = desc('_downs')
if c.content_langs != 'all':
reddits._filter(Subreddit.c.lang == c.content_langs)
# Consider resurrecting when it is not the World Cup
#if c.content_langs != 'all':
# reddits._filter(Subreddit.c.lang == c.content_langs)
if not c.over18:
reddits._filter(Subreddit.c.over_18 == False)

reddits._filter(Subreddit.c.author_id != -1)

return reddits
def GET_listing(self, where, **env):
self.where = where
Expand Down
2 changes: 1 addition & 1 deletion r2/r2/controllers/promotecontroller.py
Expand Up @@ -32,7 +32,7 @@

from r2.controllers.reddit_base import RedditController

from r2.lib.utils import timetext, make_offset_date
from r2.lib.utils import make_offset_date
from r2.lib.media import force_thumbnail, thumbnail_url
from r2.lib import cssfilter
from datetime import datetime
Expand Down
41 changes: 27 additions & 14 deletions r2/r2/controllers/reddit_base.py
Expand Up @@ -254,13 +254,20 @@ def set_subreddit():
srs = set()
sr_names = sr_name.split('+')
real_path = sr_name
for sr_name in sr_names:
sr = Subreddit._by_name(sr_name)
if isinstance(sr, FakeSubreddit):
srs = Subreddit._by_name(sr_names).values()
if len(srs) != len(sr_names):
abort(404)
elif any(isinstance(sr, FakeSubreddit)
for sr in srs):
if All in srs:
c.site = All
elif Friend in srs:
c.site = Friend
else:
abort(400)
srs.add(sr)
sr_ids = [sr._id for sr in srs]
c.site = MultiReddit(sr_ids, real_path)
else:
sr_ids = [sr._id for sr in srs]
c.site = MultiReddit(sr_ids, real_path)
else:
c.site = Subreddit._by_name(sr_name)
except NotFound:
Expand Down Expand Up @@ -483,6 +490,7 @@ def cached_response(self):
return c.response

def pre(self):

c.start_time = datetime.now(g.tz)
g.reset_caches()

Expand All @@ -499,6 +507,8 @@ def pre(self):
set_subreddit()
c.errors = ErrorSet()
c.cookies = Cookies()
# if an rss feed, this will also log the user in if a feed=
# GET param is included
set_content_type()

def try_pagecache(self):
Expand Down Expand Up @@ -656,15 +666,18 @@ def pre(self):

# the user could have been logged in via one of the feeds
maybe_admin = False

# no logins for RSS feed unless valid_feed has already been called
if not c.user_is_loggedin:
(c.user, maybe_admin) = \
valid_cookie(c.cookies[g.login_cookie].value
if g.login_cookie in c.cookies
else '')

if c.user:
c.user_is_loggedin = True
else:
if c.extension != "rss":
(c.user, maybe_admin) = \
valid_cookie(c.cookies[g.login_cookie].value
if g.login_cookie in c.cookies
else '')
if c.user:
c.user_is_loggedin = True

if not c.user_is_loggedin:
c.user = UnloggedUser(get_browser_langs())
# patch for fixing mangled language preferences
if (not isinstance(c.user.pref_lang, basestring) or
Expand Down

0 comments on commit ea1aa01

Please sign in to comment.