Permalink
Browse files

Bugfixes:

     * no-repeat on some background images in the sprite (#797)
     * fix deleted comments on the mobile site -- preserve author anonymity (#624
     * faulty permalinks on pages with all unicode title (#776)
     * no more spreadshirt
     * reorganize comment_tree and _builder.pyx to clean up cache handling
     * fix styling for Bug #78
     * Improve handling of /r/all+all (bug #699), etc (not always a 400 now)
     * Fix the time listings: sometimes we get passed unicode for self.time
     * Don't allow private reddits to show in /r/random
     * Try to allow remote sites to show their own favicons in the reddit
        toolbar

  Speed improvements:
  * CommentBuilder refactor rount 1:
     * create new sort and parent permacache entries so that we don't need to loa
     * update sorts and parents when a new comment comes in
     * update non-date sorts when a new comment vote comes in
     * add more trace info to timeouts in CommentBuilder
  * Some misc. performance hacks (incl. adding _utils.pyx)
  * Increase SR description box limit from 1k to 5k
  * Fix a bug where we weren't properly allowing hidden items to be hidden
    on time-filtered listings
  * Make Subreddit._by_name take a list of names like byID
  * Upload thumbs to S3 with reduced_redundancy==True
  * make rss feeds without a 'feed' get parameter act as if the user is not logged in
  * Require a boto that knows about reduced_redundancy
  * remove fast_queries from Message.add_props and put the onus on fetching the unread messages from the permacache
  * Store the list of popular subreddits in the permacache
    * make SubredditTopBar cacheable per-user.
  * add (as safe as we can make it) annotation for sql selects to track down requests hitting the db when they shouldn't be.

 User submitted features:
  * Merge and clean up Phire's selfs-only/no-selfs patch.

  * Don't show expired items on time-filtered controversy listings

  * Also add the 'hide' button to the toolbar just for breakneckridge.
    Next time someone calls me prickly, breakneckridge had better step
    up to the plate for me or I'm rolling it back.
  • Loading branch information...
1 parent bff6185 commit ea1aa0110bd8a000cbc41260f6fa5b0d03ea1437 @KeyserSosa KeyserSosa committed Jun 26, 2010
Showing with 2,646 additions and 830 deletions.
  1. +1 −0 .gitignore
  2. +0 −2 r2/r2/config/routing.py
  3. +30 −13 r2/r2/controllers/api.py
  4. +3 −0 r2/r2/controllers/errors.py
  5. +12 −34 r2/r2/controllers/front.py
  6. +41 −18 r2/r2/controllers/listingcontroller.py
  7. +1 −1 r2/r2/controllers/promotecontroller.py
  8. +27 −14 r2/r2/controllers/reddit_base.py
  9. +27 −6 r2/r2/controllers/validator/validator.py
  10. +5 −19 r2/r2/lib/cache.py
  11. +138 −6 r2/r2/lib/comment_tree.py
  12. +1 −0 r2/r2/lib/count.py
  13. +6 −2 r2/r2/lib/db/queries.py
  14. +1 −1 r2/r2/lib/db/tdb_cassandra.py
  15. +33 −5 r2/r2/lib/db/tdb_sql.py
  16. +46 −49 r2/r2/lib/indextankupdate.py
  17. +2 −2 r2/r2/lib/lock.py
  18. +3 −2 r2/r2/lib/media.py
  19. +43 −27 r2/r2/lib/pages/pages.py
  20. +2 −1 r2/r2/lib/s3cp.py
  21. +0 −35 r2/r2/lib/set_reddit_pops.py
  22. +16 −14 r2/r2/lib/sgm.pyx
  23. +123 −0 r2/r2/lib/sr_pops.py
  24. +243 −0 r2/r2/lib/utils/_utils.pyx
  25. +18 −168 r2/r2/lib/utils/utils.py
  26. +104 −109 r2/r2/models/_builder.pyx
  27. +2 −7 r2/r2/models/account.py
  28. +5 −6 r2/r2/models/builder.py
  29. +26 −41 r2/r2/models/link.py
  30. +86 −104 r2/r2/models/subreddit.py
  31. +2 −0 r2/r2/models/vote.py
  32. +1,457 −101 r2/r2/public/static/css/reddit.css
  33. BIN r2/r2/public/static/nsfw.png
  34. +2 −8 r2/r2/templates/comscore.html
  35. +31 −12 r2/r2/templates/createsubreddit.html
  36. +8 −1 r2/r2/templates/frame.html
  37. +22 −9 r2/r2/templates/frametoolbar.html
  38. +4 −1 r2/r2/templates/morechildren.compact
  39. +4 −2 r2/r2/templates/morechildren.html
  40. +26 −2 r2/r2/templates/newlink.compact
  41. +26 −3 r2/r2/templates/newlink.html
  42. +4 −1 r2/r2/templates/printable.mobile
  43. +11 −4 r2/setup.py
  44. +4 −0 scripts/update_reddits.sh
View
@@ -37,6 +37,7 @@ r2/_sorts.egg-info/
r2/r2/lib/_normalized_hot.c
r2/r2/lib/db/_sorts.c
r2/r2/lib/sgm.c
+r2/r2/lib/utils/_utils.c
r2/r2/lib/wrapped.c
r2/r2/models/_builder.c
r2/sgm.egg-info/
View
@@ -121,8 +121,6 @@ def make_map(global_conf={}, app_conf={}):
action = 'details', title=None)
mc('/traffic/:article/:title', controller='front',
action = 'traffic', title=None)
- mc('/shirt/:article/:title', controller='front',
- action = 'shirt', title=None)
mc('/comments/:article/:title/:comment', controller='front',
action = 'comments', title=None, comment = None)
mc('/duplicates/:article/:title', controller = 'front',
View
@@ -189,12 +189,12 @@ def POST_compose(self, form, jquery, to, subject, body, ip):
VRatelimit(rate_user = True, rate_ip = True,
prefix = "rate_submit_"),
ip = ValidIP(),
- sr = VSubmitSR('sr'),
+ sr = VSubmitSR('sr', 'kind'),
url = VUrl(['url', 'sr']),
title = VTitle('title'),
save = VBoolean('save'),
selftext = VMarkdown('text'),
- kind = VOneOf('kind', ['link', 'self', 'poll']),
+ kind = VOneOf('kind', ['link', 'self']),
then = VOneOf('then', ('tb', 'comments'),
default='comments'),
extension = VLength("extension", 20))
@@ -210,23 +210,39 @@ def POST_submit(self, form, jquery, url, selftext, kind, title,
# VUrl may have replaced 'url' by adding 'http://'
form.set_inputs(url = url)
- if not kind:
+ if not kind or form.has_errors('sr', errors.INVALID_OPTION):
# this should only happen if somebody is trying to post
# links in some automated manner outside of the regular
# submission page, and hasn't updated their script
return
- if form.has_errors('sr', errors.SUBREDDIT_NOEXIST,
- errors.SUBREDDIT_NOTALLOWED,
- errors.SUBREDDIT_REQUIRED):
+ if (form.has_errors('sr',
+ errors.SUBREDDIT_NOEXIST,
+ errors.SUBREDDIT_NOTALLOWED,
+ errors.SUBREDDIT_REQUIRED,
+ errors.NO_SELFS,
+ errors.NO_LINKS)
+ or not sr):
# checking to get the error set in the form, but we can't
# check for rate-limiting if there's no subreddit
return
- else:
- should_ratelimit = sr.should_ratelimit(c.user, 'link')
- #remove the ratelimit error if the user's karma is high
- if not should_ratelimit:
- c.errors.remove((errors.RATELIMIT, 'ratelimit'))
+
+ if sr.link_type == 'link' and kind == 'self':
+ # this could happen if they actually typed "self" into the
+ # URL box and we helpfully translated it for them
+ c.errors.add(errors.NO_SELFS, field='sr')
+
+ # and trigger that by hand for the form
+ form.has_errors('sr', errors.NO_SELFS)
+
+ return
+
+ should_ratelimit = sr.should_ratelimit(c.user, 'link')
+ #remove the ratelimit error if the user's karma is high
+ if not should_ratelimit:
+ c.errors.remove((errors.RATELIMIT, 'ratelimit'))
+
+ banmsg = None
banmsg = None
@@ -1139,12 +1155,13 @@ def POST_upload_sr_img(self, file, header, sponsor, name, form_id):
name = VSubredditName("name"),
title = VLength("title", max_length = 100),
domain = VCnameDomain("domain"),
- description = VMarkdown("description", max_length = 1000),
+ description = VMarkdown("description", max_length = 5120),
lang = VLang("lang"),
over_18 = VBoolean('over_18'),
allow_top = VBoolean('allow_top'),
show_media = VBoolean('show_media'),
type = VOneOf('type', ('public', 'private', 'restricted')),
+ link_type = VOneOf('link_type', ('any', 'link', 'self')),
ip = ValidIP(),
sponsor_text =VLength('sponsorship-text', max_length = 500),
sponsor_name =VLength('sponsorship-name', max_length = 500),
@@ -1159,7 +1176,7 @@ def POST_site_admin(self, form, jquery, name, ip, sr,
redir = False
kw = dict((k, v) for k, v in kw.iteritems()
if k in ('name', 'title', 'domain', 'description', 'over_18',
- 'show_media', 'type', 'lang', "css_on_cname",
+ 'show_media', 'type', 'link_type', 'lang', "css_on_cname",
'allow_top'))
#if a user is banned, return rate-limit errors
@@ -76,6 +76,9 @@
('BAD_CARD', _('card problem: %(message)s')),
('TOO_LONG', _("this is too long (max: %(max_length)s)")),
('NO_TEXT', _('we need something here')),
+
+ ('NO_SELFS', _("that reddit doesn't allow text posts")),
+ ('NO_LINKS', _("that reddit only allows text posts")),
))
errors = Storage([(e, e) for e in error_list.keys()])
View
@@ -83,7 +83,7 @@ def GET_oldinfo(self, article, type, dest, rest=None, comment=''):
def GET_random(self):
"""The Serendipity button"""
- sort = 'new' if rand.choice((True,False)) else 'hot'
+ sort = rand.choice(('new','hot'))
links = c.site.get_links(sort, 'all')
if isinstance(links, thing.Query):
links._limit = g.num_serendipity
@@ -318,28 +318,6 @@ def _edit_modcontrib_reddit(self, location, num, after, reverse, count, created)
extension_handling = "private"
else:
return self.abort404()
- if isinstance(c.site, ModSR):
- level = 'mod'
- elif isinstance(c.site, ContribSR):
- level = 'contrib'
- elif isinstance(c.site, AllSR):
- level = 'all'
- else:
- raise ValueError
-
- if ((level == 'mod' and
- location in ('reports', 'spam', 'trials', 'modqueue'))
- or
- (level == 'all' and
- location == 'trials')):
- pane = self._make_spamlisting(location, num, after, reverse, count)
- if c.user.pref_private_feeds:
- extension_handling = "private"
- else:
- return self.abort404()
-
- return EditReddit(content = pane,
- extension_handling = extension_handling).render()
return EditReddit(content = pane,
extension_handling = extension_handling).render()
@@ -617,17 +595,6 @@ def GET_submit(self, url, title, then):
captcha=captcha,
then = then)).render()
- def _render_opt_in_out(self, msg_hash, leave):
- """Generates the form for an optin/optout page"""
- email = Email.handler.get_recipient(msg_hash)
- if not email:
- return self.abort404()
- sent = (has_opted_out(email) == leave)
- return BoringPage(_("opt out") if leave else _("welcome back"),
- content = OptOut(email = email, leave = leave,
- sent = sent,
- msg_hash = msg_hash)).render()
-
def GET_frame(self):
"""used for cname support. makes a frame and
puts the proper url as the frame source"""
@@ -914,6 +881,17 @@ def GET_validuser(self):
c.response.content = ''
return c.response
+ def _render_opt_in_out(self, msg_hash, leave):
+ """Generates the form for an optin/optout page"""
+ email = Email.handler.get_recipient(msg_hash)
+ if not email:
+ return self.abort404()
+ sent = (has_opted_out(email) == leave)
+ return BoringPage(_("opt out") if leave else _("welcome back"),
+ content = OptOut(email = email, leave = leave,
+ sent = sent,
+ msg_hash = msg_hash)).render()
+
@validate(msg_hash = nop('x'))
def GET_optout(self, msg_hash):
"""handles /mail/optout to add an email to the optout mailing
@@ -259,21 +259,25 @@ def spotlight(self):
left_side = max(-1, min(num_tl - 3, 8))
disp_links = [spotlight_links[(i + pos) % num_tl]
for i in xrange(-2, left_side)]
- def keep_fn(item):
+
+ def trial_keep_fn(item):
if trial and trial._fullname == item._fullname:
return True
return organic.keep_fresh_links(item)
- def wrap(item):
+ def trial_wrap(item):
if item is trial:
w = Wrapped(item)
w.trial_mode = True
w.render_class = LinkOnTrial
return w
return self.builder_wrapper(item)
- b = IDBuilder(disp_links, wrap = wrap,
+
+ b = IDBuilder(disp_links,
+ wrap = trial_wrap if trial else self.builder_wrapper,
num = organic.organic_length,
- skip = True, keep_fn = keep_fn)
+ skip = True,
+ keep_fn = trial_keep_fn if trial else organic.keep_fresh_links)
try:
vislink = spotlight_links[pos]
@@ -282,11 +286,10 @@ def wrap(item):
g.log.error("pos = %d" % pos)
raise
- s = SpotlightListing(b,
- spotlight_links = spotlight_links,
- visible_link = vislink,
- max_num = self.listing_obj.max_num,
- max_score = self.listing_obj.max_score).listing()
+ s = SpotlightListing(b, spotlight_links = spotlight_links,
+ visible_link = vislink,
+ max_num = self.listing_obj.max_num,
+ max_score = self.listing_obj.max_score).listing()
if len(s.things) > 0:
# only pass through a listing if the links made it
@@ -304,15 +307,16 @@ def wrap(item):
if res.things:
return res
-
-
def query(self):
#no need to worry when working from the cache
if g.use_query_cache or c.site == Default:
self.fix_listing = False
if c.site == Default:
- sr_ids = Subreddit.user_subreddits(c.user)
+ sr_ids = Subreddit.user_subreddits(c.user,
+ limit=(Subreddit.sr_limit
+ if c.user_is_loggedin
+ else g.num_default_reddits))
return normalized_hot(sr_ids)
#if not using the query_cache we still want cached front pages
elif (not g.use_query_cache
@@ -394,6 +398,17 @@ def GET_listing(self, sort, **env):
class BrowseController(ListingController):
where = 'browse'
+ def keep_fn(self):
+ """For merged time-listings, don't show items that are too old
+ (this can happen when mr_top hasn't run in a while)"""
+ if self.time != 'all' and c.default_sr:
+ oldest = timeago('1 %s' % (str(self.time),))
+ def keep(item):
+ return item._date > oldest and item.keep_item(item)
+ return keep
+ else:
+ return ListingController.keep_fn(self)
+
@property
def menus(self):
return [ControversyTimeMenu(default = self.time)]
@@ -726,20 +741,28 @@ def title(self):
def query(self):
if self.where == 'banned' and c.user_is_admin:
reddits = Subreddit._query(Subreddit.c._spam == True,
- sort = desc('_date'))
+ sort = desc('_date'),
+ write_cache = True,
+ read_cache = True,
+ cache_time = 5 * 60)
else:
- reddits = Subreddit._query()
+ reddits = None
if self.where == 'new':
+ reddits = Subreddit._query( write_cache = True,
+ read_cache = True,
+ cache_time = 5 * 60)
reddits._sort = desc('_date')
else:
+ reddits = Subreddit._query( write_cache = True,
+ read_cache = True,
+ cache_time = 60 * 60)
reddits._sort = desc('_downs')
- if c.content_langs != 'all':
- reddits._filter(Subreddit.c.lang == c.content_langs)
+ # Consider resurrecting when it is not the World Cup
+ #if c.content_langs != 'all':
+ # reddits._filter(Subreddit.c.lang == c.content_langs)
if not c.over18:
reddits._filter(Subreddit.c.over_18 == False)
- reddits._filter(Subreddit.c.author_id != -1)
-
return reddits
def GET_listing(self, where, **env):
self.where = where
@@ -32,7 +32,7 @@
from r2.controllers.reddit_base import RedditController
-from r2.lib.utils import timetext, make_offset_date
+from r2.lib.utils import make_offset_date
from r2.lib.media import force_thumbnail, thumbnail_url
from r2.lib import cssfilter
from datetime import datetime
@@ -254,13 +254,20 @@ def set_subreddit():
srs = set()
sr_names = sr_name.split('+')
real_path = sr_name
- for sr_name in sr_names:
- sr = Subreddit._by_name(sr_name)
- if isinstance(sr, FakeSubreddit):
+ srs = Subreddit._by_name(sr_names).values()
+ if len(srs) != len(sr_names):
+ abort(404)
+ elif any(isinstance(sr, FakeSubreddit)
+ for sr in srs):
+ if All in srs:
+ c.site = All
+ elif Friend in srs:
+ c.site = Friend
+ else:
abort(400)
- srs.add(sr)
- sr_ids = [sr._id for sr in srs]
- c.site = MultiReddit(sr_ids, real_path)
+ else:
+ sr_ids = [sr._id for sr in srs]
+ c.site = MultiReddit(sr_ids, real_path)
else:
c.site = Subreddit._by_name(sr_name)
except NotFound:
@@ -483,6 +490,7 @@ def cached_response(self):
return c.response
def pre(self):
+
c.start_time = datetime.now(g.tz)
g.reset_caches()
@@ -499,6 +507,8 @@ def pre(self):
set_subreddit()
c.errors = ErrorSet()
c.cookies = Cookies()
+ # if an rss feed, this will also log the user in if a feed=
+ # GET param is included
set_content_type()
def try_pagecache(self):
@@ -656,15 +666,18 @@ def pre(self):
# the user could have been logged in via one of the feeds
maybe_admin = False
+
+ # no logins for RSS feed unless valid_feed has already been called
if not c.user_is_loggedin:
- (c.user, maybe_admin) = \
- valid_cookie(c.cookies[g.login_cookie].value
- if g.login_cookie in c.cookies
- else '')
-
- if c.user:
- c.user_is_loggedin = True
- else:
+ if c.extension != "rss":
+ (c.user, maybe_admin) = \
+ valid_cookie(c.cookies[g.login_cookie].value
+ if g.login_cookie in c.cookies
+ else '')
+ if c.user:
+ c.user_is_loggedin = True
+
+ if not c.user_is_loggedin:
c.user = UnloggedUser(get_browser_langs())
# patch for fixing mangled language preferences
if (not isinstance(c.user.pref_lang, basestring) or
Oops, something went wrong.

0 comments on commit ea1aa01

Please sign in to comment.