Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

New features:

    * self service sponsored links (initial version with no targeting)
       * credit card processing with authorize.net (and corresponding interaction code)
    * hardcache -- for persistent cache keys with expiration
    * Awards code
    * messaging overhaul round 1.  Added mark as unread as well as message sorting

  Additions:
    * add TedScraper and improve youtube scraper (ala tritelife); move traffic link on sponsored links and make them still visible when the promotion is over
    * compressify all of our pngs, and set it up so thumbs, sr images, and the sprite get compressed every time they are generated
    * rate limit logins (naive way)
    * add a limit to the length of the moderator sidebox.
    * add over18 thumbnail handling and NSFW label on nsfw content
    * make NSFW label optional, and make the operation of the pref checkboxes sensible
    * add the option (for moderators) to remove a subreddit from the default set
    * Combine the queue handling code to be easily plumbed, and use it to precompute /comments

  Bugfixes:
    * trap duplicate vote error
    * Fixed new mail mouseover on toolbar
    * spammers can't send empty bodied emails any more (they will get rejected as they should).
  • Loading branch information...
commit 5ef76b96c7845366b8234e530282d9428336fb4a 1 parent bf9f43c
@KeyserSosa KeyserSosa authored
Showing with 1,719 additions and 596 deletions.
  1. +1 −1  r2/Makefile
  2. +6 −2 r2/example.ini
  3. +102 −98 r2/r2/controllers/api.py
  4. +1 −0  r2/r2/controllers/buttons.py
  5. +1 −4 r2/r2/controllers/feedback.py
  6. +6 −2 r2/r2/controllers/front.py
  7. +1 −1  r2/r2/controllers/health.py
  8. +28 −11 r2/r2/controllers/listingcontroller.py
  9. +9 −0 r2/r2/controllers/post.py
  10. +27 −22 r2/r2/controllers/promotecontroller.py
  11. +0 −3  r2/r2/controllers/reddit_base.py
  12. +4 −13 r2/r2/controllers/validator/validator.py
  13. +65 −65 r2/r2/lib/amqp.py
  14. +44 −10 r2/r2/lib/app_globals.py
  15. +32 −0 r2/r2/lib/cache.py
  16. +2 −2 r2/r2/lib/comment_tree.py
  17. +15 −3 r2/r2/lib/contrib/nymph.py
  18. +7 −6 r2/r2/lib/cssfilter.py
  19. +217 −25 r2/r2/lib/db/queries.py
  20. +9 −7 r2/r2/lib/db/query_queue.py
  21. +84 −0 r2/r2/lib/db/tdb_lite.py
  22. +18 −6 r2/r2/lib/db/tdb_sql.py
  23. +4 −0 r2/r2/lib/db/thing.py
  24. +4 −3 r2/r2/lib/emailer.py
  25. +17 −0 r2/r2/lib/filters.py
  26. +132 −0 r2/r2/lib/hardcachebackend.py
  27. +5 −7 r2/r2/lib/jsontemplates.py
  28. +8 −3 r2/r2/lib/manager/db_manager.py
  29. +5 −5 r2/r2/lib/media.py
  30. +4 −3 r2/r2/lib/menus.py
  31. +1 −1  r2/r2/lib/organic.py
  32. +78 −29 r2/r2/lib/pages/pages.py
  33. +2 −0  r2/r2/lib/pages/things.py
  34. +4 −3 r2/r2/lib/promote.py
  35. +97 −0 r2/r2/lib/queues.py
  36. +40 −1 r2/r2/lib/scraper.py
  37. +1 −2  r2/r2/lib/solrsearch.py
  38. +49 −64 r2/r2/lib/utils/utils.py
  39. +18 −9 r2/r2/lib/workqueue.py
  40. +0 −2  r2/r2/models/__init__.py
  41. +6 −1 r2/r2/models/account.py
  42. +4 −7 r2/r2/models/admintools.py
  43. +58 −3 r2/r2/models/award.py
  44. +9 −21 r2/r2/models/builder.py
  45. +61 −20 r2/r2/models/link.py
  46. +12 −4 r2/r2/models/subreddit.py
  47. +0 −34 r2/r2/models/thing_changes.py
  48. +6 −3 r2/r2/models/vote.py
  49. BIN  r2/r2/public/static/adowngray.png
  50. BIN  r2/r2/public/static/adownmod.png
  51. BIN  r2/r2/public/static/alien-clippy.png
  52. BIN  r2/r2/public/static/apple-touch-icon.png
  53. BIN  r2/r2/public/static/aupgray.png
  54. BIN  r2/r2/public/static/aupmod.png
  55. BIN  r2/r2/public/static/award.png
  56. BIN  r2/r2/public/static/base.reddit.com.header.png
  57. BIN  r2/r2/public/static/beta.reddit1.png
  58. BIN  r2/r2/public/static/beta.reddit2.png
  59. BIN  r2/r2/public/static/beta.reddit3.png
  60. BIN  r2/r2/public/static/beta.reddit4.png
  61. BIN  r2/r2/public/static/blog-collapsed-hover.png
  62. BIN  r2/r2/public/static/blog-collapsed.png
  63. BIN  r2/r2/public/static/blog-expanded-hover.png
  64. BIN  r2/r2/public/static/blog-expanded.png
  65. BIN  r2/r2/public/static/blog_head.png
  66. BIN  r2/r2/public/static/blog_snoo.png
  67. BIN  r2/r2/public/static/blued.png
  68. BIN  r2/r2/public/static/bluer.png
  69. BIN  r2/r2/public/static/breakout.png
  70. BIN  r2/r2/public/static/button-normal.png
  71. BIN  r2/r2/public/static/button-pressed.png
  72. BIN  r2/r2/public/static/cclogo.png
  73. BIN  r2/r2/public/static/clippy-bullet.png
  74. BIN  r2/r2/public/static/continue-thread.png
  75. BIN  r2/r2/public/static/create-a-reddit.png
  76. +99 −8 r2/r2/public/static/css/reddit.css
  77. BIN  r2/r2/public/static/css_disliked.png
  78. BIN  r2/r2/public/static/css_liked.png
  79. BIN  r2/r2/public/static/css_login.png
  80. BIN  r2/r2/public/static/css_saved.png
  81. BIN  r2/r2/public/static/css_submit.png
  82. BIN  r2/r2/public/static/css_update.png
  83. BIN  r2/r2/public/static/dislike_firefox.png
  84. BIN  r2/r2/public/static/dislike_ie.png
  85. BIN  r2/r2/public/static/dislike_safari.png
  86. BIN  r2/r2/public/static/dorks-toolbar.png
  87. BIN  r2/r2/public/static/find.png
  88. BIN  r2/r2/public/static/firefox.png
  89. BIN  r2/r2/public/static/gagged-alien.png
  90. BIN  r2/r2/public/static/gradient-button-hover.png
  91. BIN  r2/r2/public/static/gradient-button.png
  92. BIN  r2/r2/public/static/green-check.png
  93. BIN  r2/r2/public/static/help.png
  94. BIN  r2/r2/public/static/ie.png
  95. +16 −6 r2/r2/public/static/js/jquery.reddit.js
  96. +49 −6 r2/r2/public/static/js/reddit.js
  97. +1 −1  r2/r2/public/static/js/sponsored.js
  98. BIN  r2/r2/public/static/kill.png
  99. BIN  r2/r2/public/static/like_firefox.png
  100. BIN  r2/r2/public/static/like_ie.png
  101. BIN  r2/r2/public/static/like_safari.png
  102. BIN  r2/r2/public/static/link-active.png
  103. BIN  r2/r2/public/static/link.png
  104. BIN  r2/r2/public/static/littlehead.png
  105. BIN  r2/r2/public/static/logo-toolbar.png
  106. BIN  r2/r2/public/static/mail.png
  107. BIN  r2/r2/public/static/mailgray.png
  108. BIN  r2/r2/public/static/next_organic.png
  109. BIN  r2/r2/public/static/noimage.png
  110. BIN  r2/r2/public/static/over18.png
  111. BIN  r2/r2/public/static/over18_icon.png
  112. BIN  r2/r2/public/static/pencil.png
  113. BIN  r2/r2/public/static/permalink-arrow.png
  114. BIN  r2/r2/public/static/pixel.png
  115. BIN  r2/r2/public/static/poll-collapsed-hover.png
  116. BIN  r2/r2/public/static/poll-collapsed.png
  117. BIN  r2/r2/public/static/poll-expanded-hover.png
  118. BIN  r2/r2/public/static/poll-expanded.png
  119. BIN  r2/r2/public/static/prev_organic.png
  120. BIN  r2/r2/public/static/reddit-is-down-brb.png
  121. BIN  r2/r2/public/static/reddit.com.header.png
  122. BIN  r2/r2/public/static/reddit404a.png
  123. BIN  r2/r2/public/static/reddit404b.png
  124. BIN  r2/r2/public/static/reddit404c.png
  125. BIN  r2/r2/public/static/reddit404d.png
  126. BIN  r2/r2/public/static/reddit404e.png
  127. BIN  r2/r2/public/static/reddit500.png
  128. BIN  r2/r2/public/static/reddit_alien.png
  129. BIN  r2/r2/public/static/reddit_firefox.png
  130. BIN  r2/r2/public/static/reddit_ie.png
  131. BIN  r2/r2/public/static/reddit_safari.png
  132. BIN  r2/r2/public/static/redditheaderScience.png
  133. BIN  r2/r2/public/static/rightarrow.png
  134. BIN  r2/r2/public/static/safari.png
  135. BIN  r2/r2/public/static/save_firefox.png
  136. BIN  r2/r2/public/static/save_ie.png
  137. BIN  r2/r2/public/static/save_safari.png
  138. BIN  r2/r2/public/static/serendipity!_firefox.png
  139. BIN  r2/r2/public/static/serendipity!_ie.png
  140. BIN  r2/r2/public/static/serendipity!_safari.png
  141. BIN  r2/r2/public/static/spreddit_firefox.png
  142. BIN  r2/r2/public/static/spreddit_ie.png
  143. BIN  r2/r2/public/static/spreddit_safari.png
  144. BIN  r2/r2/public/static/sr-add-button.png
  145. BIN  r2/r2/public/static/sr-remove-button.png
  146. BIN  r2/r2/public/static/submit-alien.png
  147. BIN  r2/r2/public/static/submit-hope.png
  148. BIN  r2/r2/public/static/transpLOGO.png
  149. BIN  r2/r2/public/static/vid-collapsed-hover.png
  150. BIN  r2/r2/public/static/vid-collapsed.png
  151. BIN  r2/r2/public/static/vid-expanded-hover.png
  152. BIN  r2/r2/public/static/vid-expanded.png
  153. BIN  r2/r2/public/static/widget_arrows.png
  154. BIN  r2/r2/public/static/widget_arrows_down.png
  155. BIN  r2/r2/public/static/widget_arrows_up.png
  156. BIN  r2/r2/public/static/wired_w.png
  157. BIN  r2/r2/public/static/youbrokeit.png
  158. +22 −2 r2/r2/templates/adminawards.html
  159. +1 −1  r2/r2/templates/appservicemonitor.html
  160. +8 −0 r2/r2/templates/createsubreddit.html
  161. +1 −2  r2/r2/templates/dart_ad.html
  162. +1 −1  r2/r2/templates/frametoolbar.html
  163. +9 −0 r2/r2/templates/link.html
  164. +10 −8 r2/r2/templates/message.html
  165. +30 −12 r2/r2/templates/organiclisting.html
  166. +32 −7 r2/r2/templates/prefoptions.html
  167. +2 −1  r2/r2/templates/printable.html
  168. +12 −3 r2/r2/templates/printablebuttons.html
  169. +6 −10 r2/r2/templates/promotelinkform.html
  170. +2 −1  r2/r2/templates/searchbar.html
  171. +3 −0  r2/r2/templates/searchform.html
  172. +34 −6 r2/r2/templates/selfserveblurb.html
  173. +8 −0 r2/r2/templates/sidecontentbox.html
  174. +40 −14 r2/r2/templates/trophycase.html
  175. +29 −1 r2/r2/templates/userawards.html
View
2  r2/Makefile
@@ -48,7 +48,7 @@ MAINCSS := $(foreach css, $(main_css), $(static_dir)/$(css))
RTLCSS = $(CSSTARGETS:.css=-rtl.css) $(MAINCSS:.css=-rtl.css)
-MD5S = $(JSTARGETS:=.md5) $(CSSTARGETS:=.md5)
+MD5S = $(JSTARGETS:=.md5) $(CSSTARGETS:=.md5) $(MAINCSS:=.md5) $(RTLCSS:=.md5)
ifdef PRIVATEREPOS
INIUPDATE = $(wildcard *.update)
View
8 r2/example.ini
@@ -10,6 +10,8 @@ uncompressedJS = true
translator = true
sqlprinting = false
+log_start = true
+
proxy_addr =
log_path =
@@ -61,6 +63,7 @@ db_create_tables = True
type_db = main
rel_type_db = main
+hardcache_db = main
db_table_link = thing, main, main
db_table_account = thing, main
@@ -116,6 +119,7 @@ max_sr_images = 20
show_awards = False
+takedown_sr = _takedowns
login_cookie = reddit_session
domain = localhost
domain_prefix =
@@ -130,6 +134,7 @@ page_cache_time = 30
static_path = /static/
useragent = Mozilla/5.0 (compatible; bot/1.0; ChangeMe)
allow_shutdown = False
+profanity_wordlist =
solr_url =
solr_cache_time = 300
@@ -157,7 +162,6 @@ HOT_PAGE_AGE = 1
#
rising_period = 12 hours
-new_incubation = 90 seconds
# time of ratelimit purgatory (min)
RATELIMIT = 10
@@ -188,7 +192,7 @@ spreadshirt_test_font =
[server:main]
use = egg:Paste#http
host = 0.0.0.0
-port = %(port)s
+port = %(scgi_port)s
[app:main]
use = egg:r2
View
200 r2/r2/controllers/api.py
@@ -28,10 +28,9 @@
from r2.models import *
from r2.models.subreddit import Default as DefaultSR
-import r2.models.thing_changes as tc
from r2.lib.utils import get_title, sanitize_url, timeuntil, set_last_modified
-from r2.lib.utils import query_string, link_from_url, timefromnow, worker
+from r2.lib.utils import query_string, link_from_url, timefromnow
from r2.lib.utils import timeago
from r2.lib.pages import FriendList, ContributorList, ModList, \
BannedList, BoringPage, FormPage, CssError, UploadedImage, \
@@ -45,10 +44,11 @@
from r2.lib.strings import strings
from r2.lib.filters import _force_unicode, websafe_json, websafe, spaceCompress
from r2.lib.db import queries
-from r2.lib import amqp, promote
+from r2.lib.db.queries import changed
+from r2.lib import promote
from r2.lib.media import force_thumbnail, thumbnail_url
from r2.lib.comment_tree import add_comment, delete_comment
-from r2.lib import tracking, sup, cssfilter, emailer
+from r2.lib import tracking, cssfilter, emailer
from r2.lib.subreddit_search import search_reddits
from datetime import datetime, timedelta
@@ -124,8 +124,7 @@ def POST_compose(self, form, jquery, to, subject, body, ip):
form.set_html(".status", _("your message has been delivered"))
form.set_inputs(to = "", subject = "", text = "", captcha="")
- if g.write_query_queue:
- queries.new_message(m, inbox_rel)
+ queries.new_message(m, inbox_rel)
@validatedForm(VUser(),
VCaptcha(),
@@ -203,15 +202,10 @@ def POST_submit(self, form, jquery, url, selftext, kind, title, save,
l._commit()
l.set_url_cache()
- v = Vote.vote(c.user, l, True, ip)
+ queries.queue_vote(c.user, l, True, ip)
if save:
r = l._save(c.user)
- if g.write_query_queue:
- queries.new_savehide(r)
-
- #reset the hot page
- if v.valid_thing:
- expire_hot(sr)
+ queries.new_savehide(r)
#set the ratelimiter
if should_ratelimit:
@@ -219,21 +213,8 @@ def POST_submit(self, form, jquery, url, selftext, kind, title, save,
prefix = "rate_submit_")
#update the queries
- if g.write_query_queue:
- queries.new_link(l)
- queries.new_vote(v)
-
- # also notifies the searchchanges
- worker.do(lambda: amqp.add_item('new_link', l._fullname))
+ queries.new_link(l)
- #update the modified flags
- set_last_modified(c.user, 'overview')
- set_last_modified(c.user, 'submitted')
- set_last_modified(c.user, 'liked')
-
- #update sup listings
- sup.add_update(c.user, 'submitted')
-
if then == 'comments':
path = add_sr(l.make_permalink_slow())
elif then == 'tb':
@@ -283,8 +264,16 @@ def _login(self, form, user, dest='', rem = None):
def POST_login(self, form, jquery, user, dest, rem, reason):
if reason and reason[0] == 'redirect':
dest = reason[1]
- if form.has_errors("passwd", errors.WRONG_PASSWORD):
+
+ hc_key = "login_attempts-%s" % request.ip
+
+ recent_attempts = g.hardcache.get(hc_key, 0)
+
+ if recent_attempts >= 25:
+ raise NotImplementedError("need proper fail msg")
+ elif form.has_errors("passwd", errors.WRONG_PASSWORD):
VRatelimit.ratelimit(rate_ip = True, prefix = 'login_', seconds=1)
+ g.hardcache.set(hc_key, recent_attempts + 1, 3600 * 8)
else:
self._login(form, user, dest, rem)
@@ -406,7 +395,7 @@ def POST_friend(self, form, jquery, ip, friend,
# The user who made the request must be an admin or a moderator
# for the privilege change to succeed.
if (not c.user_is_admin
- and (type in ('moderator','contributer','banned')
+ and (type in ('moderator','contributer', 'banned')
and not c.site.is_moderator(c.user))):
abort(403,'forbidden')
@@ -442,8 +431,7 @@ def POST_friend(self, form, jquery, ip, friend,
item, inbox_rel = Message._new(c.user, friend,
subj, msg, ip)
- if g.write_query_queue:
- queries.new_message(item, inbox_rel)
+ queries.new_message(item, inbox_rel)
@validatedForm(VUser('curpass', default = ''),
@@ -470,6 +458,7 @@ def POST_update(self, form, jquery, email, password, verify):
# unverified email for now
c.user.email_verified = None
c.user._commit()
+ Award.take_away("verified_email", c.user)
updated = True
if verify:
# TODO: rate limit this?
@@ -523,21 +512,19 @@ def POST_del(self, thing):
thing._commit()
# flag search indexer that something has changed
- tc.changed(thing)
+ changed(thing)
#expire the item from the sr cache
if isinstance(thing, Link):
sr = thing.subreddit_slow
expire_hot(sr)
- if g.use_query_cache:
- queries.new_link(thing)
+ queries.new_link(thing)
#comments have special delete tasks
elif isinstance(thing, Comment):
thing._delete()
delete_comment(thing)
- if g.use_query_cache:
- queries.new_comment(thing, None)
+ queries.new_comment(thing, None)
@noresponse(VUser(), VModhash(),
thing = VByName('id'))
@@ -545,10 +532,24 @@ def POST_report(self, thing):
'''for reporting...'''
if not thing or thing._deleted:
return
- elif c.user._spam or c.user.ignorereports:
- return
elif getattr(thing, 'promoted', False):
return
+
+ # if it is a message that is being reported, ban it.
+ # every user is admin over their own personal inbox
+ if isinstance(thing, Message):
+ admintools.spam(thing, False, True, c.user.name)
+ # auto-hide links that are reported
+ elif isinstance(thing, Link):
+ r = thing._hide(c.user)
+ queries.new_savehide(r)
+ # TODO: be nice to be able to remove comments that are reported
+ # from a user's inbox so they don't have to look at them.
+ elif isinstance(thing, Comment):
+ pass
+
+ if c.user._spam or c.user.ignorereports:
+ return
Report.new(c.user, thing)
@validatedForm(VUser(),
@@ -573,7 +574,7 @@ def POST_editusertext(self, form, jquery, item, text):
item._commit()
- tc.changed(item)
+ changed(item)
if kind == 'link':
set_last_modified(item, 'comments')
@@ -629,27 +630,19 @@ def POST_comment(self, commentform, jquery, parent, comment, ip):
if not subject.startswith(re):
subject = re + subject
item, inbox_rel = Message._new(c.user, to, subject,
- comment, ip)
+ comment, ip, parent = parent)
item.parent_id = parent._id
else:
item, inbox_rel = Comment._new(c.user, link, parent_comment,
comment, ip)
- Vote.vote(c.user, item, True, ip)
-
- # will also update searchchanges as appropriate
- worker.do(lambda: amqp.add_item('new_comment', item._fullname))
+ queries.queue_vote(c.user, item, True, ip)
#update last modified
- set_last_modified(c.user, 'overview')
- set_last_modified(c.user, 'commented')
set_last_modified(link, 'comments')
- #update sup listings
- sup.add_update(c.user, 'commented')
-
#update the comment cache
add_comment(item)
-
+
# clean up the submission form and remove it from the DOM (if reply)
t = commentform.find("textarea")
t.attr('rows', 3).html("").attr("value", "")
@@ -657,25 +650,22 @@ def POST_comment(self, commentform, jquery, parent, comment, ip):
commentform.remove()
jquery.things(parent._fullname).set_html(".reply-button:first",
_("replied"))
-
+
# insert the new comment
jquery.insert_things(item)
# remove any null listings that may be present
jquery("#noresults").hide()
-
+
#update the queries
- if g.write_query_queue:
- if is_message:
- queries.new_message(item, inbox_rel)
- else:
- queries.new_comment(item, inbox_rel)
-
+ if is_message:
+ queries.new_message(item, inbox_rel)
+ else:
+ queries.new_comment(item, inbox_rel)
+
#set the ratelimiter
if should_ratelimit:
VRatelimit.ratelimit(rate_user=True, rate_ip = True,
prefix = "rate_comment_")
-
-
@validatedForm(VUser(),
VModhash(),
@@ -756,28 +746,15 @@ def POST_vote(self, dir, thing, ip, vote_type):
else False if dir < 0
else None)
organic = vote_type == 'organic'
- v = Vote.vote(user, thing, dir, ip, organic)
+ queries.queue_vote(user, thing, dir, ip, organic)
#update relevant caches
if isinstance(thing, Link):
- sr = thing.subreddit_slow
set_last_modified(c.user, 'liked')
set_last_modified(c.user, 'disliked')
- #update sup listings
- if dir:
- sup.add_update(c.user, 'liked')
- elif dir is False:
- sup.add_update(c.user, 'disliked')
-
- if v.valid_thing:
- expire_hot(sr)
-
- if g.write_query_queue:
- queries.new_vote(v)
-
# flag search indexer that something has changed
- tc.changed(thing)
+ changed(thing)
@validatedForm(VUser(),
VModhash(),
@@ -817,7 +794,7 @@ def POST_subreddit_stylesheet(self, form, jquery,
c.site.stylesheet_hash = md5(stylesheet_contents_parsed).hexdigest()
set_last_modified(c.site,'stylesheet_contents')
- tc.changed(c.site)
+ changed(c.site)
c.site._commit()
form.set_html(".status", _('saved'))
@@ -984,6 +961,7 @@ def POST_upload_sr_img(self, file, header, sponsor, name, form_id):
description = VLength("description", max_length = 1000),
lang = VLang("lang"),
over_18 = VBoolean('over_18'),
+ allow_top = VBoolean('allow_top'),
show_media = VBoolean('show_media'),
type = VOneOf('type', ('public', 'private', 'restricted')),
ip = ValidIP(),
@@ -1001,7 +979,8 @@ def POST_site_admin(self, form, jquery, name, ip, sr, ad_type, ad_file,
redir = False
kw = dict((k, v) for k, v in kw.iteritems()
if k in ('name', 'title', 'domain', 'description', 'over_18',
- 'show_media', 'type', 'lang', "css_on_cname"))
+ 'show_media', 'type', 'lang', "css_on_cname",
+ 'allow_top'))
#if a user is banned, return rate-limit errors
if c.user._spam:
@@ -1031,9 +1010,6 @@ def POST_site_admin(self, form, jquery, name, ip, sr, ad_type, ad_file,
sr = Subreddit._new(name = name, author_id = c.user._id, ip = ip,
**kw)
- # will also update search
- worker.do(lambda: amqp.add_item('new_subreddit', sr._fullname))
-
Subreddit.subscribe_defaults(c.user)
# make sure this user is on the admin list of that site!
if sr.add_subscriber(c.user):
@@ -1046,6 +1022,8 @@ def POST_site_admin(self, form, jquery, name, ip, sr, ad_type, ad_file,
rate_ip = True,
prefix = "create_reddit_")
+ queries.new_subreddit(sr)
+
#editting an existing reddit
elif sr.is_moderator(c.user) or c.user_is_admin:
@@ -1072,7 +1050,7 @@ def POST_site_admin(self, form, jquery, name, ip, sr, ad_type, ad_file,
Subreddit._by_domain(sr.domain, _update = True)
# flag search indexer that something has changed
- tc.changed(sr)
+ changed(sr)
form.parent().set_html('.status', _("saved"))
if redir:
@@ -1118,8 +1096,7 @@ def POST_distinguish(self, form, jquery, thing, how):
def POST_save(self, thing):
if not thing: return
r = thing._save(c.user)
- if g.write_query_queue:
- queries.new_savehide(r)
+ queries.new_savehide(r)
@noresponse(VUser(),
VModhash(),
@@ -1127,17 +1104,39 @@ def POST_save(self, thing):
def POST_unsave(self, thing):
if not thing: return
r = thing._unsave(c.user)
- if g.write_query_queue and r:
+ if r:
queries.new_savehide(r)
@noresponse(VUser(),
VModhash(),
thing = VByName('id'))
+ def POST_unread_message(self, thing):
+ if not thing:
+ return
+ if hasattr(thing, "to_id") and c.user._id != thing.to_id:
+ return
+ thing.new = True
+ thing._commit()
+
+ @noresponse(VUser(),
+ VModhash(),
+ thing = VByName('id'))
+ def POST_read_message(self, thing):
+ if not thing: return
+ if hasattr(thing, "to_id") and c.user._id != thing.to_id:
+ return
+ thing.new = False
+ thing._commit()
+
+
+
+ @noresponse(VUser(),
+ VModhash(),
+ thing = VByName('id'))
def POST_hide(self, thing):
if not thing: return
r = thing._hide(c.user)
- if g.write_query_queue:
- queries.new_savehide(r)
+ queries.new_savehide(r)
@noresponse(VUser(),
VModhash(),
@@ -1145,7 +1144,7 @@ def POST_hide(self, thing):
def POST_unhide(self, thing):
if not thing: return
r = thing._unhide(c.user)
- if g.write_query_queue and r:
+ if r:
queries.new_savehide(r)
@@ -1216,20 +1215,17 @@ def GET_bookmarklet(self, action, uh, links):
Subreddit.load_subreddits(links, return_dict = False)
user = c.user if c.user_is_loggedin else None
links = [l for l in links if l.subreddit_slow.can_view(user)]
-
+
if links:
if action in ['like', 'dislike']:
#vote up all of the links
for link in links:
- v = Vote.vote(c.user, link, action == 'like',
- request.ip)
- if g.write_query_queue:
- queries.new_vote(v)
+ queries.queue_vote(c.user, link,
+ action == 'like', request.ip)
elif action == 'save':
link = max(links, key = lambda x: x._score)
r = link._save(c.user)
- if g.write_query_queue:
- queries.new_savehide(r)
+ queries.new_savehide(r)
return self.redirect("/static/css_%sd.png" % action)
return self.redirect("/static/css_submit.png")
@@ -1315,7 +1311,7 @@ def _subscribe(self, sr, sub):
else:
if sr.remove_subscriber(c.user):
sr._incr('_ups', -1)
- tc.changed(sr)
+ changed(sr)
@noresponse(VAdmin(),
@@ -1337,12 +1333,19 @@ def POST_enable_lang(self, tr):
colliding_award=VAwardByCodename(("codename", "fullname")),
codename = VLength("codename", max_length = 100),
title = VLength("title", max_length = 100),
+ awardtype = VOneOf("awardtype",
+ ("regular", "manual", "invisible")),
imgurl = VLength("imgurl", max_length = 1000))
def POST_editaward(self, form, jquery, award, colliding_award, codename,
- title, imgurl):
- if form.has_errors(("codename", "title", "imgurl"), errors.NO_TEXT):
+ title, awardtype, imgurl):
+ if form.has_errors(("codename", "title", "awardtype", "imgurl"),
+ errors.NO_TEXT):
pass
+ if awardtype is None:
+ form.set_html(".status", "bad awardtype")
+ return
+
if form.has_errors(("codename"), errors.INVALID_OPTION):
form.set_html(".status", "some other award has that codename")
pass
@@ -1351,12 +1354,13 @@ def POST_editaward(self, form, jquery, award, colliding_award, codename,
return
if award is None:
- Award._new(codename, title, imgurl)
+ Award._new(codename, title, awardtype, imgurl)
form.set_html(".status", "saved. reload to see it.")
return
award.codename = codename
award.title = title
+ award.awardtype = awardtype
award.imgurl = imgurl
award._commit()
form.set_html(".status", _('saved'))
View
1  r2/r2/controllers/buttons.py
@@ -96,6 +96,7 @@ def GET_button_content(self, url, title, css, vote, newwindow, width, link):
if link:
url = link.url
+ title = link.title
wrapper = make_wrapper(Button if vote else ButtonNoBody,
url = url,
target = "_new" if newwindow else "_parent",
View
5 r2/r2/controllers/feedback.py
@@ -27,11 +27,8 @@
class FeedbackController(RedditController):
def GET_ad_inq(self):
- title = _("inquire about advertising on reddit")
return FormPage('advertise',
- content = PaneStack([SelfServeBlurb(),
- Feedback(title=title,
- action='ad_inq')]),
+ content = SelfServeBlurb(),
loginbox = False).render()
def GET_feedback(self):
View
8 r2/r2/controllers/front.py
@@ -139,6 +139,7 @@ def GET_verify_email(self, cache_evt, key, dest):
cache_evt.clear()
c.user.email_verified = True
c.user._commit()
+ Award.give_if_needed("verified_email", c.user)
return self.redirect(dest)
@validate(cache_evt = VCacheKey('reset', ('key',)),
@@ -414,14 +415,16 @@ def GET_search_reddits(self, query, reverse, after, count, num):
prev_search = query,
elapsed_time = t,
num_results = num,
+ # update if we ever add sorts
+ search_params = {},
title = _("search results")).render()
return res
verify_langs_regex = re.compile(r"^[a-z][a-z](,[a-z][a-z])*$")
@base_listing
@validate(query = nop('q'),
- time = VMenu('action', TimeMenu, remember = False),
- sort = VMenu('sort', SearchSortMenu, remember = False),
+ time = VMenu('action', TimeMenu),
+ sort = VMenu('sort', SearchSortMenu),
langs = nop('langs'))
def GET_search(self, query, num, time, reverse, after, count, langs, sort):
"""Search links page."""
@@ -464,6 +467,7 @@ def GET_search(self, query, num, time, reverse, after, count, langs, sort):
res = SearchPage(_('search results'), query, t, num, content=spane,
nav_menus = [TimeMenu(default = time),
SearchSortMenu(default=sort)],
+ search_params = dict(sort = sort, t = time),
infotext = infotext).render()
return res
View
2  r2/r2/controllers/health.py
@@ -6,7 +6,7 @@
from pylons import c, g
from reddit_base import RedditController
-from r2.lib.utils import worker
+from r2.lib.amqp import worker
class HealthController(RedditController):
def shutdown(self):
View
39 r2/r2/controllers/listingcontroller.py
@@ -295,9 +295,6 @@ def keep(item):
elif wouldkeep and c.user_is_loggedin and c.user._id == item.author_id:
# also let the author of the link see them
return True
- elif item._date > timeago(g.new_incubation):
- # it's too young to show yet
- return False
else:
# otherwise, fall back to the regular logic (don't
# show hidden links, etc)
@@ -484,6 +481,19 @@ class MessageController(ListingController):
show_sidebar = False
render_cls = MessagePage
+ @property
+ def menus(self):
+ if self.where in ('inbox', 'messages', 'comments', 'selfreply'):
+ buttons = (NavButton(_("all"), "inbox"),
+ NavButton(plurals.messages, "messages"),
+ NavButton(_("comment replies"), 'comments'),
+ NavButton(_("post replies"), 'selfreply'))
+
+ return [NavMenu(buttons, base_path = '/message/',
+ default = 'inbox', type = "flatlist")]
+ return []
+
+
def title(self):
return _('messages') + ': ' + _(self.where)
@@ -497,11 +507,23 @@ def builder_wrapper(thing):
w.to_id = c.user._id
w.was_comment = True
w.permalink, w._fullname = p, f
- return w
else:
- return ListingController.builder_wrapper(thing)
+ w = ListingController.builder_wrapper(thing)
+
+ if c.user.pref_mark_messages_read and thing.new:
+ w.new = True
+ thing.new = False
+ thing._commit()
+
+ return w
def query(self):
+ if self.where == 'messages':
+ q = queries.get_inbox_messages(c.user)
+ elif self.where == 'comments':
+ q = queries.get_inbox_comments(c.user)
+ elif self.where == 'selfreply':
+ q = queries.get_inbox_selfreply(c.user)
if self.where == 'inbox':
q = queries.get_inbox(c.user)
@@ -613,9 +635,4 @@ class CommentsController(ListingController):
title_text = _('comments')
def query(self):
- q = Comment._query(Comment.c._spam == (True,False),
- sort = desc('_date'))
- if not c.user_is_admin:
- q._filter(Comment.c._spam == False)
-
- return q
+ return queries.get_all_comments()
View
9 r2/r2/controllers/post.py
@@ -104,7 +104,10 @@ def POST_unlogged_options(self, all_langs, pref_lang):
pref_num_comments = VInt('num_comments', 1, g.max_comments,
default = g.num_comments),
pref_show_stylesheets = VBoolean('show_stylesheets'),
+ pref_no_profanity = VBoolean('no_profanity'),
+ pref_label_nsfw = VBoolean('label_nsfw'),
pref_show_promote = VBoolean('show_promote'),
+ pref_mark_messages_read = VBoolean("mark_messages_read"),
all_langs = nop('all-langs', default = 'all'))
def POST_options(self, all_langs, pref_lang, **kw):
#temporary. eventually we'll change pref_clickgadget to an
@@ -115,6 +118,12 @@ def POST_options(self, all_langs, pref_lang, **kw):
elif not kw.get('pref_show_promote'):
kw['pref_show_promote'] = False
+ if not kw.get("pref_over_18") or not c.user.pref_over_18:
+ kw['pref_no_profanity'] = True
+
+ if kw.get("pref_no_profanity") or c.user.pref_no_profanity:
+ kw['pref_label_nsfw'] = True
+
self.set_options(all_langs, pref_lang, **kw)
u = UrlParser(c.site.path + "prefs")
u.update_query(done = 'true')
View
49 r2/r2/controllers/promotecontroller.py
@@ -66,22 +66,22 @@ def query(self):
STATUS.rejected))
elif self.sort == "unpaid_promos":
q._filter(Link.c.promote_status == STATUS.unpaid)
+ elif self.sort == "rejected_promos":
+ q._filter(Link.c.promote_status == STATUS.rejected)
elif self.sort == "live_promos":
q._filter(Link.c.promote_status == STATUS.promoted)
return q
- @validate(VPaidSponsor(),
- VVerifiedUser())
def GET_listing(self, sort = "", **env):
+ if not c.user_is_loggedin or not c.user.email_verified:
+ return self.redirect("/ad_inq")
self.sort = sort
return ListingController.GET_listing(self, **env)
GET_index = GET_listing
-
- # To open up: VSponsor -> VVerifiedUser
- @validate(VPaidSponsor(),
- VVerifiedUser())
+
+ @validate(VVerifiedUser())
def GET_new_promo(self):
return PromotePage('content', content = PromoteLinkForm()).render()
@@ -103,8 +103,7 @@ def GET_edit_promo(self, link):
return page.render()
- @validate(VPaidSponsor(),
- VVerifiedUser())
+ @validate(VVerifiedUser())
def GET_graph(self):
content = Promote_Graph()
if c.user_is_sponsor and c.render_style == 'csv':
@@ -163,17 +162,19 @@ def POST_promote(self, thing):
reason = nop("reason"))
def POST_unpromote(self, thing, reason):
if thing:
+ # reject anything that hasn't yet been promoted
if (c.user_is_sponsor and
- (thing.promote_status in (promote.STATUS.unpaid,
- promote.STATUS.unseen,
- promote.STATUS.accepted,
- promote.STATUS.promoted)) ):
+ thing.promote_status < promote.STATUS.promoted):
+ promote.reject_promo(thing, reason = reason)
+ # also reject anything that is live but has a reason given
+ elif (c.user_is_sponsor and reason and
+ thing.promte_status == promote.STATUS.promoted):
promote.reject_promo(thing, reason = reason)
+ # otherwise, mark it as "finished"
else:
promote.unpromote(thing)
- # TODO: when opening up, may have to refactor
- @validatedForm(VPaidSponsor('link_id'),
+ @validatedForm(VSponsor('link_id'),
VModhash(),
VRatelimit(rate_user = True,
rate_ip = True,
@@ -221,12 +222,21 @@ def POST_new_promo(self, form, jquery, l, ip, title, url, dates,
# check dates and date range
start, end = [x.date() for x in dates] if dates else (None, None)
- if not l or (l._date.date(), l.promote_until.date()) == (start,end):
+ if (not l or
+ (l.promote_status != promote.STATUS.promoted and
+ (l._date.date(), l.promote_until.date()) != (start,end))):
if (form.has_errors('startdate', errors.BAD_DATE,
errors.BAD_FUTURE_DATE) or
form.has_errors('enddate', errors.BAD_DATE,
errors.BAD_FUTURE_DATE, errors.BAD_DATE_RANGE)):
return
+ # if the dates have been updated, it is possible that the
+ # bid is no longer valid
+ duration = max((end - start).days, 1)
+ if float(bid) / duration < g.min_promote_bid:
+ c.errors.add(errors.BAD_BID, field = 'bid',
+ msg_params = {"min": g.min_promote_bid,
+ "max": g.max_promote_bid})
# dates have been validated at this point. Next validate title, etc.
if (form.has_errors('title', errors.NO_TEXT,
@@ -384,12 +394,6 @@ def POST_update_pay(self, form, jquery, bid, link, customer_id, pay_id,
form.set_html(".status",
_("that promotion is already over, so updating "
"your bid is kind of pointless, don't you think?"))
- # don't create or modify a transaction if no changes have been made.
- elif (link.promote_status > promote.STATUS.unpaid and
- not address_modified and
- getattr(link, "promote_bid", "") == bid):
- form.set_html(".status",
- _("no changes needed to be made"))
elif pay_id:
# valid bid and created or existing bid id.
# check if already a transaction
@@ -411,7 +415,8 @@ def GET_pay(self, article):
customer_id = data.customerProfileId,
profiles = data.paymentProfiles)
res = LinkInfoPage(link = article,
- content = content)
+ content = content,
+ show_sidebar = False)
return res.render()
def GET_link_thumb(self, *a, **kw):
View
3  r2/r2/controllers/reddit_base.py
@@ -520,9 +520,6 @@ def pre(self):
if not g.disallow_db_writes:
c.user.update_last_visit(c.start_time)
- #TODO: temporary
- c.user_is_paid_sponsor = c.user.name.lower() in g.paid_sponsors
-
c.over18 = over18()
#set_browser_langs()
View
17 r2/r2/controllers/validator/validator.py
@@ -47,8 +47,7 @@ def visible_promo(article):
# promos are visible only if comments are not disabled and the
# user is either the author or the link is live/previously live.
if is_promo:
- return (not article.disable_comments and
- (is_author or
+ return (is_author or (not article.disable_comments and
article.promote_status >= promote.STATUS.promoted))
# not a promo, therefore it is visible
return True
@@ -515,21 +514,13 @@ def run(self, link_id = None):
return
except (NotFound, ValueError):
pass
- abort(403, 'forbidden')
+ abort(403, 'forbidden')
class VTrafficViewer(VSponsor):
def user_test(self, thing):
return (VSponsor.user_test(self, thing) or
promote.is_traffic_viewer(thing, c.user))
-# TODO: tempoary validator to be replaced with Vuser once we get he
-# bugs worked out
-class VPaidSponsor(VSponsor):
- def run(self, link_id = None):
- if c.user_is_paid_sponsor:
- return
- VSponsor.run(self, link_id)
-
class VSrModerator(Validator):
def run(self):
if not (c.user_is_loggedin and c.site.is_moderator(c.user)
@@ -1100,10 +1091,10 @@ def run(self, date):
business_days = self.business_days)
if self.future is not None and date.date() < future.date():
self.set_error(errors.BAD_FUTURE_DATE,
- {"day": future.days})
+ {"day": self.future})
elif self.past is not None and date.date() > past.date():
self.set_error(errors.BAD_PAST_DATE,
- {"day": past.days})
+ {"day": self.past})
return date.replace(tzinfo=g.tz)
except (ValueError, TypeError):
self.set_error(errors.BAD_DATE)
View
130 r2/r2/lib/amqp.py
@@ -20,13 +20,15 @@
# CondeNet, Inc. All Rights Reserved.
################################################################################
-from threading import local
+from Queue import Queue
+from threading import local, Thread
from datetime import datetime
import os
import sys
import time
import errno
import socket
+import itertools
from amqplib import client_0_8 as amqp
@@ -44,9 +46,35 @@
have_init = False
#there are two ways of interacting with this module: add_item and
-#handle_items. add_item should only be called from the utils.worker
-#thread since it might block for an arbitrary amount of time while
-#trying to get a connection amqp.
+#handle_items. _add_item (the internal function for adding items to
+#amqp that are added using add_item) might block for an arbitrary
+#amount of time while trying to get a connection amqp.
+
+class Worker:
+ def __init__(self):
+ self.q = Queue()
+ self.t = Thread(target=self._handle)
+ self.t.setDaemon(True)
+ self.t.start()
+
+ def _handle(self):
+ while True:
+ fn = self.q.get()
+ try:
+ fn()
+ self.q.task_done()
+ except:
+ import traceback
+ print traceback.format_exc()
+
+ def do(self, fn, *a, **kw):
+ fn1 = lambda: fn(*a, **kw)
+ self.q.put(fn1)
+
+ def join(self):
+ self.q.join()
+
+worker = Worker()
def get_connection():
global connection
@@ -63,9 +91,9 @@ def get_connection():
print 'error connecting to amqp'
time.sleep(1)
- #don't run init_queue until someone actually needs it. this allows
- #the app server to start and serve most pages if amqp isn't
- #running
+ # don't run init_queue until someone actually needs it. this
+ # allows the app server to start and serve most pages if amqp
+ # isn't running
if not have_init:
init_queue()
have_init = True
@@ -73,7 +101,6 @@ def get_connection():
def get_channel(reconnect = False):
global connection
global channel
- global log
# Periodic (and increasing with uptime) errors appearing when
# connection object is still present, but appears to have been
@@ -91,67 +118,22 @@ def get_channel(reconnect = False):
channel.chan = connection.channel()
return channel.chan
+
def init_queue():
- from r2.models import admintools
+ from r2.lib.queues import RedditQueueMap
exchange = 'reddit_exchange'
chan = get_channel()
- #we'll have one exchange for now
- chan.exchange_declare(exchange=exchange,
- type='direct',
- durable=True,
- auto_delete=False)
-
- #prec_links queue
- chan.queue_declare(queue='prec_links',
- durable=True,
- exclusive=False,
- auto_delete=False)
- chan.queue_bind(routing_key='prec_links',
- queue='prec_links',
- exchange=exchange)
-
- chan.queue_declare(queue='scraper_q',
- durable=True,
- exclusive=False,
- auto_delete=False)
-
- chan.queue_declare(queue='searchchanges_q',
- durable=True,
- exclusive=False,
- auto_delete=False)
-
- # new_link
- chan.queue_bind(routing_key='new_link',
- queue='scraper_q',
- exchange=exchange)
- chan.queue_bind(routing_key='new_link',
- queue='searchchanges_q',
- exchange=exchange)
-
- # new_subreddit
- chan.queue_bind(routing_key='new_subreddit',
- queue='searchchanges_q',
- exchange=exchange)
-
- # new_comment (nothing here for now)
-
- # while new items will be put here automatically, we also need a
- # way to specify that the item has changed by hand
- chan.queue_bind(routing_key='searchchanges_q',
- queue='searchchanges_q',
- exchange=exchange)
-
- admintools.admin_queues(chan, exchange)
+ RedditQueueMap(exchange, chan).init()
-def add_item(routing_key, body, message_id = None):
+def _add_item(routing_key, body, message_id = None):
"""adds an item onto a queue. If the connection to amqp is lost it
will try to reconnect and then call itself again."""
if not amqp_host:
- print "Ignoring amqp message %r to %r" % (body, routing_key)
+ log.error("Ignoring amqp message %r to %r" % (body, routing_key))
return
chan = get_channel()
@@ -172,14 +154,17 @@ def add_item(routing_key, body, message_id = None):
else:
raise
+def add_item(routing_key, body, message_id = None):
+ if amqp_host:
+ log.debug("amqp: adding item %r to %r" % (body, routing_key))
+
+ worker.do(_add_item, routing_key, body, message_id = message_id)
+
def handle_items(queue, callback, ack = True, limit = 1, drain = False):
"""Call callback() on every item in a particular queue. If the
connection to the queue is lost, it will die. Intended to be
used as a long-running process."""
- # debuffer stdout so that logging comes through more real-time
- sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0)
-
chan = get_channel()
while True:
msg = chan.basic_get(queue)
@@ -199,11 +184,26 @@ def handle_items(queue, callback, ack = True, limit = 1, drain = False):
break # the innermost loop only
msg = chan.basic_get(queue)
- callback(items)
-
- if ack:
+ try:
+ count_str = ''
+ if 'message_count' in items[-1].delivery_info:
+ # the count from the last message, if the count is
+ # available
+ count_str = '(%d remaining)' % items[-1].delivery_info['message_count']
+ print "%s: %d items %s" % (queue, len(items), count_str)
+ callback(items, chan)
+
+ if ack:
+ for item in items:
+ chan.basic_ack(item.delivery_tag)
+
+ # flush any log messages printed by the callback
+ sys.stdout.flush()
+ except:
for item in items:
- chan.basic_ack(item.delivery_tag)
+ # explicitly reject the items that we've not processed
+ chan.basic_reject(item.delivery_tag, requeue = True)
+ raise
def empty_queue(queue):
"""debug function to completely erase the contents of a queue"""
View
54 r2/r2/lib/app_globals.py
@@ -21,9 +21,9 @@
################################################################################
from __future__ import with_statement
from pylons import config
-import pytz, os, logging, sys, socket
-from datetime import timedelta
-from r2.lib.cache import LocalCache, Memcache, CacheChain
+import pytz, os, logging, sys, socket, re, subprocess
+from datetime import timedelta, datetime
+from r2.lib.cache import LocalCache, Memcache, HardCache, CacheChain
from r2.lib.db.stats import QueryStats
from r2.lib.translation import get_active_langs
from r2.lib.lock import make_lock_factory
@@ -55,7 +55,8 @@ class Globals(object):
'max_promote_bid',
]
- bool_props = ['debug', 'translator',
+ bool_props = ['debug', 'translator',
+ 'log_start',
'sqlprinting',
'template_debug',
'uncompressedJS',
@@ -74,8 +75,6 @@ class Globals(object):
'rendercaches',
'admins',
'sponsors',
- # TODO: temporary until we open it up to all users
- 'paid_sponsors',
'monitored_servers',
'automatic_reddits',
'agents',
@@ -120,8 +119,6 @@ def __init__(self, global_conf, app_conf, paths, **extra):
v = tuple(self.to_iter(v))
setattr(self, k, v)
- self.paid_sponsors = set(x.lower() for x in self.paid_sponsors)
-
# initialize caches
mc = Memcache(self.memcaches, pickleProtocol = 1)
self.memcache = mc
@@ -142,6 +139,9 @@ def __init__(self, global_conf, app_conf, paths, **extra):
#load the database info
self.dbm = self.load_db_params(global_conf)
+ # can't do this until load_db_params() has been called
+ self.hardcache = CacheChain((LocalCache(), mc, HardCache(self)))
+
#make a query cache
self.stats_collector = QueryStats()
@@ -190,6 +190,8 @@ def __init__(self, global_conf, app_conf, paths, **extra):
else:
self.log.setLevel(logging.WARNING)
+ if self.log_start:
+ self.log.error("reddit app started on %s" % datetime.now())
# set log level for pycountry which is chatty
logging.getLogger('pycountry.db').setLevel(logging.CRITICAL)
@@ -206,6 +208,16 @@ def __init__(self, global_conf, app_conf, paths, **extra):
with open(stylesheet_path) as s:
self.default_stylesheet = s.read()
+ self.profanities = None
+ if self.profanity_wordlist and os.path.exists(self.profanity_wordlist):
+ with open(self.profanity_wordlist, 'r') as handle:
+ words = []
+ for line in handle:
+ words.append(line.strip(' \n\r'))
+ if words:
+ self.profanities = re.compile(r"\b(%s)\b" % '|'.join(words),
+ re.I | re.U)
+
self.reddit_host = socket.gethostname()
self.reddit_pid = os.getpid()
@@ -216,6 +228,19 @@ def __init__(self, global_conf, app_conf, paths, **extra):
if self.write_query_queue and not self.amqp_host:
raise Exception("amqp_host must be defined to use the query queue")
+ # try to set the source control revision number
+ try:
+ popen = subprocess.Popen(["git", "log", "--date=short",
+ "--pretty=format:%H %h", '-n1'],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE)
+ resp, stderrdata = popen.communicate()
+ resp = resp.strip().split(' ')
+ self.version, self.short_version = resp
+ except object, e:
+ self.log.info("Couldn't read source revision (%r)" % e)
+ self.version = self.short_version = '(unknown)'
+
@staticmethod
def to_bool(x):
return (x.lower() == 'true') if x else None
@@ -241,6 +266,11 @@ def load_db_params(self, gc):
dbm.type_db = dbm.engines[gc['type_db']]
dbm.relation_type_db = dbm.engines[gc['rel_type_db']]
+ dbm.hardcache_db = dbm.engines[gc['hardcache_db']]
+
+ def split_flags(p):
+ return ([n for n in p if not n.startswith("!")],
+ dict((n.strip('!'), True) for n in p if n.startswith("!")))
prefix = 'db_table_'
for k, v in gc.iteritems():
@@ -249,10 +279,14 @@ def load_db_params(self, gc):
name = k[len(prefix):]
kind = params[0]
if kind == 'thing':
- dbm.add_thing(name, [dbm.engines[n] for n in params[1:]])
+ engines, flags = split_flags(params[1:])
+ dbm.add_thing(name, [dbm.engines[n] for n in engines],
+ **flags)
elif kind == 'relation':
+ engines, flags = split_flags(params[3:])
dbm.add_relation(name, params[1], params[2],
- [dbm.engines[n] for n in params[3:]])
+ [dbm.engines[n] for n in engines],
+ **flags)
return dbm
def __del__(self):
View
32 r2/r2/lib/cache.py
@@ -24,6 +24,8 @@
from utils import lstrips
from contrib import memcache
+from r2.lib.hardcachebackend import HardCacheBackend
+
class CacheUtils(object):
def incr_multi(self, keys, amt=1, prefix=''):
for k in keys:
@@ -75,6 +77,36 @@ def delete_multi(self, keys, prefix='', time=0):
memcache.Client.delete_multi(self, keys, time = time,
key_prefix = prefix)
+class HardCache(CacheUtils):
+ backend = None
+
+ def __init__(self, gc):
+ self.backend = HardCacheBackend(gc)
+
+ def _split_key(self, key):
+ tokens = key.split("-", 1)
+ if len(tokens) != 2:
+ raise ValueError("key %s has no dash" % key)
+
+ category, ids = tokens
+ return category, ids
+
+ def set(self, key, val, time=0):
+ category, ids = self._split_key(key)
+ if time <= 0:
+ raise ValueError ("HardCache.set() *must* have an expiration time")
+ self.backend.set(category, ids, val, time)
+
+ def get(self, key, default=None):
+ category, ids = self._split_key(key)
+ r = self.backend.get(category, ids)
+ if r is None: return default
+ return r
+
+ def delete(self, key, time=0):
+ category, ids = self._split_key(key)
+ self.backend.delete(category, ids)
+
class LocalCache(dict, CacheUtils):
def __init__(self, *a, **kw):
return dict.__init__(self, *a, **kw)
View
4 r2/r2/lib/comment_tree.py
@@ -37,7 +37,7 @@ def add_comment(comment):
def add_comment_nolock(comment):
cm_id = comment._id
- p_id = comment.parent_id if hasattr(comment, 'parent_id') else None
+ p_id = comment.parent_id
link_id = comment.link_id
cids, comment_tree, depth, num_children = link_comments(link_id)
@@ -108,7 +108,7 @@ def load_link_comments(link_id):
#make a tree
comment_tree = {}
for cm in comments:
- p_id = cm.parent_id if hasattr(cm, 'parent_id') else None
+ p_id = cm.parent_id
comment_tree.setdefault(p_id, []).append(cm._id)
#calculate the depths
View
18 r2/r2/lib/contrib/nymph.py
@@ -21,6 +21,11 @@
################################################################################
import re, sys, Image, os, hashlib, StringIO
+def optimize_png(fname, optimizer = "/usr/bin/env optipng"):
+ if os.path.exists(fname):
+ os.popen("%s %s" % (optimizer, fname))
+ return fname
+
class Spriter(object):
spritable = re.compile(r" *background-image: *url\((.*)\) *.*/\* *SPRITE *\*/")
@@ -31,7 +36,7 @@ def __init__(self, padding = (4, 4),
self.im_lookup = {}
self.ypos = [0]
self.padding = padding
-
+
self.css_path = css_path
self.actual_path = actual_path
@@ -65,12 +70,19 @@ def finish(self, out_file, out_string):
master.paste(image,
(self.padding[0], self.padding[1] + self.ypos[i]))
- master.save(os.path.join(self.actual_path, out_file))
+ f = os.path.join(self.actual_path, out_file)
+ master.save(f)
+
+ # optimize the file
+ optimize_png(f)
d = dict(('pos_' + str(i), -self.padding[1] - y)
for i, y in enumerate(self.ypos))
- h = hashlib.md5(master.tostring()).hexdigest()
+ # md5 the final contents
+ with open(f) as handle:
+ h = hashlib.md5(handle.read()).hexdigest()
+
d['sprite'] = os.path.join(self.css_path, "%s?v=%s" % (out_file, h))
return out_string % d
View
13 r2/r2/lib/cssfilter.py
@@ -29,6 +29,11 @@
from pylons import g, c
from pylons.i18n import _
+import tempfile
+from r2.lib import s3cp
+from md5 import md5
+from r2.lib.contrib.nymph import optimize_png
+
import re
import cssutils
@@ -372,10 +377,6 @@ def save_sr_image(sr, data, resource = None):
http:/${g.s3_thumb_bucket}/${sr._fullname}[_${num}].png?v=${md5hash}
[Note: g.s3_thumb_bucket begins with a "/" so the above url is valid.]
"""
- import tempfile
- from r2.lib import s3cp
- from md5 import md5
-
hash = md5(data).hexdigest()
try:
@@ -389,8 +390,8 @@ def save_sr_image(sr, data, resource = None):
resource = ""
resource = g.s3_thumb_bucket + sr._fullname + resource + ".png"
- s3cp.send_file(f.name, resource, 'image/png', 'public-read',
- None, False)
+ s3cp.send_file(optimize_png(f.name, g.png_optimizer), resource,
+ 'image/png', 'public-read', None, False)
finally:
f.close()
View
242 r2/r2/lib/db/queries.py
@@ -3,9 +3,12 @@
from r2.lib.db.thing import Thing, Merge
from r2.lib.db.operators import asc, desc, timeago
from r2.lib.db import query_queue
+from r2.lib.normalized_hot import expire_hot
from r2.lib.db.sorts import epoch_seconds
-from r2.lib.utils import fetch_things2, worker, tup, UniqueIterator
+from r2.lib.utils import fetch_things2, tup, UniqueIterator, set_last_modified
from r2.lib.solrsearch import DomainSearchQuery
+from r2.lib import amqp, sup
+import cPickle as pickle
from datetime import datetime
import itertools
@@ -121,15 +124,15 @@ def insert(self, items):
def delete(self, items):
"""Deletes an item from the cached data."""
self.fetch()
- changed = False
+ did_change = False
for item in tup(items):
t = self.make_item_tuple(item)
while t in self.data:
self.data.remove(t)
- changed = True
+ did_change = True
- if changed:
+ if did_change:
query_cache.set(self.iden, self.data)
def update(self):
@@ -253,6 +256,11 @@ def user_query(kind, user, sort, time):
q._filter(db_times[time])
return make_results(q)
+def get_all_comments():
+ """the master /comments page"""
+ q = Comment._query(sort = desc('_date'))
+ return make_results(q)
+
def get_comments(user, sort, time):
return user_query(Comment, user, sort, time)
@@ -272,7 +280,7 @@ def user_rel_query(rel, user, name):
eager_load = True,
thing_data = not g.use_query_cache
)
-
+
return make_results(q, filter_thing2)
vote_rel = Vote.rel(Account, Link)
@@ -297,9 +305,13 @@ def get_inbox_messages(user):
def get_inbox_comments(user):
return user_rel_query(inbox_comment_rel, user, 'inbox')
+def get_inbox_selfreply(user):
+ return user_rel_query(inbox_comment_rel, user, 'selfreply')
+
def get_inbox(user):
return merge_results(get_inbox_comments(user),
- get_inbox_messages(user))
+ get_inbox_messages(user),
+ get_inbox_selfreply(user))
def get_sent(user):
q = Message._query(Message.c.author_id == user._id,
@@ -311,7 +323,10 @@ def add_queries(queries, insert_items = None, delete_items = None):
"""Adds multiple queries to the query queue. If insert_items or
delete_items is specified, the query may not need to be recomputed at
all."""
- log = g.log.debug
+ if not g.write_query_queue:
+ return
+
+ log = g.log
make_lock = g.make_lock
def _add_queries():
for q in queries:
@@ -320,16 +335,16 @@ def _add_queries():
with make_lock("add_query(%s)" % q.iden):
if insert_items and q.can_insert():
- log("Inserting %s into query %s" % (insert_items, q))
+ log.debug("Inserting %s into query %s" % (insert_items, q))
q.insert(insert_items)
elif delete_items and q.can_delete():
- log("Deleting %s from query %s" % (delete_items, q))
+ log.debug("Deleting %s from query %s" % (delete_items, q))
q.delete(delete_items)
else:
- log('Adding precomputed query %s' % q)
+ log.debug('Adding precomputed query %s' % q)
query_queue.add_query(q)
- worker.do(_add_queries)
-
+ # let the amqp worker handle this
+ amqp.worker.do(_add_queries)
#can be rewritten to be more efficient
def all_queries(fn, obj, *param_lists):
@@ -357,36 +372,60 @@ def display_jobs(jobs):
## The following functions should be called after their respective
## actions to update the correct listings.
def new_link(link):
+ "Called on the submission and deletion of links"
sr = Subreddit._byID(link.sr_id)
author = Account._byID(link.author_id)
results = all_queries(get_links, sr, ('hot', 'new', 'old'), ['all'])
- results.extend(all_queries(get_links, sr, ('top', 'controversial'), db_times.keys()))
+
+ results.extend(all_queries(get_links, sr, ('top', 'controversial'),
+ db_times.keys()))
results.append(get_submitted(author, 'new', 'all'))
#results.append(get_links(sr, 'toplinks', 'all'))
if link._spam:
results.append(get_spam_links(sr))
if link._deleted:
+ results.append(get_links(sr, 'new', 'all'))
add_queries(results, delete_items = link)
else:
+ # only 'new' qualifies for insertion, which will be done in
+ # run_new_links
add_queries(results, insert_items = link)
+ amqp.add_item('new_link', link._fullname)
+
+
def new_comment(comment, inbox_rel):
author = Account._byID(comment.author_id)
job = [get_comments(author, 'new', 'all')]
if comment._deleted:
+ job.append(get_all_comments())
add_queries(job, delete_items = comment)
else:
#if comment._spam:
# sr = Subreddit._byID(comment.sr_id)
# job.append(get_spam_comments(sr))
add_queries(job, insert_items = comment)
+ amqp.add_item('new_comment', comment._fullname)
+
+ # note that get_all_comments() is updated by the amqp process
+ # r2.lib.db.queries.run_new_comments
if inbox_rel:
inbox_owner = inbox_rel._thing1
- add_queries([get_inbox_comments(inbox_owner)],
- insert_items = inbox_rel)
+ if inbox_rel._name == "inbox":
+ add_queries([get_inbox_comments(inbox_owner)],
+ insert_items = inbox_rel)
+ else:
+ add_queries([get_inbox_selfreply(inbox_owner)],
+ insert_items = inbox_rel)
+
+
+def new_subreddit(sr):
+ "no precomputed queries here yet"
+ amqp.add_item('new_subreddit', sr._fullname)
+
def new_vote(vote):
user = vote._thing1
@@ -432,6 +471,13 @@ def new_savehide(rel):
elif name == 'unhide':
add_queries([get_hidden(user)], delete_items = rel)
+def changed(things):
+ """Indicate to solrsearch that a given item should be updated"""
+ things = tup(things)
+ for thing in things:
+ amqp.add_item('searchchanges_q', thing._fullname,
+ message_id = thing._fullname)
+
def _by_srid(things):
"""Takes a list of things and returns them in a dict separated by
sr_id, in addition to the looked-up subreddits"""
@@ -452,8 +498,8 @@ def ban(things):
for sr_id, things in by_srid.iteritems():
sr = srs[sr_id]
- links = [ x for x in things if isinstance(x, Link) ]
- #comments = [ x for x in things if isinstance(x, Comment) ]
+ links = [x for x in things if isinstance(x, Link)]
+ comments = [x for x in things if isinstance(x, Comment)]
if links:
add_queries([get_spam_links(sr)], insert_items = links)
@@ -462,11 +508,16 @@ def ban(things):
get_links(sr, 'new', 'all'),
get_links(sr, 'top', 'all'),
get_links(sr, 'controversial', 'all')]
- results.extend(all_queries(get_links, sr, ('top', 'controversial'), db_times.keys()))
+ results.extend(all_queries(get_links, sr,
+ ('top', 'controversial'),
+ db_times.keys()))
add_queries(results, delete_items = links)
- #if comments:
- # add_queries([get_spam_comments(sr)], insert_items = comments)
+ if comments:
+ # add_queries([get_spam_comments(sr)], insert_items = comments)
+ add_queries([get_all_comments()], delete_items = comments)
+
+ changed(things)
def unban(things):
by_srid, srs = _by_srid(things)
@@ -475,8 +526,8 @@ def unban(things):
for sr_id, things in by_srid.iteritems():
sr = srs[sr_id]
- links = [ x for x in things if isinstance(x, Link) ]
- #comments = [ x for x in things if isinstance(x, Comment) ]
+ links = [x for x in things if isinstance(x, Link)]
+ comments = [x for x in things if isinstance(x, Comment)]
if links:
add_queries([get_spam_links(sr)], delete_items = links)
@@ -485,11 +536,16 @@ def unban(things):
get_links(sr, 'new', 'all'),
get_links(sr, 'top', 'all'),
get_links(sr, 'controversial', 'all')]
- results.extend(all_queries(get_links, sr, ('top', 'controversial'), db_times.keys()))
+ results.extend(all_queries(get_links, sr,
+ ('top', 'controversial'),
+ db_times.keys()))
add_queries(results, insert_items = links)
- #if comments:
- # add_queries([get_spam_comments(sr)], delete_items = comments)
+ if comments:
+ #add_queries([get_spam_comments(sr)], delete_items = comments)
+ add_queries([get_all_comments()], insert_items = comments)
+
+ changed(things)
def new_report(thing):
if isinstance(thing, Link):
@@ -546,6 +602,7 @@ def update_user(user):
results = [get_inbox_messages(user),
get_inbox_comments(user),
+ get_inbox_selfreply(user),
get_sent(user),
get_liked(user),
get_disliked(user),
@@ -559,3 +616,138 @@ def add_all_users():
q = Account._query(sort = asc('_date'))
for user in fetch_things2(q):
update_user(user)
+
+
+# amqp queue processing functions
+
+def run_new_comments():
+
+ def _run_new_comments(msgs, chan):
+ fnames = [msg.body for msg in msgs]
+ comments = Comment._by_fullname(fnames, return_dict=False)
+ add_queries([get_all_comments()],
+ insert_items = comments)
+
+ amqp.handle_items('newcomments_q', _run_new_comments, limit=100)
+
+
+#def run_new_links():
+# """queue to add new links to the 'new' page. note that this isn't
+# in use until the spam_q plumbing is"""
+#
+# def _run_new_links(msgs, chan):
+# fnames = [ msg.body for msg in msgs ]
+# links = Link._by_fullname(fnames, data=True, return_dict=False)
+#
+# srs = Subreddit._byID([l.sr_id for l in links], return_dict=True)
+#
+# results = []
+#
+# _sr = lambda l: l.sr_id
+# for sr_id, sr_links in itertools.groupby(sorted(links, key=_sr),
+# key=_sr):
+# sr = srs[sr_id]
+# results = [get_links(sr, 'new', 'all')]
+# add_queries(results, insert_items = sr_links)
+#
+# amqp.handle_items('newpage_q', _run_new_links, limit=100)
+
+
+def queue_vote(user, thing, dir, ip, organic = False):
+ if g.amqp_host:
+ key = "registered_vote_%s_%s" % (user._id, thing._fullname)
+ g.cache.set(key, '1' if dir is True else '0' if dir is None else '-1')
+ amqp.add_item('register_vote_q',
+ pickle.dumps((user._id, thing._fullname, dir, ip, organic)))
+ else:
+ handle_vote(user, thing, dir, ip, organic)
+
+def get_likes(user, items):
+ if not user or not items:
+ return {}
+ keys = {}
+ res = {}
+ for i in items:
+ keys['registered_vote_%s_%s' % (user._id, i._fullname)] = (user, i)
+ r = g.cache.get_multi(keys.keys())
+
+ # populate the result set based on what we fetched from the cache first
+ for k, v in r.iteritems():
+ res[keys[k]] = v
+
+ # now hit the vote db with the remainder
+ likes = Vote.likes(user, [i for i in items if (user, i) not in res])
+
+ for k, v in likes.iteritems():
+ res[k] = v._name
+
+ # lastly, translate into boolean:
+ for k in res.keys():
+ res[k] = (True if res[k] == '1'
+ else False if res[k] == '-1' else None)
+
+ return res
+
+def handle_vote(user, thing, dir, ip, organic):
+ from r2.lib.db import tdb_sql
+ from sqlalchemy.exc import IntegrityError
+ try:
+ v = Vote.vote(user, thing, dir, ip, organic)
+ except (tdb_sql.CreationError, IntegrityError):
+ g.log.error("duplicate vote for: %s" % str((user, thing, dir)))
+ return
+
+ if isinstance(thing, Link):
+ new_vote(v)
+ if v.valid_thing:
+ expire_hot(thing.subreddit_slow)
+
+ #update the modified flags
+ set_last_modified(user, 'liked')
+ if user._id == thing.author_id:
+ set_last_modified(user, 'overview')
+ set_last_modified(user, 'submitted')
+ #update sup listings
+ sup.add_update(user, 'submitted')
+
+ #update sup listings
+ if dir:
+ sup.add_update(user, 'liked')
+ elif dir is False:
+ sup.add_update(user, 'disliked')
+
+ elif isinstance(thing, Comment):
+ #update last modified
+ if user._id == thing.author_id:
+ set_last_modified(user, 'overview')
+ set_last_modified(user, 'commented')
+ #update sup listings
+ sup.add_update(user, 'commented')
+
+
+def process_votes(drain = False, limit = 100):
+
+ def _handle_votes(msgs, chan):
+ to_do = []
+ uids = set()
+ tids = set()
+ for x in msgs:
+ uid, tid, dir, ip, organic = pickle.loads(x.body)
+ print (uid, tid, dir, ip, organic)
+ uids.add(uid)
+ tids.add(tid)
+ to_do.append((uid, tid, dir, ip, organic))
+
+ users = Account._byID(uids, data = True, return_dict = True)
+ things = Thing._by_fullname(tids, data = True, return_dict = True)
+
+ for uid, tid, dir, ip, organic in to_do:
+ handle_vote(users[uid], things[tid], dir, ip, organic)
+
+ amqp.handle_items('register_vote_q', _handle_votes, limit = limit,
+ drain = drain)
+
+try:
+ from r2admin.lib.admin_queries import *
+except ImportError:
+ pass
View
16 r2/r2/lib/db/query_queue.py
@@ -7,13 +7,13 @@
working_prefix = 'working_'
prefix = 'prec_link_'
-TIMEOUT = 120
+TIMEOUT = 600
def add_query(cached_results):
amqp.add_item('prec_links', pickle.dumps(cached_results, -1))
def run():
- def callback(msgs):
+ def callback(msgs, chan):
for msg in msgs: # will be len==1
# r2.lib.db.queries.CachedResults
cr = pickle.loads(msg.body)
@@ -36,16 +36,18 @@ def callback(msgs):
cr = pickle.loads(msg.body)
- print 'working: ', iden, cr.query._rules
+ print 'working: ', iden, cr.query._rules, cr.query._sort
start = datetime.now()
- cr.update()
+ try:
+ cr.update()
+ g.memcache.set(key, datetime.now())
+ finally:
+ g.memcache.delete(working_key)
+
done = datetime.now()
q_time_s = (done - msg.timestamp).seconds
proc_time_s = (done - start).seconds + ((done - start).microseconds/1000000.0)
print ('processed %s in %.6f seconds after %d seconds in queue'
% (iden, proc_time_s, q_time_s))
- g.memcache.set(key, datetime.now())
- g.memcache.delete(working_key)
-
amqp.handle_items('prec_links', callback, limit = 1)
View
84 r2/r2/lib/db/tdb_lite.py
@@ -0,0 +1,84 @@
+# The contents of this file are subject to the Common Public Attribution
+# License Version 1.0. (the "License"); you may not use this file except in
+# compliance with the License. You may obtain a copy of the License at
+# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
+# License Version 1.1, but Sections 14 and 15 have been added to cover use of
+# software over a computer network and provide for limited attribution for the
+# Original Developer. In addition, Exhibit A has been modified to be consistent
+# with Exhibit B.
+#
+# Software distributed under the License is distributed on an "AS IS" basis,
+# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
+# the specific language governing rights and limitations under the License.
+#
+# The Original Code is Reddit.
+#
+# The Original Developer is the Initial Developer. The Initial Developer of the
+# Original Code is CondeNet, Inc.
+#
+# All portions of the code written by CondeNet are Copyright (c) 2006-2009
+# CondeNet, Inc. All Rights Reserved.
+################################################################################
+
+import sqlalchemy as sa
+
+class tdb_lite(object):
+ def __init__(self, gc):
+ self.gc = gc
+
+ def make_metadata(self, engine):
+ metadata = sa.MetaData(engine)
+ metadata.bind.echo = self.gc.sqlprinting
+ return metadata
+
+ def index_str(self, table, name, on, where = None):
+ index_str = 'create index idx_%s_' % name
+ index_str += table.name
+ index_str += ' on '+ table.name + ' (%s)' % on
+ if where:
+ index_str += ' where %s' % where
+ return index_str
+
+ def create_table(self, table, index_commands=None):
+ t = table
+ if self.gc.db_create_tables:
+ #@@hackish?
+ if not t.bind.has_table(t.name):
+ t.create(checkfirst = False)
+ if index_commands:
+ for i in index_commands:
+ t.bind.execute(i)
+
+ def py2db(self, val, return_kind=False):
+ if isinstance(val, bool):
+ val = 't' if val else 'f'
+ kind = 'bool'
+ elif isinstance(val, (str, unicode)):