From e87f520d45e7106f5a365e25a59a5bdca0959fdf Mon Sep 17 00:00:00 2001 From: KeyserSosa Date: Tue, 4 May 2010 12:12:48 -0700 Subject: [PATCH] New Features: * Make the new targeted self-serve promotion the default * add TheOatmeal's comic to our source. (http://www.theoatmeal.com) * add targetting * add rerunning or suplementing existing campaigns * move all sponsored link listings to be precomputed * deputy moderation * /about/trials and /about/modqueue * Have trials train the spam filter * Allow admins to see all trials, site-wide, at /r/all/about/trials * Allow trials to be ended while votes are still coming in when the jury opinion is near-unanimous * offload scraping to embed.ly * Use boto for s3cp instead of forking curl Additions: * migrated all graphs to use Flot * Implement per-user suspiciousness quota and print over-quota trace * Keep track of how many questionable links a user has outstanding at any given time * add links to the FAQ and walkthru * add a differnt thumbnail for self posts (thanks to licensplate) * Add some visual tracking for comments trees * Relabel: ban / unban => remove / approve * Use big remove / approve buttons when the listing relates to moderation, or when the item has reports or is on trial * Merge unban, ignore, and a new approve this non-banned link into one * Be a little smarter about the way we allow caching of subreddit stylesheets * make cassandra's cache chain cache negative results (and yes that commit message is bigger than the patch) * add All, friends, random, and mod to topbar * add a local render precache (optional) * better noimage.png from paradox460 * Add a 'legacy' mode to CMemcache that will allow us to use it as a drop-in replacement for testing purposes. Modifies python-memcached to use the same 'crc' hashing algorithm that pylibmc does, and removes a feature in python-memcached that we aren't using. Contributed by Joe Terranova * Everyone sees cake * Added g.system_user Bugfixes: * improve validation of media embed, and remove a todo from link.py by putting all of the child div stuff in pages * Feed/JSON updates * Add selftext to RSS feeds * Add subreddit names to Comments in JSON * add is_self param to indicate self posts * Don't shade the background of child comments of spam * shorted the cache lifetime for AllSR to 1 min * Tell everyone that we actually own the trademarks we claim to own (footer update) * You no longer get an orange alien when one of your reddits sends an automated message to a user --- r2/Makefile | 2 +- r2/example.ini | 5 +- r2/r2/config/routing.py | 39 +- r2/r2/controllers/__init__.py | 2 +- r2/r2/controllers/ads.py | 6 +- r2/r2/controllers/api.py | 140 +- r2/r2/controllers/buttons.py | 54 +- r2/r2/controllers/errors.py | 1 + r2/r2/controllers/front.py | 575 +++--- r2/r2/controllers/health.py | 4 +- r2/r2/controllers/listingcontroller.py | 161 +- r2/r2/controllers/mediaembed.py | 6 +- r2/r2/controllers/promotecontroller.py | 422 ++-- r2/r2/controllers/reddit_base.py | 64 +- r2/r2/controllers/toolbar.py | 5 +- r2/r2/controllers/validator/validator.py | 50 +- r2/r2/lib/app_globals.py | 50 +- r2/r2/lib/authorize/interaction.py | 98 +- r2/r2/lib/cache.py | 186 +- r2/r2/lib/contrib/memcache.py | 133 +- r2/r2/lib/cssfilter.py | 24 +- r2/r2/lib/db/queries.py | 123 +- r2/r2/lib/db/tdb_sql.py | 2 +- r2/r2/lib/db/thing.py | 13 +- r2/r2/lib/db/userrel.py | 3 +- r2/r2/lib/emailer.py | 30 +- r2/r2/lib/jsontemplates.py | 26 +- r2/r2/lib/media.py | 26 +- r2/r2/lib/menus.py | 7 +- r2/r2/lib/migrate.py | 178 +- r2/r2/lib/organic.py | 104 +- r2/r2/lib/pages/pages.py | 496 +++-- r2/r2/lib/pages/things.py | 26 +- r2/r2/lib/promote.py | 1031 ++++++---- r2/r2/lib/s3cp.py | 85 +- r2/r2/lib/scraper.py | 601 +++++- r2/r2/lib/services.py | 16 +- r2/r2/lib/strings.py | 13 +- r2/r2/lib/tracking.py | 6 + r2/r2/lib/traffic.py | 9 +- r2/r2/lib/utils/trial_utils.py | 92 +- r2/r2/lib/utils/utils.py | 108 ++ r2/r2/lib/wrapped.py | 3 + r2/r2/models/account.py | 75 +- r2/r2/models/ad.py | 12 +- r2/r2/models/admintools.py | 95 +- r2/r2/models/award.py | 12 +- r2/r2/models/bidding.py | 143 +- r2/r2/models/builder.py | 29 +- r2/r2/models/jury.py | 12 +- r2/r2/models/link.py | 71 +- r2/r2/models/mail_queue.py | 30 +- r2/r2/models/subreddit.py | 189 +- r2/r2/models/trial.py | 150 +- r2/r2/models/vote.py | 16 +- r2/r2/public/static/cake.png | Bin 224 -> 220 bytes r2/r2/public/static/css/reddit.css | 192 +- r2/r2/public/static/green-check.png | Bin 834 -> 529 bytes r2/r2/public/static/js/jquery.flot.js | 2119 +++++++++++++++++++++ r2/r2/public/static/js/jquery.lazyload.js | 164 ++ r2/r2/public/static/js/jquery.reddit.js | 32 +- r2/r2/public/static/js/reddit.js | 29 +- r2/r2/public/static/js/sponsored.js | 271 ++- r2/r2/public/static/noimage.png | Bin 1249 -> 2736 bytes r2/r2/public/static/nothing.png | Bin 0 -> 241 bytes r2/r2/public/static/self_default.png | Bin 0 -> 2252 bytes r2/r2/public/static/vid-collapsed.png | Bin 319 -> 908 bytes r2/r2/public/static/vid-expanded.png | Bin 306 -> 902 bytes r2/r2/templates/buttondemopanel.html | 12 +- r2/r2/templates/link.html | 31 +- r2/r2/templates/link.xml | 4 + r2/r2/templates/linkontrial.html | 10 +- r2/r2/templates/linkpromoteinfobar.html | 30 - r2/r2/templates/messagecompose.html | 2 +- r2/r2/templates/morechildren.html | 2 +- r2/r2/templates/moremessages.html | 2 +- r2/r2/templates/newlink.html | 34 +- r2/r2/templates/paymentform.html | 33 +- r2/r2/templates/printable.html | 12 +- r2/r2/templates/printablebuttons.html | 203 +- r2/r2/templates/promo_email.email | 18 +- r2/r2/templates/promote_graph.html | 37 +- r2/r2/templates/promotedlink.html | 48 +- r2/r2/templates/promotedtraffic.html | 17 +- r2/r2/templates/promotelinkform.html | 547 +++--- r2/r2/templates/reddit.html | 20 +- r2/r2/templates/redditfooter.html | 2 + r2/r2/templates/redditheader.html | 10 +- r2/r2/templates/reddittraffic.html | 100 +- r2/r2/templates/selfserveblurb.html | 2 +- r2/r2/templates/selfserviceoatmeal.html | 40 + r2/r2/templates/spotlightlisting.html | 8 +- r2/r2/templates/subredditinfobar.html | 8 +- r2/r2/templates/subredditstylesheet.html | 4 +- r2/r2/templates/trafficgraph.html | 74 + r2/r2/templates/upgradebuttons.html | 149 ++ r2/r2/templates/utils.html | 34 + r2/r2/tests/testfile | 1 + r2/setup.py | 5 +- 99 files changed, 7698 insertions(+), 2467 deletions(-) create mode 100644 r2/r2/public/static/js/jquery.flot.js create mode 100644 r2/r2/public/static/js/jquery.lazyload.js create mode 100644 r2/r2/public/static/nothing.png create mode 100644 r2/r2/public/static/self_default.png create mode 100644 r2/r2/templates/selfserviceoatmeal.html create mode 100644 r2/r2/templates/trafficgraph.html create mode 100644 r2/r2/templates/upgradebuttons.html create mode 100644 r2/r2/tests/testfile diff --git a/r2/Makefile b/r2/Makefile index f744460e14..770d847237 100644 --- a/r2/Makefile +++ b/r2/Makefile @@ -21,7 +21,7 @@ ################################################################################ # Jacascript files to be compressified -js_targets = jquery.js jquery.json.js jquery.reddit.js reddit.js ui.core.js ui.datepicker.js sponsored.js +js_targets = jquery.js jquery.json.js jquery.reddit.js reddit.js ui.core.js ui.datepicker.js sponsored.js jquery.flot.js jquery.lazyload.js # CSS targets main_css = reddit.css css_targets = reddit-ie6-hax.css reddit-ie7-hax.css mobile.css spreadshirt.css diff --git a/r2/example.ini b/r2/example.ini index b4b79804d9..8552c373bb 100644 --- a/r2/example.ini +++ b/r2/example.ini @@ -161,7 +161,7 @@ tracking_secret = abcdefghijklmnopqrstuvwxyz0123456789 ip_hash = S3KEY_ID = ABCDEFGHIJKLMNOP1234 S3SECRET_KEY = aBcDeFgHiJkLmNoPqRsTuVwXyZ1234567890AbCd -s3_thumb_bucket = /your.bucket.here/ +s3_thumb_bucket = your.bucket.here default_thumb = /static/noimage.png MIN_DOWN_LINK = 0 @@ -182,6 +182,8 @@ rising_period = 12 hours # time of ratelimit purgatory (min) RATELIMIT = 10 +QUOTA_THRESHOLD = 0 + num_comments = 200 max_comments = 500 num_default_reddits = 10 @@ -200,6 +202,7 @@ share_reply = noreply@yourdomain.com agents = feedback_email = abuse@localhost +system_user = reddit # t-shirt stuff spreadshirt_url = diff --git a/r2/r2/config/routing.py b/r2/r2/config/routing.py index a700965bb5..be6a6aa982 100644 --- a/r2/r2/config/routing.py +++ b/r2/r2/config/routing.py @@ -32,13 +32,13 @@ def make_map(global_conf={}, app_conf={}): admin_routes.add(mc) - mc('/login', controller='front', action='login') - mc('/logout', controller='front', action='logout') - mc('/verify', controller='front', action='verify') - mc('/adminon', controller='front', action='adminon') - mc('/adminoff', controller='front', action='adminoff') + mc('/login', controller='forms', action='login') + mc('/logout', controller='forms', action='logout') + mc('/verify', controller='forms', action='verify') + mc('/adminon', controller='forms', action='adminon') + mc('/adminoff', controller='forms', action='adminoff') mc('/submit', controller='front', action='submit') - mc('/validuser', controller='front', action='validuser') + mc('/validuser', controller='forms', action='validuser') mc('/over18', controller='post', action='over18') @@ -63,6 +63,7 @@ def make_map(global_conf={}, app_conf={}): requirements=dict(where='subscriber|contributor|moderator')) mc('/buttons', controller='buttons', action='button_demo_page') + mc('/upgradebuttons', controller='buttons', action='upgrade_buttons') #the frame mc('/button_content', controller='buttons', action='button_content') #/button.js and buttonlite.js - the embeds @@ -78,7 +79,6 @@ def make_map(global_conf={}, app_conf={}): mc('/feedback', controller='feedback', action='feedback') mc('/ad_inq', controller='feedback', action='ad_inq') - mc('/admin/i18n', controller='i18n', action='list') mc('/admin/i18n/:action', controller='i18n') mc('/admin/i18n/:action/:lang', controller='i18n') @@ -103,10 +103,10 @@ def make_map(global_conf={}, app_conf={}): mc('/user/:username/:where', controller='user', action='listing', where='overview') - mc('/prefs/:location', controller='front', + mc('/prefs/:location', controller='forms', action='prefs', location='options') - mc('/juryduty', controller='front', action='juryduty') + mc('/depmod', controller='forms', action='depmod') mc('/info/0:article/*rest', controller = 'front', action='oldinfo', dest='comments', type='ancient') @@ -126,8 +126,8 @@ def make_map(global_conf={}, app_conf={}): mc('/duplicates/:article/:title', controller = 'front', action = 'duplicates', title=None) - mc('/mail/optout', controller='front', action = 'optout') - mc('/mail/optin', controller='front', action = 'optin') + mc('/mail/optout', controller='forms', action = 'optout') + mc('/mail/optin', controller='forms', action = 'optin') mc('/stylesheet', controller = 'front', action = 'stylesheet') mc('/frame', controller='front', action = 'frame') mc('/framebuster/:blah', controller='front', action = 'framebuster') @@ -136,12 +136,12 @@ def make_map(global_conf={}, app_conf={}): mc('/promoted/edit_promo/:link', controller='promote', action = 'edit_promo') - mc('/promoted/pay/:link', + mc('/promoted/pay/:link/:indx', controller='promote', action = 'pay') mc('/promoted/graph', controller='promote', action = 'graph') mc('/promoted/:action', controller='promote', - requirements = dict(action = "new_promo")) + requirements = dict(action = "edit_promo|new_promo|roadblock")) mc('/promoted/:sort', controller='promote', action = "listing") mc('/promoted/', controller='promoted', action = "listing", sort = "") @@ -151,7 +151,7 @@ def make_map(global_conf={}, app_conf={}): mc('/', controller='hot', action='listing') - listing_controllers = "hot|saved|new|recommended|randomrising|comments" + listing_controllers = "hot|saved|new|randomrising|comments" mc('/:controller', action='listing', requirements=dict(controller=listing_controllers)) @@ -168,8 +168,9 @@ def make_map(global_conf={}, app_conf={}): mc('/message/moderator/:subwhere', controller='message', action='listing', where = 'moderator') + mc('/password', controller='forms', action="password") mc('/:action', controller='front', - requirements=dict(action="password|random|framebuster")) + requirements=dict(action="random|framebuster|selfserviceoatmeal")) mc('/:action', controller='embed', requirements=dict(action="help|blog")) mc('/help/*anything', controller='embed', action='help') @@ -187,11 +188,11 @@ def make_map(global_conf={}, app_conf={}): mc('/d/:what', controller='api', action='bookmarklet') - mc('/resetpassword/:key', controller='front', + mc('/resetpassword/:key', controller='forms', action='resetpassword') - mc('/verification/:key', controller='front', + mc('/verification/:key', controller='forms', action='verify_email') - mc('/resetpassword', controller='front', + mc('/resetpassword', controller='forms', action='resetpassword') mc('/post/:action/:url_user', controller='post', @@ -205,7 +206,7 @@ def make_map(global_conf={}, app_conf={}): mc('/api/gadget/click/:ids', controller = 'api', action='gadget', type='click') mc('/api/gadget/:type', controller = 'api', action='gadget') mc('/api/:action', controller='promote', - requirements=dict(action="promote|unpromote|new_promo|link_thumb|freebie|promote_note|update_pay|refund|traffic_viewer|rm_traffic_viewer")) + requirements=dict(action="promote|unpromote|edit_promo|link_thumb|freebie|promote_note|update_pay|refund|traffic_viewer|rm_traffic_viewer|edit_campaign|delete_campaign|meta_promo|add_roadblock|rm_roadblock")) mc('/api/:action', controller='api') mc("/button_info", controller="api", action="info", limit = 1) diff --git a/r2/r2/controllers/__init__.py b/r2/r2/controllers/__init__.py index 90ca79f8dc..17452febb3 100644 --- a/r2/r2/controllers/__init__.py +++ b/r2/r2/controllers/__init__.py @@ -24,7 +24,6 @@ from listingcontroller import SavedController from listingcontroller import NewController from listingcontroller import BrowseController -from listingcontroller import RecommendedController from listingcontroller import MessageController from listingcontroller import RedditsController from listingcontroller import ByIDController as ByidController @@ -35,6 +34,7 @@ from listingcontroller import MyredditsController from feedback import FeedbackController +from front import FormsController from front import FrontController from health import HealthController from buttons import ButtonsController diff --git a/r2/r2/controllers/ads.py b/r2/r2/controllers/ads.py index 4d53f080eb..3aba5a5eea 100644 --- a/r2/r2/controllers/ads.py +++ b/r2/r2/controllers/ads.py @@ -26,14 +26,14 @@ class AdsController(RedditController): - @validate(VAdmin()) + @validate(VSponsorAdmin()) def GET_index(self): res = AdminPage(content = AdminAds(), show_sidebar = False, title = 'ads').render() return res - @validate(VAdmin(), + @validate(VSponsorAdmin(), ad = VAdByCodename('adcn')) def GET_assign(self, ad): if ad is None: @@ -44,7 +44,7 @@ def GET_assign(self, ad): title='assign an ad to a community').render() return res - @validate(VAdmin(), + @validate(VSponsorAdmin(), ad = VAdByCodename('adcn')) def GET_srs(self, ad): if ad is None: diff --git a/r2/r2/controllers/api.py b/r2/r2/controllers/api.py index 9816c63901..5b00a6f5b0 100644 --- a/r2/r2/controllers/api.py +++ b/r2/r2/controllers/api.py @@ -35,7 +35,7 @@ from r2.lib.pages import FriendList, ContributorList, ModList, \ BannedList, BoringPage, FormPage, CssError, UploadedImage, \ ClickGadget -from r2.lib.utils.trial_utils import indict, on_trial +from r2.lib.utils.trial_utils import indict, end_trial, trial_info from r2.lib.pages.things import wrap_links, default_thing_wrapper from r2.lib import spreadshirt @@ -52,6 +52,7 @@ from r2.lib import tracking, cssfilter, emailer from r2.lib.subreddit_search import search_reddits from r2.lib.log import log_text +from r2.lib.filters import safemarkdown from datetime import datetime, timedelta from md5 import md5 @@ -164,11 +165,12 @@ def POST_compose(self, form, jquery, to, subject, body, ip): default='comments')) def POST_submit(self, form, jquery, url, banmsg, selftext, kind, title, save, sr, ip, then): - #backwards compatability - if url == 'self': - kind = 'self' if isinstance(url, (unicode, str)): + #backwards compatability + if url.lower() == 'self': + url = kind = 'self' + # VUrl may have replaced 'url' by adding 'http://' form.set_inputs(url = url) @@ -217,6 +219,29 @@ def POST_submit(self, form, jquery, url, banmsg, selftext, kind, title, if form.has_error() or not title: return + if should_ratelimit: + filled_quota = c.user.quota_full('link') + if filled_quota is not None and not c.user._spam: + log_text ("over-quota", + "%s just went over their per-%s quota" % + (c.user.name, filled_quota), "info") + + compose_link = ("/message/compose?to=%23" + sr.name + + "&subject=Exemption+request") + + verify_link = "/verify?reason=submit" + + if c.user.email_verified: + msg = strings.verified_quota_msg % dict(link=compose_link) + else: + msg = strings.unverified_quota_msg % dict(link1=verify_link, + link2=compose_link) + + md = safemarkdown(msg) + form.set_html(".status", md) + return + + # well, nothing left to do but submit it l = Link._submit(request.post.title, url if kind == 'link' else 'self', c.user, sr, ip) @@ -240,6 +265,7 @@ def POST_submit(self, form, jquery, url, banmsg, selftext, kind, title, #set the ratelimiter if should_ratelimit: + c.user.clog_quota('link', l) VRatelimit.ratelimit(rate_user=True, rate_ip = True, prefix = "rate_submit_") @@ -363,6 +389,7 @@ def POST_leavemoderator(self, container): """ if container and container.is_moderator(c.user): container.remove_moderator(c.user) + Subreddit.special_reddits(c.user, "moderator", _update=True) @noresponse(VUser(), VModhash(), @@ -373,8 +400,8 @@ def POST_leavecontributor(self, container): """ if container and container.is_contributor(c.user): container.remove_contributor(c.user) + Subreddit.special_reddits(c.user, "contributor", _update=True) - @noresponse(VUser(), VModhash(), nuser = VExistingUname('name'), @@ -394,7 +421,7 @@ def POST_unfriend(self, nuser, iuser, container, type): # The user who made the request must be an admin or a moderator # for the privilege change to succeed. if (not c.user_is_admin - and (type in ('moderator','contributer','banned') + and (type in ('moderator','contributor','banned') and not c.site.is_moderator(c.user))): abort(403, 'forbidden') # if we are (strictly) unfriending, the container had better @@ -404,6 +431,9 @@ def POST_unfriend(self, nuser, iuser, container, type): fn = getattr(container, 'remove_' + type) fn(iuser or nuser) + if type in ("moderator", "contributor"): + Subreddit.special_reddits(iuser or nuser, type, _update=True) + @validatedForm(VUser(), @@ -424,7 +454,7 @@ def POST_friend(self, form, jquery, ip, friend, # The user who made the request must be an admin or a moderator # for the privilege change to succeed. if (not c.user_is_admin - and (type in ('moderator','contributer', 'banned') + and (type in ('moderator','contributor', 'banned') and not c.site.is_moderator(c.user))): abort(403,'forbidden') @@ -433,34 +463,39 @@ def POST_friend(self, form, jquery, ip, friend, if type == "friend" and container != c.user: abort(403,'forbidden') - elif not form.has_errors("name", - errors.USER_DOESNT_EXIST, errors.NO_USER): - new = fn(friend) - cls = dict(friend=FriendList, - moderator=ModList, - contributor=ContributorList, - banned=BannedList).get(type) - form.set_inputs(name = "") - form.set_html(".status:first", _("added")) - if new and cls: - user_row = cls().user_row(friend) - jquery("#" + type + "-table").show( - ).find("table").insert_table_rows(user_row) - - if type != 'friend': - msg = strings.msg_add_friend.get(type) - subj = strings.subj_add_friend.get(type) - if msg and subj and friend.name != c.user.name: - # fullpath with domain needed or the markdown link - # will break - d = dict(url = container.path, - title = container.title) - msg = msg % d - subj = subj % d - item, inbox_rel = Message._new(c.user, friend, - subj, msg, ip) - - queries.new_message(item, inbox_rel) + elif form.has_errors("name", errors.USER_DOESNT_EXIST, errors.NO_USER): + return + + new = fn(friend) + + if type in ("moderator", "contributor"): + Subreddit.special_reddits(friend, type, _update=True) + + cls = dict(friend=FriendList, + moderator=ModList, + contributor=ContributorList, + banned=BannedList).get(type) + form.set_inputs(name = "") + form.set_html(".status:first", _("added")) + if new and cls: + user_row = cls().user_row(friend) + jquery("#" + type + "-table").show( + ).find("table").insert_table_rows(user_row) + + if type != 'friend': + msg = strings.msg_add_friend.get(type) + subj = strings.subj_add_friend.get(type) + if msg and subj and friend.name != c.user.name: + # fullpath with domain needed or the markdown link + # will break + d = dict(url = container.path, + title = container.title) + msg = msg % d + subj = subj % d + item, inbox_rel = Message._new(c.user, friend, + subj, msg, ip) + + queries.new_message(item, inbox_rel) @validatedForm(VUser('curpass', default = ''), @@ -536,8 +571,9 @@ def POST_del(self, thing): if not thing: return '''for deleting all sorts of things''' thing._deleted = True - if getattr(thing, "promoted", None) is not None: - promote.delete_promo(thing) + if (getattr(thing, "promoted", None) is not None and + not promote.is_promoted(thing)): + promote.reject_promotion(thing) thing._commit() # flag search indexer that something has changed @@ -784,7 +820,7 @@ def POST_juryvote(self, dir, thing, ip): j = Jury.by_account_and_defendant(c.user, thing) - if not on_trial([thing]).get(thing._fullname,False): + if not trial_info([thing]).get(thing._fullname,False): log_text("juryvote: not on trial", level="warning") return @@ -793,7 +829,7 @@ def POST_juryvote(self, dir, thing, ip): return log_text("juryvote", - "%s cast a %d juryvote on %r" % (c.user.name, dir, thing), + "%s cast a %d juryvote on %s" % (c.user.name, dir, thing._id36), level="info") j._name = str(dir) @@ -1143,23 +1179,20 @@ def POST_site_admin(self, form, jquery, name, ip, sr, jquery.refresh() @noresponse(VUser(), VModhash(), - VSrCanBan('id'), + why = VSrCanBan('id'), thing = VByName('id')) - def POST_ban(self, thing): + def POST_remove(self, why, thing): + end_trial(thing, why + "-removed") admintools.spam(thing, False, not c.user_is_admin, c.user.name) @noresponse(VUser(), VModhash(), - VSrCanBan('id'), + why = VSrCanBan('id'), thing = VByName('id')) - def POST_unban(self, thing): - admintools.unspam(thing, c.user.name) - - @noresponse(VUser(), VModhash(), - VSrCanBan('id'), - thing = VByName('id')) - def POST_ignore(self, thing): + def POST_approve(self, why, thing): if not thing: return - Report.accept(thing, False) + + end_trial(thing, why + "-approved") + admintools.unspam(thing, c.user.name) @validatedForm(VUser(), VModhash(), VSrCanDistinguish('id'), @@ -1714,12 +1747,15 @@ def POST_tb_commentspanel_hide(self): sponsorships = VByName('ids', thing_cls = Subreddit, multiple = True)) def POST_onload(self, form, jquery, promoted, sponsorships, *a, **kw): + suffix = "" + if not isinstance(c.site, FakeSubreddit): + suffix = "-" + c.site.name def add_tracker(dest, where, what): jquery.set_tracker( where, - tracking.PromotedLinkInfo.gen_url(fullname=what, + tracking.PromotedLinkInfo.gen_url(fullname=what + suffix, ip = request.ip), - tracking.PromotedLinkClickInfo.gen_url(fullname = what, + tracking.PromotedLinkClickInfo.gen_url(fullname =what + suffix, dest = dest, ip = request.ip) ) diff --git a/r2/r2/controllers/buttons.py b/r2/r2/controllers/buttons.py index 98ed43547a..10304a258c 100644 --- a/r2/r2/controllers/buttons.py +++ b/r2/r2/controllers/buttons.py @@ -21,7 +21,7 @@ ################################################################################ from reddit_base import RedditController, MinimalController, make_key from r2.lib.pages import Button, ButtonNoBody, ButtonEmbed, ButtonLite, \ - ButtonDemoPanel, WidgetDemoPanel, Bookmarklets, BoringPage + ButtonDemoPanel, WidgetDemoPanel, Bookmarklets, BoringPage, UpgradeButtons from r2.lib.pages.things import wrap_links from r2.models import * from r2.lib.utils import tup, query_string @@ -124,40 +124,16 @@ def get_wrapped_link(self, url, link = None, wrapper = None): return wrapper(None) - @validate(url = VSanitizedUrl('url'), - title = nop('title'), - css = nop('css'), - vote = VBoolean('vote', default=True), - newwindow = VBoolean('newwindow'), - width = VInt('width', 0, 800), - l = VByName('id')) - def GET_button_content(self, url, title, css, vote, newwindow, width, l): - # no buttons on domain listings - if isinstance(c.site, DomainSR): - c.site = Default - return self.redirect(request.path + query_string(request.GET)) - - #disable css hack - if (css != 'http://blog.wired.com/css/redditsocial.css' and - css != 'http://www.wired.com/css/redditsocial.css'): - css = None - - if l: - url = l.url - title = l.title - kw = {} - if title: - kw = dict(title = title) - wrapper = make_wrapper(Button if vote else ButtonNoBody, - url = url, - target = "_new" if newwindow else "_parent", - vote = vote, bgcolor = c.bgcolor, - width = width, css = css, - button = self.buttontype(), **kw) - - l = self.get_wrapped_link(url, l, wrapper) - return l.render() - + def GET_button_content(self, *a, **kw): + return """ + + + + + upgrade + + + """ @validate(buttontype = VInt('t', 1, 5), url = VSanitizedUrl("url"), @@ -226,6 +202,14 @@ def GET_button_demo_page(self): show_sidebar = False, content=ButtonDemoPanel()).render() + def GET_upgrade_buttons(self): + # no buttons for domain listings -> redirect to top level + if isinstance(c.site, DomainSR): + return self.redirect('/buttons') + return BoringPage(_("reddit buttons"), + show_sidebar = False, + content=UpgradeButtons()).render() + def GET_widget_demo_page(self): return BoringPage(_("reddit widget"), diff --git a/r2/r2/controllers/errors.py b/r2/r2/controllers/errors.py index 28bcacf10e..1e4ba292eb 100644 --- a/r2/r2/controllers/errors.py +++ b/r2/r2/controllers/errors.py @@ -67,6 +67,7 @@ ('BAD_EMAILS', _('the following emails are invalid: %(emails)s')), ('NO_EMAILS', _('please enter at least one email address')), ('TOO_MANY_EMAILS', _('please only share to %(num)s emails at a time.')), + ('OVERSOLD', _('that reddit has already been oversold on %(start)s to %(end)s. Please pick another reddit or date.')), ('BAD_DATE', _('please provide a date of the form mm/dd/yyyy')), ('BAD_DATE_RANGE', _('the dates need to be in order and not identical')), ('BAD_FUTURE_DATE', _('please enter a date at least %(day)s days in the future')), diff --git a/r2/r2/controllers/front.py b/r2/r2/controllers/front.py index ef8fac2bad..b5cdb5f3a9 100644 --- a/r2/r2/controllers/front.py +++ b/r2/r2/controllers/front.py @@ -51,6 +51,8 @@ class FrontController(RedditController): + allow_stylesheets = True + @validate(article = VLink('article'), comment = VCommentID('comment')) def GET_oldinfo(self, article, type, dest, rest=None, comment=''): @@ -102,71 +104,6 @@ def GET_random(self): else: return self.redirect(add_sr('/')) - def GET_password(self): - """The 'what is my password' page""" - return BoringPage(_("password"), content=Password()).render() - - @validate(VUser(), - dest = VDestination()) - def GET_verify(self, dest): - if c.user.email_verified: - content = InfoBar(message = strings.email_verified) - if dest: - return self.redirect(dest) - else: - content = PaneStack( - [InfoBar(message = strings.verify_email), - PrefUpdate(email = True, verify = True, - password = False)]) - return BoringPage(_("verify email"), content = content).render() - - @validate(VUser(), - cache_evt = VCacheKey('email_verify', ('key',)), - key = nop('key'), - dest = VDestination(default = "/prefs/update")) - def GET_verify_email(self, cache_evt, key, dest): - if c.user_is_loggedin and c.user.email_verified: - cache_evt.clear() - return self.redirect(dest) - elif not (cache_evt.user and - key == passhash(cache_evt.user.name, cache_evt.user.email)): - content = PaneStack( - [InfoBar(message = strings.email_verify_failed), - PrefUpdate(email = True, verify = True, - password = False)]) - return BoringPage(_("verify email"), content = content).render() - elif c.user != cache_evt.user: - # wrong user. Log them out and try again. - self.logout() - return self.redirect(request.fullpath) - else: - cache_evt.clear() - c.user.email_verified = True - c.user._commit() - Award.give_if_needed("verified_email", c.user) - return self.redirect(dest) - - @validate(cache_evt = VCacheKey('reset', ('key',)), - key = nop('key')) - def GET_resetpassword(self, cache_evt, key): - """page hit once a user has been sent a password reset email - to verify their identity before allowing them to update their - password.""" - - #if another user is logged-in, log them out - if c.user_is_loggedin: - self.logout() - return self.redirect(request.path) - - done = False - if not key and request.referer: - referer_path = request.referer.split(g.domain)[-1] - done = referer_path.startswith(request.fullpath) - elif not getattr(cache_evt, "user", None): - return self.abort404() - return BoringPage(_("reset password"), - content=ResetPassword(key=key, done=done)).render() - @validate(VAdmin(), article = VLink('article')) def GET_details(self, article): @@ -176,6 +113,13 @@ def GET_details(self, article): return DetailsPage(link = article, expand_children=False).render() + def GET_selfserviceoatmeal(self +): + return BoringPage(_("self service help"), + show_sidebar = False, + content = SelfServiceOatmeal()).render() + + @validate(article = VLink('article')) def GET_shirt(self, article): if not can_view_link_comments(article): @@ -275,78 +219,6 @@ def GET_comments(self, article, comment, context, sort, num_comments, infotext = infotext).render() return res - @validate(VUser()) - def GET_juryduty(self): - displayPane = PaneStack() - - active_trials = {} - finished_trials = {} - - juries = Jury.by_account(c.user) - - trials = on_trial([j._thing2 for j in juries]) - - for j in juries: - defendant = j._thing2 - - if trials.get(defendant._fullname, False): - active_trials[defendant._fullname] = j._name - else: - finished_trials[defendant._fullname] = j._name - - if active_trials: - fullnames = sorted(active_trials.keys(), reverse=True) - - def my_wrap(thing): - w = Wrapped(thing) - w.hide_score = True - w.likes = None - w.trial_mode = True - w.render_class = LinkOnTrial - w.juryvote = active_trials[thing._fullname] - return w - - listing = wrap_links(fullnames, wrapper=my_wrap) - displayPane.append(InfoBar(strings.active_trials, - extra_class="mellow")) - displayPane.append(listing) - - if finished_trials: - fullnames = sorted(finished_trials.keys(), reverse=True) - listing = wrap_links(fullnames) - displayPane.append(InfoBar(strings.finished_trials, - extra_class="mellow")) - displayPane.append(listing) - - displayPane.append(InfoBar(strings.more_info_link % - dict(link="/help/juryduty"), - extra_class="mellow")) - - return Reddit(content = displayPane).render() - - @validate(VUser(), - location = nop("location")) - def GET_prefs(self, location=''): - """Preference page""" - content = None - infotext = None - if not location or location == 'options': - content = PrefOptions(done=request.get.get('done')) - elif location == 'friends': - content = PaneStack() - infotext = strings.friends % Friends.path - content.append(FriendList()) - elif location == 'update': - content = PrefUpdate() - elif location == 'feeds' and c.user.pref_private_feeds: - content = PrefFeeds() - elif location == 'delete': - content = PrefDelete() - else: - return self.abort404() - - return PrefsPage(content = content, infotext=infotext).render() - @validate(VUser(), name = nop('name')) def GET_newreddit(self, name): @@ -360,22 +232,98 @@ def GET_newreddit(self, name): def GET_stylesheet(self): if hasattr(c.site,'stylesheet_contents') and not g.css_killswitch: - self.check_modified(c.site,'stylesheet_contents') + c.allow_loggedin_cache = True + self.check_modified(c.site,'stylesheet_contents', + private=False, max_age=7*24*60*60, + must_revalidate=False) c.response_content_type = 'text/css' c.response.content = c.site.stylesheet_contents return c.response else: return self.abort404() - @base_listing - @validate(location = nop('location'), - created = VOneOf('created', ('true','false'), - default = 'false')) - def GET_editreddit(self, location, num, after, reverse, count, created): - """Edit reddit form.""" - if isinstance(c.site, FakeSubreddit): + def _make_spamlisting(self, location, num, after, reverse, count): + if location == 'reports': + query = c.site.get_reported() + elif location == 'spam': + query = c.site.get_spam() + elif location == 'trials': + query = c.site.get_trials() + elif location == 'modqueue': + query = c.site.get_modqueue() + else: + raise ValueError + + if isinstance(query, thing.Query): + builder_cls = QueryBuilder + elif isinstance (query, list): + builder_cls = QueryBuilder + else: + builder_cls = IDBuilder + + def keep_fn(x): + # no need to bother mods with banned users, or deleted content + if x.hidden or x._deleted: + return False + + if location == "reports": + return x.reported > 0 and not x._spam + elif location == "spam": + return x._spam + elif location == "trials": + return not getattr(x, "verdict", None) + elif location == "modqueue": + if x.reported > 0 and not x._spam: + return True # reported but not banned + verdict = getattr(x, "verdict", None) + if verdict is None: + return True # anything without a verdict (i.e., trials) + if x._spam and verdict != 'mod-removed': + return True # spam, unless banned by a moderator + return False + else: + raise ValueError + + builder = builder_cls(query, + skip = True, + num = num, after = after, + keep_fn = keep_fn, + count = count, reverse = reverse, + wrap = ListingController.builder_wrapper) + listing = LinkListing(builder) + pane = listing.listing() + + return pane + + def _edit_modcontrib_reddit(self, location, num, after, reverse, count, created): + extension_handling = False + + if not c.user_is_loggedin: + return self.abort404() + if isinstance(c.site, ModSR): + level = 'mod' + elif isinstance(c.site, ContribSR): + level = 'contrib' + elif isinstance(c.site, AllSR): + level = 'all' + else: + raise ValueError + + if ((level == 'mod' and + location in ('reports', 'spam', 'trials', 'modqueue')) + or + (level == 'all' and + location == 'trials')): + pane = self._make_spamlisting(location, num, after, reverse, count) + if c.user.pref_private_feeds: + extension_handling = "private" + else: return self.abort404() + return EditReddit(content = pane, + extension_handling = extension_handling).render() + + def _edit_normal_reddit(self, location, num, after, reverse, count, created): # moderator is either reddit's moderator or an admin is_moderator = c.user_is_loggedin and c.site.is_moderator(c.user) or c.user_is_admin extension_handling = False @@ -404,29 +352,8 @@ def GET_editreddit(self, location, num, after, reverse, count, created): stylesheet_contents = '' pane = SubredditStylesheet(site = c.site, stylesheet_contents = stylesheet_contents) - elif location in ('reports', 'spam') and is_moderator: - query = (c.site.get_reported() if location == 'reports' - else c.site.get_spam()) - builder_cls = (QueryBuilder if isinstance(query, thing.Query) - else IDBuilder) - def keep_fn(x): - # no need to bother mods with banned users, or deleted content - if x.hidden or x._deleted: - return False - if location == "reports" and not x._spam: - return (x.reported > 0) - if location == "spam": - return x._spam - return True - - builder = builder_cls(query, - skip = True, - num = num, after = after, - keep_fn = keep_fn, - count = count, reverse = reverse, - wrap = ListingController.builder_wrapper) - listing = LinkListing(builder) - pane = listing.listing() + elif location in ('reports', 'spam', 'trials', 'modqueue') and is_moderator: + pane = self._make_spamlisting(location, num, after, reverse, count) if c.user.pref_private_feeds: extension_handling = "private" elif is_moderator and location == 'traffic': @@ -439,6 +366,25 @@ def keep_fn(x): return EditReddit(content = pane, extension_handling = extension_handling).render() + @base_listing + @validate(location = nop('location'), + created = VOneOf('created', ('true','false'), + default = 'false')) + def GET_editreddit(self, location, num, after, reverse, count, created): + """Edit reddit form.""" + if isinstance(c.site, ModContribSR): + return self._edit_modcontrib_reddit(location, num, after, reverse, + count, created) + elif isinstance(c.site, AllSR) and c.user_is_admin: + return self._edit_modcontrib_reddit(location, num, after, reverse, + count, created) + elif isinstance(c.site, FakeSubreddit): + return self.abort404() + else: + return self._edit_normal_reddit(location, num, after, reverse, + count, created) + + def GET_awards(self): """The awards page.""" return BoringPage(_("awards"), content = UserAwards()).render() @@ -605,63 +551,6 @@ def _search(self, query_obj, num, after, reverse, count=0): return builder.total_num, timing, res - @validate(dest = VDestination()) - def GET_login(self, dest): - """The /login form. No link to this page exists any more on - the site (all actions invoking it now go through the login - cover). However, this page is still used for logging the user - in during submission or voting from the bookmarklets.""" - - if (c.user_is_loggedin and - not request.environ.get('extension') == 'embed'): - return self.redirect(dest) - return LoginPage(dest = dest).render() - - @validate(VUser(), - VModhash(), - dest = VDestination()) - def GET_logout(self, dest): - return self.redirect(dest) - - @validate(VUser(), - VModhash(), - dest = VDestination()) - def POST_logout(self, dest): - """wipe login cookie and redirect to referer.""" - self.logout() - return self.redirect(dest) - - - @validate(VUser(), - dest = VDestination()) - def GET_adminon(self, dest): - """Enable admin interaction with site""" - #check like this because c.user_is_admin is still false - if not c.user.name in g.admins: - return self.abort404() - self.login(c.user, admin = True) - return self.redirect(dest) - - @validate(VAdmin(), - dest = VDestination()) - def GET_adminoff(self, dest): - """disable admin interaction with site.""" - if not c.user.name in g.admins: - return self.abort404() - self.login(c.user, admin = False) - return self.redirect(dest) - - def GET_validuser(self): - """checks login cookie to verify that a user is logged in and - returns their user name""" - c.response_content_type = 'text/plain' - if c.user_is_loggedin: - perm = str(c.user.can_wiki()) - c.response.content = c.user.name + "," + perm - else: - c.response.content = '' - return c.response - @validate(VAdmin(), comment = VCommentByID('comment_id')) def GET_comment_by_id(self, comment): @@ -711,22 +600,6 @@ def _render_opt_in_out(self, msg_hash, leave): sent = sent, msg_hash = msg_hash)).render() - @validate(msg_hash = nop('x')) - def GET_optout(self, msg_hash): - """handles /mail/optout to add an email to the optout mailing - list. The actual email addition comes from the user posting - the subsequently rendered form and is handled in - ApiController.POST_optout.""" - return self._render_opt_in_out(msg_hash, True) - - @validate(msg_hash = nop('x')) - def GET_optin(self, msg_hash): - """handles /mail/optin to remove an email address from the - optout list. The actual email removal comes from the user - posting the subsequently rendered form and is handled in - ApiController.POST_optin.""" - return self._render_opt_in_out(msg_hash, False) - def GET_frame(self): """used for cname support. makes a frame and puts the proper url as the frame source""" @@ -801,3 +674,223 @@ def GET_traffic(self, article): def GET_site_traffic(self): return BoringPage("traffic", content = RedditTraffic()).render() + +class FormsController(RedditController): + + def GET_password(self): + """The 'what is my password' page""" + return BoringPage(_("password"), content=Password()).render() + + @validate(VUser(), + dest = VDestination(), + reason = nop('reason')) + def GET_verify(self, dest, reason): + if c.user.email_verified: + content = InfoBar(message = strings.email_verified) + if dest: + return self.redirect(dest) + else: + if reason == "submit": + infomsg = strings.verify_email_submit + else: + infomsg = strings.verify_email + + content = PaneStack( + [InfoBar(message = infomsg), + PrefUpdate(email = True, verify = True, + password = False)]) + return BoringPage(_("verify email"), content = content).render() + + @validate(VUser(), + cache_evt = VCacheKey('email_verify', ('key',)), + key = nop('key'), + dest = VDestination(default = "/prefs/update")) + def GET_verify_email(self, cache_evt, key, dest): + if c.user_is_loggedin and c.user.email_verified: + cache_evt.clear() + return self.redirect(dest) + elif not (cache_evt.user and + key == passhash(cache_evt.user.name, cache_evt.user.email)): + content = PaneStack( + [InfoBar(message = strings.email_verify_failed), + PrefUpdate(email = True, verify = True, + password = False)]) + return BoringPage(_("verify email"), content = content).render() + elif c.user != cache_evt.user: + # wrong user. Log them out and try again. + self.logout() + return self.redirect(request.fullpath) + else: + cache_evt.clear() + c.user.email_verified = True + c.user._commit() + Award.give_if_needed("verified_email", c.user) + return self.redirect(dest) + + @validate(cache_evt = VCacheKey('reset', ('key',)), + key = nop('key')) + def GET_resetpassword(self, cache_evt, key): + """page hit once a user has been sent a password reset email + to verify their identity before allowing them to update their + password.""" + + #if another user is logged-in, log them out + if c.user_is_loggedin: + self.logout() + return self.redirect(request.path) + + done = False + if not key and request.referer: + referer_path = request.referer.split(g.domain)[-1] + done = referer_path.startswith(request.fullpath) + elif not getattr(cache_evt, "user", None): + return self.abort404() + return BoringPage(_("reset password"), + content=ResetPassword(key=key, done=done)).render() + + @validate(VUser()) + def GET_depmod(self): + displayPane = PaneStack() + + active_trials = {} + finished_trials = {} + + juries = Jury.by_account(c.user) + + trials = trial_info([j._thing2 for j in juries]) + + for j in juries: + defendant = j._thing2 + + if trials.get(defendant._fullname, False): + active_trials[defendant._fullname] = j._name + else: + finished_trials[defendant._fullname] = j._name + + if active_trials: + fullnames = sorted(active_trials.keys(), reverse=True) + + def my_wrap(thing): + w = Wrapped(thing) + w.hide_score = True + w.likes = None + w.trial_mode = True + w.render_class = LinkOnTrial + w.juryvote = active_trials[thing._fullname] + return w + + listing = wrap_links(fullnames, wrapper=my_wrap) + displayPane.append(InfoBar(strings.active_trials, + extra_class="mellow")) + displayPane.append(listing) + + if finished_trials: + fullnames = sorted(finished_trials.keys(), reverse=True) + listing = wrap_links(fullnames) + displayPane.append(InfoBar(strings.finished_trials, + extra_class="mellow")) + displayPane.append(listing) + + displayPane.append(InfoBar(strings.more_info_link % + dict(link="/help/deputies"), + extra_class="mellow")) + + return Reddit(content = displayPane).render() + + @validate(VUser(), + location = nop("location")) + def GET_prefs(self, location=''): + """Preference page""" + content = None + infotext = None + if not location or location == 'options': + content = PrefOptions(done=request.get.get('done')) + elif location == 'friends': + content = PaneStack() + infotext = strings.friends % Friends.path + content.append(FriendList()) + elif location == 'update': + content = PrefUpdate() + elif location == 'feeds' and c.user.pref_private_feeds: + content = PrefFeeds() + elif location == 'delete': + content = PrefDelete() + else: + return self.abort404() + + return PrefsPage(content = content, infotext=infotext).render() + + + @validate(dest = VDestination()) + def GET_login(self, dest): + """The /login form. No link to this page exists any more on + the site (all actions invoking it now go through the login + cover). However, this page is still used for logging the user + in during submission or voting from the bookmarklets.""" + + if (c.user_is_loggedin and + not request.environ.get('extension') == 'embed'): + return self.redirect(dest) + return LoginPage(dest = dest).render() + + @validate(VUser(), + VModhash(), + dest = VDestination()) + def GET_logout(self, dest): + return self.redirect(dest) + + @validate(VUser(), + VModhash(), + dest = VDestination()) + def POST_logout(self, dest): + """wipe login cookie and redirect to referer.""" + self.logout() + return self.redirect(dest) + + + @validate(VUser(), + dest = VDestination()) + def GET_adminon(self, dest): + """Enable admin interaction with site""" + #check like this because c.user_is_admin is still false + if not c.user.name in g.admins: + return self.abort404() + self.login(c.user, admin = True) + return self.redirect(dest) + + @validate(VAdmin(), + dest = VDestination()) + def GET_adminoff(self, dest): + """disable admin interaction with site.""" + if not c.user.name in g.admins: + return self.abort404() + self.login(c.user, admin = False) + return self.redirect(dest) + + def GET_validuser(self): + """checks login cookie to verify that a user is logged in and + returns their user name""" + c.response_content_type = 'text/plain' + if c.user_is_loggedin: + perm = str(c.user.can_wiki()) + c.response.content = c.user.name + "," + perm + else: + c.response.content = '' + return c.response + + @validate(msg_hash = nop('x')) + def GET_optout(self, msg_hash): + """handles /mail/optout to add an email to the optout mailing + list. The actual email addition comes from the user posting + the subsequently rendered form and is handled in + ApiController.POST_optout.""" + return self._render_opt_in_out(msg_hash, True) + + @validate(msg_hash = nop('x')) + def GET_optin(self, msg_hash): + """handles /mail/optin to remove an email address from the + optout list. The actual email removal comes from the user + posting the subsequently rendered form and is handled in + ApiController.POST_optin.""" + return self._render_opt_in_out(msg_hash, False) + diff --git a/r2/r2/controllers/health.py b/r2/r2/controllers/health.py index 0668245c05..ac9e2f0070 100644 --- a/r2/r2/controllers/health.py +++ b/r2/r2/controllers/health.py @@ -5,12 +5,12 @@ from pylons.controllers.util import abort from pylons import c, g -from reddit_base import RedditController +from reddit_base import MinimalController from r2.lib.amqp import worker from validator import * -class HealthController(RedditController): +class HealthController(MinimalController): def shutdown(self): thread_pool = c.thread_pool def _shutdown(): diff --git a/r2/r2/controllers/listingcontroller.py b/r2/r2/controllers/listingcontroller.py index 5c466cbc4e..8c31c6eccb 100644 --- a/r2/r2/controllers/listingcontroller.py +++ b/r2/r2/controllers/listingcontroller.py @@ -19,7 +19,7 @@ # All portions of the code written by CondeNet are Copyright (c) 2006-2010 # CondeNet, Inc. All Rights Reserved. ################################################################################ -from reddit_base import RedditController, base_listing +from reddit_base import RedditController, base_listing, organic_pos from validator import * from r2.models import * @@ -30,7 +30,6 @@ from r2.lib.rising import get_rising from r2.lib.wrapped import Wrapped from r2.lib.normalized_hot import normalized_hot, get_hot -from r2.lib.recommendation import get_recommended from r2.lib.db.thing import Query, Merge, Relations from r2.lib.db import queries from r2.lib.strings import Score @@ -40,7 +39,7 @@ from r2.lib.utils import iters, check_cheating, timeago from r2.lib.utils.trial_utils import populate_spotlight from r2.lib import sup -from r2.lib.promote import PromoteSR +from r2.lib.promote import randomized_promotion_list, get_promote_srid from r2.lib.contrib.pysolr import SolrError from admin import admin_profile_query @@ -56,6 +55,9 @@ class ListingController(RedditController): # toggle skipping of links based on the users' save/hide/vote preferences skip = True + # allow stylesheets on listings + allow_stylesheets = True + # toggles showing numbers show_nums = True @@ -149,7 +151,8 @@ def keep(item): def listing(self): """Listing to generate from the builder""" - if c.site.path == PromoteSR.path and not c.user_is_sponsor: + if (getattr(c.site, "_id", -1) == get_promote_srid() and + not c.user_is_sponsor): abort(403, 'forbidden') listing = LinkListing(self.builder_obj, show_nums = self.show_nums) try: @@ -187,6 +190,7 @@ def rightbox(self): builder_wrapper = staticmethod(default_thing_wrapper()) def GET_listing(self, **env): + check_cheating('site') return self.build_listing(**env) class FixListing(object): @@ -219,55 +223,73 @@ class HotController(FixListing, ListingController): where = 'hot' def spotlight(self): - spotlight_links, pos = organic.organic_links(c.user) - - trial = populate_spotlight() - - if trial: - spotlight_links.insert(pos, trial._fullname) + if (c.site == Default + and (not c.user_is_loggedin + or (c.user_is_loggedin and c.user.pref_organic))): - if not spotlight_links: - return None + spotlight_links = organic.organic_links(c.user) + pos = organic_pos() - # get links in proximity to pos - num_tl = len(spotlight_links) - if num_tl <= 3: - disp_links = spotlight_links - else: - left_side = max(-1, min(num_tl - 3, 8)) - disp_links = [spotlight_links[(i + pos) % num_tl] - for i in xrange(-2, left_side)] + if not spotlight_links: + pos = 0 + elif pos != 0: + pos = pos % len(spotlight_links) - def keep_fn(item): - if trial and trial._fullname == item._fullname: - return True - elif item.likes is not None: - return False - else: - return item.keep_item(item) + spotlight_links, pos = promote.insert_promoted(spotlight_links, pos) + trial = populate_spotlight() - def wrap(item): - if item is trial: - w = Wrapped(item) - w.trial_mode = True - w.render_class = LinkOnTrial - return w - return self.builder_wrapper(item) + if trial: + spotlight_links.insert(pos, trial._fullname) - b = IDBuilder(disp_links, wrap = wrap, - skip = True, keep_fn = keep_fn) + if not spotlight_links: + return None - s = SpotlightListing(b, - spotlight_links = spotlight_links, - visible_link = spotlight_links[pos], - max_num = self.listing_obj.max_num, - max_score = self.listing_obj.max_score).listing() + # get links in proximity to pos + num_tl = len(spotlight_links) + if num_tl <= 3: + disp_links = spotlight_links + else: + left_side = max(-1, min(num_tl - 3, 8)) + disp_links = [spotlight_links[(i + pos) % num_tl] + for i in xrange(-2, left_side)] + def keep_fn(item): + if trial and trial._fullname == item._fullname: + return True + return organic.keep_fresh_links(item) + + def wrap(item): + if item is trial: + w = Wrapped(item) + w.trial_mode = True + w.render_class = LinkOnTrial + return w + return self.builder_wrapper(item) + b = IDBuilder(disp_links, wrap = wrap, + num = organic.organic_length, + skip = True, keep_fn = keep_fn) + + s = SpotlightListing(b, + spotlight_links = spotlight_links, + visible_link = spotlight_links[pos], + max_num = self.listing_obj.max_num, + max_score = self.listing_obj.max_score).listing() + + if len(s.things) > 0: + # only pass through a listing if the links made it + # through our builder + organic.update_pos(pos+1) + return s + + # no organic box on a hot page, then show a random promoted link + elif c.site != Default: + link_ids = randomized_promotion_list(c.user, c.site) + if link_ids: + res = wrap_links(link_ids, wrapper = self.builder_wrapper, + num = 1, keep_fn = lambda x: x.fresh, + skip = True) + if res.things: + return res - if len(s.things) > 0: - # only pass through a listing if the links made it - # through our builder - organic.update_pos(pos+1) - return s def query(self): @@ -289,9 +311,7 @@ def query(self): def content(self): # only send a spotlight listing for HTML rendering - if (c.site == Default and c.render_style == "html" - and (not c.user_is_loggedin - or (c.user_is_loggedin and c.user.pref_organic))): + if c.render_style == "html": spotlight = self.spotlight() if spotlight: return PaneStack([spotlight, self.listing_obj], css_class='spacer') @@ -414,22 +434,22 @@ def GET_listing(self, links, **env): return ListingController.GET_listing(self, **env) -class RecommendedController(ListingController): - where = 'recommended' - title_text = _('recommended for you') - - @property - def menus(self): - return [RecSortMenu(default = self.sort)] - - def query(self): - return get_recommended(c.user._id, sort = self.sort) - - @validate(VUser(), - sort = VMenu("controller", RecSortMenu)) - def GET_listing(self, sort, **env): - self.sort = sort - return ListingController.GET_listing(self, **env) +#class RecommendedController(ListingController): +# where = 'recommended' +# title_text = _('recommended for you') +# +# @property +# def menus(self): +# return [RecSortMenu(default = self.sort)] +# +# def query(self): +# return get_recommended(c.user._id, sort = self.sort) +# +# @validate(VUser(), +# sort = VMenu("controller", RecSortMenu)) +# def GET_listing(self, sort, **env): +# self.sort = sort +# return ListingController.GET_listing(self, **env) class UserController(ListingController): render_cls = ProfilePage @@ -527,6 +547,7 @@ class MessageController(ListingController): show_sidebar = False show_nums = False render_cls = MessagePage + allow_stylesheets = False @property def menus(self): @@ -671,7 +692,7 @@ def GET_listing(self, where, mark, message, subwhere = None, **env): def GET_compose(self, to, subject, message, success): captcha = Captcha() if c.user.needs_captcha() else None content = MessageCompose(to = to, subject = subject, - captcha = captcha, + captcha = captcha, message = message, success = success) return MessagePage(content = content).render() @@ -742,7 +763,7 @@ def content(self): stack.append(InfoBar(message=message)) stack.append(self.listing_obj) - + return stack @validate(VUser()) @@ -754,5 +775,9 @@ class CommentsController(ListingController): title_text = _('comments') def query(self): - return queries.get_all_comments() + return c.site.get_all_comments() + + def GET_listing(self, **env): + c.profilepage = True + return ListingController.GET_listing(self, **env) diff --git a/r2/r2/controllers/mediaembed.py b/r2/r2/controllers/mediaembed.py index 9ab54ad320..cb147b8bbf 100644 --- a/r2/r2/controllers/mediaembed.py +++ b/r2/r2/controllers/mediaembed.py @@ -22,7 +22,7 @@ from validator import * from reddit_base import MinimalController -from r2.lib.scraper import scrapers +from r2.lib.scraper import get_media_embed from r2.lib.pages import MediaEmbedBody, ComScore, render_ad from pylons import request @@ -48,9 +48,7 @@ def GET_mediaembed(self, link): elif isinstance(link.media_object, dict): # otherwise it's the new style, which is a dict(type=type, **args) - media_object_type = link.media_object['type'] - scraper = scrapers[media_object_type] - media_embed = scraper.media_embed(**link.media_object) + media_embed = get_media_embed(link.media_object) content = media_embed.content return MediaEmbedBody(body = content).render() diff --git a/r2/r2/controllers/promotecontroller.py b/r2/r2/controllers/promotecontroller.py index 73539d5f62..62ba611709 100644 --- a/r2/r2/controllers/promotecontroller.py +++ b/r2/r2/controllers/promotecontroller.py @@ -25,13 +25,13 @@ from r2.lib.authorize import get_account_info, edit_profile from r2.lib.pages import * from r2.lib.pages.things import wrap_links +from r2.lib.strings import strings from r2.lib.menus import * from r2.controllers import ListingController from r2.controllers.reddit_base import RedditController -from r2.lib.promote import get_promoted, STATUS, PromoteSR -from r2.lib.utils import timetext +from r2.lib.utils import timetext, make_offset_date from r2.lib.media import force_thumbnail, thumbnail_url from r2.lib import cssfilter from datetime import datetime @@ -46,33 +46,20 @@ def title_text(self): return _('promoted by you') def query(self): - q = Link._query(Link.c.sr_id == PromoteSR._id) - if not c.user_is_sponsor: - # get user's own promotions - q._filter(Link.c.author_id == c.user._id) - q._filter(Link.c._spam == (True, False), - Link.c.promoted == (True, False)) - q._sort = desc('_date') - + author_id = None if c.user_is_sponsor else c.user._id if self.sort == "future_promos": - q._filter(Link.c.promote_status == STATUS.unseen) + return promote.get_unapproved_links(author_id) elif self.sort == "pending_promos": - if c.user_is_admin: - q._filter(Link.c.promote_status == STATUS.pending) - else: - q._filter(Link.c.promote_status == (STATUS.unpaid, - STATUS.unseen, - STATUS.accepted, - STATUS.rejected)) + return promote.get_accepted_links(author_id) elif self.sort == "unpaid_promos": - q._filter(Link.c.promote_status == STATUS.unpaid) + return promote.get_unpaid_links(author_id) elif self.sort == "rejected_promos": - q._filter(Link.c.promote_status == STATUS.rejected) + return promote.get_rejected_links(author_id) elif self.sort == "live_promos": - q._filter(Link.c.promote_status == STATUS.promoted) - - return q + return promote.get_live_links(author_id) + return promote.get_all_links(author_id) + @validate(VSponsor()) def GET_listing(self, sort = "", **env): if not c.user_is_loggedin or not c.user.email_verified: return self.redirect("/ad_inq") @@ -81,7 +68,7 @@ def GET_listing(self, sort = "", **env): GET_index = GET_listing - @validate(VVerifiedUser()) + @validate(VSponsor()) def GET_new_promo(self): return PromotePage('content', content = PromoteLinkForm()).render() @@ -90,113 +77,77 @@ def GET_new_promo(self): def GET_edit_promo(self, link): if link.promoted is None: return self.abort404() - rendered = wrap_links(link) - timedeltatext = '' - if link.promote_until: - timedeltatext = timetext(link.promote_until - datetime.now(g.tz), - resultion=2) + rendered = wrap_links(link, wrapper = promote.sponsor_wrapper, + skip = False) form = PromoteLinkForm(link = link, listing = rendered, - timedeltatext = timedeltatext) + timedeltatext = "") + page = PromotePage('new_promo', content = form) return page.render() - @validate(VVerifiedUser()) + @validate(VSponsor()) def GET_graph(self): content = Promote_Graph() if c.user_is_sponsor and c.render_style == 'csv': c.response.content = content.as_csv() return c.response - return PromotePage("grpaph", content = content).render() + return PromotePage("graph", content = content).render() ### POST controllers below - @validatedForm(VSponsor(), - link = VByName("link"), - bid = VBid('bid', "link")) - def POST_freebie(self, form, jquery, link, bid): - if link and link.promoted is not None and bid: - promote.auth_paid_promo(link, c.user, -1, bid) - jquery.refresh() - - @validatedForm(VSponsor(), + @validatedForm(VSponsorAdmin(), + link = VLink("link_id"), + indx = VInt("indx")) + def POST_freebie(self, form, jquery, link, indx): + if promote.is_promo(link) and indx is not None: + promote.free_campaign(link, indx, c.user) + form.redirect(promote.promo_edit_url(link)) + + @validatedForm(VSponsorAdmin(), link = VByName("link"), note = nop("note")) def POST_promote_note(self, form, jquery, link, note): - if link and link.promoted is not None: + if promote.is_promo(link): form.find(".notes").children(":last").after( "

" + promote.promotion_log(link, note, True) + "

") - @validatedForm(VSponsor(), - link = VByName("link"), - refund = VFloat("refund")) - def POST_refund(self, form, jquery, link, refund): - if link: - # make sure we don't refund more than we should - author = Account._byID(link.author_id) - promote.refund_promo(link, author, refund) - jquery.refresh() - - @noresponse(VSponsor(), + @noresponse(VSponsorAdmin(), thing = VByName('id')) def POST_promote(self, thing): - if thing: - now = datetime.now(g.tz) - # make accepted if unseen or already rejected - if thing.promote_status in (promote.STATUS.unseen, - promote.STATUS.rejected): - promote.accept_promo(thing) - # if not finished and the dates are current - elif (thing.promote_status < promote.STATUS.finished and - thing._date <= now and thing.promote_until > now): - # if already pending, cron job must have failed. Promote. - if thing.promote_status == promote.STATUS.accepted: - promote.pending_promo(thing) - promote.promote(thing) - - @noresponse(VSponsor(), + if promote.is_promo(thing): + promote.accept_promotion(thing) + + @noresponse(VSponsorAdmin(), thing = VByName('id'), reason = nop("reason")) def POST_unpromote(self, thing, reason): - if thing: - # reject anything that hasn't yet been promoted - if (c.user_is_sponsor and - thing.promote_status < promote.STATUS.promoted): - promote.reject_promo(thing, reason = reason) - # also reject anything that is live but has a reason given - elif (c.user_is_sponsor and reason and - thing.promote_status == promote.STATUS.promoted): - promote.reject_promo(thing, reason = reason) - # otherwise, mark it as "finished" - else: - promote.unpromote(thing) + if promote.is_promo(thing): + promote.reject_promotion(thing, reason = reason) @validatedForm(VSponsor('link_id'), VModhash(), VRatelimit(rate_user = True, rate_ip = True, prefix = 'create_promo_'), - ip = ValidIP(), l = VLink('link_id'), title = VTitle('title'), - url = VUrl('url', allow_self = False), - dates = VDateRange(['startdate', 'enddate'], - future = g.min_promote_future, - reference_date = promote.promo_datetime_now, - business_days = True, - admin_override = True), + url = VUrl('url', allow_self = False, lookup = False), + ip = ValidIP(), disable_comments = VBoolean("disable_comments"), set_clicks = VBoolean("set_maximum_clicks"), max_clicks = VInt("maximum_clicks", min = 0), set_views = VBoolean("set_maximum_views"), max_views = VInt("maximum_views", min = 0), - bid = VBid('bid', 'link_id')) - def POST_new_promo(self, form, jquery, l, ip, title, url, dates, - disable_comments, - set_clicks, max_clicks, set_views, max_views, bid): + ) + def POST_edit_promo(self, form, jquery, ip, l, title, url, + disable_comments, + set_clicks, max_clicks, + set_views, max_views): + should_ratelimit = False if not c.user_is_sponsor: set_clicks = False @@ -209,7 +160,7 @@ def POST_new_promo(self, form, jquery, l, ip, title, url, dates, if not should_ratelimit: c.errors.remove((errors.RATELIMIT, 'ratelimit')) - + # demangle URL in canonical way if url: if isinstance(url, (unicode, str)): @@ -220,102 +171,168 @@ def POST_new_promo(self, form, jquery, l, ip, title, url, dates, # want the URL url = url[0].url - if form.has_errors('bid', errors.BAD_BID): + # users can change the disable_comments on promoted links + if ((not l or not promote.is_promoted(l)) and + (form.has_errors('title', errors.NO_TEXT, + errors.TOO_LONG) or + form.has_errors('url', errors.NO_URL, errors.BAD_URL) or + jquery.has_errors('ratelimit', errors.RATELIMIT))): + return + + if not l: + l = promote.new_promotion(title, url, c.user, ip) + elif promote.is_promo(l): + changed = False + # live items can only be changed by a sponsor, and also + # pay the cost of de-approving the link + trusted = c.user_is_sponsor or \ + getattr(c.user, "trusted_sponsor", False) + if not promote.is_promoted(l) or trusted: + if title and title != l.title: + l.title = title + changed = not trusted + if url and url != l.url: + l.url = url + changed = not trusted + + # only trips if the title and url are changed by a non-sponsor + if changed and not promote.is_unpaid(l): + promote.unapprove_promotion(l) + + if c.user_is_sponsor: + l.maximum_clicks = max_clicks + l.maximum_views = max_views + + # comment disabling is free to be changed any time. + l.disable_comments = disable_comments + l._commit() + + form.redirect(promote.promo_edit_url(l)) + + @validate(VSponsorAdmin()) + def GET_roadblock(self): + return PromotePage('content', content = Roadblocks()).render() + + @validatedForm(VSponsorAdmin(), + VModhash(), + dates = VDateRange(['startdate', 'enddate'], + future = 1, + reference_date = promote.promo_datetime_now, + business_days = False, + admin_override = True), + sr = VSubmitSR('sr')) + def POST_add_roadblock(self, form, jquery, dates, sr): + if (form.has_errors('startdate', errors.BAD_DATE, + errors.BAD_FUTURE_DATE) or + form.has_errors('enddate', errors.BAD_DATE, + errors.BAD_FUTURE_DATE, errors.BAD_DATE_RANGE)): + return + if form.has_errors('sr', errors.SUBREDDIT_NOEXIST, + errors.SUBREDDIT_NOTALLOWED, + errors.SUBREDDIT_REQUIRED): + return + if dates and sr: + sd, ed = dates + promote.roadblock_reddit(sr.name, sd.date(), ed.date()) + jquery.refresh() + + @validatedForm(VSponsorAdmin(), + VModhash(), + dates = VDateRange(['startdate', 'enddate'], + future = 1, + reference_date = promote.promo_datetime_now, + business_days = False, + admin_override = True), + sr = VSubmitSR('sr')) + def POST_rm_roadblock(self, form, jquery, dates, sr): + if dates and sr: + sd, ed = dates + promote.unroadblock_reddit(sr.name, sd.date(), ed.date()) + jquery.refresh() + + @validatedForm(VSponsor('link_id'), + VModhash(), + dates = VDateRange(['startdate', 'enddate'], + future = 1, + reference_date = promote.promo_datetime_now, + business_days = False, + admin_override = True), + l = VLink('link_id'), + bid = VBid('bid', 'link_id', 'sr'), + sr = VSubmitSR('sr'), + indx = VInt("indx"), + targeting = VLength("targeting", 10)) + def POST_edit_campaign(self, form, jquery, l, indx, + dates, bid, sr, targeting): + if not l: return - # check dates and date range start, end = [x.date() for x in dates] if dates else (None, None) - if (not l or - (l.promote_status != promote.STATUS.promoted and - (l._date.date(), l.promote_until.date()) != (start,end))): - if (form.has_errors('startdate', errors.BAD_DATE, - errors.BAD_FUTURE_DATE) or - form.has_errors('enddate', errors.BAD_DATE, - errors.BAD_FUTURE_DATE, errors.BAD_DATE_RANGE)): - return - # if the dates have been updated, it is possible that the - # bid is no longer valid - duration = max((end - start).days, 1) - if float(bid) / duration < g.min_promote_bid: - c.errors.add(errors.BAD_BID, field = 'bid', - msg_params = {"min": g.min_promote_bid, - "max": g.max_promote_bid}) - - # dates have been validated at this point. Next validate title, etc. - if (form.has_errors('title', errors.NO_TEXT, - errors.TOO_LONG) or - form.has_errors('url', errors.NO_URL, errors.BAD_URL) or - (not l and jquery.has_errors('ratelimit', errors.RATELIMIT))): + + if start and end and not promote.is_accepted(l) and not c.user_is_sponsor: + # if the ad is not approved already, ensure the start date + # is at least 2 days in the future + now = promote.promo_datetime_now() + future = make_offset_date(now, g.min_promote_future, + business_days = True) + if start < future.date(): + c.errors.add(errors.BAD_FUTURE_DATE, + msg_params = dict(day=g.min_promote_future), + field = "startdate") + + + if (form.has_errors('startdate', errors.BAD_DATE, + errors.BAD_FUTURE_DATE) or + form.has_errors('enddate', errors.BAD_DATE, + errors.BAD_FUTURE_DATE, errors.BAD_DATE_RANGE)): return - elif l: - if l.promote_status == promote.STATUS.finished: - form.parent().set_html(".status", - _("that promoted link is already finished.")) - else: - # we won't penalize for changes of dates provided - # the submission isn't pending (or promoted, or - # finished) - changed = False - if dates and not promote.update_promo_dates(l, *dates): - form.parent().set_html(".status", - _("too late to change the date.")) - else: - changed = True - - # check for changes in the url and title - if promote.update_promo_data(l, title, url): - changed = True - # sponsors can change the bid value (at the expense of making - # the promotion a freebie) - if c.user_is_sponsor and bid != l.promote_bid: - promote.auth_paid_promo(l, c.user, -1, bid) - promote.accept_promo(l) - changed = True - - if c.user_is_sponsor: - l.maximum_clicks = max_clicks - l.maximum_views = max_views - changed = True - - l.disable_comments = disable_comments - l._commit() - - if changed: - jquery.refresh() - - # no link so we are creating a new promotion - elif dates: - promote_start, promote_end = dates - # check that the bid satisfies the minimum - duration = max((promote_end - promote_start).days, 1) - if bid / duration >= g.min_promote_bid: - l = promote.new_promotion(title, url, c.user, ip, - promote_start, promote_end, bid, - disable_comments = disable_comments, - max_clicks = max_clicks, - max_views = max_views) - # if the submitter is a sponsor (or implicitly an admin) we can - # fast-track the approval and auto-accept the bid - if c.user_is_sponsor: - promote.auth_paid_promo(l, c.user, -1, bid) - promote.accept_promo(l) - - # register a vote - v = Vote.vote(c.user, l, True, ip) - - # set the rate limiter - if should_ratelimit: - VRatelimit.ratelimit(rate_user=True, rate_ip = True, - prefix = "create_promo_", - seconds = 60) - - form.redirect(promote.promo_edit_url(l)) - else: - c.errors.add(errors.BAD_BID, - msg_params = dict(min=g.min_promote_bid, - max=g.max_promote_bid), - field = 'bid') - form.set_error(errors.BAD_BID, "bid") + + duration = max((end - start).days, 1) + + if form.has_errors('bid', errors.BAD_BID): + return + + if bid is None or float(bid) / duration < g.min_promote_bid: + c.errors.add(errors.BAD_BID, field = 'bid', + msg_params = {"min": g.min_promote_bid, + "max": g.max_promote_bid}) + form.has_errors('bid', errors.BAD_BID) + return + + if targeting == 'one': + if form.has_errors('sr', errors.SUBREDDIT_NOEXIST, + errors.SUBREDDIT_NOTALLOWED, + errors.SUBREDDIT_REQUIRED): + # checking to get the error set in the form, but we can't + # check for rate-limiting if there's no subreddit + return + oversold = promote.is_roadblocked(sr.name, start, end) + if oversold: + c.errors.add(errors.OVERSOLD, field = 'sr', + msg_params = {"start": oversold[0].strftime('%m/%d/%Y'), + "end": oversold[1].strftime('%m/%d/%Y')}) + form.has_errors('sr', errors.OVERSOLD) + return + if targeting == 'none': + sr = None + + if indx is not None: + promote.edit_campaign(l, indx, dates, bid, sr) + l = promote.editable_add_props(l) + jquery.update_campaign(*l.campaigns[indx]) + else: + indx = promote.new_campaign(l, dates, bid, sr) + l = promote.editable_add_props(l) + jquery.new_campaign(*l.campaigns[indx]) + + @validatedForm(VSponsor('link_id'), + VModhash(), + l = VLink('link_id'), + indx = VInt("indx")) + def POST_delete_campaign(self, form, jquery, l, indx): + if l and indx is not None: + promote.delete_campaign(l, indx) + @validatedForm(VSponsor('container'), VModhash(), @@ -361,8 +378,8 @@ def POST_rm_traffic_viewer(self, form, jquery, iuser, thing): @validatedForm(VSponsor('link'), link = VByName("link"), + indx = VInt("indx"), customer_id = VInt("customer_id", min = 0), - bid = VBid("bid", "link"), pay_id = VInt("account", min = 0), edit = VBoolean("edit"), address = ValidAddress(["firstName", "lastName", @@ -372,7 +389,7 @@ def POST_rm_traffic_viewer(self, form, jquery, iuser, thing): usa_only = True), creditcard = ValidCard(["cardNumber", "expirationDate", "cardCode"])) - def POST_update_pay(self, form, jquery, bid, link, customer_id, pay_id, + def POST_update_pay(self, form, jquery, link, indx, customer_id, pay_id, edit, address, creditcard): address_modified = not pay_id or edit if address_modified: @@ -385,35 +402,32 @@ def POST_update_pay(self, form, jquery, bid, link, customer_id, pay_id, pass else: pay_id = edit_profile(c.user, address, creditcard, pay_id) - if form.has_errors('bid', errors.BAD_BID) or not bid: - pass # if link is in use or finished, don't make a change - elif link.promote_status == promote.STATUS.promoted: - form.set_html(".status", - _("that link is currently promoted. " - "you can't update your bid now.")) - elif link.promote_status == promote.STATUS.finished: - form.set_html(".status", - _("that promotion is already over, so updating " - "your bid is kind of pointless, don't you think?")) - elif pay_id: + if pay_id: # valid bid and created or existing bid id. # check if already a transaction - if promote.auth_paid_promo(link, c.user, pay_id, bid): + success, reason = promote.auth_campaign(link, indx, c.user, pay_id) + if success: form.redirect(promote.promo_edit_url(link)) else: form.set_html(".status", + reason or _("failed to authenticate card. sorry.")) @validate(VSponsor("link"), - article = VLink("link")) - def GET_pay(self, article): - data = get_account_info(c.user) + article = VLink("link"), + indx = VInt("indx")) + def GET_pay(self, article, indx): # no need for admins to play in the credit card area if c.user_is_loggedin and c.user._id != article.author_id: return self.abort404() - content = PaymentForm(link = article, + # make sure this is a valid campaign index + if indx not in getattr(article, "campaigns", {}): + return self.abort404() + + data = get_account_info(c.user) + content = PaymentForm(article, indx, customer_id = data.customerProfileId, profiles = data.paymentProfiles) res = LinkInfoPage(link = article, @@ -441,8 +455,8 @@ def POST_link_thumb(self, link=None, file=None): if any(errors.values()): return UploadedImage("", "", "upload", errors = errors).render() else: - if not c.user_is_sponsor: - promote.unapproved_promo(link) + if not c.user_is_sponsor and not promote.is_unpaid(link): + promote.unapprove_promotion(link) return UploadedImage(_('saved'), thumbnail_url(link), "", errors = errors).render() diff --git a/r2/r2/controllers/reddit_base.py b/r2/r2/controllers/reddit_base.py index 1d97349c19..f9400bb8f1 100644 --- a/r2/r2/controllers/reddit_base.py +++ b/r2/r2/controllers/reddit_base.py @@ -183,7 +183,11 @@ def firsttime(): def get_redditfirst(key,default=None): try: - cookie = simplejson.loads(c.cookies['reddit_first'].value) + val = c.cookies['reddit_first'].value + # on cookie presence, return as much + if default is None: + default = True + cookie = simplejson.loads(val) return cookie[key] except (ValueError,TypeError,KeyError),e: # it's not a proper json dict, or the cookie isn't present, or @@ -259,7 +263,6 @@ def set_subreddit(): redirect_to("/reddits/create?name=%s" % sr_name) elif not c.error_page: abort(404, "not found") - #if we didn't find a subreddit, check for a domain listing if not sr_name and c.site == Default and domain: c.site = DomainSR(domain) @@ -437,6 +440,8 @@ def new_fn(self, before, **env): class MinimalController(BaseController): + allow_stylesheets = False + def request_key(self): # note that this references the cookie at request time, not # the current value of it @@ -446,7 +451,7 @@ def request_key(self): except CookieError: cookies_key = '' - return make_key('request_key', + return make_key('request_key_', c.lang, c.content_langs, request.host, @@ -470,6 +475,8 @@ def pre(self): ratelimit_agents() ratelimit_throttled() + c.allow_loggedin_cache = False + # the domain has to be set before Cookies get initialized set_subreddit() c.errors = ErrorSet() @@ -480,6 +487,7 @@ def try_pagecache(self): if not c.user_is_loggedin: r = g.rendercache.get(self.request_key()) if r and request.method == 'GET': + r, c.cookies = r response = c.response response.headers = r.headers response.content = r.content @@ -517,38 +525,35 @@ def post(self): if c.response_access_control: c.response.headers['Access-Control'] = c.response_access_control - if c.user_is_loggedin: + if c.user_is_loggedin and not c.allow_loggedin_cache: response.headers['Cache-Control'] = 'no-cache' response.headers['Pragma'] = 'no-cache' - # send cookies - if not c.used_cache and c.cookies: - # if we used the cache, these cookies should be set by the - # cached response object instead - for k,v in c.cookies.iteritems(): - if v.dirty: - response.set_cookie(key = k, - value = quote(v.value), - domain = v.domain, - expires = v.expires) - #return #set content cache if (g.page_cache_time and request.method == 'GET' - and not c.user_is_loggedin + and (not c.user_is_loggedin or c.allow_loggedin_cache) and not c.used_cache and not c.dontcache and response.status_code != 503 and response.content and response.content[0]): try: g.rendercache.set(self.request_key(), - response, + (response, c.cookies), g.page_cache_time) except MemcachedError: # the key was too big to set in the rendercache g.log.debug("Ignored too-big render cache") + # send cookies + for k,v in c.cookies.iteritems(): + if v.dirty: + response.set_cookie(key = k, + value = quote(v.value), + domain = v.domain, + expires = v.expires) + if g.usage_sampling <= 0.0: return @@ -649,6 +654,15 @@ def pre(self): if not isinstance(c.site, FakeSubreddit): request.environ['REDDIT_NAME'] = c.site.name + # random reddit trickery -- have to do this after the content lang is set + if c.site == Random: + c.site = Subreddit.random_reddit() + redirect_to("/" + c.site.path.strip('/') + request.path) + elif c.site == RandomNSFW: + c.site = Subreddit.random_reddit(over18 = True) + redirect_to("/" + c.site.path.strip('/') + request.path) + + # check that the site is available: if c.site._spam and not c.user_is_admin and not c.error_page: abort(404, "not found") @@ -664,7 +678,7 @@ def pre(self): return self.intermediate_redirect("/over18") #check whether to allow custom styles - c.allow_styles = True + c.allow_styles = self.allow_stylesheets if g.css_killswitch: c.allow_styles = False #if the preference is set and we're not at a cname @@ -674,14 +688,22 @@ def pre(self): elif c.site.domain and c.site.css_on_cname and not c.cname: c.allow_styles = False - def check_modified(self, thing, action): - if c.user_is_loggedin: + def check_modified(self, thing, action, + private=True, max_age=0, must_revalidate=True): + if c.user_is_loggedin and not c.allow_loggedin_cache: return last_modified = utils.last_modified_date(thing, action) date_str = http_utils.http_date_str(last_modified) c.response.headers['last-modified'] = date_str - c.response.headers['cache-control'] = "private, max-age=0, must-revalidate" + + cache_control = [] + if private: + cache_control.append('private') + cache_control.append('max-age=%d' % max_age) + if must_revalidate: + cache_control.append('must-revalidate') + c.response.headers['cache-control'] = ', '.join(cache_control) modified_since = request.if_modified_since if modified_since and modified_since >= last_modified: diff --git a/r2/r2/controllers/toolbar.py b/r2/r2/controllers/toolbar.py index e76c8c4bdf..e09984d8f8 100644 --- a/r2/r2/controllers/toolbar.py +++ b/r2/r2/controllers/toolbar.py @@ -75,6 +75,9 @@ def auto_expand_panel(link): return c.user.pref_frame_commentspanel class ToolbarController(RedditController): + + allow_stylesheets = True + @validate(link1 = VByName('id'), link2 = VLink('id', redirect = False)) def GET_goto(self, link1, link2): @@ -89,7 +92,7 @@ def GET_tb(self, link): "/tb/$id36, show a given link with the toolbar" if not link: return self.abort404() - elif link.is_self: + elif link.is_self or not link.subreddit_slow.can_view(c.user): return self.redirect(link.url) res = Frame(title = link.title, diff --git a/r2/r2/controllers/validator/validator.py b/r2/r2/controllers/validator/validator.py index 15abbfa781..f2303b4fde 100644 --- a/r2/r2/controllers/validator/validator.py +++ b/r2/r2/controllers/validator/validator.py @@ -560,6 +560,19 @@ def run(self): if not c.user.email_verified: raise VerifiedUserRequiredException +class VSponsorAdmin(VVerifiedUser): + """ + Validator which checks c.user_is_sponsor + """ + def user_test(self, thing): + return (thing.author_id == c.user._id) + + def run(self, link_id = None): + VVerifiedUser.run(self) + if c.user_is_sponsor: + return + abort(403, 'forbidden') + class VSponsor(VVerifiedUser): """ Not intended to be used as a check for c.user_is_sponsor, but @@ -616,15 +629,17 @@ def run(self, thing_name): class VSrCanBan(VByName): def run(self, thing_name): if c.user_is_admin: - return True + return 'admin' elif c.user_is_loggedin: item = VByName.run(self, thing_name) # will throw a legitimate 500 if this isn't a link or # comment, because this should only be used on links and # comments subreddit = item.subreddit_slow - if subreddit.can_ban(c.user): - return True + if subreddit.is_moderator(c.user): + return 'mod' + # elif subreddit.is_contributor(c.user): + # return 'contributor' abort(403,'forbidden') class VSrSpecial(VByName): @@ -747,8 +762,9 @@ def run(self, url): return utils.sanitize_url(url) class VUrl(VRequired): - def __init__(self, item, allow_self = True, *a, **kw): + def __init__(self, item, allow_self = True, lookup = True, *a, **kw): self.allow_self = allow_self + self.lookup = lookup VRequired.__init__(self, item, errors.NO_URL, *a, **kw) def run(self, url, sr = None): @@ -769,6 +785,8 @@ def run(self, url, sr = None): if url == 'self': if self.allow_self: return url + elif not self.lookup: + return url elif url: try: l = Link._by_url(url, sr) @@ -786,7 +804,7 @@ def run(self, name): if name and name.startswith('~') and c.user_is_admin: try: user_id = int(name[1:]) - return Account._byID(user_id) + return Account._byID(user_id, True) except (NotFound, ValueError): return self.error(errors.USER_DOESNT_EXIST) @@ -868,16 +886,18 @@ def cast(self, val): return float(val) class VBid(VNumber): - def __init__(self, bid, link_id): + def __init__(self, bid, link_id, sr): self.duration = 1 - VNumber.__init__(self, (bid, link_id), min = g.min_promote_bid, + VNumber.__init__(self, (bid, link_id, sr), + # targeting is a little more expensive + min = g.min_promote_bid, max = g.max_promote_bid, coerce = False, error = errors.BAD_BID) def cast(self, val): return float(val)/self.duration - def run(self, bid, link_id): + def run(self, bid, link_id, sr = None): if link_id: try: link = Thing._by_fullname(link_id, return_dict = False, @@ -886,7 +906,14 @@ def run(self, bid, link_id): except NotFound: pass if VNumber.run(self, bid): - return float(bid) + if sr: + if self.cast(bid) >= self.min * 1.5: + return float(bid) + else: + self.set_error(self.error, msg_params = dict(min=self.min * 1.5, + max=self.max)) + else: + return float(bid) @@ -953,7 +980,7 @@ def run (self): expire_time = max(r.values()) time = utils.timeuntil(expire_time) - print "rate-limiting %s from %s" % (self.prefix, r.keys()) + g.log.debug("rate-limiting %s from %s" % (self.prefix, r.keys())) # when errors have associated field parameters, we'll need # to add that here @@ -974,8 +1001,7 @@ def ratelimit(self, rate_user = False, rate_ip = False, prefix = "rate_", to_set['user' + str(c.user._id36)] = expire_time if rate_ip: to_set['ip' + str(request.ip)] = expire_time - - g.cache.set_multi(to_set, prefix, time = seconds) + g.cache.set_multi(to_set, prefix = prefix, time = seconds) class VCommentIDs(Validator): #id_str is a comma separated list of id36's diff --git a/r2/r2/lib/app_globals.py b/r2/r2/lib/app_globals.py index 34ea36f921..3e926ac740 100644 --- a/r2/r2/lib/app_globals.py +++ b/r2/r2/lib/app_globals.py @@ -24,7 +24,7 @@ import pytz, os, logging, sys, socket, re, subprocess, random from datetime import timedelta, datetime from r2.lib.cache import LocalCache, SelfEmptyingCache -from r2.lib.cache import PyMemcache, CMemcache +from r2.lib.cache import CMemcache from r2.lib.cache import HardCache, MemcacheChain, MemcacheChain, HardcacheChain from r2.lib.cache import CassandraCache, CassandraCacheChain from r2.lib.db.stats import QueryStats @@ -45,6 +45,7 @@ class Globals(object): 'HOT_PAGE_AGE', 'MODWINDOW', 'RATELIMIT', + 'QUOTA_THRESHOLD', 'num_comments', 'max_comments', 'num_default_reddits', @@ -77,6 +78,8 @@ class Globals(object): tuple_props = ['memcaches', 'rec_cache', 'rendercaches', + 'local_rendercache', + 'servicecaches', 'permacache_memcaches', 'cassandra_seeds', 'permacaches', @@ -135,40 +138,55 @@ def __init__(self, global_conf, app_conf, paths, **extra): localcache_cls = (SelfEmptyingCache if self.running_as_script else LocalCache) - num_mc_clients = 2 if self.running_as_script else 10 - - py_mc = PyMemcache(self.memcaches) - c_mc = CMemcache(self.memcaches, num_clients = num_mc_clients) - rmc = CMemcache(self.rendercaches, num_clients = num_mc_clients) + num_mc_clients = 2# if self.running_as_script else 10 + + c_mc = CMemcache(self.memcaches, num_clients = num_mc_clients, legacy=True) + rmc = CMemcache(self.rendercaches, num_clients = num_mc_clients, + noreply=True, no_block=True) + lrmc = None + if self.local_rendercache: + lrmc = CMemcache(self.local_rendercache, + num_clients = num_mc_clients, + noreply=True, no_block=True) + smc = CMemcache(self.servicecaches, num_clients = num_mc_clients) rec_cache = None # we're not using this for now pmc_chain = (localcache_cls(),) if self.permacache_memcaches: - pmc_chain += (PyMemcache(self.permacache_memcaches),) + pmc_chain += (CMemcache(self.permacache_memcaches, + num_clients=num_mc_clients, + legacy=True),) if self.cassandra_seeds: self.cassandra_seeds = list(self.cassandra_seeds) random.shuffle(self.cassandra_seeds) pmc_chain += (CassandraCache('permacache', 'permacache', self.cassandra_seeds),) if self.permacaches: - pmc_chain += (PyMemcache(self.permacaches),) + pmc_chain += (CMemcache(self.permacaches, + num_clients=num_mc_clients, + legacy=True),) if len(pmc_chain) == 1: print 'Warning: proceding without a permacache' - - self.permacache = CassandraCacheChain(pmc_chain) + + self.permacache = CassandraCacheChain(pmc_chain, cache_negative_results = True) # hardcache is done after the db info is loaded, and then the # chains are reset to use the appropriate initial entries - self.memcache = py_mc # we'll keep using this one for locks - # intermediately + self.memcache = c_mc # we'll keep using this one for locks + # intermediately - self.cache = MemcacheChain((localcache_cls(), py_mc)) - self.rendercache = MemcacheChain((localcache_cls(), rmc)) + self.cache = MemcacheChain((localcache_cls(), c_mc)) + if lrmc: + self.rendercache = MemcacheChain((localcache_cls(), lrmc, rmc)) + else: + self.rendercache = MemcacheChain((localcache_cls(), rmc)) + self.servicecache = MemcacheChain((localcache_cls(), smc)) self.rec_cache = rec_cache self.make_lock = make_lock_factory(self.memcache) - cache_chains = [self.cache, self.permacache, self.rendercache] + cache_chains = [self.cache, self.permacache, self.rendercache, + self.servicecache] # set default time zone if one is not set tz = global_conf.get('timezone') @@ -181,7 +199,7 @@ def __init__(self, global_conf, app_conf, paths, **extra): self.dbm = self.load_db_params(global_conf) # can't do this until load_db_params() has been called - self.hardcache = HardcacheChain((localcache_cls(), py_mc, + self.hardcache = HardcacheChain((localcache_cls(), c_mc, HardCache(self)), cache_negative_results = True) cache_chains.append(self.hardcache) diff --git a/r2/r2/lib/authorize/interaction.py b/r2/r2/lib/authorize/interaction.py index dd7eef1242..093e6aa5a4 100644 --- a/r2/r2/lib/authorize/interaction.py +++ b/r2/r2/lib/authorize/interaction.py @@ -99,7 +99,7 @@ def _make_transaction(trans_cls, amount, user, pay_id, return req.make_request() -def auth_transaction(amount, user, payid, thing, test = None): +def auth_transaction(amount, user, payid, thing, campaign, test = None): # use negative pay_ids to identify freebies, coupons, or anything # that doesn't require a CC. if payid < 0: @@ -107,12 +107,13 @@ def auth_transaction(amount, user, payid, thing, test = None): # update previous freebie transactions if we can try: bid = Bid.one(thing_id = thing._id, - pay_id = payid) + transaction = trans_id, + campaign = campaign) bid.bid = amount bid.auth() except NotFound: - bid = Bid._new(trans_id, user, payid, thing._id, amount) - return bid.transaction + bid = Bid._new(trans_id, user, payid, thing._id, amount, campaign) + return bid.transaction, "" elif int(payid) in PayID.get_ids(user): order = Order(invoiceNumber = "%dT%d" % (user._id, thing._id)) @@ -120,8 +121,12 @@ def auth_transaction(amount, user, payid, thing, test = None): amount, user, payid, order = order, test = test) if success: - Bid._new(res.trans_id, user, payid, thing._id, amount) - return res.trans_id + if test: + return auth_transaction(amount, user, -1, thing, campaign, + test = test) + else: + Bid._new(res.trans_id, user, payid, thing._id, amount, campaign) + return res.trans_id, "" elif res is None: # we are in test mode! return auth_transaction(amount, user, -1, thing, test = test) @@ -132,45 +137,62 @@ def auth_transaction(amount, user, payid, thing, test = None): Bid.one(res.trans_id) except NotFound: Bid._new(res.trans_id, user, payid, thing._id, amount) - return res.trans_id + return res.trans_id, res.response_reason_text -def void_transaction(user, trans_id, test = None): - bid = Bid.one(trans_id) +def void_transaction(user, trans_id, campaign, test = None): + bid = Bid.one(transaction = trans_id, campaign = campaign) bid.void() - # verify that the transaction has the correct ownership - if bid.account_id == user._id and trans_id > 0: + if trans_id > 0: res = _make_transaction(ProfileTransVoid, None, user, None, trans_id = trans_id, test = test) return res -def charge_transaction(user, trans_id, test = None): - bid = Bid.one(trans_id) - bid.charged() - if trans_id < 0: - # freebies are automatically approved - return True - elif bid.account_id == user._id: - res = _make_transaction(ProfileTransPriorAuthCapture, - bid.bid, user, - bid.pay_id, trans_id = trans_id, - test = test) - return bool(res) - - -def refund_transaction(amount, user, trans_id, test = None): - bid = Bid.one(trans_id) - if trans_id > 0: - # create a new bid to identify the refund - success, res = _make_transaction(ProfileTransRefund, - amount, user, bid.pay_id, - trans_id = trans_id, - test = test) - if success: - bid = Bid._new(res.trans_id, user, -1, bid.thing_id, amount) - bid.refund() - return bool(res.trans_id) - +def is_charged_transaction(trans_id, campaign): + bid = Bid.one(transaction = trans_id, campaign = campaign) + return bid.is_charged() + +def charge_transaction(user, trans_id, campaign, test = None): + bid = Bid.one(transaction = trans_id, campaign = campaign) + if not bid.is_charged(): + bid.charged() + if trans_id < 0: + # freebies are automatically authorized + return True + elif bid.account_id == user._id: + res = _make_transaction(ProfileTransPriorAuthCapture, + bid.bid, user, + bid.pay_id, trans_id = trans_id, + test = test) + return bool(res) + + # already charged + return True + + +#def refund_transaction(amount, user, trans_id, campaign, test = None): +# bid = Bid.one(transaction = trans_id, campaign = campaign) +# if trans_id > 0: +# # create a new bid to identify the refund +# success, res = _make_transaction(ProfileTransRefund, +# amount, user, bid.pay_id, +# trans_id = trans_id, +# test = test) +# if success: +# bid = Bid._new(res.trans_id, user, -1, bid.thing_id, amount) +# bid.refund() +# return bool(res.trans_id) + +def get_transactions(*trans_keys): + from sqlalchemy import and_, or_ + + if trans_keys: + f = or_(*[and_(Bid.transaction == trans_id, Bid.campaign == camp) + for trans_id, camp in trans_keys]) + q = Bid.query() + q = q.filter(f) + return dict(((p.transaction, p.campaign), p) for p in q) + return {} diff --git a/r2/r2/lib/cache.py b/r2/r2/lib/cache.py index b6e24f6251..13c7b30bb1 100644 --- a/r2/r2/lib/cache.py +++ b/r2/r2/lib/cache.py @@ -29,8 +29,8 @@ import pycassa import cassandra.ttypes -from contrib import memcache -from utils import lstrips, in_chunks, tup +from r2.lib.contrib import memcache +from r2.lib.utils import lstrips, in_chunks, tup from r2.lib.hardcachebackend import HardCacheBackend from r2.lib.utils import trace @@ -71,7 +71,6 @@ def __init__(self, servers): memcache.Client.__init__(self, servers, pickleProtocol = 1) def set_multi(self, keys, prefix='', time=0): - new_keys = {} for k,v in keys.iteritems(): new_keys[str(k)] = v @@ -97,24 +96,67 @@ class CMemcache(CacheUtils): def __init__(self, servers, debug = False, - binary = True, noreply = False, - num_clients = 10): + no_block = False, + num_clients = 10, + legacy = False): self.servers = servers + self.legacy = legacy self.clients = pylibmc.ClientPool(n_slots = num_clients) for x in xrange(num_clients): - client = pylibmc.Client(servers, binary=binary) + client = pylibmc.Client(servers, binary=not self.legacy) behaviors = { - 'no_block': True, # use async I/O + 'no_block': no_block, # use async I/O 'cache_lookups': True, # cache DNS lookups 'tcp_nodelay': True, # no nagle - 'ketama': True, # consistant hashing '_noreply': int(noreply), 'verify_key': int(debug), # spend the CPU to verify keys } + + # so that we can drop this in to share keys with + # python-memcached, we have a 'legacy' mode that MD5s the + # keys and uses the old CRC32-modula mapping + if self.legacy: + behaviors['hash'] = 'crc' + else: + behaviors['ketama'] = True # consistent hashing + client.behaviors.update(behaviors) self.clients.put(client) + self.min_compress_len = 512*1024 + + def hashkey(fn): + # decorator to MD5 the key for a single-action command when + # self.legacy is set + def _fn(self, key, *a, **kw): + if self.legacy: + key = md5(key).hexdigest() + return fn(self, key, *a, **kw) + return _fn + + def hashmap(fn): + # same as hashkey, but for _multi commands taking a dictionary + def _fn(self, keys, *a, **kw): + if self.legacy: + prefix = kw.pop('prefix', '') + keys = dict((md5('%s%s' % (prefix, key)).hexdigest(), value) + for (key, value) + in keys.iteritems()) + return fn(self, keys, *a, **kw) + return _fn + + def hashkeys(fn): + # same as hashkey, but for _multi commands taking a list + def _fn(self, keys, *a, **kw): + if self.legacy: + prefix = kw.pop('prefix', '') + keys = [md5('%s%s' % (prefix, key)).hexdigest() + for key in keys] + return fn(self, keys, *a, **kw) + return _fn + + @hashkey def get(self, key, default = None): with self.clients.reserve() as mc: ret = mc.get(key) @@ -123,8 +165,22 @@ def get(self, key, default = None): return ret def get_multi(self, keys, prefix = ''): + if self.legacy: + # get_multi can't use @hashkeys for md5ing the keys + # because we have to map the returns too + keymap = dict((md5('%s%s' % (prefix, key)).hexdigest(), key) + for key in keys) + prefix = '' + else: + keymap = dict((str(key), key) + for key in keys) + with self.clients.reserve() as mc: - return mc.get_multi(keys, key_prefix = prefix) + ret = mc.get_multi(keymap.keys(), key_prefix = prefix) + + return dict((keymap[retkey], retval) + for (retkey, retval) + in ret.iteritems()) # simple_get_multi exists so that a cache chain can # single-instance the handling of prefixes for performance, but @@ -134,27 +190,50 @@ def get_multi(self, keys, prefix = ''): # them, so here it is simple_get_multi = get_multi + @hashkey def set(self, key, val, time = 0): with self.clients.reserve() as mc: - return mc.set(key, val, time = time) + return mc.set(key, val, time = time, + min_compress_len = self.min_compress_len) + @hashmap def set_multi(self, keys, prefix='', time=0): new_keys = {} for k,v in keys.iteritems(): new_keys[str(k)] = v with self.clients.reserve() as mc: return mc.set_multi(new_keys, key_prefix = prefix, + time = time, + min_compress_len = self.min_compress_len) + + @hashmap + def add_multi(self, keys, prefix='', time=0): + new_keys = {} + for k,v in keys.iteritems(): + new_keys[str(k)] = v + with self.clients.reserve() as mc: + return mc.add_multi(new_keys, key_prefix = prefix, time = time) + @hashkeys + def incr_multi(self, keys, prefix='', delta=1): + with self.clients.reserve() as mc: + return mc.incr_multi(map(str, keys), + key_prefix = prefix, + delta=delta) + + @hashkey def append(self, key, val, time=0): with self.clients.reserve() as mc: return mc.append(key, val, time=time) + @hashkey def incr(self, key, delta=1, time=0): # ignore the time on these with self.clients.reserve() as mc: return mc.incr(key, delta) + @hashkey def add(self, key, val, time=0): try: with self.clients.reserve() as mc: @@ -162,10 +241,12 @@ def add(self, key, val, time=0): except pylibmc.DataExists: return None + @hashkey def delete(self, key, time=0): with self.clients.reserve() as mc: return mc.delete(key) + @hashkeys def delete_multi(self, keys, prefix='', time=0): with self.clients.reserve() as mc: return mc.delete_multi(keys, time = time, @@ -330,7 +411,9 @@ def fn(self, *a, **kw): replace = make_set_fn('replace') set_multi = make_set_fn('set_multi') add = make_set_fn('add') + add_multi = make_set_fn('add_multi') incr = make_set_fn('incr') + incr_multi = make_set_fn('incr_multi') decr = make_set_fn('decr') delete = make_set_fn('delete') delete_multi = make_set_fn('delete_multi') @@ -539,7 +622,7 @@ def sgm(cache, keys, miss_fn, prefix='', time=0): nr = miss_fn([s_keys[i] for i in need]) nr = dict((str(k), v) for k,v in nr.iteritems()) r.update(nr) - cache.set_multi(nr, prefix, time = time) + cache.set_multi(nr, prefix=prefix, time = time) return dict((s_keys[k], v) for k,v in r.iteritems()) @@ -563,6 +646,21 @@ def test_cache(cache, prefix=''): assert cache.get_multi(('%sp_3' % prefix, '%sp_4' % prefix)) == {'%sp_3'%prefix: 3, '%sp_4'%prefix: 4} + # delete + cache.set('%s1'%prefix, 1) + assert cache.get('%s1'%prefix) == 1 + cache.delete('%s1'%prefix) + assert cache.get('%s1'%prefix) is None + + cache.set('%s1'%prefix, 1) + cache.set('%s2'%prefix, 2) + cache.set('%s3'%prefix, 3) + assert cache.get('%s1'%prefix) == 1 and cache.get('%s2'%prefix) == 2 + cache.delete_multi(['%s1'%prefix, '%s2'%prefix]) + assert (cache.get('%s1'%prefix) is None + and cache.get('%s2'%prefix) is None + and cache.get('%s3'%prefix) == 3) + #incr cache.set('%s5'%prefix, 1) cache.set('%s6'%prefix, 1) @@ -574,6 +672,72 @@ def test_cache(cache, prefix=''): assert cache.get('%s5'%prefix) == 5 assert cache.get('%s6'%prefix) == 2 +def test_cache_compat(caches, prefix=''): + """Test that that the keyspaces of a list of cache objects are + compatible. Used to compare legacy-mode CMemcache to + PyMemcache""" + import random + + def _gen(): + keys = dict((str(random.random()), x) for x in xrange(1000)) + prefixed = dict(('%s%s' % (prefix, key), value) + for (key, value) + in keys.iteritems()) + return keys, prefixed + + for cache in caches: + print 'with source == %r' % (cache,) + + # single-ops + keys, prefixed = _gen() + for key, val in prefixed.iteritems(): + for ocache in caches: + cache.set(key, val) + assert ocache.get(key) == val + + # double-prefixed + ocache.set('%s%s' % (prefix, key), val) + assert cache.get('%s%s' % (prefix, key)) == val + + # pre-prefixed + keys, prefixed = _gen() + cache.set_multi(prefixed) + for ocache in caches: + assert ocache.get_multi(prefixed.keys()) == prefixed + for key, val in prefixed.iteritems(): + assert ocache.get(key) == val + + # prefixed in place + keys, prefixed = _gen() + cache.set_multi(keys, prefix=prefix) + for ocache in caches: + assert ocache.get_multi(keys.keys()) == keys + for key, val in prefixed.iteritems(): + assert ocache.get(key) == val + + # prefixed on set + keys, prefixed = _gen() + cache.set_multi(keys, prefix=prefix) + for ocache in caches: + assert ocache.get_multi(prefixed.keys()) == keys + for key, val in prefixed.iteritems(): + assert ocache.get(key) == val + + # prefixed on get + keys, prefixed = _gen() + cache.set_multi(prefixed) + for ocache in caches: + assert ocache.get_multi(keys.keys(), prefix=prefix) == keys + for key, val in prefixed.iteritems(): + assert ocache.get(key) == val + + #delete + keys, prefixed = _gen() + for ocache in caches: + cache.set_multi(prefixed) + ocache.delete_multi(prefixed.keys()) + assert cache.get_multi(prefixed.keys()) == {} + def test_multi(cache): from threading import Thread diff --git a/r2/r2/lib/contrib/memcache.py b/r2/r2/lib/contrib/memcache.py index 30bc52ad06..93b4114ca2 100755 --- a/r2/r2/lib/contrib/memcache.py +++ b/r2/r2/lib/contrib/memcache.py @@ -47,7 +47,7 @@ import socket import time import os -from md5 import md5 +from hashlib import md5 import re import types try: @@ -69,9 +69,6 @@ def decompress(val): except ImportError: from StringIO import StringIO -from binascii import crc32 # zlib version is not cross-platform -serverHashFunction = crc32 - __author__ = "Evan Martin " __version__ = "1.43" __copyright__ = "Copyright (C) 2003 Danga Interactive" @@ -94,6 +91,97 @@ class _Error(Exception): class local(object): pass +# stolen wholesale from cmemcache_hash +# so that we're using the +# same hashing algorithm as pylibmc's 'crc' +crc32tab = ( + 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, + 0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3, + 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988, + 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, + 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, + 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, + 0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, + 0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5, + 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172, + 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, + 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, + 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59, + 0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, + 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f, + 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, + 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, + 0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a, + 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433, + 0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, + 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01, + 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, + 0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, + 0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c, + 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65, + 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, + 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, + 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0, + 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, + 0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086, + 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f, + 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, + 0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, + 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a, + 0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683, + 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, + 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, + 0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, + 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7, + 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc, + 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, + 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, + 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b, + 0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, + 0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79, + 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, + 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, + 0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, + 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d, + 0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, + 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, + 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, + 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, + 0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e, + 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777, + 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, + 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, + 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2, + 0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, + 0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0, + 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9, + 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, + 0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf, + 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94, + 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d +) +def serverHashFunction(key): + r"""Calculate a cmemcache-style CRC32 hash of *key*. + + Note that unlike cmemcache's version, this does calculate the key even if + only one server exists, mostly because we don't layer violate enough to + know how many servers there are or aren't. + + >>> cmemcache_hash("Hello world") + 3030 + >>> cmemcache_hash("Hello worle") + 31953 + >>> cmemcache_hash("") + 1 + """ + crc = ~0 + + for c in key: + crc = ((crc & 0xffffffff) >> 8) ^ crc32tab[(crc ^ ord(c)) & 0xff] + + crc = int((~crc >> 16) & 0x7fff) + + return crc or 1 class Client(local): """ @@ -230,10 +318,7 @@ def _init_buckets(self): self.buckets.append(server) def _get_server(self, key): - if type(key) == types.TupleType: - serverhash, key = key - else: - serverhash = serverHashFunction(key) + serverhash = serverHashFunction(key) # the original version suffered from a failure rate of # 1 in n^_SERVER_RETRIES, where n = number of bukets @@ -295,7 +380,8 @@ def delete_multi(self, keys, time=0, key_prefix=''): server_keys, prefixed_to_orig_key = self._map_and_prefix_keys(keys, key_prefix) - # send out all requests on each server before reading anything + # send out all requests on each server before reading + # anything. can this deadlock if the return buffer fills up? dead_servers = [] rc = 1 @@ -338,8 +424,8 @@ def delete(self, key, time=0): @param time: number of seconds any subsequent set / update commands should fail. Defaults to 0 for no delay. @rtype: int ''' - server, key = self._get_server(key) key = check_key(key) + server, key = self._get_server(key) if not server: return 0 self._statlog('delete') @@ -397,8 +483,8 @@ def decr(self, key, delta=1, time=0): return self._incrdecr("decr", key, delta) def _incrdecr(self, cmd, key, delta): - server, key = self._get_server(key) key = check_key(key) + server, key = self._get_server(key) if not server: return 0 self._statlog(cmd) @@ -501,26 +587,14 @@ def _map_and_prefix_keys(self, key_iterable, key_prefix): prefixed_to_orig_key = {} # build up a list for each server of all the keys we want. for orig_key in key_iterable: - if type(orig_key) is types.TupleType: - # Tuple of hashvalue, key ala _get_server(). Caller is essentially telling us what server to stuff this on. - # Ensure call to _get_server gets a Tuple as well. - str_orig_key = str(orig_key[1]) - server, key = self._get_server((orig_key[0], key_prefix + str_orig_key)) # Gotta pre-mangle key before hashing to a server. Returns the mangled key. - else: - str_orig_key = str(orig_key) # set_multi supports int / long keys. - server, key = self._get_server(key_prefix + str_orig_key) - - # Now check to make sure key length is proper ... - #changed by steve - #check_key(str_orig_key, key_extra_len=key_extra_len) - key = check_key(key_prefix + str_orig_key) + key = '%s%s' % (key_prefix, orig_key) + key = check_key(key) + server, key = self._get_server(key) if not server: continue - if not server_keys.has_key(server): - server_keys[server] = [] - server_keys[server].append(key) + server_keys.setdefault(server, []).append(key) prefixed_to_orig_key[key] = orig_key return (server_keys, prefixed_to_orig_key) @@ -653,8 +727,8 @@ def _val_to_store_info(self, val, min_compress_len): return (flags, len(val), val) def _set(self, cmd, key, val, time, min_compress_len = 0): - server, key = self._get_server(key) key = check_key(key) + server, key = self._get_server(key) if not server: return 0 @@ -678,8 +752,8 @@ def get(self, key): @return: The value or None. ''' - server, key = self._get_server(key) key = check_key(key) + server, key = self._get_server(key) if not server: return None @@ -960,7 +1034,6 @@ def check_key(key, key_extra_len=0): Contains control characters (Raises MemcachedKeyCharacterError). Is not a string (Raises MemcachedStringEncodingError) """ - if type(key) == types.TupleType: key = key[1] if not isinstance(key, str): raise Client.MemcachedStringEncodingError, ("Keys must be str()'s, not" "unicode. Convert your unicode strings using " diff --git a/r2/r2/lib/cssfilter.py b/r2/r2/lib/cssfilter.py index 1df3647131..6f6c997a18 100644 --- a/r2/r2/lib/cssfilter.py +++ b/r2/r2/lib/cssfilter.py @@ -30,6 +30,7 @@ from pylons.i18n import _ from mako import filters +import os import tempfile from r2.lib import s3cp from md5 import md5 @@ -192,7 +193,7 @@ def valid_url(prop,value,report): # the label -> image number lookup is stored on the subreddit if c.site.images.has_key(name): num = c.site.images[name] - value._setCssText("url(http:/%s%s_%d.png?v=%s)" + value._setCssText("url(http://%s/%s_%d.png?v=%s)" % (g.s3_thumb_bucket, c.site._fullname, num, randstr(36))) else: @@ -390,29 +391,32 @@ def save_sr_image(sr, data, resource = None): """ uploades image data to s3 as a PNG and returns its new url. Urls will be of the form: - http:/${g.s3_thumb_bucket}/${sr._fullname}[_${num}].png?v=${md5hash} + http://${g.s3_thumb_bucket}/${sr._fullname}[_${num}].png?v=${md5hash} [Note: g.s3_thumb_bucket begins with a "/" so the above url is valid.] """ hash = md5(data).hexdigest() + f = tempfile.NamedTemporaryFile(suffix = '.png',delete=False) try: - f = tempfile.NamedTemporaryFile(suffix = '.png') f.write(data) - f.flush() + f.close() + + optimize_png(f.name, g.png_optimizer) + contents = open(f.name).read() if resource is not None: resource = "_%s" % resource else: resource = "" - resource = g.s3_thumb_bucket + sr._fullname + resource + ".png" + fname = resource = sr._fullname + resource + ".png" + + s3cp.send_file(g.s3_thumb_bucket, fname, contents, 'image/png') - s3cp.send_file(optimize_png(f.name, g.png_optimizer), resource, - 'image/png', 'public-read', None, False) finally: - f.close() + os.unlink(f.name) - return 'http:/%s%s?v=%s' % (g.s3_thumb_bucket, - resource.split('/')[-1], hash) + return 'http://%s/%s?v=%s' % (g.s3_thumb_bucket, + resource.split('/')[-1], hash) diff --git a/r2/r2/lib/db/queries.py b/r2/r2/lib/db/queries.py index 12a677f732..b4d2bf0c9b 100644 --- a/r2/r2/lib/db/queries.py +++ b/r2/r2/lib/db/queries.py @@ -1,5 +1,5 @@ -from r2.models import Account, Link, Comment, Vote, SaveHide -from r2.models import Message, Inbox, Subreddit, ModeratorInbox +from r2.models import Account, Link, Comment, Trial, Vote, SaveHide +from r2.models import Message, Inbox, Subreddit, ModContribSR, ModeratorInbox from r2.lib.db.thing import Thing, Merge from r2.lib.db.operators import asc, desc, timeago from r2.lib.db import query_queue @@ -296,7 +296,9 @@ def make_results(query, filter = filter_identity): return query def merge_results(*results): - if g.use_query_cache: + if not results: + return QueryishList([]) + elif g.use_query_cache: return MergedCachedResults(results) else: m = Merge(results, sort = results[0]._sort) @@ -342,7 +344,12 @@ def get_spam_comments(sr): return make_results(q_c) def get_spam(sr): - return get_spam_links(sr) + if isinstance(sr, ModContribSR): + srs = Subreddit._byID(sr.sr_ids(), return_dict=False) + results = [ get_spam_links(sr) for sr in srs ] + return merge_results(*results) + else: + return get_spam_links(sr) #return merge_results(get_spam_links(sr), # get_spam_comments(sr)) @@ -361,10 +368,79 @@ def get_reported_comments(sr): return make_results(q_c) def get_reported(sr): - return get_reported_links(sr) + if isinstance(sr, ModContribSR): + srs = Subreddit._byID(sr.sr_ids(), return_dict=False) + results = [ get_reported_links(sr) for sr in srs ] + return merge_results(*results) + else: + return get_reported_links(sr) #return merge_results(get_reported_links(sr), # get_reported_comments(sr)) +# TODO: Wow, what a hack. I'm doing this in a hurry to make +# /r/blah/about/trials and /r/blah/about/modqueue work. At some point +# before the heat death of the universe, we should start precomputing +# these things instead. That would require an "on_trial" attribute to be +# maintained on Links, a precomputer that keeps track of such links, +# and changes to: +# trial_utils.py: trial_info(), end_trial(), indict() +# trial.py: all_defendants_cache() +class QueryishList(list): + prewrap_fn = None + _rules = None + _sort = None + + @property + def sort(self): + return self._sort + + def _cursor(self): + return self + + def _filter(self): + return True + + @property + def data(self): + return [ (t._fullname, 2145945600) for t in self ] + # Jan 1 2038 ^^^^^^^^^^ + # so that trials show up before spam and reports + + def fetchone(self): + if self: + return self.pop(0) + else: + raise StopIteration + +def get_trials_links(sr): + l = Trial.defendants_by_sr(sr) + s = QueryishList(l) + s._sort = [db_sort('new')] + return s + +def get_trials(sr): + if isinstance(sr, ModContribSR): + srs = Subreddit._byID(sr.sr_ids(), return_dict=False) + return get_trials_links(srs) + else: + return get_trials_links(sr) + +def get_modqueue(sr): + results = [] + if isinstance(sr, ModContribSR): + srs = Subreddit._byID(sr.sr_ids(), return_dict=False) + results.append(get_trials_links(srs)) + + for sr in srs: + results.append(get_reported_links(sr)) + results.append(get_spam_links(sr)) + else: + results.append(get_trials_links(sr)) + results.append(get_reported_links(sr)) + results.append(get_spam_links(sr)) + + return merge_results(*results) + def get_domain_links(domain, sort, time): return DomainSearchQuery(domain, sort=search_sort[sort], timerange=time) @@ -889,7 +965,7 @@ def _run_commentstree(msgs, chan): def queue_vote(user, thing, dir, ip, organic = False, cheater = False, store = True): # set the vote in memcached so the UI gets updated immediately - key = "registered_vote_%s_%s" % (user._id, thing._fullname) + key = prequeued_vote_key(user, thing) g.cache.set(key, '1' if dir is True else '0' if dir is None else '-1') # queue the vote to be stored unless told not to if store: @@ -900,19 +976,50 @@ def queue_vote(user, thing, dir, ip, organic = False, else: handle_vote(user, thing, dir, ip, organic) +def prequeued_vote_key(user, item): + return 'registered_vote_%s_%s' % (user._id, item._fullname) + def get_likes(user, items): if not user or not items: return {} keys = {} res = {} - for i in items: - keys['registered_vote_%s_%s' % (user._id, i._fullname)] = (user, i) + keys = dict((prequeued_vote_key(user, item), (user,item)) + for item in items) r = g.cache.get_multi(keys.keys()) # populate the result set based on what we fetched from the cache first for k, v in r.iteritems(): res[keys[k]] = v + # performance hack: if their last vote came in before this thing + # was created, they can't possibly have voted on it + cantexist = {} + for item in items: + if (user, item) in res: + continue + + last_vote_attr_name = 'last_vote_' + item.__class__.__name__ + last_vote = getattr(user, last_vote_attr_name, None) + if not last_vote: + continue + + try: + if last_vote < item._date: + res[(user, item)] = '0' + cantexist[prequeued_vote_key(user, item)] = '0' + except TypeError: + g.log.error("user %s has a broken %s? (%r)" + % (user._id, last_vote_attr_name, last_vote)) + # accounts for broken last_vote properties + pass + + # this is a bit dodgy, but should save us from having to reload + # all of the votes on pages they've already loaded as soon as they + # cast a new vote + if cantexist: + g.cache.set_multi(cantexist) + # now hit the vote db with the remainder likes = Vote.likes(user, [i for i in items if (user, i) not in res]) diff --git a/r2/r2/lib/db/tdb_sql.py b/r2/r2/lib/db/tdb_sql.py index 246b1ef13b..6e61490dce 100644 --- a/r2/r2/lib/db/tdb_sql.py +++ b/r2/r2/lib/db/tdb_sql.py @@ -330,7 +330,7 @@ def get_read_table(tables): ip_weights.extend((ip, .01) for ip in no_connections) #rebalance the weights - total_weight = sum(w[1] for w in ip_weights) + total_weight = sum(w[1] for w in ip_weights) or 1 ip_weights = [(ip, weight / total_weight) for ip, weight in ip_weights] diff --git a/r2/r2/lib/db/thing.py b/r2/r2/lib/db/thing.py index d662b3a476..02de55bd8c 100644 --- a/r2/r2/lib/db/thing.py +++ b/r2/r2/lib/db/thing.py @@ -264,7 +264,7 @@ def _load_multi(cls, need): prefix = thing_prefix(cls.__name__) #write the data to the cache - cache.set_multi(to_save, prefix) + cache.set_multi(to_save, prefix=prefix) def _load(self): self._load_multi(self) @@ -708,7 +708,7 @@ def _delete(self): self._name = 'un' + self._name @classmethod - def _fast_query(cls, thing1s, thing2s, name, data=True): + def _fast_query(cls, thing1s, thing2s, name, data=True, eager_load=True): """looks up all the relationships between thing1_ids and thing2_ids and caches them""" prefix = thing_prefix(cls.__name__) @@ -738,7 +738,7 @@ def items_db(pairs): q = cls._query(cls.c._thing1_id == t1_ids, cls.c._thing2_id == t2_ids, cls.c._name == names, - eager_load = True, + eager_load = eager_load, data = data) rel_ids = {} @@ -747,7 +747,7 @@ def items_db(pairs): #relations with the same keys #l = rel_ids.setdefault((rel._thing1_id, rel._thing2_id), []) #l.append(rel._id) - rel_ids[(rel._thing1._id, rel._thing2._id, rel._name)] = rel._id + rel_ids[(rel._thing1_id, rel._thing2_id, rel._name)] = rel._id for p in pairs: if p not in rel_ids: @@ -1056,6 +1056,7 @@ def undone(pairs): return [p for p in pairs if not p[2]] pairs = undone(safe_next(c) for c in cursors) + while pairs: #only one query left, just dump it if len(pairs) == 1: @@ -1193,7 +1194,7 @@ def _query(cls, *rules, **kw): return Merge(queries) @classmethod - def _fast_query(cls, sub, obj, name, data=True): + def _fast_query(cls, sub, obj, name, data=True, eager_load=True): #divide into types def type_dict(items): types = {} @@ -1210,7 +1211,7 @@ def type_dict(items): t1, t2 = types if sub_dict.has_key(t1) and obj_dict.has_key(t2): res.update(rel._fast_query(sub_dict[t1], obj_dict[t2], name, - data = True)) + data = data, eager_load=eager_load)) return res diff --git a/r2/r2/lib/db/userrel.py b/r2/r2/lib/db/userrel.py index 32ef0dce4e..d7431c5b25 100644 --- a/r2/r2/lib/db/userrel.py +++ b/r2/r2/lib/db/userrel.py @@ -67,7 +67,8 @@ def userrel_remove(self, user): @memoize(ids_fn_name) def userrel_ids(self): q = relation._query(relation.c._thing1_id == self._id, - relation.c._name == name) + relation.c._name == name, + sort = "_date") #removed set() here, shouldn't be required return [r._thing2_id for r in q] diff --git a/r2/r2/lib/emailer.py b/r2/r2/lib/emailer.py index 192f1908f2..f211f9baea 100644 --- a/r2/r2/lib/emailer.py +++ b/r2/r2/lib/emailer.py @@ -62,7 +62,7 @@ def verify_email(user, dest): Award.take_away("verified_email", user) emaillink = ('http://' + g.domain + '/verification/' + key + query_string(dict(dest=dest))) - print "Generated email verification link: " + emaillink + g.log.debug("Generated email verification link: " + emaillink) g.cache.set("email_verify_%s" %key, user._id, time=1800) _system_email(user.email, @@ -118,7 +118,7 @@ def send_queued_mail(test = False): """sends mail from the mail queue to smtplib for delivery. Also, on successes, empties the mail queue and adds all emails to the sent_mail list.""" - from r2.lib.pages import PasswordReset, Share, Mail_Opt, VerifyEmail, Promo_Email + from r2.lib.pages import PasswordReset, Share, Mail_Opt, VerifyEmail now = datetime.datetime.now(g.tz) if not c.site: c.site = Default @@ -164,17 +164,6 @@ def sendmail(email): elif email.kind == Email.Kind.OPTIN: email.body = Mail_Opt(msg_hash = email.msg_hash, leave = False).render(style = "email") - elif email.kind in (Email.Kind.ACCEPT_PROMO, - Email.Kind.REJECT_PROMO, - Email.Kind.QUEUED_PROMO, - Email.Kind.LIVE_PROMO, - Email.Kind.BID_PROMO, - Email.Kind.FINISHED_PROMO, - Email.Kind.NEW_PROMO): - email.body = Promo_Email(link = email.thing, - kind = email.kind, - body = email.body).render(style="email") - # handle unknown types here elif not email.body: email.set_sent(rejected = True) @@ -208,8 +197,11 @@ def opt_in(msg_hash): return email, removed -def _promo_email(thing, kind, body = ""): +def _promo_email(thing, kind, body = "", **kw): + from r2.lib.pages import Promo_Email a = Account._byID(thing.author_id) + body = Promo_Email(link = thing, kind = kind, + body = body, **kw).render(style = "email") return _system_email(a.email, body, kind, thing = thing, reply_to = "selfservicesupport@reddit.com") @@ -217,8 +209,9 @@ def _promo_email(thing, kind, body = ""): def new_promo(thing): return _promo_email(thing, Email.Kind.NEW_PROMO) -def promo_bid(thing): - return _promo_email(thing, Email.Kind.BID_PROMO) +def promo_bid(thing, bid, start_date): + return _promo_email(thing, Email.Kind.BID_PROMO, bid = bid, + start_date = start_date) def accept_promo(thing): return _promo_email(thing, Email.Kind.ACCEPT_PROMO) @@ -226,8 +219,9 @@ def accept_promo(thing): def reject_promo(thing, reason = ""): return _promo_email(thing, Email.Kind.REJECT_PROMO, reason) -def queue_promo(thing): - return _promo_email(thing, Email.Kind.QUEUED_PROMO) +def queue_promo(thing, bid, trans_id): + return _promo_email(thing, Email.Kind.QUEUED_PROMO, bid = bid, + trans_id = trans_id) def live_promo(thing): return _promo_email(thing, Email.Kind.LIVE_PROMO) diff --git a/r2/r2/lib/jsontemplates.py b/r2/r2/lib/jsontemplates.py index fb67083295..37a5ea3b96 100644 --- a/r2/r2/lib/jsontemplates.py +++ b/r2/r2/lib/jsontemplates.py @@ -231,20 +231,21 @@ class LinkJsonTemplate(ThingJsonTemplate): num_comments = "num_comments", subreddit = "subreddit", subreddit_id = "subreddit_id", + is_self = "is_self", permalink = "permalink" ) def thing_attr(self, thing, attr): - from r2.lib.scraper import scrapers + from r2.lib.scraper import get_media_embed if attr == "media_embed": if (thing.media_object and not isinstance(thing.media_object, basestring)): - scraper = scrapers[thing.media_object['type']] - media_embed = scraper.media_embed(**thing.media_object) - return dict(scrolling = media_embed.scrolling, - width = media_embed.width, - height = media_embed.height, - content = media_embed.content) + media_embed = get_media_embed(thing.media_object) + if media_embed: + return dict(scrolling = media_embed.scrolling, + width = media_embed.width, + height = media_embed.height, + content = media_embed.content) return dict() elif attr == 'subreddit': return thing.subreddit.name @@ -275,7 +276,8 @@ class CommentJsonTemplate(ThingJsonTemplate): likes = "likes", author = "author", link_id = "link_id", - sr_id = "sr_id", + subreddit = "subreddit", + subreddit_id = "subreddit_id", parent_id = "parent_id", ) @@ -283,10 +285,10 @@ def thing_attr(self, thing, attr): from r2.models import Comment, Link, Subreddit if attr == 'link_id': return make_fullname(Link, thing.link_id) - elif attr == 'sr_id': - if hasattr(thing, attr): - return make_fullname(Subreddit, thing.sr_id) - return None + elif attr == 'subreddit': + return thing.subreddit.name + elif attr == 'subreddit_id': + return thing.subreddit._fullname elif attr == "parent_id": if getattr(thing, "parent_id", None): return make_fullname(Comment, thing.parent_id) diff --git a/r2/r2/lib/media.py b/r2/r2/lib/media.py index 68a2c0ec1a..6911e0e71d 100644 --- a/r2/r2/lib/media.py +++ b/r2/r2/lib/media.py @@ -31,6 +31,7 @@ from r2.lib import amqp from r2.lib.contrib.nymph import optimize_png +import os import tempfile import traceback @@ -40,19 +41,26 @@ def thumbnail_url(link): """Given a link, returns the url for its thumbnail based on its fullname""" - return 'http:/%s%s.png' % (s3_thumbnail_bucket, link._fullname) + return 'http://%s/%s.png' % (s3_thumbnail_bucket, link._fullname) def upload_thumb(link, image): """Given a link and an image, uploads the image to s3 into an image based on the link's fullname""" - f = tempfile.NamedTemporaryFile(suffix = '.png') - image.save(f) - - resource = s3_thumbnail_bucket + link._fullname + '.png' - log.debug('uploading to s3: %s' % link._fullname) - s3cp.send_file(optimize_png(f.name, g.png_optimizer), - resource, 'image/png', 'public-read', None, False) - log.debug('thumbnail %s: %s' % (link._fullname, thumbnail_url(link))) + f = tempfile.NamedTemporaryFile(suffix = '.png', delete=False) + try: + image.save(f) + f.close() + g.log.debug("optimizing %s in %s" % (link._fullname,f.name)) + optimize_png(f.name, g.png_optimizer) + contents = open(f.name).read() + + s3fname = link._fullname + '.png' + + log.debug('uploading to s3: %s' % link._fullname) + s3cp.send_file(g.s3_thumb_bucket, s3fname, contents, 'image/png', never_expire=True) + log.debug('thumbnail %s: %s' % (link._fullname, thumbnail_url(link))) + finally: + os.unlink(f.name) def update_link(link, thumbnail, media_object): diff --git a/r2/r2/lib/menus.py b/r2/r2/lib/menus.py index 483a9a8f3b..ce9e156e7e 100644 --- a/r2/r2/lib/menus.py +++ b/r2/r2/lib/menus.py @@ -159,7 +159,8 @@ def __getattr__(self, attr): new_promo = _('create promotion'), my_current_promos = _('my promoted links'), current_promos = _('all promoted links'), - future_promos = _('unapproved'), + future_promos = _('unseen'), + roadblock = _('roadblock'), graph = _('analytics'), live_promos = _('live'), unpaid_promos = _('unpaid'), @@ -310,8 +311,10 @@ def cachable_attrs(self): class SubredditButton(NavButton): def __init__(self, sr): + from r2.models.subreddit import Mod self.path = sr.path - NavButton.__init__(self, sr.name, sr.path, False, + name = 'mod' if sr == Mod else sr.name + NavButton.__init__(self, name, sr.path, False, isselected = (c.site == sr)) def build(self, base_path = ''): diff --git a/r2/r2/lib/migrate.py b/r2/r2/lib/migrate.py index 25a8d1fc24..a47b4322b2 100644 --- a/r2/r2/lib/migrate.py +++ b/r2/r2/lib/migrate.py @@ -35,83 +35,6 @@ def add_allow_top_to_srs(): for sr in fetch_things2(q): sr.allow_top = True; sr._commit() -def convert_promoted(): - """ - should only need to be run once to update old style promoted links - to the new style. - """ - from r2.lib.utils import fetch_things2 - from r2.lib import authorize - - q = Link._query(Link.c.promoted == (True, False), - sort = desc("_date")) - sr_id = PromoteSR._id - bid = 100 - with g.make_lock(promoted_lock_key): - promoted = {} - set_promoted({}) - for l in fetch_things2(q): - print "updating:", l - try: - if not l._loaded: l._load() - # move the promotion into the promo subreddit - l.sr_id = sr_id - # set it to accepted (since some of the update functions - # check that it is not already promoted) - l.promote_status = STATUS.accepted - author = Account._byID(l.author_id) - l.promote_trans_id = authorize.auth_transaction(bid, author, -1, l) - l.promote_bid = bid - l.maximum_clicks = None - l.maximum_views = None - # set the dates - start = getattr(l, "promoted_on", l._date) - until = getattr(l, "promote_until", None) or \ - (l._date + timedelta(1)) - l.promote_until = None - update_promo_dates(l, start, until) - # mark it as promoted if it was promoted when we got there - if l.promoted and l.promote_until > datetime.now(g.tz): - l.promote_status = STATUS.pending - else: - l.promote_status = STATUS.finished - - if not hasattr(l, "disable_comments"): - l.disable_comments = False - # add it to the auction list - if l.promote_status == STATUS.pending and l._fullname not in promoted: - promoted[l._fullname] = auction_weight(l) - l._commit() - except AttributeError: - print "BAD THING:", l - print promoted - set_promoted(promoted) - # run what is normally in a cron job to clear out finished promos - #promote_promoted() - -def store_market(): - - """ - create index ix_promote_date_actual_end on promote_date(actual_end); - create index ix_promote_date_actual_start on promote_date(actual_start); - create index ix_promote_date_start_date on promote_date(start_date); - create index ix_promote_date_end_date on promote_date(end_date); - - alter table promote_date add column account_id bigint; - create index ix_promote_date_account_id on promote_date(account_id); - alter table promote_date add column bid real; - alter table promote_date add column refund real; - - """ - - for p in PromoteDates.query().all(): - l = Link._by_fullname(p.thing_name, True) - if hasattr(l, "promote_bid") and hasattr(l, "author_id"): - p.account_id = l.author_id - p._commit() - PromoteDates.update(l, l._date, l.promote_until) - PromoteDates.update_bid(l) - def subscribe_to_blog_and_annoucements(filename): import re from time import sleep @@ -207,6 +130,8 @@ def load_accounts(inbox_rel): queries.get_unread_comments(a).update() queries.get_unread_selfreply(a).update() + + def pushup_permacache(verbosity=1000): """When putting cassandra into the permacache chain, we need to push everything up into the rest of the chain, so this is @@ -345,57 +270,54 @@ def by_url_key(url, prefix=''): in old.iteritems()) g.permacache.set_multi(new) -def _progress(it, verbosity=100, key=repr, estimate=None, persec=False): - """An iterator that yields everything from `it', but prints progress - information along the way, including time-estimates if - possible""" +# alter table bids DROP constraint bids_pkey; +# alter table bids add column campaign integer; +# update bids set campaign = 0; +# alter table bids ADD primary key (transaction, campaign); +def promote_v2(): + # alter table bids add column campaign integer; + # update bids set campaign = 0; + from r2.models import Link, NotFound, PromoteDates, Bid from datetime import datetime - import sys - - now = start = datetime.now() - elapsed = start - start - - print 'Starting at %s' % (start,) - - seen = 0 - for item in it: - seen += 1 - if seen % verbosity == 0: - now = datetime.now() - elapsed = now - start - elapsed_seconds = elapsed.days * 86400 + elapsed.seconds - - if estimate: - remaining = ((elapsed/seen)*estimate)-elapsed - completion = now + remaining - count_str = ('%d/%d %.2f%%' - % (seen, estimate, float(seen)/estimate*100)) - estimate_str = (' (%s remaining; completion %s)' - % (remaining, completion)) - else: - count_str = '%d' % seen - estimate_str = '' - - if key: - key_str = ': %s' % key(item) - else: - key_str = '' + from pylons import g + for p in PromoteDates.query(): + try: + l = Link._by_fullname(p.thing_name, + data = True, return_dict = False) + if not l: + raise NotFound, p.thing_name + + # update the promote status + l.promoted = True + l.promote_status = getattr(l, "promote_status", STATUS.unseen) + l._date = datetime(*(list(p.start_date.timetuple()[:7]) + [g.tz])) + set_status(l, l.promote_status) + + # add new campaign + print (l, (p.start_date, p.end_date), p.bid, None) + if not p.bid: + print "no bid? ", l + p.bid = 20 + new_campaign(l, (p.start_date, p.end_date), p.bid, None) + print "updated: %s (%s)" % (l, l._date) + + except NotFound: + print "NotFound: %s" % p.thing_name + + print "updating campaigns" + for b in Bid.query(): + l = Link._byID(int(b.thing_id)) + print "updating: ", l + campaigns = getattr(l, "campaigns", {}).copy() + indx = b.campaign + if indx in campaigns: + sd, ed, bid, sr, trans_id = campaigns[indx] + campaigns[indx] = sd, ed, bid, sr, b.transaction + l.campaigns = campaigns + l._commit() + else: + print "no campaign information: ", l - if persec and elapsed_seconds > 0: - persec_str = ' (%.2f/s)' % (seen/elapsed_seconds,) - else: - persec_str = '' - - sys.stdout.write('%s%s, %s%s%s\n' - % (count_str, persec_str, - elapsed, estimate_str, key_str)) - sys.stdout.flush() - this_chunk = 0 - yield item - - now = datetime.now() - elapsed = now - start - print 'Processed %d items in %s..%s (%s)' % (seen, start, now, elapsed) def shorten_byurl_keys(): """We changed by_url keys from a format like @@ -412,7 +334,7 @@ def shorten_byurl_keys(): from pylons import g from r2.lib.utils import fetch_things2, in_chunks from r2.lib.db.operators import desc - from r2.lib.utils import base_url + from r2.lib.utils import base_url, progress # from link.py def old_by_url_key(url): @@ -435,7 +357,7 @@ def new_by_url_key(url): sort=desc('_date')) for links in ( in_chunks( - _progress( + progress( fetch_things2(l_q, verbosity), key = lambda link: link._date, verbosity=verbosity, diff --git a/r2/r2/lib/organic.py b/r2/r2/lib/organic.py index 970b7d545a..cd5c63b2c4 100644 --- a/r2/r2/lib/organic.py +++ b/r2/r2/lib/organic.py @@ -24,7 +24,6 @@ from r2.lib.normalized_hot import get_hot from r2.lib import count from r2.lib.utils import UniqueIterator, timeago -from r2.lib.promote import random_promoted from pylons import c @@ -33,65 +32,13 @@ organic_lifetime = 5*60 organic_length = 30 +organic_max_length= 50 -# how many regular organic links should show between promoted ones -promoted_every_n = 5 - -def keep_link(link): - return link.fresh - -def insert_promoted(link_names, sr_ids, logged_in): - """ - Inserts promoted links into an existing organic list. Destructive - on `link_names' - """ - promoted_items = random_promoted() - - if not promoted_items: - return - - # no point in running the builder over more promoted links than - # we'll even use - max_promoted = max(1,len(link_names)/promoted_every_n) - - # remove any that the user has acted on - def keep(item): - if c.user_is_loggedin and c.user._id == item.author_id: - return True - else: - return item.keep_item(item) - - builder = IDBuilder(promoted_items, keep_fn = keep, - skip = True, num = max_promoted) - promoted_items = builder.get_items()[0] - - if not promoted_items: - return - # don't insert one at the head of the list 50% of the time for - # logged in users, and 50% of the time for logged-off users when - # the pool of promoted links is less than 3 (to avoid showing the - # same promoted link to the same person too often) - if (logged_in or len(promoted_items) < 3) and random.choice((True,False)): - promoted_items.insert(0, None) - - # insert one promoted item for every N items - for i, item in enumerate(promoted_items): - pos = i * promoted_every_n + i - if pos > len(link_names): - break - elif item is None: - continue - else: - link_names.insert(pos, item._fullname) - -@memoize('cached_organic_links2', time = organic_lifetime) -def cached_organic_links(user_id, langs): - if user_id is None: - sr_ids = Subreddit.default_subreddits() - else: - user = Account._byID(user_id, data=True) - sr_ids = Subreddit.user_subreddits(user) +def keep_fresh_links(item): + return (c.user_is_loggedin and c.user._id == item.author_id) or item.fresh +@memoize('cached_organic_links', time = organic_lifetime) +def cached_organic_links(*sr_ids): sr_count = count.get_link_counts() #only use links from reddits that you're subscribed to link_names = filter(lambda n: sr_count[n][1] in sr_ids, sr_count.keys()) @@ -100,10 +47,10 @@ def cached_organic_links(user_id, langs): if not link_names and g.debug: q = All.get_links('new', 'all') q._limit = 100 # this decomposes to a _query - link_names = [x._fullname for x in q] + link_names = [x._fullname for x in q if x.promoted is None] g.log.debug('Used inorganic links') - #potentially add a up and coming link + #potentially add an up and coming link if random.choice((True, False)) and sr_ids: sr = Subreddit._byID(random.choice(sr_ids)) fnames = get_hot([sr], True)[0] @@ -114,23 +61,6 @@ def cached_organic_links(user_id, langs): new_item = random.choice(fnames[1:4]) link_names.insert(0, new_item) - insert_promoted(link_names, sr_ids, user_id is not None) - - # remove any that the user has acted on - builder = IDBuilder(link_names, - skip = True, keep_fn = keep_link, - num = organic_length) - link_names = [ x._fullname for x in builder.get_items()[0] ] - - #if not logged in, don't reset the count. if we did that we might get in a - #cycle where the cache will return the same link over and over - if user_id: - update_pos(0) - - # remove any duplicates caused by insert_promoted if the user is logged in - if user_id: - link_names = list(UniqueIterator(link_names)) - return link_names def organic_links(user): @@ -140,20 +70,14 @@ def organic_links(user): # make sure that these are sorted so the cache keys are constant sr_ids.sort() - if c.user_is_loggedin: - links = cached_organic_links(user._id, None) - else: - links = cached_organic_links(None, c.content_langs) - - pos = organic_pos() + # get the default subreddits if the user is not logged in + user_id = None if isinstance(user, FakeAccount) else user + sr_ids = Subreddit.user_subreddits(user, True) - # Make sure that links[pos] exists. Or, if links is [], at least set pos=0 - if not links: - pos = 0 - elif pos != 0: - pos = pos % len(links) - - return links, pos + # pass the cached function a sorted list so that we can guarantee + # cachability + sr_ids.sort() + return cached_organic_links(*sr_ids)[:organic_max_length] def update_pos(pos): "Update the user's current position within the cached organic list." diff --git a/r2/r2/lib/pages/pages.py b/r2/r2/lib/pages/pages.py index 7835cf8320..f010ad83a8 100644 --- a/r2/r2/lib/pages/pages.py +++ b/r2/r2/lib/pages/pages.py @@ -22,8 +22,8 @@ from r2.lib.wrapped import Wrapped, Templated, CachedTemplate from r2.models import Account, Default, make_feedurl from r2.models import FakeSubreddit, Subreddit, Ad, AdSR -from r2.models import Friends, All, Sub, NotFound, DomainSR -from r2.models import Link, Printable, Trophy, bidding, PromoteDates +from r2.models import Friends, All, Sub, NotFound, DomainSR, Random, Mod, RandomNSFW +from r2.models import Link, Printable, Trophy, bidding, PromotionWeights from r2.config import cache from r2.lib.tracking import AdframeInfo from r2.lib.jsonresponse import json_respond @@ -47,10 +47,11 @@ from r2.lib.utils import trunc_time from r2.lib.template_helpers import add_sr, get_domain from r2.lib.subreddit_search import popular_searches -from r2.lib.scraper import scrapers +from r2.lib.scraper import get_media_embed from r2.lib.log import log_text +from r2.lib.memoize import memoize -import sys, random, datetime, locale, calendar, simplejson, re +import sys, random, datetime, locale, calendar, simplejson, re, time import graph, pycountry from itertools import chain from urllib import quote @@ -277,12 +278,16 @@ def build_toolbars(self): """Sets the layout of the navigation topbar on a Reddit. The result is a list of menus which will be rendered in order and displayed at the top of the Reddit.""" - main_buttons = [NamedButton('hot', dest='', aliases=['/hot']), - NamedButton('new'), - NamedButton('controversial'), - NamedButton('top'), - NamedButton('saved', False) - ] + if c.site == Friends: + main_buttons = [NamedButton('new', dest='', aliases=['/hot']), + NamedButton('comments')] + else: + main_buttons = [NamedButton('hot', dest='', aliases=['/hot']), + NamedButton('new'), + NamedButton('controversial'), + NamedButton('top'), + NamedButton('saved', False) + ] more_buttons = [] @@ -705,17 +710,18 @@ def __init__(self, link = None, comment = None, self.link = self.link_listing.things[0] link_title = ((self.link.title) if hasattr(self.link, 'title') else '') - if comment: - if comment._deleted and not c.user_is_admin: - author = _("[deleted]") - else: - author = Account._byID(comment.author_id, data=True).name - params = {'author' : author, 'title' : _force_unicode(link_title)} - title = strings.permalink_title % params - else: - params = {'title':_force_unicode(link_title), 'site' : c.site.name} - title = strings.link_info_title % params + # defaults whether or not there is a comment + params = {'title':_force_unicode(link_title), 'site' : c.site.name} + title = strings.link_info_title % params + + # only modify the title if the comment/author are neither deleted nor spam + if comment and not comment._deleted and not comment._spam: + author = Account._byID(comment.author_id, data=True) + + if not author._deleted and not author._spam: + params = {'author' : author.name, 'title' : _force_unicode(link_title)} + title = strings.permalink_title % params self.subtitle = subtitle @@ -903,17 +909,18 @@ def build_toolbars(self): def rightbox(self): rb = Reddit.rightbox(self) - rb.push(ProfileBar(self.user)) - if c.user_is_admin: - from admin_pages import AdminSidebar - rb.append(AdminSidebar(self.user)) tc = TrophyCase(self.user) helplink = ( "/help/awards", _("what's this?") ) scb = SideContentBox(title=_("trophy case"), helplink=helplink, content=[tc], extra_class="trophy-area") - rb.append(scb) + + rb.push(scb) + if c.user_is_admin: + from admin_pages import AdminSidebar + rb.push(AdminSidebar(self.user)) + rb.push(ProfileBar(self.user)) return rb @@ -1026,7 +1033,7 @@ def my_reddits_dropdown(self): return SubredditMenu(drop_down_buttons, title = _('my reddits'), type = 'srdrop') - + def subscribed_reddits(self): srs = [SubredditButton(sr) for sr in sorted(self.my_reddits, @@ -1036,28 +1043,44 @@ def subscribed_reddits(self): ] return NavMenu(srs, type='flatlist', separator = '-', - _id = 'sr-bar') + css_class = 'sr-bar') def popular_reddits(self, exclude=[]): exclusions = set(exclude) buttons = [SubredditButton(sr) for sr in self.pop_reddits if sr not in exclusions] - + return NavMenu(buttons, type='flatlist', separator = '-', - _id = 'sr-bar') + css_class = 'sr-bar', _id = 'sr-bar') + def special_reddits(self): + reddits = [All, Random] + if getattr(c.site, "over_18", False): + reddits.append(RandomNSFW) + if c.user_is_loggedin: + if c.user.friends: + reddits.append(Friends) + if c.show_mod_mail: + reddits.append(Mod) + return NavMenu([SubredditButton(sr) for sr in reddits], + type = 'flatlist', separator = '-', + css_class = 'sr-bar') + def sr_bar (self): + sep = ' | ' menus = [] + menus.append(self.special_reddits()) + menus.append(RawString(sep)) + if not c.user_is_loggedin: menus.append(self.popular_reddits()) else: if len(self.my_reddits) > g.sr_dropdown_threshold: - menus.append(self.my_reddits_dropdown()) + menus = [self.my_reddits_dropdown()] + menus menus.append(self.subscribed_reddits()) - sep = ' – ' menus.append(RawString(sep)) @@ -1263,7 +1286,6 @@ def add_props(cls, user, wrapped): query_string(submit_url_options)) else: w.tblink = add_sr("/tb/"+w._id36) - w.upstyle = "mod" if w.likes else "" w.downstyle = "mod" if w.likes is False else "" if not c.user_is_loggedin: @@ -1280,7 +1302,6 @@ def __init__(self, captcha = None, url = '', title= '', subreddits = (), tabs = (('link', ('link-desc', 'url-field')), ('text', ('text-desc', 'text-field'))) all_fields = set(chain(*(parts for (tab, parts) in tabs))) - buttons = [] self.default_tabs = tabs[0][1] self.default_tab = tabs[0][0] @@ -1289,7 +1310,6 @@ def __init__(self, captcha = None, url = '', title= '', subreddits = (), to_hide = ','.join('#' + p for p in all_fields if p not in parts) onclick = "return select_form_tab(this, '%s', '%s');" onclick = onclick % (to_show, to_hide) - if tab_name == self.default_tab: self.default_show = to_show self.default_hide = to_hide @@ -1385,6 +1405,10 @@ class ButtonDemoPanel(Templated): """The page for showing the different styles of embedable voting buttons""" pass +class UpgradeButtons(Templated): + """The page for showing the different styles of embedable voting buttons""" + pass + class SelfServeBlurb(Templated): pass @@ -2010,6 +2034,9 @@ def __init__(self, original_path, subreddit, sub_domain): class FrameBuster(Templated): pass +class SelfServiceOatmeal(Templated): + pass + class PromotePage(Reddit): create_reddit_box = False submit_box = False @@ -2018,7 +2045,8 @@ class PromotePage(Reddit): def __init__(self, title, nav_menus = None, *a, **kw): buttons = [NamedButton('new_promo')] - if c.user_is_admin: + if c.user_is_sponsor: + buttons.append(NamedButton('roadblock')) buttons.append(NamedButton('current_promos', dest = '')) else: buttons.append(NamedButton('my_current_promos', dest = '')) @@ -2045,12 +2073,13 @@ class PromoteLinkForm(Templated): def __init__(self, sr = None, link = None, listing = '', timedeltatext = '', *a, **kw): bids = [] - if c.user_is_admin and link: + if c.user_is_sponsor and link: + self.author = Account._byID(link.author_id) try: bids = bidding.Bid.lookup(thing_id = link._id) bids.sort(key = lambda x: x.date, reverse = True) except NotFound: - bids = [] + pass # reference "now" to what we use for promtions now = promote.promo_datetime_now() @@ -2060,22 +2089,70 @@ def __init__(self, sr = None, link = None, listing = '', business_days = True) - datetime.timedelta(1)) - if link: - startdate = link._date - enddate = link.promote_until - else: - startdate = mindate + datetime.timedelta(1) - enddate = startdate + datetime.timedelta(1) + startdate = mindate + datetime.timedelta(1) + enddate = startdate + datetime.timedelta(3) self.startdate = startdate.strftime("%m/%d/%Y") self.enddate = enddate .strftime("%m/%d/%Y") + self.mindate = mindate .strftime("%m/%d/%Y") - Templated.__init__(self, sr = sr, link = link, - datefmt = datefmt, bids = bids, - timedeltatext = timedeltatext, - listing = listing, - *a, **kw) + self.link = None + if link: + self.sr_searches = simplejson.dumps(popular_searches()) + self.subreddits = (Subreddit.submit_sr_names(c.user) or + Subreddit.submit_sr_names(None)) + self.default_sr = self.subreddits[0] if self.subreddits \ + else g.default_sr + # have the promo code wrap the campaigns for rendering + self.link = promote.editable_add_props(link) + + if not c.user_is_sponsor: + self.now = promote.promo_datetime_now().date() + start_date = promote.promo_datetime_now(offset = -14).date() + end_date = promote.promo_datetime_now(offset = 14).date() + self.promo_traffic = dict(load_traffic('day', 'promos')) + self.market, self.promo_counter = \ + Promote_Graph.get_market(None, start_date, end_date) + + Templated.__init__(self, sr = sr, + datefmt = datefmt, + timedeltatext = timedeltatext, + listing = listing, bids = bids, + *a, **kw) + +class PromoteLinkFormOld(PromoteLinkForm): + def __init__(self, **kw): + PromoteLinkForm.__init__(self, **kw) + self.bid = g.min_promote_bid + campaign = {} + if self.link: + campaign = self.link.campaigns[0] + self.startdate = campaign.start_date + self.enddate = campaign.end_date + + self.bid = campaign.get("bid", g.min_promote_bid) + self.freebie = campaign.get("status",{}).get("free", False) + self.complete = campaign.get("status",{}).get("complete", False) + self.paid = campaign.get("status",{}).get("paid", False) + +class Roadblocks(Templated): + def __init__(self): + self.roadblocks = promote.get_roadblocks() + Templated.__init__(self) + # reference "now" to what we use for promtions + now = promote.promo_datetime_now() + + startdate = now + datetime.timedelta(1) + enddate = startdate + datetime.timedelta(1) + + self.startdate = startdate.strftime("%m/%d/%Y") + self.enddate = enddate .strftime("%m/%d/%Y") + self.sr_searches = simplejson.dumps(popular_searches()) + self.subreddits = (Subreddit.submit_sr_names(c.user) or + Subreddit.submit_sr_names(None)) + self.default_sr = self.subreddits[0] if self.subreddits \ + else g.default_sr class TabbedPane(Templated): def __init__(self, tabs): @@ -2100,22 +2177,54 @@ def __init__(self, link, load = False, expand = False, nofollow = False): def content(self): return '' +def make_link_child(item): + link_child = None + editable = False + + # if the item has a media_object, try to make a MediaEmbed for rendering + if item.media_object: + media_embed = None + if isinstance(item.media_object, basestring): + media_embed = item.media_object + else: + media_embed = get_media_embed(item.media_object) + if media_embed: + media_embed = MediaEmbed(media_domain = g.media_domain, + height = media_embed.height + 10, + width = media_embed.width + 10, + scrolling = media_embed.scrolling, + id36 = item._id36) + else: + g.log.debug("media_object without media_embed %s" % item) + + if media_embed: + link_child = MediaChild(item, media_embed, load = True) + + # if the item has selftext, add a selftext child + elif item.selftext: + expand = getattr(item, 'expand_children', False) + link_child = SelfTextChild(item, expand = expand, + nofollow = item.nofollow) + #draw the edit button if the contents are pre-expanded + editable = (expand and + item.author == c.user and + not item._deleted) + + return link_child, editable + + class MediaChild(LinkChild): """renders when the user hits the expando button to expand media objects, like embedded videos""" css_style = "video" + def __init__(self, link, content, **kw): + self._content = content + LinkChild.__init__(self, link, **kw) def content(self): - if isinstance(self.link.media_object, basestring): - return self.link.media_object - - scraper = scrapers[self.link.media_object['type']] - media_embed = scraper.media_embed(**self.link.media_object) - return MediaEmbed(media_domain = g.media_domain, - height = media_embed.height+10, - width = media_embed.width+10, - scrolling = media_embed.scrolling, - id36 = self.link._id36).render() + if isinstance(self._content, basestring): + return self._content + return self._content.render() class MediaEmbed(Templated): """The actual rendered iframe for a media child""" @@ -2150,6 +2259,9 @@ def __init__(self, if extra_css: css_class += " " + extra_css + if text is None: + text = '' + CachedTemplate.__init__(self, fullname = item._fullname if item else "", text = text, @@ -2183,26 +2295,32 @@ class PromotedTraffic(Traffic): multiy format) and a table of the data. """ def __init__(self, thing): + # TODO: needs a fix for multiple campaigns self.thing = thing - d = thing._date.astimezone(g.tz) - promote.timezone_offset - d = d.replace(minute = 0, second = 0, microsecond = 0) - until = thing.promote_until - promote.timezone_offset - now = datetime.datetime.now(g.tz) - - # the results are preliminary until 1 day after the promotion ends - self.preliminary = (until + datetime.timedelta(1) > now) - - self.traffic = load_traffic('hour', "thing", thing._fullname, - start_time = d, stop_time = until) - - # load monthly totals if we have them, otherwise use the daily totals - self.totals = load_traffic('month', "thing", thing._fullname) - if not self.totals: - self.totals = load_traffic('day', "thing", thing._fullname) - # generate a list of - # (uniq impressions, # impressions, uniq clicks, # clicks) - if self.totals: - self.totals = map(sum, zip(*zip(*self.totals)[1])) + d = until = None + self.traffic = [] + if thing.campaigns: + d = min(sd.date() if isinstance(sd, datetime.datetime) else sd + for sd, ed, bid, sr, trans_id in thing.campaigns.values() + if trans_id) + until = max(ed.date() if isinstance(ed, datetime.datetime) else ed + for sd, ed, bid, sr, trans_id in thing.campaigns.values() + if trans_id) + now = datetime.datetime.now(g.tz).date() + + # the results are preliminary until 1 day after the promotion ends + self.preliminary = (until + datetime.timedelta(1) > now) + self.traffic = load_traffic('hour', "thing", thing._fullname, + start_time = d, stop_time = until) + + # load monthly totals if we have them, otherwise use the daily totals + self.totals = load_traffic('month', "thing", thing._fullname) + if not self.totals: + self.totals = load_traffic('day', "thing", thing._fullname) + # generate a list of + # (uniq impressions, # impressions, uniq clicks, # clicks) + if self.totals: + self.totals = map(sum, zip(*zip(*self.totals)[1])) imp = self.slice_traffic(self.traffic, 0, 1) @@ -2213,21 +2331,20 @@ def __init__(self, thing): self.totals[1] = imp_total imp_total = locale.format('%d', imp_total, True) - chart = graph.LineGraph(imp) - self.imp_graph = chart.google_chart(ylabels = ['uniques', 'total'], - title = ("impressions (%s)" % - imp_total)) - + + self.imp_graph = TrafficGraph(imp[-72:], ylabels = ['uniques', 'total'], + title = ("recent impressions (%s total)" % + imp_total)) cli = self.slice_traffic(self.traffic, 2, 3) cli_total = sum(x[2] for x in cli) # ensure total consistency if self.totals: self.totals[3] = cli_total cli_total = locale.format('%d', cli_total, True) - chart = graph.LineGraph(cli) - self.cli_graph = chart.google_chart(ylabels = ['uniques', 'total'], - title = ("clicks (%s)" % - cli_total)) + self.cli_graph = TrafficGraph(cli[-72:], ylabels = ['uniques', 'total'], + title = ("recent clicks (%s total)" % + cli_total)) + else: self.imp_graph = self.cli_graph = None @@ -2286,21 +2403,17 @@ def __init__(self): setattr(self, ival + "_data", data) for name, indx, color in slices: data2 = self.slice_traffic(data, *indx) - chart = graph.LineGraph(data2, colors = [color, "B0B0B0"]) - setattr(self, name + "_" + ival + "_chart", chart) + setattr(self, name + "_" + ival + "_chart", data2) title = "%s by %s" % (name, ival) - res = chart.google_chart(ylabels = [name], - multiy = False, - title = title) + res = TrafficGraph(data2, colors = [color], title = title) setattr(self, name + "_" + ival, res) else: self.has_data = True if self.has_data: imp_by_day = [[] for i in range(7)] uni_by_day = [[] for i in range(7)] - dates = self.uniques_day_chart.xdata - uniques = self.uniques_day_chart.ydata[0] - imps = self.impressions_day_chart.ydata[0] + dates, imps = zip(*self.impressions_day_chart) + dates, uniques = zip(*self.uniques_day_chart) self.uniques_mean = sum(map(float, uniques))/len(uniques) self.impressions_mean = sum(map(float, imps))/len(imps) for i, d in enumerate(dates): @@ -2400,6 +2513,42 @@ def monthly_summary(self): "%5.2f%%" % f)) return res +class TrafficGraph(Templated): + def __init__(self, data, width = 300, height = 175, + bar_fmt = True, colors = ("FF4500", "336699"), title = '', + ylabels = [], multiy = True): + # fallback on google charts + chart = graph.LineGraph(data[:72], colors = colors) + self.gc = chart.google_chart(ylabels = ylabels, multiy = multiy, title = title) + + xdata = [] + ydata = [] + for d in data: + xdata.append(time.mktime(d[0].timetuple())*1000) + ydata.append(d[1:]) + ydata = zip(*ydata) + self.colors = colors + self.title = title + + if bar_fmt: + xdata = graph.DataSeries(xdata).toBarX() + + if ydata and not isinstance(ydata[0], (list, tuple)): + if bar_fmt: + ydata = graph.DataSeries(ydata).toBarY() + self.data = [zip(xdata, ydata)] + else: + self.data = [] + for ys in ydata: + if bar_fmt: + ys = graph.DataSeries(ys).toBarY() + self.data.append(zip(xdata, ys)) + + self.width = width + self.height = height + Templated.__init__(self) + + class RedditAds(Templated): def __init__(self, **kw): self.sr_name = c.site.name @@ -2423,48 +2572,87 @@ def __init__(self, **kw): Templated.__init__(self, **kw) class PaymentForm(Templated): - def __init__(self, **kw): + def __init__(self, link, indx, **kw): self.countries = pycountry.countries + self.link = promote.editable_add_props(link) + self.campaign = self.link.campaigns[indx] + self.indx = indx Templated.__init__(self, **kw) class Promote_Graph(Templated): - def __init__(self): - self.now = promote.promo_datetime_now() - start_date = (self.now - datetime.timedelta(7)).date() - end_date = (self.now + datetime.timedelta(7)).date() + + @classmethod + @memoize('get_market', time = 60) + def get_market(cls, user_id, start_date, end_date): + market = {} + promo_counter = {} + def callback(link, bid, bid_day, starti, endi, indx): + for i in xrange(starti, endi): + if user_id is None or link.author_id == user_id: + if (not promote.is_unpaid(link) and + not promote.is_rejected(link)): + market[i] = market.get(i, 0) + bid_day + promo_counter[i] = promo_counter.get(i, 0) + 1 + cls.promo_iter(start_date, end_date, callback) + return market, promo_counter + @classmethod + def promo_iter(cls, start_date, end_date, callback): size = (end_date - start_date).days + for link, indx, s, e in cls.get_current_promos(start_date, end_date): + if indx in link.campaigns: + sdate, edate, bid, sr, trans_id = link.campaigns[indx] + if isinstance(sdate, datetime.datetime): + sdate = sdate.date() + if isinstance(edate, datetime.datetime): + edate = edate.date() + starti = max((sdate - start_date).days, 0) + endi = min((edate - start_date).days, size) + bid_day = bid / max((edate - sdate).days, 1) + callback(link, bid, bid_day, starti, endi, indx) + @classmethod + def get_current_promos(cls, start_date, end_date): # grab promoted links - promos = PromoteDates.for_date_range(start_date, end_date) - promos.sort(key = lambda x: x.start_date) + # returns a list of (thing_id, campaign_idx, start, end) + promos = PromotionWeights.get_schedule(start_date, end_date) + # sort based on the start date + promos.sort(key = lambda x: x[2]) # wrap the links - links = wrap_links([p.thing_name for p in promos]) + links = wrap_links([p[0] for p in promos]) # remove rejected/unpaid promos links = dict((l._fullname, l) for l in links.things - if (l.promoted is not None and - l.promote_status not in ( promote.STATUS.rejected, - promote.STATUS.unpaid)) ) + if promote.is_accepted(l) or promote.is_unapproved(l)) # filter promos accordingly - promos = filter(lambda p: links.has_key(p.thing_name), promos) + promos = [(links[thing_name], indx, s, e) + for thing_name, indx, s, e in promos + if links.has_key(thing_name)] + return promos + + def __init__(self): + self.now = promote.promo_datetime_now() + + start_date = promote.promo_datetime_now(offset = -7).date() + end_date = promote.promo_datetime_now(offset = 7).date() + + size = (end_date - start_date).days + + # these will be cached queries + market, promo_counter = self.get_market(None, start_date, end_date) + my_market = market + if not c.user_is_sponsor: + my_market = self.get_market(c.user._id, start_date, end_date)[0] + + # determine the range of each link promote_blocks = [] - market = {} - my_market = {} - promo_counter = {} - for p in promos: - starti = max((p.start_date - start_date).days, 0) - endi = min((p.end_date - start_date).days, size) - link = links[p.thing_name] - bid_day = link.promote_bid/max((p.end_date - p.start_date).days, 1) - for i in xrange(starti, endi): - market[i] = market.get(i, 0) + bid_day - if c.user_is_sponsor or link.author_id == c.user._id: - my_market[i] = my_market.get(i, 0) + bid_day - promo_counter[i] = promo_counter.get(i, 0) + 1 - if c.user_is_sponsor or link.author_id == c.user._id: - promote_blocks.append( (link, starti, endi) ) + def block_maker(link, bid, bid_day, starti, endi, indx): + if ((c.user_is_sponsor or link.author_id == c.user._id) + and not promote.is_rejected(link) + and not promote.is_unpaid(link)): + promote_blocks.append( (link, bid, starti, endi, indx) ) + self.promo_iter(start_date, end_date, block_maker) # now sort the promoted_blocks into the most contiguous chuncks we can sorted_blocks = [] @@ -2473,17 +2661,18 @@ def __init__(self): while True: sorted_blocks.append(cur) # get the future items (sort will be preserved) - future = filter(lambda x: x[1] >= cur[2], promote_blocks) + future = filter(lambda x: x[2] >= cur[3], promote_blocks) if future: # resort by date and give precidence to longest promo: - cur = min(future, key = lambda x: (x[1], x[1]-x[2])) + cur = min(future, key = lambda x: (x[2], x[2]-x[3])) promote_blocks.remove(cur) else: break # load recent traffic as well: self.recent = {} - for k, v in load_summary("thing"): + #TODO + for k, v in []:#load_summary("thing"): if k.startswith('t%d_' % Link._type_id): self.recent[k] = v @@ -2497,36 +2686,38 @@ def __init__(self): # graphs of money history = self.now - datetime.timedelta(60) - pool = bidding.PromoteDates.bid_history(history) + + pool =PromotionWeights.bid_history(promote.promo_datetime_now(offset=-60), + promote.promo_datetime_now(offset=2)) if pool: # we want to generate a stacked line graph, so store the # bids and the total including refunded amounts - chart = graph.LineGraph([(d, b, r) for (d, b, r) in pool], - colors = ("008800", "FF0000")) total_sale = sum(b for (d, b, r) in pool) total_refund = sum(r for (d, b, r) in pool) - self.money_graph = chart.google_chart( - ylabels = ['total ($)'], - title = ("monthly sales ($%.2f total, $%.2f credits)" % - (total_sale, total_refund)), - multiy = False) + + self.money_graph = TrafficGraph([(d, b, r) for (d, b, r) in pool], + colors = ("008800", "FF0000"), + ylabels = ['total ($)'], + title = ("monthly sales ($%.2f total, $%.2f credits)" % + (total_sale, total_refund)), + multiy = False) history = self.now - datetime.timedelta(30) - self.top_promoters = bidding.PromoteDates.top_promoters(history) + #TODO + self.top_promoters = []#bidding.PromoteDates.top_promoters(history) else: self.money_graph = None self.top_promoters = [] # graphs of impressions and clicks self.promo_traffic = load_traffic('day', 'promos') - impressions = [(d, i) for (d, (i, k)) in self.promo_traffic] + impressions = [(d, i) for (d, (i, k)) in self.promo_traffic] pool = dict((d, b+r) for (d, b, r) in pool) if impressions: - chart = graph.LineGraph(impressions) - self.imp_graph = chart.google_chart(ylabels = ['total'], - title = "impressions") + self.imp_graph = TrafficGraph(impressions, ylabels = ['total'], + title = "impressions") clicks = [(d, k) for (d, (i, k)) in self.promo_traffic] @@ -2539,27 +2730,22 @@ def __init__(self): CTR = [(d, (100 * float(k) / i if i else 0)) for (d, (i, k)) in self.promo_traffic] - chart = graph.LineGraph(clicks) - self.cli_graph = chart.google_chart(ylabels = ['total'], - title = "clicks") - + self.cli_graph = TrafficGraph(clicks, ylabels = ['total'], + title = "clicks") mean_CPM = sum(x[1] for x in CPM) * 1. / max(len(CPM), 1) - chart = graph.LineGraph([(d, min(x, mean_CPM*2)) for d, x in CPM], - colors = ["336699"]) - self.cpm_graph = chart.google_chart(ylabels = ['CPM ($)'], - title = "cost per 1k impressions " + - "($%.2f average)" % mean_CPM) + self.cpm_graph = TrafficGraph([(d, min(x, mean_CPM*2)) for d, x in CPM], + colors = ["336699"], ylabels = ['CPM ($)'], + title = "cost per 1k impressions " + + "($%.2f average)" % mean_CPM) mean_CPC = sum(x[1] for x in CPC) * 1. / max(len(CPC), 1) - chart = graph.LineGraph([(d, min(x, mean_CPC*2)) for d, x in CPC], - colors = ["336699"]) - self.cpc_graph = chart.google_chart(ylabels = ['CPC ($0.01)'], - title = "cost per click " + - "($%.2f average)" % (mean_CPC/100.)) - - chart = graph.LineGraph(CTR, colors = ["336699"]) - self.ctr_graph = chart.google_chart(ylabels = ['CTR (%)'], - title = "click through rate") + self.cpc_graph = TrafficGraph([(d, min(x, mean_CPC*2)) for d, x in CPC], + colors = ["336699"], ylabels = ['CPC ($0.01)'], + title = "cost per click " + + "($%.2f average)" % (mean_CPC/100.)) + + self.ctr_graph = TrafficGraph(CTR, colors = ["336699"], ylabels = ['CTR (%)'], + title = "click through rate") else: self.imp_graph = self.cli_graph = None diff --git a/r2/r2/lib/pages/things.py b/r2/r2/lib/pages/things.py index dbb1596909..ac372a91cf 100644 --- a/r2/r2/lib/pages/things.py +++ b/r2/r2/lib/pages/things.py @@ -25,9 +25,10 @@ from r2.models import make_wrapper, IDBuilder, Thing from r2.lib.utils import tup from r2.lib.strings import Score -from r2.lib.promote import promo_edit_url, promo_traffic_url +from r2.lib.promote import * from datetime import datetime from pylons import c, g +from pylons.i18n import _, ungettext class PrintableButtons(Styled): def __init__(self, style, thing, @@ -35,11 +36,17 @@ def __init__(self, style, thing, show_distinguish = False, show_indict = False, **kw): show_report = show_report and c.user_is_loggedin + Styled.__init__(self, style = style, + thing = thing, fullname = thing._fullname, can_ban = thing.can_ban, show_spam = thing.show_spam, show_reports = thing.show_reports, + show_ignore = thing.show_reports or + (thing.reveal_trial_info and not thing.show_spam), + approval_checkmark = getattr(thing, + "approval_checkmark", None), show_delete = show_delete, show_report = show_report, show_indict = show_indict, @@ -59,7 +66,7 @@ def __init__(self, thing, comments = True, delete = True, report = True): # do we show the report button? show_report = not is_author and report - if c.user_is_admin: + if c.user_is_admin and thing.promoted is None: show_report = False show_indict = True else: @@ -67,6 +74,10 @@ def __init__(self, thing, comments = True, delete = True, report = True): # do we show the delete button? show_delete = is_author and delete and not thing._deleted + # disable the delete button for live sponsored links + if (is_promoted(thing) and not c.user_is_sponsor): + show_delete = False + # do we show the distinguish button? among other things, # we never want it to appear on link listings -- only # comments pages @@ -76,15 +87,12 @@ def __init__(self, thing, comments = True, delete = True, report = True): kw = {} if thing.promoted is not None: now = datetime.now(g.tz) - promotable = (thing._date <= now and thing.promote_until > now) kw = dict(promo_url = promo_edit_url(thing), - promote_bid = thing.promote_bid, promote_status = getattr(thing, "promote_status", 0), user_is_sponsor = c.user_is_sponsor, - promotable = promotable, traffic_url = promo_traffic_url(thing), is_author = thing.is_author) - + PrintableButtons.__init__(self, 'linkbuttons', thing, # user existence and preferences is_loggedin = c.user_is_loggedin, @@ -163,16 +171,16 @@ def _default_thing_wrapper(thing): def wrap_links(links, wrapper = default_thing_wrapper(), listing_cls = LinkListing, num = None, show_nums = False, nextprev = False, - num_margin = None, mid_margin = None): + num_margin = None, mid_margin = None, **kw): links = tup(links) if not all(isinstance(x, str) for x in links): links = [x._fullname for x in links] - b = IDBuilder(links, num = num, wrap = wrapper) + b = IDBuilder(links, num = num, wrap = wrapper, **kw) l = listing_cls(b, nextprev = nextprev, show_nums = show_nums) if num_margin is not None: l.num_margin = num_margin if mid_margin is not None: l.mid_margin = mid_margin return l.listing() - + diff --git a/r2/r2/lib/promote.py b/r2/r2/lib/promote.py index e81eb944b7..abe5b91227 100644 --- a/r2/r2/lib/promote.py +++ b/r2/r2/lib/promote.py @@ -22,13 +22,18 @@ from __future__ import with_statement from r2.models import * +from r2.lib.wrapped import Wrapped from r2.lib import authorize from r2.lib import emailer, filters from r2.lib.memoize import memoize from r2.lib.template_helpers import get_domain -from r2.lib.utils import Enum +from r2.lib.utils import Enum, UniqueIterator +from organic import keep_fresh_links from pylons import g, c from datetime import datetime, timedelta +from r2.lib.db.queries import make_results, db_sort, add_queries, merge_results +import itertools + import random promoted_memo_lifetime = 30 @@ -38,15 +43,22 @@ STATUS = Enum("unpaid", "unseen", "accepted", "rejected", "pending", "promoted", "finished") -PromoteSR = 'promos' -try: - PromoteSR = Subreddit._new(name = PromoteSR, - title = "promoted links", - author_id = -1, - type = "public", - ip = '0.0.0.0') -except SubredditExists: - PromoteSR = Subreddit._by_name(PromoteSR) +CAMPAIGN = Enum("start", "end", "bid", "sr", "trans_id") + +@memoize("get_promote_srid") +def get_promote_srid(name = 'promos'): + try: + sr = Subreddit._by_name(name) + except NotFound: + sr = Subreddit._new(name = name, + title = "promoted links", + # negative author_ids make this unlisable + author_id = -1, + type = "public", + ip = '0.0.0.0') + return sr._id + +# attrs def promo_traffic_url(l): domain = get_domain(cname = False, subreddit = False) @@ -56,6 +68,191 @@ def promo_edit_url(l): domain = get_domain(cname = False, subreddit = False) return "http://%s/promoted/edit_promo/%s" % (domain, l._id36) +def pay_url(l, indx): + return "%spromoted/pay/%s/%d" % (g.payment_domain, l._id36, indx) + +# booleans + +def is_promo(link): + return (link and not link._deleted and link.promoted is not None + and hasattr(link, "promote_status")) + +def is_accepted(link): + return is_promo(link) and (link.promote_status != STATUS.rejected and + link.promote_status >= STATUS.accepted) + +def is_unpaid(link): + return is_promo(link) and link.promote_status == STATUS.unpaid + +def is_unapproved(link): + return is_promo(link) and link.promote_status <= STATUS.unseen + +def is_rejected(link): + return is_promo(link) and link.promote_status == STATUS.rejected + +def is_promoted(link): + return is_promo(link) and link.promote_status == STATUS.promoted + +# no references to promote_status below this function, pls +def set_status(l, status, onchange = None): + # keep this out here. Useful for updating the queue if there is a bug + # and for initial migration + add_queries([_sponsored_link_query(None, l.author_id), + _sponsored_link_query(None), + _sponsored_link_query(status, l.author_id), + _sponsored_link_query(status)], insert_items = [l]) + + # no need to delete or commit of the status is unchanged + if status != getattr(l, "promote_status", None): + # new links won't even have a promote_status yet + if hasattr(l, "promote_status"): + add_queries([_sponsored_link_query(l.promote_status, l.author_id), + _sponsored_link_query(l.promote_status)], + delete_items = [l]) + l.promote_status = status + l._commit() + if onchange: + onchange() + +# query queue updates below + +def _sponsored_link_query(status, author_id = None): + q = Link._query(Link.c.sr_id == get_promote_srid(), + Link.c._spam == (True, False), + Link.c._deleted == (True,False), + sort = db_sort('new')) + if status is not None: + q._filter(Link.c.promote_status == status) + if author_id is not None: + q._filter(Link.c.author_id == author_id) + return make_results(q) + +def get_unpaid_links(author_id = None): + return _sponsored_link_query(STATUS.unpaid, author_id = author_id) + +def get_unapproved_links(author_id = None): + return _sponsored_link_query(STATUS.unseen, author_id = author_id) + +def get_rejected_links(author_id = None): + return _sponsored_link_query(STATUS.rejected, author_id = author_id) + +def get_live_links(author_id = None): + return _sponsored_link_query(STATUS.promoted, author_id = author_id) + +def get_accepted_links(author_id = None): + return merge_results(_sponsored_link_query(STATUS.accepted, + author_id = author_id), + _sponsored_link_query(STATUS.pending, + author_id = author_id), + _sponsored_link_query(STATUS.finished, + author_id = author_id)) + +def get_all_links(author_id = None): + return _sponsored_link_query(None, author_id = author_id) + + +# subreddit roadblocking functions + +roadblock_prefix = "promotion_roadblock" +def roadblock_key(sr_name, d): + return "%s-%s_%s" % (roadblock_prefix, + sr_name, d.strftime("%Y_%m_%d")) + +def roadblock_reddit(sr_name, start_date, end_date): + d = start_date + now = promo_datetime_now().date() + # set the expire to be 1 week after the roadblock end date + expire = ((end_date - now).days + 7) * 86400 + while d < end_date: + g.hardcache.add(roadblock_key(sr_name, d), + "%s-%s" % (start_date.strftime("%Y_%m_%d"), + end_date.strftime("%Y_%m_%d")), + time = expire) + d += timedelta(1) + +def unroadblock_reddit(sr_name, start_date, end_date): + d = start_date + while d < end_date: + g.hardcache.delete(roadblock_key(sr_name, d)) + d += timedelta(1) + +def is_roadblocked(sr_name, start_date, end_date): + d = start_date + while d < end_date: + res = g.hardcache.get(roadblock_key(sr_name, d)) + if res: + start_date, end_date = res.split('-') + start_date = datetime.strptime(start_date, "%Y_%m_%d").date() + end_date = datetime.strptime(end_date, "%Y_%m_%d").date() + return (start_date, end_date) + d += timedelta(1) + +def get_roadblocks(): + rbs = g.hardcache.backend.ids_by_category(roadblock_prefix) + by_sr = {} + for rb in rbs: + rb = rb.split('_') + date = datetime.strptime('_'.join(rb[1:]), "%Y_%m_%d").date() + by_sr.setdefault(rb[0], []).append((date, date + timedelta(1))) + + blobs = [] + for k, v in by_sr.iteritems(): + for sd, ed in sorted(v): + if blobs and blobs[-1][0] == k and blobs[-1][-1] == sd: + blobs[-1] = (k, blobs[-1][1], ed) + else: + blobs.append((k, sd, ed)) + blobs.sort(key = lambda x: x[1]) + return blobs + +# control functions + +class RenderableCampaign(): + __slots__ = ["indx", "start_date", "end_date", "duration", + "bid", "sr", "status"] + + def __init__(self, link, indx, raw_campaign, transaction): + sd, ed, bid, sr, trans_id = raw_campaign + self.indx = indx + self.start_date = sd.strftime("%m/%d/%Y") + self.end_date = ed.strftime("%m/%d/%Y") + ndays = (ed - sd).days + self.duration = strings.time_label % dict(num = ndays, + time = ungettext("day", "days", ndays)) + self.bid = "%.2f" % bid + self.sr = sr + + self.status = dict(paid = bool(transaction), + complete = False, + free = (trans_id < 0), + pay_url = pay_url(link, indx), + sponsor = c.user_is_sponsor) + if transaction: + if transaction.is_void(): + self.status['paid'] = False + self.status['free'] = False + elif transaction.is_charged(): + self.status['complete'] = True + + def get(self, key, default): + return getattr(self, key, default) + + def __iter__(self): + for s in self.__slots__: + yield getattr(self, s) + +def editable_add_props(l): + if not isinstance(l, Wrapped): + l = Wrapped(l) + + l.bids = get_transactions(l) + l.campaigns = dict((indx, RenderableCampaign(l, indx, + campaign, l.bids.get(indx))) + for indx, campaign in + getattr(l, "campaigns", {}).iteritems()) + + return l + # These could be done with relationships, but that seeks overkill as # we never query based on user and only check per-thing def is_traffic_viewer(thing, user): @@ -98,420 +295,492 @@ def promotion_log(thing, text, commit = False): if commit: thing._commit() return text - -def new_promotion(title, url, user, ip, promote_start, promote_until, bid, - disable_comments = False, - max_clicks = None, max_views = None): + +def new_promotion(title, url, user, ip): """ Creates a new promotion with the provided title, etc, and sets it status to be 'unpaid'. """ - l = Link._submit(title, url, user, PromoteSR, ip) + sr = Subreddit._byID(get_promote_srid()) + l = Link._submit(title, url, user, sr, ip) l.promoted = True - l.promote_until = None - l.promote_status = STATUS.unpaid - l.promote_trans_id = 0 - l.promote_bid = bid - l.maximum_clicks = max_clicks - l.maximum_views = max_views - l.disable_comments = disable_comments - update_promo_dates(l, promote_start, promote_until) + l.disable_comments = False + l.campaigns = {} promotion_log(l, "promotion created") l._commit() + + # set the status of the link, populating the query queue + if c.user_is_sponsor or getattr(user, "trusted_sponsor", False): + set_status(l, STATUS.accepted) + else: + set_status(l, STATUS.unpaid) + # the user has posted a promotion, so enable the promote menu unless # they have already opted out if user.pref_show_promote is not False: user.pref_show_promote = True user._commit() + + # notify of new promo emailer.new_promo(l) return l -def update_promo_dates(thing, start_date, end_date, commit = True): - if thing and thing.promote_status < STATUS.pending or c.user_is_admin: - if (thing._date != start_date or - thing.promote_until != end_date): - promotion_log(thing, "duration updated (was %s -> %s)" % - (thing._date, thing.promote_until)) - thing._date = start_date - thing.promote_until = end_date - PromoteDates.update(thing, start_date, end_date) - if commit: - thing._commit() - return True - return False - -def update_promo_data(thing, title, url, commit = True): - if thing and (thing.url != url or thing.title != title): - if thing.title != title: - promotion_log(thing, "title updated (was '%s')" % - thing.title) - if thing.url != url: - promotion_log(thing, "url updated (was '%s')" % - thing.url) - old_url = thing.url - thing.url = url - thing.title = title - if not c.user_is_sponsor: - unapproved_promo(thing) - thing.update_url_cache(old_url) - if commit: - thing._commit() - return True - return False - -def refund_promo(thing, user, refund): - cur_refund = getattr(thing, "promo_refund", 0) - refund = min(refund, thing.promote_bid - cur_refund) - if refund > 0: - thing.promo_refund = cur_refund + refund - if authorize.refund_transaction(refund, user, thing.promote_trans_id): - promotion_log(thing, "payment update: refunded '%.2f'" % refund) - else: - promotion_log(thing, "payment update: refund failed") - if thing.promote_status in (STATUS.promoted, STATUS.finished): - PromoteDates.update_bid(thing) - thing._commit() - -def auth_paid_promo(thing, user, pay_id, bid): - """ - promotes a promotion from 'unpaid' to 'unseen'. - - In the case that bid already exists on the current promotion, the - previous transaction is voided and repalced with the new bid. - """ - if thing.promote_status == STATUS.finished: - return - elif (thing.promote_status > STATUS.unpaid and - thing.promote_trans_id): - # void the existing transaction - authorize.void_transaction(user, thing.promote_trans_id) - - # create a new transaction and update the bid - trans_id = authorize.auth_transaction(bid, user, pay_id, thing) - thing.promote_bid = bid - - if trans_id is not None and int(trans_id) != 0: - # we won't reset to unseen if already approved and the payment went ok - promotion_log(thing, "updated payment and/or bid: SUCCESS (id: %s)" - % trans_id) - if trans_id < 0: - promotion_log(thing, "FREEBIE") - thing.promote_status = max(thing.promote_status, STATUS.unseen) - thing.promote_trans_id = trans_id - else: - # something bad happend. - promotion_log(thing, "updated payment and/or bid: FAILED") - thing.promore_status = STATUS.unpaid - thing.promote_trans_id = 0 - thing._commit() - - emailer.promo_bid(thing) - PromoteDates.update_bid(thing) - return bool(trans_id) - - -def unapproved_promo(thing): +def sponsor_wrapper(link): + w = Wrapped(link) + w.render_class = PromotedLink + w.rowstyle = "promoted link" + return w + +def campaign_lock(link): + return "edit_promo_campaign_lock_" + str(link._id) + +def get_transactions(link): + # tuple of (transaction_id, key) + trans_tuples = [(v[CAMPAIGN.trans_id], k) + for k, v in getattr(link, "campaigns", {}).iteritems() + if v[CAMPAIGN.trans_id] != 0] + bids = authorize.get_transactions(*trans_tuples) + return dict((indx, bids.get((t, indx))) for t, indx in trans_tuples) + + +def new_campaign(link, dates, bid, sr): + with g.make_lock(campaign_lock(link)): + # get a copy of the attr so that it'll be + # marked as dirty on the next write. + campaigns = getattr(link, "campaigns", {}).copy() + # create a new index + indx = max(campaigns.keys() or [-1]) + 1 + # add the campaign + # store the name not the reddit + sr = sr.name if sr else "" + campaigns[indx] = list(dates) + [bid, sr, 0] + PromotionWeights.add(link, indx, sr, dates[0], dates[1], bid) + link.campaigns = {} + link.campaigns = campaigns + link._commit() + return indx + +def free_campaign(link, index, user): + auth_campaign(link, index, user, -1) + +def edit_campaign(link, index, dates, bid, sr): + with g.make_lock(campaign_lock(link)): + campaigns = getattr(link, "campaigns", {}).copy() + if index in campaigns: + trans_id = campaigns[index][CAMPAIGN.trans_id] + prev_bid = campaigns[index][CAMPAIGN.bid] + # store the name not the reddit + sr = sr.name if sr else "" + campaigns[index] = list(dates) + [bid, sr, trans_id] + PromotionWeights.reschedule(link, index, + sr, dates[0], dates[1], bid) + link.campaigns = {} + link.campaigns = campaigns + link._commit() + + #TODO cancel any existing charges if the bid has changed + if prev_bid != bid: + void_campaign(link, index, c.user) + + +def delete_campaign(link, index): + with g.make_lock(campaign_lock(link)): + campaigns = getattr(link, "campaigns", {}).copy() + if index in campaigns: + PromotionWeights.delete_unfinished(link, index) + del campaigns[index] + link.campaigns = {} + link.campaigns = campaigns + link._commit() + #TODO cancel any existing charges + void_campaign(link, index, c.user) + +def void_campaign(link, index, user): + campaigns = getattr(link, "campaigns", {}).copy() + if index in campaigns: + sd, ed, bid, sr, trans_id = campaigns[index] + transactions = get_transactions(link) + if transactions.get(index): + # void the existing transaction + a = Account._byID(link.author_id) + authorize.void_transaction(a, trans_id, index) + +def auth_campaign(link, index, user, pay_id): """ - revert status of a promoted link to unseen. - - NOTE: if the promotion is live, this has the side effect of - bumping it from the live queue pending an admin's intervention to - put it back in place. + for setting up a campaign as a real bid with authorize.net """ - # only reinforce pending if it hasn't been seen yet. - if STATUS.unseen < thing.promote_status < STATUS.finished: - promotion_log(thing, "status update: unapproved") - unpromote(thing, status = STATUS.unseen) + with g.make_lock(campaign_lock(link)): + campaigns = getattr(link, "campaigns", {}).copy() + if index in campaigns: + # void any existing campaign + void_campaign(link, index, user) + + sd, ed, bid, sr, trans_id = campaigns[index] + # create a new transaction and update the bid + test = 1 if g.debug else None + trans_id, reason = authorize.auth_transaction(bid, user, + pay_id, link, + index, + test = test) + if not reason and trans_id is not None and int(trans_id) != 0: + promotion_log(link, "updated payment and/or bid: " + "SUCCESS (id: %s)" + % trans_id) + if trans_id < 0: + promotion_log(link, "FREEBIE") + + set_status(link, + max(STATUS.unseen if trans_id else STATUS.unpaid, + link.promote_status)) + # notify of campaign creation + # update the query queue + if user._id == link.author_id and trans_id > 0: + emailer.promo_bid(link, bid, sd) -def accept_promo(thing): - """ - Accept promotion and set its status as accepted if not already - charged, else pending. - """ - if thing.promote_status < STATUS.pending: - bid = Bid.one(thing.promote_trans_id) - if bid.status == Bid.STATUS.CHARGE: - thing.promote_status = STATUS.pending - # repromote if already promoted before - if hasattr(thing, "promoted_on"): - promote(thing) else: - emailer.queue_promo(thing) - else: - thing.promote_status = STATUS.accepted - promotion_log(thing, "status update: accepted") - emailer.accept_promo(thing) - thing._commit() - -def reject_promo(thing, reason = ""): - """ - Reject promotion and set its status as rejected - - Here, we use unpromote so that we can also remove a promotion from - the queue if it has become promoted. - """ - unpromote(thing, status = STATUS.rejected) - promotion_log(thing, "status update: rejected. Reason: '%s'" % reason) - emailer.reject_promo(thing, reason) - -def delete_promo(thing): - """ - deleted promotions have to be specially dealt with. Reject the - promo and void any associated transactions. - """ - thing.promoted = False - thing._deleted = True - reject_promo(thing, reason = "The promotion was deleted by the user") - if thing.promote_trans_id > 0: - user = Account._byID(thing.author_id) - authorize.void_transaction(user, thing.promote_trans_id) + # something bad happend. + promotion_log(link, "updated payment and/or bid: FAILED ('%s')" + % reason) + trans_id = 0 + campaigns[index] = sd, ed, bid, sr, trans_id + link.campaigns = {} + link.campaigns = campaigns + link._commit() - -def pending_promo(thing): - """ - For an accepted promotion within the proper time interval, charge - the account of the user and set the new status as pending. - """ - if thing.promote_status == STATUS.accepted and thing.promote_trans_id: - user = Account._byID(thing.author_id) - # TODO: check for charge failures/recharges, etc - if authorize.charge_transaction(user, thing.promote_trans_id): - promotion_log(thing, "status update: pending") - thing.promote_status = STATUS.pending - thing.promote_paid = thing.promote_bid - thing._commit() - emailer.queue_promo(thing) - else: - promotion_log(thing, "status update: charge failure") - thing._commit() - #TODO: email rejection? - - - -def promote(thing, batch = False): - """ - Given a promotion with pending status, set the status to promoted - and move it into the promoted queue. - """ - if thing.promote_status == STATUS.pending: - promotion_log(thing, "status update: live") - PromoteDates.log_start(thing) - thing.promoted_on = datetime.now(g.tz) - thing.promote_status = STATUS.promoted - thing._commit() - emailer.live_promo(thing) - if not batch: - with g.make_lock(promoted_lock_key): - promoted = get_promoted_direct() - if thing._fullname not in promoted: - promoted[thing._fullname] = auction_weight(thing) - set_promoted(promoted) - -def unpromote(thing, batch = False, status = STATUS.finished): - """ - unpromote a link with provided status, removing it from the - current promotional queue. - """ - if status == STATUS.finished: - PromoteDates.log_end(thing) - emailer.finished_promo(thing) - thing.unpromoted_on = datetime.now(g.tz) - promotion_log(thing, "status update: finished") - thing.promote_status = status - thing._commit() - if not batch: - with g.make_lock(promoted_lock_key): - promoted = get_promoted_direct() - if thing._fullname in promoted: - del promoted[thing._fullname] - set_promoted(promoted) - -# batch methods for moving promotions into the pending queue, and -# setting status as pending. + return bool(trans_id), reason + return False, "" # dates are referenced to UTC, while we want promos to change at (roughly) # midnight eastern-US. # TODO: make this a config parameter timezone_offset = -5 # hours timezone_offset = timedelta(0, timezone_offset * 3600) +def promo_datetime_now(offset = None): + now = datetime.now(g.tz) + timezone_offset + if offset is not None: + now += timedelta(offset) + return now -def promo_datetime_now(): - return datetime.now(g.tz) + timezone_offset -def generate_pending(date = None, test = False): + +def get_scheduled_campaign(link, offset = None): """ - Look-up links that are to be promoted on the provided date (the - default is now plus one day) and set their status as pending if - they have been accepted. This results in credit cards being charged. + returns the indices of the campaigns that (datewise) could be active. """ - date = date or (promo_datetime_now() + timedelta(1)) - links = Link._by_fullname([p.thing_name for p in - PromoteDates.for_date(date)], - data = True, - return_dict = False) - for l in links: - if l._deleted and l.promote_status != STATUS.rejected: - print "DELETING PROMO", l - # deleted promos should never be made pending - delete_promo(l) - elif l.promote_status == STATUS.accepted: - if test: - print "Would have made pending: (%s, %s)" % \ - (l, l.make_permalink(None)) - else: - pending_promo(l) + now = promo_datetime_now(offset = offset) + active = [] + campaigns = getattr(link, "campaigns", {}) + for indx in campaigns: + sd, ed, bid, sr, trans_id = campaigns[indx] + if sd <= now and ed >= now: + active.append(indx) + return active -def promote_promoted(test = False): +def accept_promotion(link): """ - make promotions that are no longer supposed to be active - 'finished' and find all pending promotions that are supposed to be - promoted and promote them. + Accepting is campaign agnostic. Accepting the ad just means that + it is allowed to run if payment has been processed. + + If a campagn is able to run, this also requeues it. """ - from r2.lib.traffic import load_traffic - with g.make_lock(promoted_lock_key): - now = promo_datetime_now() - - promoted = Link._by_fullname(get_promoted_direct().keys(), - data = True, return_dict = False) - promos = {} - for l in promoted: - keep = True - if l.promote_until < now: - keep = False - maximum_clicks = getattr(l, "maximum_clicks", None) - maximum_views = getattr(l, "maximum_views", None) - if maximum_clicks or maximum_views: - # grab the traffic - traffic = load_traffic("day", "thing", l._fullname) - if traffic: - # (unique impressions, number impressions, - # unique clicks, number of clicks) - traffic = [y for x, y in traffic] - traffic = map(sum, zip(*traffic)) - uimp, nimp, ucli, ncli = traffic - if maximum_clicks and maximum_clicks < ncli: - keep = False - if maximum_views and maximum_views < nimp: - keep = False - - if not keep: - if test: - print "Would have unpromoted: (%s, %s)" % \ - (l, l.make_permalink(None)) - else: - unpromote(l, batch = True) - - new_promos = Link._query(Link.c.promote_status == (STATUS.pending, - STATUS.promoted), - Link.c.promoted == True, - data = True) - for l in new_promos: - if l.promote_until > now and l._date <= now: - if test: - print "Would have promoted: %s" % l + promotion_log(link, "status update: accepted") + # update the query queue + + set_status(link, STATUS.accepted) + now = promo_datetime_now(0) + if link._fullname in set(l.thing_name for l in + PromotionWeights.get_campaigns(now)): + promotion_log(link, "requeued") + #TODO: smarter would be nice, but this will have to do for now + make_daily_promotions() + emailer.accept_promo(link) + +def reject_promotion(link, reason = None): + promotion_log(link, "status update: rejected") + # update the query queue + set_status(link, STATUS.rejected) + # check to see if this link is a member of the current live list + links, weighted = get_live_promotions() + if link._fullname in links: + links.remove(link._fullname) + for k in list(weighted.keys()): + weighted[k] = [(lid, w) for lid, w in weighted[k] + if lid != link._fullname] + if not weighted[k]: + del weighted[k] + set_live_promotions((links, weighted)) + promotion_log(link, "dequeued") + emailer.reject_promo(link, reason = reason) + + +def unapprove_promotion(link): + promotion_log(link, "status update: unapproved") + # update the query queue + set_status(link, STATUS.unseen) + links, weghts = get_live_promotions() + +def accepted_iter(func, offset = 0): + now = promo_datetime_now(offset = offset) + campaigns = PromotionWeights.get_campaigns(now) + # load the links that have campaigns coming up + links = Link._by_fullname(set(x.thing_name for x in campaigns), + data = True,return_dict = True) + for x in campaigns: + l = links[x.thing_name] + if is_accepted(l): + # get the campaign of interest from the link + camp = getattr(l, "campaigns", {}).get(x.promo_idx) + # the transaction id is the last of the campaign tuple + if camp and camp[CAMPAIGN.trans_id]: + func(l, camp, x.promo_idx, x.weight) + + +def charge_pending(offset = 1): + def _charge(l, camp, indx, weight): + user = Account._byID(l.author_id) + sd, ed, bid, sr, trans_id = camp + try: + if (not authorize.is_charged_transaction(trans_id, indx) and + authorize.charge_transaction(user, trans_id, indx)): + # TODO: probably not absolutely necessary + promotion_log(l, "status update: pending") + # update the query queue + if is_promoted(l): + emailer.queue_promo(l, bid, trans_id) else: - promote(l, batch = True) - promos[l._fullname] = auction_weight(l) - elif l.promote_until <= now: - if test: - print "Would have unpromoted: (%s, %s)" % \ - (l, l.make_permalink(None)) - else: - unpromote(l, batch = True) - - # remove unpaid promos that are scheduled to run on today or before - unpaid_promos = Link._query(Link.c.promoted == True, - Link.c.promote_status == STATUS.unpaid, - Link.c._date < now, - Link.c._deleted == False, - data = True) - for l in unpaid_promos: - if test: - print "Would have rejected: %s" % promo_edit_url(l) - else: - reject_promo(l, reason = "We're sorry, but this sponsored link was not set up for payment before the appointed date. Please add payment info and move the date into the future if you would like to resubmit. Also please feel free to email us at selfservicesupport@reddit.com if you believe this email is in error.") + set_status(l, STATUS.pending, + onchange = lambda: emailer.queue_promo(l, bid, trans_id) ) + except: + print "Error on %s, campaign %s" % (l, indx) + accepted_iter(_charge, offset = offset) - if test: - print promos +def get_scheduled(offset = 0): + """ + gets a dictionary of sr -> list of (link, weight) for promotions + that should be live as of the day which is offset days from today. + """ + by_sr = {} + def _promote(l, camp, indx, weight): + sd, ed, bid, sr, trans_id = camp + if authorize.is_charged_transaction(trans_id, indx): + by_sr.setdefault(sr, []).append((l, weight)) + accepted_iter(_promote, offset = offset) + return by_sr + + +def get_traffic_weights(srnames): + from r2.lib import traffic + + # the weight is just the last 7 days of impressions (averaged) + def weigh(t, npoints = 7): + if t: + t = [y[1] for x, y in t[-npoints-1:-1]] + return max(float(sum(t)) / len(t), 1) + return 1 + + default_traffic = [weigh(traffic.load_traffic("day", "reddit", sr.name)) + for sr in Subreddit.top_lang_srs('all', 10)] + default_traffic = (float(max(sum(default_traffic),1)) / + max(len(default_traffic), 1)) + + res = {} + for srname in srnames: + if srname: + res[srname] = (default_traffic / + weigh(traffic.load_traffic("day", "reddit", srname)) ) else: - set_promoted(promos) - return promos - -def auction_weight(link): - duration = (link.promote_until - link._date).days - return duration and link.promote_bid / duration - -def set_promoted(link_names): - # caller is assumed to execute me inside a lock if necessary - g.permacache.set(promoted_memo_key, link_names) - - #update cache - get_promoted(_update = True) - -@memoize(promoted_memo_key, time = promoted_memo_lifetime) -def get_promoted(): - # does not lock the list to return it, so (slightly) stale data - # will be returned if called during an update rather than blocking - return get_promoted_direct() - -def get_promoted_direct(): - return g.permacache.get(promoted_memo_key, {}) - - -def get_promoted_slow(): - # to be used only by a human at a terminal - with g.make_lock(promoted_lock_key): - links = Link._query(Link.c.promote_status == STATUS.promoted, - Link.c.promoted == True, - data = True) - link_names = dict((x._fullname, auction_weight(x)) for x in links) + res[srname] = 1 + return res + +def get_weighted_schedule(offset = 0): + by_sr = get_scheduled(offset = offset) + weight_dict = get_traffic_weights(by_sr.keys()) + weighted = {} + links = set() + for sr_name, t_tuples in by_sr.iteritems(): + weighted[sr_name] = [] + for l, weight in t_tuples: + links.add(l._fullname) + weighted[sr_name].append((l._fullname, + weight * weight_dict[sr_name])) + return links, weighted + +def promotion_key(): + return "current_promotions" + +def get_live_promotions(): + return g.permacache.get(promotion_key()) or (set(), {}) + +def set_live_promotions(x): + return g.permacache.set(promotion_key(), x) + +def make_daily_promotions(offset = 0, test = False): + old_links = set([]) + all_links, weighted = get_weighted_schedule(offset) + x = get_live_promotions() + if x: + old_links, old_weights = x + # links that need to be promoted + new_links = all_links - old_links + # links that have already been promoted + old_links = old_links - all_links + else: + new_links = links - set_promoted(link_names) + links = Link._by_fullname(new_links.union(old_links), data = True, + return_dict = True) + for l in old_links: + if is_promoted(links[l]): + if test: + print "unpromote", l + else: + # update the query queue + set_status(links[l], STATUS.finished, + onchange = lambda: emailer.finished_promo(links[l])) - return link_names + for l in new_links: + if is_accepted(links[l]): + if test: + print "promote2", l + else: + # update the query queue + set_status(links[l], STATUS.promoted, + onchange = lambda: emailer.live_promo(links[l])) + + # convert the weighted dict to use sr_ids which are more useful + srs = {"":""} + for srname in weighted.keys(): + if srname: + srs[srname] = Subreddit._by_name(srname)._id + weighted = dict((srs[k], v) for k, v in weighted.iteritems()) + + if not test: + set_live_promotions((all_links, weighted)) + else: + print (all_links, weighted) -def random_promoted(): +def get_promotion_list(user, site): + # site is specified, pick an ad from that site + if not isinstance(site, FakeSubreddit): + srids = set([site._id]) + # site is Fake, user is not. Pick based on their subscriptions. + elif user and not isinstance(user, FakeAccount): + srids = set(Subreddit.reverse_subscriber_ids(user) + [""]) + # both site and user are "fake" -- get the default subscription list + else: + srids = set(Subreddit.user_subreddits(None, True) + [""]) + + return get_promotions_cached(srids) + + +#@memoize('get_promotions_cached', time = 10 * 60) +def get_promotions_cached(sites): + p = get_live_promotions() + if p: + links, promo_dict = p + available = {} + for k, links in promo_dict.iteritems(): + if k in sites: + for l, w in links: + available[l] = available.get(l, 0) + w + # sort the available list by weight + links = available.keys() + links.sort(key = lambda x: -available[x]) + norm = sum(available.values()) + # return a sorted list of (link, norm_weight) + return [(l, available[l] / norm) for l in links] + + return [] + +def randomized_promotion_list(user, site): + promos = get_promotion_list(user, site) + # no promos, no problem + if not promos: + return [] + # more than two: randomize + elif len(promos) > 1: + n = random.uniform(0, 1) + for i, (l, w) in enumerate(promos): + n -= w + if n < 0: + promos = promos[i:] + promos[:i] + break + # fall thru for the length 1 case here as well + return [l for l, w in promos] + + +def insert_promoted(link_names, pos, promoted_every_n = 5): """ - return a list of the currently promoted items, randomly choosing - the order of the list based on the bid-weighing. + Inserts promoted links into an existing organic list. Destructive + on `link_names' """ - bids = get_promoted() - market = sum(bids.values()) - if market: - # get a list of current promotions, sorted by their bid amount - promo_list = bids.keys() - # sort by bids and use the thing_id as the tie breaker (for - # consistent sorting) - promo_list.sort(key = lambda x: (bids[x], x), reverse = True) - if len(bids) > 1: - # pick a number, any number - n = random.uniform(0, 1) - for i, p in enumerate(promo_list): - n -= bids[p] / market - if n < 0: - return promo_list[i:] + promo_list[:i] - return promo_list - - -def test_random_promoted(n = 1000): - promos = get_promoted() - market = sum(promos.values()) - if market: - res = {} - for i in xrange(n): - key = random_promoted()[0] - res[key] = res.get(key, 0) + 1 - - print "%10s expected actual E/A" % "thing" - print "------------------------------------" - for k, v in promos.iteritems(): - expected = float(v) / market * 100 - actual = float(res.get(k, 0)) / n * 100 - - print "%10s %6.2f%% %6.2f%% %6.2f" % \ - (k, expected, actual, expected / actual if actual else 0) + promoted_items = randomized_promotion_list(c.user, c.site) + + if not promoted_items: + return link_names, pos + + # no point in running the builder over more promoted links than + # we'll even use + max_promoted = max(1,len(link_names)/promoted_every_n) + + builder = IDBuilder(promoted_items, keep_fn = keep_fresh_links, + skip = True) + promoted_items = builder.get_items()[0] + + focus = None + if promoted_items: + focus = promoted_items[0]._fullname + # insert one promoted item for every N items + for i, item in enumerate(promoted_items): + p = i * (promoted_every_n + 1) + if p > len(link_names): + break + p += pos + if p > len(link_names): + p = p % len(link_names) + + link_names.insert(p, item._fullname) + + link_names = filter(None, link_names) + if focus: + try: + pos = link_names.index(focus) + except ValueError: + pass + # don't insert one at the head of the list 50% of the time for + # logged in users, and 50% of the time for logged-off users when + # the pool of promoted links is less than 3 (to avoid showing the + # same promoted link to the same person too often) + if ((c.user_is_loggedin or len(promoted_items) < 3) and + random.choice((True,False))): + pos = (pos + 1) % len(link_names) + + return list(UniqueIterator(link_names)), pos + +def benchmark_promoted(user, site, pos = 0, link_sample = 50, attempts = 100): + c.user = user + c.site = site + link_names = ["blah%s" % i for i in xrange(link_sample)] + res = {} + for i in xrange(attempts): + names, p = insert_promoted(link_names[::], pos) + name = names[p] + res[name] = res.get(name, 0) + 1 + res = list(res.iteritems()) + res.sort(key = lambda x : x[1], reverse = True) + expected = dict(get_promotion_list(user, site)) + for l, v in res: + print "%s: %5.3f %3.5f" % (l,float(v)/attempts, expected.get(l, 0)) + + + +def Run(offset = 0): + charge_pending(offset = offset + 1) + charge_pending(offset = offset) + make_daily_promotions(offset = offset) + diff --git a/r2/r2/lib/s3cp.py b/r2/r2/lib/s3cp.py index 5ed85b4365..b54ce2dc6f 100644 --- a/r2/r2/lib/s3cp.py +++ b/r2/r2/lib/s3cp.py @@ -22,79 +22,32 @@ # CondeNet, Inc. All Rights Reserved. ################################################################################ -import base64, hmac, sha, os, sys, getopt -from datetime import datetime -from pylons import g,config +import boto +from boto.s3.connection import S3Connection +from boto.s3.key import Key +from pylons import g KEY_ID = g.S3KEY_ID SECRET_KEY = g.S3SECRET_KEY -class S3Exception(Exception): pass - -def make_header(verb, date, amz_headers, resource, content_type): - content_md5 = '' - - #amazon headers - lower_head = dict((key.lower(), val) - for key, val in amz_headers.iteritems()) - keys = lower_head.keys() - keys.sort() - amz_lst = ['%s:%s' % (key, lower_head[key]) for key in keys] - amz_str = '\n'.join(amz_lst) - - s = '\n'.join((verb, - content_md5, - content_type, - date, - amz_str, - resource)) - - h = hmac.new(SECRET_KEY, s, sha) - return base64.encodestring(h.digest()).strip() - -def send_file(filename, resource, content_type, acl, rate, meter): - date = datetime.utcnow().strftime("%a, %d %b %Y %H:%M:%S GMT") - amz_headers = {'x-amz-acl': acl} +NEVER = 'Thu, 31 Dec 2037 23:59:59 GMT' - auth_header = make_header('PUT', date, amz_headers, resource, content_type) - - params = ['-T', filename, - '-H', 'x-amz-acl: %s' % amz_headers['x-amz-acl'], - '-H', 'Authorization: AWS %s:%s' % (KEY_ID, auth_header), - '-H', 'Date: %s' % date] - - if content_type: - params.append('-H') - params.append('Content-Type: %s' % content_type) - - if rate: - params.append('--limit-rate') - params.append(rate) +class S3Exception(Exception): pass - if meter: - params.append('-o') - params.append('s3cp.output') - else: - params.append('-s') +def send_file(bucketname, filename, content, content_type = 'text/plain', never_expire = False): + # this function is pretty low-traffic, but if we start using it a + # lot more we'll want to maintain a connection pool across the app + # rather than connecting on every invocation - params.append('https://s3.amazonaws.com%s' % resource) + # TODO: add ACL support instead of always using public-read - exit_code = os.spawnlp(os.P_WAIT, 'curl', 'curl', *params) - if exit_code: - raise S3Exception(exit_code) + connection = S3Connection(KEY_ID, SECRET_KEY) + bucket = connection.get_bucket(bucketname) + k = bucket.new_key(filename) - -if __name__ == '__main__': - options = "a:c:l:m" - try: - opts, args = getopt.getopt(sys.argv[1:], options) - except: - sys.exit(2) - - opts = dict(opts) + headers={'Content-Type': content_type} + if never_expire: + headers['Expires'] = NEVER - send_file(args[0], args[1], - opts.get('-c', ''), - opts.get('-a', 'private'), - opts.get('-l'), - opts.has_key('-m')) + k.set_contents_from_string(content, policy='public-read', + headers=headers) diff --git a/r2/r2/lib/scraper.py b/r2/r2/lib/scraper.py index 78bc2740cd..fec58a9191 100644 --- a/r2/r2/lib/scraper.py +++ b/r2/r2/lib/scraper.py @@ -23,6 +23,7 @@ from pylons import g from r2.lib import utils from r2.lib.memoize import memoize +import simplejson as json from urllib2 import Request, HTTPError, URLError, urlopen from httplib import InvalidURL @@ -158,8 +159,8 @@ class MediaEmbed(object): scrolling = False def __init__(self, height, width, content, scrolling = False): - self.height = height - self.width = width + self.height = int(height) + self.width = int(width) self.content = content self.scrolling = scrolling @@ -317,10 +318,11 @@ def youtube_in_google(google_url): def make_scraper(url): domain = utils.domain(url) scraper = Scraper - for suffix, cls in scrapers.iteritems(): - if domain.endswith(suffix): - scraper = cls - break + for suffix, clses in scrapers.iteritems(): + for cls in clses: + if domain.endswith(suffix): + scraper = cls + break #sometimes youtube scrapers masquerade as google scrapers if scraper == GootubeScraper: @@ -625,6 +627,216 @@ def media_embed(cls, content, **kw): content = content, scrolling = True) + +########## oembed rich-media scrapers ########## + +class OEmbed(Scraper): + """ + Oembed Scraper + ============== + Tries to use the oembed standard to create a media object. + + url_re: Regular Expression to match the incoming url against. + api_endpoint: Url of the api end point you are using. + api_params: Default Params to be sent with the outgoing request. + """ + url_re = '' + api_endpoint = '' + api_params = {} + + def __init__(self, url): + Scraper.__init__(self, url) + self.oembed = None + + #Fallback to the scraper if the url doesn't match + if not self.url_re.match(self.url): + self.__class__ = Scraper + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self.url) + + def download(self): + self.api_params.update( { 'url':self.url}) + query = urllib.urlencode(self.api_params) + api_url = "%s?%s" % (self.api_endpoint, query) + + self.content_type, self.content = fetch_url(api_url) + + #Either a 404 or 500. + if not self.content: + #raise ValueError('ISSUE CALLING %s' %api_url) + log.warn('oEmbed call (%s) failed to return content for %s' + %(api_url, self.url)) + return None + + try: + self.oembed = json.loads(self.content) + except ValueError, e: + log.error('oEmbed call (%s) return invalid json for %s' + %(api_url, self.url)) + return None + + def image_urls(self): + #if the original url was an image, use that + if self.oembed and self.oembed.get('type') =='photo': + yield self.oembed.get('url') + elif self.oembed and self.oembed.get('thumbnail_url'): + yield self.oembed.get('thumbnail_url') + + def largest_image_url(self): + #Seems to be the default place to check if the download has happened. + if not self.oembed: + self.download() + + #if the original url was of the photo type + if self.oembed and self.oembed.get('type') =='photo': + return self.oembed.get('url') + elif self.oembed and self.oembed.get('thumbnail_url'): + return self.oembed.get('thumbnail_url') + + def media_object(self): + #Seems to be the default place to check if the download has happened. + if not self.oembed: + self.download() + + if self.oembed and self.oembed.get('type') in ['video', 'rich']: + for domain in self.domains: + if self.url.find(domain) > -1: + return dict(type=domain, oembed=self.oembed) + return None + + @classmethod + def media_embed(cls, video_id = None, height = None, width = None, **kw): + content = None + oembed = kw.get('oembed') + + # check if oembed is there and has html + if oembed and oembed.get('html'): + content = oembed.get('html') + if content and oembed.get('height') and oembed.get('width'): + return MediaEmbed(height = oembed['height'], + width = oembed['width'], + content = content) + +class EmbedlyOEmbed(OEmbed): + """ + Embedly oEmbed Provider + ======================= + documentation: http://api.embed.ly + """ + domains = ['youtube.com', 'veoh.com', 'justin.tv', 'ustream.com', + 'qik.com', 'revision3.com', 'dailymotion.com', 'collegehumor.com', + 'twitvid.com', 'break.com', 'vids.myspace.com', 'metacafe.com', + 'blip.tv', 'video.google.com','revver.com', 'video.yahoo.com', + 'viddler.com', 'liveleak.com', 'animoto.com', 'yfrog.com', + 'tweetphoto.com', 'flickr.com', 'twitpic.com', 'imgur.com', + 'posterous.com', 'twitgoo.com', 'photobucket.com', 'phodroid.com', + 'xkcd.com', 'asofterword.com', 'qwantz.com', '23hq.com', 'hulu.com', + 'movieclips.com', 'crackle.com', 'fancast.com', 'funnyordie.com', + 'vimeo.com', 'ted.com', 'omnisio.com', 'nfb.ca', 'thedailyshow.com', + 'movies.yahoo.com', 'colbertnation.com', 'comedycentral.com', + 'theonion.com', 'wordpress.tv', 'traileraddict.com', 'soundcloud.com', + 'slideshare.net', 'scribd.com', 'screenr.com', '5min.com', + 'howcast.com', 'my.opera.com', 'escapistmagazine.com', ] + + url_re = re.compile('^http://.+\.youtube\.com/watch.+|'+\ + '^http://.+\.youtube\.com/v/.+|'+\ + '^http://youtube\.com/watch.+|'+\ + '^http://youtube\.com/v/.+|'+\ + '^http://youtu\.be/.+|'+\ + '^http://www\.veoh\.com/.*/watch/.+|'+\ + '^http://www\.justin\.tv/clip/.+|'+\ + '^http://www\.justin\.tv/.+|'+\ + '^http://justin\.tv/clip/.+|'+\ + '^http://justin\.tv/.+|'+\ + '^http://www\.ustream\.tv/recorded/.+|'+\ + '^http://www\.ustream\.tv/channel/.+|'+\ + '^http://qik\.com/video/.+|'+\ + '^http://qik\.com/.+|'+\ + '^http://.*revision3\.com/.+|'+\ + '^http://www.dailymotion\.com/video/.+|'+\ + '^http://www.dailymotion\.com/.+/video/.+|'+\ + '^http://dailymotion\.com/video/.+|'+\ + '^http://dailymotion\.com/.+/video/.+|'+\ + '^http://www\.collegehumor\.com/video:.+|'+\ + '^http://www\.twitvid\.com/.+|'+\ + '^http://www\.break\.com/.*/.+|'+\ + '^http://vids\.myspace\.com/index\.cfm\?fuseaction=vids\.individual&videoid.+|'+\ + '^http://www\.myspace\.com/index\.cfm\?fuseaction=.*&videoid.+|'+\ + '^http://www\.metacafe\.com/watch/.+|'+\ + '^http://blip\.tv/file/.+|'+\ + '^http://.+\.blip\.tv/file/.+|'+\ + '^http://video\.google\.com/videoplay\?.+|'+\ + '^http://revver\.com/video/.+|'+\ + '^http://www\.revver\.com/video/.+|'+\ + '^http://video\.yahoo\.com/watch/.*/.+|'+\ + '^http://video\.yahoo\.com/network/.+|'+\ + '^http://.*viddler\.com/explore/.*/videos/.+|'+\ + '^http://liveleak\.com/view\?.+|'+\ + '^http://www\.liveleak\.com/view\?.+|'+\ + '^http://animoto\.com/play/.+|'+\ + '^http://yfrog\..*/.+|'+\ + '^http://.+\.yfrog\..*/.+|'+\ + '^http://tweetphoto\.com/.+|'+\ + '^http://www\.flickr\.com/photos/.+|'+\ + '^http://twitpic\.com/.+|'+\ + '^http://.*imgur\.com/.+|'+\ + '^http://.*\.posterous\.com/.+|'+\ + '^http://twitgoo\.com/.+|'+\ + '^http://i.*\.photobucket\.com/albums/.+|'+\ + '^http://gi.*\.photobucket\.com/groups/.+|'+\ + '^http://phodroid\.com/.*/.*/.+|'+\ + '^http://xkcd\.com/.+|'+\ + '^http://www\.asofterworld\.com/index\.php\?id=.+|'+\ + '^http://www\.qwantz\.com/index\.php\?comic=.+|'+\ + '^http://23hq\.com/.*/photo/.+|'+\ + '^http://www\.23hq\.com/.*/photo/.+|'+\ + '^http://www\.hulu\.com/watch/.+|'+\ + '^http://movieclips\.com/watch/.*/.*/|'+\ + '^http://movieclips\.com/watch/.*/.*/.*/.+|'+\ + '^http://.*crackle\.com/c/.+|'+\ + '^http://www\.fancast\.com/.*/videos|'+\ + '^http://www\.funnyordie\.com/videos/.+|'+\ + '^http://www\.vimeo\.com/groups/.*/videos/.+|'+\ + '^http://www\.vimeo\.com/.+|'+\ + '^http://vimeo\.com/groups/.*/videos/.+|'+\ + '^http://vimeo\.com/.+|'+\ + '^http://www\.ted\.com/.+|'+\ + '^http://www\.omnisio\.com/.+|'+\ + '^http://.*nfb\.ca/film/.+|'+\ + '^http://www\.thedailyshow\.com/watch/.+|'+\ + '^http://www\.thedailyshow\.com/full-episodes/.+|'+\ + '^http://www\.thedailyshow\.com/collection/.*/.*/.+|'+\ + '^http://movies\.yahoo\.com/.*movie/.*/video/.+|'+\ + '^http://movies\.yahoo\.com/movie/.*/info|'+\ + '^http://movies\.yahoo\.com/movie/.*/trailer|'+\ + '^http://www\.colbertnation\.com/the-colbert-report-collections/.+|'+\ + '^http://www\.colbertnation\.com/full-episodes/.+|'+\ + '^http://www\.colbertnation\.com/the-colbert-report-videos/.+|'+\ + '^http://www\.comedycentral\.com/videos/index\.jhtml\?.+|'+\ + '^http://www\.theonion\.com/video/.+|'+\ + '^http://theonion\.com/video/.+|'+\ + '^http://wordpress\.tv/.*/.*/.*/.*/|'+\ + '^http://www\.traileraddict\.com/trailer/.+|'+\ + '^http://www\.traileraddict\.com/clip/.+|'+\ + '^http://www\.traileraddict\.com/poster/.+|'+\ + '^http://soundcloud\.com/.+|'+\ + '^http://soundcloud\.com/.*/.+|'+\ + '^http://soundcloud\.com/.*/sets/.+|'+\ + '^http://soundcloud\.com/groups/.+|'+\ + '^http://www\.slideshare\.net/.*/.+|'+\ + '^http://.*\.scribd\.com/doc/.+|'+\ + '^http://screenr\.com/.+|'+\ + '^http://www\.5min\.com/Video/.+|'+\ + '^http://www\.howcast\.com/videos/.+|'+\ + '^http://my\.opera\.com/.*/albums/show\.dml\?id=.+|'+\ + '^http://my\.opera\.com/.*/albums/showpic\.dml\?album=.+&picture=.+|'+\ + '^http://escapistmagazine\.com/videos/.+|'+\ + '^http://www\.escapistmagazine\.com/videos/.+', re.I + ) + api_endpoint = 'http://api.embed.ly/v1/api/oembed' + api_params = {'format':'json', 'maxwidth':600 } + class GenericScraper(MediaScraper): """a special scrapper not associated with any domains, used to write media objects to links by hand""" @@ -662,13 +874,14 @@ def find_media_object(self, scraper): youtube_id = self.youtube_url_re.match(movie_embed['src']).group(2) youtube_url = 'http://www.youtube.com/watch?v=%s"' % youtube_id log.debug('found youtube embed %s' % youtube_url) - mo = YoutubeScraper(youtube_url).media_object() + mo = make_scraper(youtube_url).media_object() mo['deep'] = scraper.url return mo #scrapers =:= dict(domain -> ScraperClass) scrapers = {} -for scraper in [ YoutubeScraper, +for scraper in [ EmbedlyOEmbed, + YoutubeScraper, MetacafeScraper, GootubeScraper, VimeoScraper, @@ -686,14 +899,20 @@ def find_media_object(self, scraper): EscapistScraper, JustintvScraper, SoundcloudScraper, - #CraigslistScraper, + CraigslistScraper, GenericScraper, ]: for domain in scraper.domains: - scrapers[domain] = scraper + scrapers.setdefault(domain, []).append(scraper) deepscrapers = [YoutubeEmbedDeepScraper] +def get_media_embed(media_object): + for scraper in scrapers.get(media_object['type']): + res = scraper.media_embed(**media_object) + if res: + return res + def convert_old_media_objects(): q = Link._query(Link.c.media_object is not None, Link.c._date > whenever, @@ -728,10 +947,10 @@ def convert_old_media_objects(): 'http://www.facebook.com/pages/Rick-Astley/5807213510?sid=c99aaf3888171e73668a38e0749ae12d', # regular thumbnail finder 'http://www.flickr.com/photos/septuagesima/317819584/', # thumbnail with image_src - 'http://www.youtube.com/watch?v=Yu_moia-oVI', + #'http://www.youtube.com/watch?v=Yu_moia-oVI', 'http://www.metacafe.com/watch/sy-1473689248/rick_astley_never_gonna_give_you_up_official_music_video/', 'http://video.google.com/videoplay?docid=5908758151704698048', - 'http://vimeo.com/4495451', + #'http://vimeo.com/4495451', 'http://www.break.com/usercontent/2008/11/Macy-s-Thankgiving-Day-Parade-Rick-Roll-611965.html', 'http://www.theonion.com/content/video/sony_releases_new_stupid_piece_of', 'http://www.collegehumor.com/video:1823712', @@ -761,8 +980,291 @@ def convert_old_media_objects(): 'http://listen.grooveshark.com/#/song/Never_Gonna_Give_You_Up/12616328', 'http://tinysong.com/2WOJ', # also Grooveshark - - 'http://www.rickrolled.com/videos/video/rickrolld' # test the DeepScraper + 'http://www.slideshare.net/doina/happy-easter-from-holland-slideshare', + 'http://www.slideshare.net/stinson/easter-1284190', + 'http://www.slideshare.net/angelspascual/easter-events', + 'http://www.slideshare.net/sirrods/happy-easter-3626014', + 'http://www.slideshare.net/sirrods/happy-easter-wide-screen', + 'http://www.slideshare.net/carmen_serbanescu/easter-holiday', + 'http://www.slideshare.net/Lithuaniabook/easter-1255880', + 'http://www.slideshare.net/hues/easter-plants', + 'http://www.slideshare.net/Gospelman/passover-week', + 'http://www.slideshare.net/angelspascual/easter-around-the-world-1327542', + 'http://www.scribd.com/doc/13994900/Easter', + 'http://www.scribd.com/doc/27425714/Celebrating-Easter-ideas-for-adults-and-children', + 'http://www.scribd.com/doc/28010101/Easter-Foods-No-Name', + 'http://www.scribd.com/doc/28452730/Easter-Cards', + 'http://www.scribd.com/doc/19026714/The-Easter-Season', + 'http://www.scribd.com/doc/29183659/History-of-Easter', + 'http://www.scribd.com/doc/15632842/The-Last-Easter', + 'http://www.scribd.com/doc/28741860/The-Plain-Truth-About-Easter', + 'http://www.scribd.com/doc/23616250/4-27-08-ITS-EASTER-AGAIN-ORTHODOX-EASTER-by-vanderKOK', + 'http://screenr.com/t9d', + 'http://screenr.com/yLS', + 'http://screenr.com/gzS', + 'http://screenr.com/IwU', + 'http://screenr.com/FM7', + 'http://screenr.com/Ejg', + 'http://screenr.com/u4h', + 'http://screenr.com/QiN', + 'http://screenr.com/zts', + 'http://www.5min.com/Video/How-to-Decorate-Easter-Eggs-with-Decoupage-142076462', + 'http://www.5min.com/Video/How-to-Color-Easter-Eggs-Dye-142076281', + 'http://www.5min.com/Video/How-to-Make-an-Easter-Egg-Diorama-142076482', + 'http://www.5min.com/Video/How-to-Make-Sequined-Easter-Eggs-142076512', + 'http://www.5min.com/Video/How-to-Decorate-Wooden-Easter-Eggs-142076558', + 'http://www.5min.com/Video/How-to-Blow-out-an-Easter-Egg-142076367', + 'http://www.5min.com/Video/Learn-About-Easter-38363995', + 'http://www.howcast.com/videos/368909-Easter-Egg-Dying-How-To-Make-Ukrainian-Easter-Eggs', + 'http://www.howcast.com/videos/368911-Easter-Egg-Dying-How-To-Color-Easter-Eggs-With-Food-Dyes', + 'http://www.howcast.com/videos/368913-Easter-Egg-Dying-How-To-Make-Homemade-Easter-Egg-Dye', + 'http://www.howcast.com/videos/220110-The-Meaning-Of-Easter', + 'http://my.opera.com/nirvanka/albums/show.dml?id=519866', + 'http://img402.yfrog.com/i/mfe.jpg/', + 'http://img20.yfrog.com/i/dy6.jpg/', + 'http://img145.yfrog.com/i/4mu.mp4/', + 'http://img15.yfrog.com/i/mygreatmovie.mp4/', + 'http://img159.yfrog.com/i/500x5000401.jpg/', + 'http://tweetphoto.com/14784358', + 'http://tweetphoto.com/16044847', + 'http://tweetphoto.com/16718883', + 'http://tweetphoto.com/16451148', + 'http://tweetphoto.com/16133984', + 'http://tweetphoto.com/8069529', + 'http://tweetphoto.com/16207556', + 'http://tweetphoto.com/7448361', + 'http://tweetphoto.com/16069325', + 'http://tweetphoto.com/4791033', + 'http://www.flickr.com/photos/10349896@N08/4490293418/', + 'http://www.flickr.com/photos/mneylon/4483279051/', + 'http://www.flickr.com/photos/xstartxtodayx/4488996521/', + 'http://www.flickr.com/photos/mommyknows/4485313917/', + 'http://www.flickr.com/photos/29988430@N06/4487127638/', + 'http://www.flickr.com/photos/excomedia/4484159563/', + 'http://www.flickr.com/photos/sunnybrook100/4471526636/', + 'http://www.flickr.com/photos/jaimewalsh/4489497178/', + 'http://www.flickr.com/photos/29988430@N06/4486475549/', + 'http://www.flickr.com/photos/22695183@N08/4488681694/', + 'http://twitpic.com/1cnsf6', + 'http://twitpic.com/1cgtti', + 'http://twitpic.com/1coc0n', + 'http://twitpic.com/1cm8us', + 'http://twitpic.com/1cgks4', + 'http://imgur.com/6pLoN', + 'http://onegoodpenguin.posterous.com/golden-tee-live-2010-easter-egg', + 'http://adland.posterous.com/?tag=royaleastershowauckland', + 'http://apartmentliving.posterous.com/biggest-easter-egg-hunts-in-the-dc-area', + 'http://twitgoo.com/1as', + 'http://twitgoo.com/1p94', + 'http://twitgoo.com/4kg2', + 'http://twitgoo.com/6c9', + 'http://twitgoo.com/1w5', + 'http://twitgoo.com/6mu', + 'http://twitgoo.com/1w3', + 'http://twitgoo.com/1om', + 'http://twitgoo.com/1mh', + 'http://www.qwantz.com/index.php?comic=1686', + 'http://www.qwantz.com/index.php?comic=773', + 'http://www.qwantz.com/index.php?comic=1018', + 'http://www.qwantz.com/index.php?comic=1019', + 'http://www.23hq.com/mhg/photo/5498347', + 'http://www.23hq.com/Greetingdesignstudio/photo/5464607', + 'http://www.23hq.com/Greetingdesignstudio/photo/5464590', + 'http://www.23hq.com/Greetingdesignstudio/photo/5464605', + 'http://www.23hq.com/Greetingdesignstudio/photo/5464604', + 'http://www.23hq.com/dvilles2/photo/5443192', + 'http://www.23hq.com/Greetingdesignstudio/photo/5464606', + 'http://www.youtube.com/watch?v=gghKdx558Qg', + 'http://www.youtube.com/watch?v=yPid9BLQQcg', + 'http://www.youtube.com/watch?v=uEo2vboUYUk', + 'http://www.youtube.com/watch?v=geUhtoHbLu4', + 'http://www.youtube.com/watch?v=Zk7dDekYej0', + 'http://www.youtube.com/watch?v=Q3tgMosx_tI', + 'http://www.youtube.com/watch?v=s9P8_vgmLfs', + 'http://www.youtube.com/watch?v=1cmtN1meMmk', + 'http://www.youtube.com/watch?v=AVzj-U5Ihm0', + 'http://www.veoh.com/collection/easycookvideos/watch/v366931kcdgj7Hd', + 'http://www.veoh.com/collection/easycookvideos/watch/v366991zjpANrqc', + 'http://www.veoh.com/browse/videos/category/educational/watch/v7054535EZGFJqyX', + 'http://www.veoh.com/browse/videos/category/lifestyle/watch/v18155013XBBtnYwq', + 'http://www.justin.tv/easter7presents', + 'http://www.justin.tv/easterfraud', + 'http://www.justin.tv/cccog27909', + 'http://www.justin.tv/clip/6e8c18f7050', + 'http://www.justin.tv/venom24', + 'http://qik.com/video/1622287', + 'http://qik.com/video/1503735', + 'http://qik.com/video/40504', + 'http://qik.com/video/1445763', + 'http://qik.com/video/743285', + 'http://qik.com/video/1445299', + 'http://qik.com/video/1443200', + 'http://qik.com/video/1445889', + 'http://qik.com/video/174242', + 'http://qik.com/video/1444897', + 'http://revision3.com/hak5/DualCore', + 'http://revision3.com/popsiren/charm', + 'http://revision3.com/tekzilla/eyefinity', + 'http://revision3.com/diggnation/2005-10-06', + 'http://revision3.com/hak5/netcat-virtualization-wordpress/', + 'http://revision3.com/infected/forsaken', + 'http://revision3.com/hak5/purepwnage', + 'http://revision3.com/tekzilla/wowheadset', + 'http://www.dailymotion.com/video/xcstzd_greek-wallets-tighten-during-easter_news', + 'http://www.dailymotion.com/video/xcso4y_exclusive-easter-eggs-easter-basket_lifestyle', + 'http://www.dailymotion.com/video/x2sgkt_evil-easter-bunny', + 'http://www.dailymotion.com/video/xco7oc_invitation-to-2010-easter-services_news', + 'http://www.dailymotion.com/video/xcss6b_big-cat-easter_animals', + 'http://www.dailymotion.com/video/xcszw1_easter-bunny-visits-buenos-aires-zo_news', + 'http://www.dailymotion.com/video/xcsfvs_forecasters-warn-of-easter-misery_news', + 'http://www.collegehumor.com/video:1682246', + 'http://www.twitvid.com/D9997', + 'http://www.twitvid.com/902B9', + 'http://www.twitvid.com/C33F8', + 'http://www.twitvid.com/63F73', + 'http://www.twitvid.com/BC0BA', + 'http://www.twitvid.com/1C33C', + 'http://www.twitvid.com/8A8E2', + 'http://www.twitvid.com/51035', + 'http://www.twitvid.com/5C733', + 'http://www.break.com/game-trailers/game/just-cause-2/just-cause-2-lost-easter-egg?res=1', + 'http://www.break.com/usercontent/2010/3/10/easter-holiday-2009-slideshow-1775624', + 'http://www.break.com/index/a-very-sexy-easter-video.html', + 'http://www.break.com/usercontent/2010/3/11/this-video-features-gizzi-erskine-making-easter-cookies-1776089', + 'http://www.break.com/usercontent/2007/4/4/happy-easter-265717', + 'http://www.break.com/usercontent/2007/4/17/extreme-easter-egg-hunting-276064', + 'http://www.break.com/usercontent/2006/11/18/the-evil-easter-bunny-184789', + 'http://www.break.com/usercontent/2006/4/16/hoppy-easter-kitty-91040', + 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=104063637', + 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=104004674', + 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=103928002', + 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=103999188', + 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=103920940', + 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=103981831', + 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=104004673', + 'http://vids.myspace.com/index.cfm?fuseaction=vids.individual&videoid=104046456', + 'http://www.metacafe.com/watch/105023/the_easter_bunny/', + 'http://www.metacafe.com/watch/4376131/easter_lay/', + 'http://www.metacafe.com/watch/2245996/how_to_make_ukraine_easter_eggs/', + 'http://www.metacafe.com/watch/4374339/easter_eggs/', + 'http://www.metacafe.com/watch/2605860/filled_easter_baskets/', + 'http://www.metacafe.com/watch/2372088/easter_eggs/', + 'http://www.metacafe.com/watch/3043671/www_goodnews_ws_easter_island/', + 'http://www.metacafe.com/watch/1652057/easter_eggs/', + 'http://www.metacafe.com/watch/1173632/ultra_kawaii_easter_bunny_party/', + 'http://celluloidremix.blip.tv/file/3378272/', + 'http://blip.tv/file/449469', + 'http://blip.tv/file/199776', + 'http://blip.tv/file/766967', + 'http://blip.tv/file/770127', + 'http://blip.tv/file/854925', + 'http://www.blip.tv/file/22695?filename=Uncle_dale-THEEASTERBUNNYHATESYOU395.flv', + 'http://iofa.blip.tv/file/3412333/', + 'http://blip.tv/file/190393', + 'http://blip.tv/file/83152', + 'http://video.google.com/videoplay?docid=-5427138374898988918&q=easter+bunny&pl=true', + 'http://video.google.com/videoplay?docid=7785441737970480237', + 'http://video.google.com/videoplay?docid=2320995867449957036', + 'http://video.google.com/videoplay?docid=-2586684490991458032&q=peeps&pl=true', + 'http://video.google.com/videoplay?docid=5621139047118918034', + 'http://video.google.com/videoplay?docid=4232304376070958848', + 'http://video.google.com/videoplay?docid=-6612726032157145299', + 'http://video.google.com/videoplay?docid=4478549130377875994&hl=en', + 'http://video.google.com/videoplay?docid=9169278170240080877', + 'http://video.google.com/videoplay?docid=2551240967354893096', + 'http://video.yahoo.com/watch/7268801/18963438', + 'http://video.yahoo.com/watch/2224892/7014048', + 'http://video.yahoo.com/watch/7244748/18886014', + 'http://video.yahoo.com/watch/4656845/12448951', + 'http://video.yahoo.com/watch/363942/2249254', + 'http://video.yahoo.com/watch/2232968/7046348', + 'http://video.yahoo.com/watch/4530253/12135472', + 'http://video.yahoo.com/watch/2237137/7062908', + 'http://video.yahoo.com/watch/952841/3706424', + 'http://www.viddler.com/explore/BigAppleChannel/videos/113/', + 'http://www.viddler.com/explore/cheezburger/videos/379/', + 'http://www.viddler.com/explore/warnerbros/videos/350/', + 'http://www.viddler.com/explore/tvcgroup/videos/169/', + 'http://www.viddler.com/explore/thebrickshow/videos/12/', + 'http://www.liveleak.com/view?i=e0b_1239827917', + 'http://www.liveleak.com/view?i=715_1239490211', + 'http://www.liveleak.com/view?i=d30_1206233786&p=1', + 'http://www.liveleak.com/view?i=d91_1239548947', + 'http://www.liveleak.com/view?i=f58_1190741182', + 'http://www.liveleak.com/view?i=44e_1179885621&c=1', + 'http://www.liveleak.com/view?i=451_1188059885', + 'http://www.liveleak.com/view?i=3f5_1267456341&c=1', + 'http://www.hulu.com/watch/67313/howcast-how-to-make-braided-easter-bread', + 'http://www.hulu.com/watch/133583/access-hollywood-glees-matthew-morrison-on-touring-and-performing-for-president-obama', + 'http://www.hulu.com/watch/66319/saturday-night-live-easter-album', + 'http://www.hulu.com/watch/80229/explorer-end-of-easter-island', + 'http://www.hulu.com/watch/139020/nbc-today-show-lamb-and-ham-create-easter-feast', + 'http://www.hulu.com/watch/84272/rex-the-runt-easter-island', + 'http://www.hulu.com/watch/132203/everyday-italian-easter-pie', + 'http://www.hulu.com/watch/23349/nova-secrets-of-lost-empires-ii-easter-island', + 'http://movieclips.com/watch/dirty_harry_1971/do_you_feel_lucky_punk/', + 'http://movieclips.com/watch/napoleon_dynamite_2004/chatting_online_with_babes/', + 'http://movieclips.com/watch/dumb__dumber_1994/the_toilet_doesnt_flush/', + 'http://movieclips.com/watch/jaws_1975/youre_gonna_need_a_bigger_boat/', + 'http://movieclips.com/watch/napoleon_dynamite_2004/chatting_online_with_babes/61.495/75.413', + 'http://movieclips.com/watch/super_troopers_2001/the_cat_game/12.838/93.018', + 'http://movieclips.com/watch/this_is_spinal_tap_1984/these_go_to_eleven/79.703/129.713', + 'http://crackle.com/c/Originals/What_s_the_deal_with_Easter_candy_/2303243', + 'http://crackle.com/c/How_To/Dryer_Lint_Easter_Bunny_Trailer_Park_Craft/2223902', + 'http://crackle.com/c/How_To/Pagan_Origin_of_Easter_Easter_Egg_Rabbit_Playb_/2225124', + 'http://crackle.com/c/Funny/Happy_Easter/2225363', + 'http://crackle.com/c/Funny/Crazy_and_Hilarious_Easter_Egg_Hunt/2225737', + 'http://crackle.com/c/How_To/Learn_About_Greek_Orthodox_Easter/2262294', + 'http://crackle.com/c/How_To/How_to_Make_Ukraine_Easter_Eggs/2262274', + 'http://crackle.com/c/How_To/Symbolism_Of_Ukrainian_Easter_Eggs/2262267', + 'http://crackle.com/c/Funny/Easter_Retard/931976', + 'http://www.fancast.com/tv/It-s-the-Easter-Beagle,-Charlie-Brown/74789/1078053475/Peanuts:-Specials:-It-s-the-Easter-Beagle,-Charlie-Brown/videos', + 'http://www.fancast.com/movies/Easter-Parade/97802/687440525/Easter-Parade/videos', + 'http://www.fancast.com/tv/Saturday-Night-Live/10009/1083396482/Easter-Album/videos', + 'http://www.fancast.com/movies/The-Proposal/147176/1140660489/The-Proposal:-Easter-Egg-Hunt/videos', + 'http://www.funnyordie.com/videos/f6883f54ae/the-unsettling-ritualistic-origin-of-the-easter-bunny', + 'http://www.funnyordie.com/videos/3ccb03863e/easter-tail-keaster-bunny', + 'http://www.funnyordie.com/videos/17b1d36ad0/easter-bunny-from-leatherfink', + 'http://www.funnyordie.com/videos/0c55aa116d/easter-exposed-from-bryan-erwin', + 'http://www.funnyordie.com/videos/040dac4eff/easter-eggs', + 'http://vimeo.com/10446922', + 'http://vimeo.com/10642542', + 'http://www.vimeo.com/10664068', + 'http://vimeo.com/819176', + 'http://www.vimeo.com/10525353', + 'http://vimeo.com/10429123', + 'http://www.vimeo.com/10652053', + 'http://vimeo.com/10572216', + 'http://www.ted.com/talks/jared_diamond_on_why_societies_collapse.html', + 'http://www.ted.com/talks/nathan_myhrvold_on_archeology_animal_photography_bbq.html', + 'http://www.ted.com/talks/johnny_lee_demos_wii_remote_hacks.html', + 'http://www.ted.com/talks/robert_ballard_on_exploring_the_oceans.html', + 'http://www.omnisio.com/v/Z3QxbTUdjhG/wall-e-collection-of-videos', + 'http://www.omnisio.com/v/3ND6LTvdjhG/php-tutorial-4-login-form-updated', + 'http://www.thedailyshow.com/watch/thu-december-14-2000/intro---easter', + 'http://www.thedailyshow.com/watch/tue-april-18-2006/headlines---easter-charade', + 'http://www.thedailyshow.com/watch/tue-april-18-2006/egg-beaters', + 'http://www.thedailyshow.com/watch/tue-april-18-2006/moment-of-zen---scuba-diver-hiding-easter-eggs', + 'http://www.thedailyshow.com/watch/tue-april-7-2009/easter---passover-highlights', + 'http://www.thedailyshow.com/watch/tue-february-29-2000/headlines---leap-impact', + 'http://www.thedailyshow.com/watch/thu-march-1-2007/tomb-with-a-jew', + 'http://www.thedailyshow.com/watch/mon-april-24-2000/the-meaning-of-passover', + 'http://www.colbertnation.com/the-colbert-report-videos/268800/march-31-2010/easter-under-attack---peeps-display-update', + 'http://www.colbertnation.com/the-colbert-report-videos/268797/march-31-2010/intro---03-31-10', + 'http://www.colbertnation.com/full-episodes/wed-march-31-2010-craig-mullaney', + 'http://www.colbertnation.com/the-colbert-report-videos/60902/march-28-2006/the-word---easter-under-attack---marketing', + 'http://www.colbertnation.com/the-colbert-report-videos/83362/march-07-2007/easter-under-attack---bunny', + 'http://www.colbertnation.com/the-colbert-report-videos/61404/april-06-2006/easter-under-attack---recalled-eggs?videoId=61404', + 'http://www.colbertnation.com/the-colbert-report-videos/223957/april-06-2009/colbert-s-easter-parade', + 'http://www.colbertnation.com/the-colbert-report-videos/181772/march-28-2006/intro---3-28-06', + 'http://www.traileraddict.com/trailer/despicable-me/easter-greeting', + 'http://www.traileraddict.com/trailer/easter-parade/trailer', + 'http://www.traileraddict.com/clip/the-proposal/easter-egg-hunt', + 'http://www.traileraddict.com/trailer/despicable-me/international-teaser-trailer', + 'http://www.traileraddict.com/trailer/despicable-me/today-show-minions', + 'http://revver.com/video/263817/happy-easter/', + 'http://www.revver.com/video/1574939/easter-bunny-house/', + 'http://revver.com/video/771140/easter-08/', ] def submit_all(): @@ -796,36 +1298,55 @@ def submit_all(): return links -def test(): - """Take some example URLs and print out a nice pretty HTML table - of their extracted thubmnails and media objects""" +def test_real(nlinks): + from r2.models import Link, desc + from r2.lib.utils import fetch_things2 + + counter = 0 + q = Link._query(sort = desc("_date")) + + print "" + for l in fetch_things2(q): + if counter > nlinks: + break + if not l.is_self: + h = make_scraper(l.url) + mo = h.media_object() + print "scraper: %s" % mo + if mo: + print get_media_embed(mo).content + counter +=1 + print "
" + +def test_url(url): import sys from r2.lib.filters import websafe + sys.stderr.write("%s\n" % url) + print "" + h = make_scraper(url) + print "" + print "", websafe(url), "" + print "
" + print websafe(repr(h)) + img = h.largest_image_url() + if img: + print "" % img + else: + print "(no image)" + mo = h.media_object() + print "" + if mo: + print get_media_embed(mo).content + else: + print "None" + print "" + print "" +def test(): + """Take some example URLs and print out a nice pretty HTML table + of their extracted thubmnails and media objects""" print "" for url in test_urls: - sys.stderr.write("%s\n" % url) - print "" - h = make_scraper(url) - print "" % img - else: - print "" - mo = h.media_object() - print "" - print "" + test_url(url) print "
" - print "", websafe(url), "" - print "
" - print websafe(repr(h)) - img = h.largest_image_url() - if img: - print "
(no image)" - if mo: - s = scrapers[mo['type']] - print websafe(repr(mo)) - print "
" - print s.media_embed(**mo).content - else: - print "None" - print "
" diff --git a/r2/r2/lib/services.py b/r2/r2/lib/services.py index 203fd5624f..93c2a7fffc 100644 --- a/r2/r2/lib/services.py +++ b/r2/r2/lib/services.py @@ -109,20 +109,20 @@ def __init__(self, hosts = None, queue_length_max = {}): @classmethod def set_cache_lifetime(cls, data, key = "memcaches"): - g.rendercache.set(key + "_lifetime", data) + g.servicecache.set(key + "_lifetime", data) @classmethod def get_cache_lifetime(cls, average = None, key = "memcaches"): - d = g.rendercache.get(key + "_lifetime", DataLogger()) + d = g.servicecache.get(key + "_lifetime", DataLogger()) return d(average) @classmethod def from_cache(cls, host): key = cls.cache_key + str(host) - return g.rendercache.get(key) + return g.servicecache.get(key) def set_cache(self, h): - cache = g.rendercache + cache = g.servicecache # cache the whole object res = {} res[self.cache_key + str(h.host)] = h @@ -137,7 +137,7 @@ def set_cache(self, h): @classmethod def get_db_load(cls, names): - return g.rendercache.get_multi(names, prefix = cls.cache_key_small) + return g.servicecache.get_multi(names, prefix = cls.cache_key_small) def server_load(self, mach_name): h = self.from_cache(host) @@ -145,8 +145,10 @@ def server_load(self, mach_name): def __iter__(self): if not self.hostlogs: - self.hostlogs = [self.from_cache(host) for host in self._hosts] - self.hostlogs = filter(None, self.hostlogs) + hosts = g.servicecache.get_multi(self._hosts, + prefix = self.cache_key) + self.hostlogs = [hosts[str(x)] for x in self._hosts + if str(x) in hosts] return iter(self.hostlogs) def render(self, *a, **kw): diff --git a/r2/r2/lib/strings.py b/r2/r2/lib/strings.py index b43ba01984..fde1488e89 100644 --- a/r2/r2/lib/strings.py +++ b/r2/r2/lib/strings.py @@ -41,8 +41,8 @@ # StringHandler instance strings string_dict = dict( - banned_by = "banned by %s", - banned = "banned", + banned_by = "removed by %s", + banned = "removed", reports = "reports: %d", # this accomodates asian languages which don't use spaces @@ -77,8 +77,8 @@ sr_created = _('your reddit has been created'), - active_trials = _("these things are still on trial, so you can feel free to reclassify them:"), - finished_trials = _("these trials have concluded; it's too late to change your mind on them:"), + active_trials = _("we haven't yet decided whether these things are spam, so you have a chance to change your vote:"), + finished_trials = _("it's too late to change your vote on these things (the verdict has been issued):"), more_info_link = _("visit [%(link)s](%(link)s) for more information"), msg_add_friend = dict( @@ -129,9 +129,12 @@ submit_text = _("""You are submitting a text-based post. Speak your mind. A title is required, but expanding further in the text field is not. Beginning your title with "vote up if" is violation of intergalactic law."""), iphone_first = _("You should consider using [reddit's free iphone app](http://itunes.com/apps/iredditfree)."), verify_email = _("we're going to need to verify your email address for you to proceed."), + verify_email_submit = _("you'll be able to submit more frequently once you verify your email address"), email_verified = _("your email address has been verfied"), email_verify_failed = _("Verification failed. Please try that again"), - search_failed = _("Our search machines are under too much load to handle your request right now. :( Sorry for the inconvenience. [Try again](%(link)s) in a little bit -- but please don't mash reload; that only makes the problem worse.") + search_failed = _("Our search machines are under too much load to handle your request right now. :( Sorry for the inconvenience. [Try again](%(link)s) in a little bit -- but please don't mash reload; that only makes the problem worse."), + verified_quota_msg = _("You've submitted several links recently that haven't been doing very well. You'll have to wait a while before you can submit again, or [write to the moderators of this reddit](%(link)s) and ask for an exemption."), + unverified_quota_msg = _("You haven't [verified your email address](%(link1)s); until you do, your submitting privileges will be severely limited. Please try again in an hour or verify your email address. If you'd like an exemption from this rule, please [write to the moderators of this reddit](%(link2)s)."), ) class StringHandler(object): diff --git a/r2/r2/lib/tracking.py b/r2/r2/lib/tracking.py index 71bd8220ca..d06163618b 100644 --- a/r2/r2/lib/tracking.py +++ b/r2/r2/lib/tracking.py @@ -21,6 +21,7 @@ ################################################################################ from base64 import standard_b64decode as b64dec, \ standard_b64encode as b64enc +from pylons import request from Crypto.Cipher import AES from random import choice from pylons import g, c @@ -138,6 +139,11 @@ class UserInfo(Info): def init_defaults(self): self.name = safe_str(c.user.name if c.user_is_loggedin else '') self.site = safe_str(c.site.name if c.site else '') + try: + action = request.environ['pylons.routes_dict'].get('action') + self.site += "-%s" % action + except Exception,e: + g.log.error(e) self.lang = safe_str(c.lang if c.lang else '') self.cname = safe_str(c.cname) diff --git a/r2/r2/lib/traffic.py b/r2/r2/lib/traffic.py index 6c65d78bfe..7dc4afda1c 100644 --- a/r2/r2/lib/traffic.py +++ b/r2/r2/lib/traffic.py @@ -35,10 +35,11 @@ def load_traffic_uncached(interval, what, iden, On connection failure (or no data) returns an empy list. """ def format_date(d): - if d.tzinfo is None: - d = d.replace(tzinfo = g.tz) - else: - d = d.astimezone(g.tz) + if hasattr(d, "tzinfo"): + if d.tzinfo is None: + d = d.replace(tzinfo = g.tz) + else: + d = d.astimezone(g.tz) return ":".join(map(str, d.timetuple()[:6])) traffic_url = os.path.join(g.traffic_url, interval, what, iden) diff --git a/r2/r2/lib/utils/trial_utils.py b/r2/r2/lib/utils/trial_utils.py index 28f70855c8..dc50393426 100644 --- a/r2/r2/lib/utils/trial_utils.py +++ b/r2/r2/lib/utils/trial_utils.py @@ -21,36 +21,21 @@ ################################################################################ from pylons import c, g, request -from r2.lib.utils import ip_and_slash16, jury_cache_dict, voir_dire_priv +from r2.lib.utils import ip_and_slash16, jury_cache_dict, voir_dire_priv, tup from r2.lib.memoize import memoize from r2.lib.log import log_text -@memoize('trial_utils.all_defendants') -def all_defendants_cache(): - fnames = g.hardcache.backend.ids_by_category("trial") - return fnames - -def all_defendants(quench=False, _update=False): - from r2.models import Thing - all = all_defendants_cache(_update=_update) - - defs = Thing._by_fullname(all, data=True).values() - - if quench: - # Used for the spotlight, to filter out trials with over 30 votes; - # otherwise, hung juries would hog the spotlight for an hour as - # their vote counts continued to skyrocket - - return filter (lambda d: - not g.cache.get("quench_jurors-" + d._fullname), - defs) - else: - return defs +# Hardcache lifetime for a trial. +# The regular hardcache reaper should never run on one of these, +# since a mistrial should be declared if the trial is still open +# after 24 hours. So the "3 days" expiration isn't really real. +TRIAL_TIME = 3 * 86400 def trial_key(thing): return "trial-" + thing._fullname -def on_trial(things): +def trial_info(things): + things = tup(things) keys = dict((trial_key(thing), thing._fullname) for thing in things) vals = g.hardcache.get_multi(keys) @@ -58,11 +43,18 @@ def on_trial(things): for (key, val) in vals.iteritems()) -def end_trial(thing): - g.hardcache.delete(trial_key(thing)) - all_defendants(_update=True) +def end_trial(thing, verdict=None): + from r2.models import Trial + if trial_info(thing): + g.hardcache.delete(trial_key(thing)) + Trial.all_defendants(_update=True) + + if verdict is not None: + thing.verdict = verdict + thing._commit() def indict(defendant): + from r2.models import Trial tk = trial_key(defendant) rv = False @@ -75,18 +67,30 @@ def indict(defendant): elif g.hardcache.get(tk): result = "it's already on trial" else: - # The regular hardcache reaper should never run on one of these, - # since a mistrial should be declared if the trial is still open - # after 24 hours. So the "3 days" expiration isn't really real. - g.hardcache.set(tk, True, 3 * 86400) - all_defendants(_update=True) + # The spams/koshers dict is just a infrequently-updated cache; the + # official source of the data is the Jury relation. + g.hardcache.set(tk, dict(spams=0, koshers=0), TRIAL_TIME) + Trial.all_defendants(_update=True) result = "it's now indicted: %s" % tk rv = True - log_text("indict_result", "%r: %s" % (defendant, result), level="info") + log_text("indict_result", "%s: %s" % (defendant._id36, result), level="info") return rv +# These are spam/kosher votes, not up/down votes +def update_voting(defendant, koshers, spams): + tk = trial_key(defendant) + d = g.hardcache.get(tk) + if d is None: + log_text("update_voting() fail", + "%s not on trial" % defendant._id36, + level="error") + else: + d["koshers"] = koshers + d["spams"] = spams + g.hardcache.set(tk, d, TRIAL_TIME) + # Check to see if a juror is eligible to serve on a jury for a given link. def voir_dire(account, ip, slash16, defendants_voted_upon, defendant, sr): from r2.models import Link @@ -113,7 +117,7 @@ def voir_dire(account, ip, slash16, defendants_voted_upon, defendant, sr): return True def assign_trial(account, ip, slash16): - from r2.models import Jury, Subreddit + from r2.models import Jury, Subreddit, Trial from r2.lib.db import queries defendants_voted_upon = [] @@ -126,7 +130,7 @@ def assign_trial(account, ip, slash16): subscribed_sr_ids = Subreddit.user_subreddits(account, ids=True, limit=None) # Pull defendants, except ones which already have lots of juryvotes - defs = all_defendants(quench=True) + defs = Trial.all_defendants(quench=True) # Filter out defendants outside this user's subscribed SRs defs = filter (lambda d: d.sr_id in subscribed_sr_ids, defs) @@ -144,8 +148,9 @@ def assign_trial(account, ip, slash16): likes = queries.get_likes(account, defs) - # Filter out things that the user has upvoted or downvoted - defs = filter (lambda d: likes.get((account, d)) is None, defs) + if not g.debug: + # Filter out things that the user has upvoted or downvoted + defs = filter (lambda d: likes.get((account, d)) is None, defs) # Prefer oldest trials defs.sort(key=lambda x: x._date) @@ -166,33 +171,44 @@ def populate_spotlight(): g.log.debug("not eligible") return None + if not g.cache.add("global-jury-key", True, 5): + g.log.debug("not yet time to add another juror") + return None + ip, slash16 = ip_and_slash16(request) jcd = jury_cache_dict(c.user, ip, slash16) if jcd is None: + g.cache.delete("global-jury-key") return None if g.cache.get_multi(jcd.keys()) and not g.debug: g.log.debug("recent juror") + g.cache.delete("global-jury-key") return None trial = assign_trial(c.user, ip, slash16) if trial is None: g.log.debug("nothing available") + g.cache.delete("global-jury-key") return None for k, v in jcd.iteritems(): g.cache.set(k, True, v) + log_text("juryassignment", + "%s was just assigned to the jury for %s" % (c.user.name, trial._id36), + level="info") + return trial def look_for_verdicts(): from r2.models import Trial print "checking all trials for verdicts..." - for defendant in all_defendants(): - print "Looking at %r" % defendant + for defendant in Trial.all_defendants(): + print "Looking at reddit.com/comments/%s/x" % defendant._id36 v = Trial(defendant).check_verdict() print "Verdict: %r" % v diff --git a/r2/r2/lib/utils/utils.py b/r2/r2/lib/utils/utils.py index 2522c29551..2025984c32 100644 --- a/r2/r2/lib/utils/utils.py +++ b/r2/r2/lib/utils/utils.py @@ -30,6 +30,7 @@ from BeautifulSoup import BeautifulSoup +from time import sleep from datetime import datetime, timedelta from pylons.i18n import ungettext, _ from r2.lib.filters import _force_unicode @@ -1207,6 +1208,113 @@ def ip_and_slash16(req): return (ip, slash16) +def spaceout(items, targetseconds, + minsleep = 0, die = False, + estimate = None): + """Given a list of items and a function to apply to them, space + the execution out over the target number of seconds and + optionally stop when we're out of time""" + targetseconds = float(targetseconds) + state = [1.0] + + if estimate is None: + try: + estimate = len(items) + except TypeError: + # if we can't come up with an estimate, the best we can do + # is just enforce the minimum sleep time (and the max + # targetseconds if die==True) + pass + + mean = lambda lst: sum(float(x) for x in lst)/float(len(lst)) + beginning = datetime.now() + + for item in items: + start = datetime.now() + yield item + end = datetime.now() + + took_delta = end - start + took = (took_delta.days * 60 * 24 + + took_delta.seconds + + took_delta.microseconds/1000000.0) + state.append(took) + if len(state) > 10: + del state[0] + + if die and end > beginning + timedelta(seconds=targetseconds): + # we ran out of time, ignore the rest of the iterator + break + + if estimate is None: + if minsleep: + # we have no idea how many items we're going to get + sleep(minsleep) + else: + sleeptime = max((targetseconds / estimate) - mean(state), + minsleep) + if sleeptime > 0: + sleep(sleeptime) + +def progress(it, verbosity=100, key=repr, estimate=None, persec=False): + """An iterator that yields everything from `it', but prints progress + information along the way, including time-estimates if + possible""" + from datetime import datetime + import sys + + now = start = datetime.now() + elapsed = start - start + + # try to guess at the estimate if we can + if estimate is None: + try: + estimate = len(it) + except: + pass + + print 'Starting at %s' % (start,) + + seen = 0 + for item in it: + seen += 1 + if seen % verbosity == 0: + now = datetime.now() + elapsed = now - start + elapsed_seconds = elapsed.days * 86400 + elapsed.seconds + + if estimate: + remaining = ((elapsed/seen)*estimate)-elapsed + completion = now + remaining + count_str = ('%d/%d %.2f%%' + % (seen, estimate, float(seen)/estimate*100)) + estimate_str = (' (%s remaining; completion %s)' + % (remaining, completion)) + else: + count_str = '%d' % seen + estimate_str = '' + + if key: + key_str = ': %s' % key(item) + else: + key_str = '' + + if persec and elapsed_seconds > 0: + persec_str = ' (%.2f/s)' % (seen/elapsed_seconds,) + else: + persec_str = '' + + sys.stdout.write('%s%s, %s%s%s\n' + % (count_str, persec_str, + elapsed, estimate_str, key_str)) + sys.stdout.flush() + this_chunk = 0 + yield item + + now = datetime.now() + elapsed = now - start + print 'Processed %d items in %s..%s (%s)' % (seen, start, now, elapsed) + class Hell(object): def __str__(self): return "boom!" diff --git a/r2/r2/lib/wrapped.py b/r2/r2/lib/wrapped.py index a5560302b2..b2bf24d916 100644 --- a/r2/r2/lib/wrapped.py +++ b/r2/r2/lib/wrapped.py @@ -343,6 +343,9 @@ def _render(self, attr, style, **kwargs): def _write_cache(self, keys): from pylons import g + if not keys: + return + toset = dict((md5(key).hexdigest(), val) for (key, val) in keys.iteritems()) diff --git a/r2/r2/models/account.py b/r2/r2/models/account.py index e81444cad1..c817ed22ba 100644 --- a/r2/r2/models/account.py +++ b/r2/r2/models/account.py @@ -73,14 +73,14 @@ class Account(Thing): share = {}, wiki_override = None, email = "", - email_verified = None, + email_verified = False, ignorereports = False, pref_show_promote = None, ) def karma(self, kind, sr = None): suffix = '_' + kind + '_karma' - + #if no sr, return the sum if sr is None: total = 0 @@ -128,14 +128,10 @@ def can_wiki(self): self.comment_karma >= g.WIKI_KARMA) def jury_betatester(self): - k = "juror-" + self.name - if not g.hardcache.get(k): - return False - if g.cache.get("jury-killswitch"): return False - - return True + else: + return True def all_karmas(self): """returns a list of tuples in the form (name, link_karma, @@ -318,6 +314,61 @@ def remove_cup(self): def cup_info(self): return g.hardcache.get("cup_info-%d" % self._id) + def quota_key(self, kind): + return "user_%s_quotas-%s" % (kind, self.name) + + def clog_quota(self, kind, item): + key = self.quota_key(kind) + fnames = g.hardcache.get(key, []) + fnames.append(item._fullname) + g.hardcache.set(key, fnames, 86400 * 30) + + def quota_baskets(self, kind): + from r2.models.admintools import filter_quotas + key = self.quota_key(kind) + fnames = g.hardcache.get(key) + + if not fnames: + return None + + unfiltered = Thing._by_fullname(fnames, data=True, return_dict=False) + + baskets, new_quotas = filter_quotas(unfiltered) + + if new_quotas is None: + pass + elif new_quotas == []: + g.hardcache.delete(key) + else: + g.hardcache.set(key, new_quotas, 86400 * 30) + + return baskets + + def quota_limits(self, kind): + if kind != 'link': + raise NotImplementedError + + if self.email_verified: + return dict(hour=3, day=10, week=50, month=150) + else: + return dict(hour=1, day=3, week=5, month=5) + + def quota_full(self, kind): + limits = self.quota_limits(kind) + baskets = self.quota_baskets(kind) + + if baskets is None: + return None + + total = 0 + filled_quota = None + for key in ('hour', 'day', 'week', 'month'): + total += len(baskets[key]) + if total >= limits[key]: + filled_quota = key + + return filled_quota + @classmethod def cup_info_multi(cls, ids): ids = [ int(i) for i in ids ] @@ -325,6 +376,14 @@ def cup_info_multi(cls, ids): # calling g.hardcache.get_multi()? return sgm(g.hardcache, ids, miss_fn=None, prefix="cup_info-") + @classmethod + def system_user(cls): + if not hasattr(g, "system_user"): + return None + try: + return cls._by_name(g.system_user) + except NotFound: + return None class FakeAccount(Account): _nodb = True diff --git a/r2/r2/models/ad.py b/r2/r2/models/ad.py index 38dc510572..bbeec984e7 100644 --- a/r2/r2/models/ad.py +++ b/r2/r2/models/ad.py @@ -98,30 +98,30 @@ def _new(cls, ad, sr, weight=100): @classmethod @memoize('adsr.by_ad') - def by_ad_cache(cls, ad): - q = AdSR._query(AdSR.c._thing1_id == ad._id, + def by_ad_cache(cls, ad_id): + q = AdSR._query(AdSR.c._thing1_id == ad_id, sort = desc('_date')) q._limit = 500 return [ t._id for t in q ] @classmethod def by_ad(cls, ad, _update=False): - rel_ids = cls.by_ad_cache(ad, _update=_update) + rel_ids = cls.by_ad_cache(ad._id, _update=_update) adsrs = AdSR._byID_rel(rel_ids, data=True, eager_load=True, thing_data=True, return_dict = False) return adsrs @classmethod @memoize('adsr.by_sr') - def by_sr_cache(cls, sr): - q = AdSR._query(AdSR.c._thing2_id == sr._id, + def by_sr_cache(cls, sr_id): + q = AdSR._query(AdSR.c._thing2_id == sr_id, sort = desc('_date')) q._limit = 500 return [ t._id for t in q ] @classmethod def by_sr(cls, sr, _update=False): - rel_ids = cls.by_sr_cache(sr, _update=_update) + rel_ids = cls.by_sr_cache(sr._id, _update=_update) adsrs = AdSR._byID_rel(rel_ids, data=True, eager_load=True, thing_data=True, return_dict = False) return adsrs diff --git a/r2/r2/models/admintools.py b/r2/r2/models/admintools.py index a5880c09ad..d026db5944 100644 --- a/r2/r2/models/admintools.py +++ b/r2/r2/models/admintools.py @@ -35,9 +35,14 @@ def spam(self, things, auto=True, moderator_banned=False, banner=None, date = None, **kw): from r2.lib.db import queries - things = [x for x in tup(things) if not x._spam] - Report.accept(things, True) - for t in things: + all_things = tup(things) + new_things = [x for x in all_things if not x._spam] + + # No need to accept reports on things with _spam=True, + # since nobody can report them in the first place. + Report.accept(new_things, True) + + for t in all_things: t._spam = True ban_info = copy(getattr(t, 'ban_info', {})) ban_info.update(auto = auto, @@ -54,15 +59,27 @@ def spam(self, things, auto=True, moderator_banned=False, t._commit() if not auto: - self.author_spammer(things, True) - self.set_last_sr_ban(things) + self.author_spammer(new_things, True) + self.set_last_sr_ban(new_things) - queries.ban(things) + queries.ban(new_things) def unspam(self, things, unbanner = None): from r2.lib.db import queries - things = [x for x in tup(things) if x._spam] + things = tup(things) + + # We want to make unban-all moderately efficient, so when + # mass-unbanning, we're going to skip the code below on links that + # are already not banned. However, when someone manually clicks + # "approve" on an unbanned link, and there's just one, we want do + # want to run the code below. That way, the little green checkmark + # will have the right mouseover details, the reports will be + # cleared, etc. + + if len(things) > 1: + things = [x for x in things if x._spam] + Report.accept(things, False) for t in things: ban_info = copy(getattr(t, 'ban_info', {})) @@ -146,6 +163,70 @@ def ip_span(ip): ip = websafe(ip) return '' % ip +def filter_quotas(unfiltered): + from r2.lib.utils.trial_utils import trial_info + + trials = trial_info(unfiltered) + + now = datetime.now(g.tz) + + baskets = { + 'hour': [], + 'day': [], + 'week': [], + 'month': [], + } + + new_quotas = [] + quotas_changed = False + + for item in unfiltered: + delta = now - item._date + + age = delta.days * 86400 + delta.seconds + + # First, select a basket or abort if item is too old + if age < 3600: + basket = 'hour' + elif age < 86400: + basket = 'day' + elif age < 7 * 86400: + basket = 'week' + elif age < 30 * 86400: + basket = 'month' + else: + quotas_changed = True + continue + + score = item._downs - item._ups + + verdict = getattr(item, "verdict", None) + approved = verdict and verdict in ( + 'admin-approved', 'mod-approved') + + # Then, make sure it's worthy of quota-clogging + if trials.get(item._fullname): + pass + elif item._spam: + pass + elif item._deleted: + pass + elif score <= 0: + pass + elif age < 86400 and score <= g.QUOTA_THRESHOLD and not approved: + pass + else: + quotas_changed = True + continue + + baskets[basket].append(item) + new_quotas.append(item._fullname) + + if quotas_changed: + return baskets, new_quotas + else: + return baskets, None + try: from r2admin.models.admintools import * except ImportError: diff --git a/r2/r2/models/award.py b/r2/r2/models/award.py index 8b61938ed8..82efcd5cb8 100644 --- a/r2/r2/models/award.py +++ b/r2/r2/models/award.py @@ -145,30 +145,30 @@ def _new(cls, recipient, award, description = None, @classmethod @memoize('trophy.by_account2') - def by_account_cache(cls, account): - q = Trophy._query(Trophy.c._thing1_id == account._id, + def by_account_cache(cls, account_id): + q = Trophy._query(Trophy.c._thing1_id == account_id, sort = desc('_date')) q._limit = 500 return [ t._id for t in q ] @classmethod def by_account(cls, account, _update=False): - rel_ids = cls.by_account_cache(account, _update=_update) + rel_ids = cls.by_account_cache(account._id, _update=_update) trophies = Trophy._byID_rel(rel_ids, data=True, eager_load=True, thing_data=True, return_dict = False) return trophies @classmethod @memoize('trophy.by_award2') - def by_award_cache(cls, award): - q = Trophy._query(Trophy.c._thing2_id == award._id, + def by_award_cache(cls, award_id): + q = Trophy._query(Trophy.c._thing2_id == award_id, sort = desc('_date')) q._limit = 500 return [ t._id for t in q ] @classmethod def by_award(cls, award, _update=False): - rel_ids = cls.by_award_cache(award, _update=_update) + rel_ids = cls.by_award_cache(award._id, _update=_update) trophies = Trophy._byID_rel(rel_ids, data=True, eager_load=True, thing_data=True, return_dict = False) return trophies diff --git a/r2/r2/models/bidding.py b/r2/r2/models/bidding.py index e5d974a71c..325b7a3e83 100644 --- a/r2/r2/models/bidding.py +++ b/r2/r2/models/bidding.py @@ -19,7 +19,7 @@ # All portions of the code written by CondeNet are Copyright (c) 2006-2010 # CondeNet, Inc. All Rights Reserved. ################################################################################ -from sqlalchemy import Column, String, DateTime, Date, Float, Integer, \ +from sqlalchemy import Column, String, DateTime, Date, Float, Integer, Boolean,\ func as safunc, and_, or_ from sqlalchemy.exceptions import IntegrityError from sqlalchemy.schema import PrimaryKeyConstraint @@ -139,7 +139,7 @@ def _make_storable(self, val): return val._fullname else: return val - + @classmethod def _lookup(cls, multiple, *a, **kw): """ @@ -281,31 +281,54 @@ class Bid(Sessionized, Base): status = Column(Integer, nullable = False, default = STATUS.AUTH) + # make this a primary key as well so that we can have more than + # one freebie per campaign + campaign = Column(Integer, default = 0, primary_key = True) @classmethod - def _new(cls, trans_id, user, pay_id, thing_id, bid): + def _new(cls, trans_id, user, pay_id, thing_id, bid, campaign = 0): bid = Bid(trans_id, user, pay_id, - thing_id, getattr(request, 'ip', '0.0.0.0'), bid = bid) + thing_id, getattr(request, 'ip', '0.0.0.0'), bid = bid, + campaign = campaign) bid._commit() return bid +# @classmethod +# def for_transactions(cls, transids): +# transids = filter(lambda x: x != 0, transids) +# if transids: +# q = cls.query() +# q = q.filter(or_(*[cls.transaction == i for i in transids])) +# return dict((p.transaction, p) for p in q) +# return {} + def set_status(self, status): if self.status != status: self.status = status self._commit() - + def auth(self): self.set_status(self.STATUS.AUTH) + def is_auth(self): + return (self.status == self.STATUS.AUTH) + def void(self): self.set_status(self.STATUS.VOID) - + + def is_void(self): + return (self.status == self.STATUS.VOID) + def charged(self): self.set_status(self.STATUS.CHARGE) + def is_charged(self): + return (self.status == self.STATUS.CHARGE) + def refund(self): self.set_status(self.STATUS.REFUND) +#TODO: decommission and drop tables once the patch is working class PromoteDates(Sessionized, Base): __tablename__ = "promote_date" @@ -436,6 +459,114 @@ def top_promoters(cls, start_date, end_date = None): return res +# eventual replacement for PromoteDates +class PromotionWeights(Sessionized, Base): + __tablename__ = "promotion_weight" + + thing_name = Column(String, primary_key = True, + nullable = False, index = True) + + promo_idx = Column(BigInteger, index = True, autoincrement = False, + primary_key = True) + + sr_name = Column(String, primary_key = True, + nullable = True, index = True) + date = Column(Date(), primary_key = True, + nullable = False, index = True) + + # because we might want to search by account + account_id = Column(BigInteger, index = True, autoincrement = False) + + # bid and weight should always be the same, but they don't have to be + bid = Column(Float, nullable = False) + weight = Column(Float, nullable = False) + + finished = Column(Boolean) + + @classmethod + def reschedule(cls, thing, idx, sr, start_date, end_date, total_weight, + finished = False): + cls.delete_unfinished(thing, idx) + cls.add(thing, idx, sr, start_date, end_date, total_weight, + finished = finished) + + @classmethod + def add(cls, thing, idx, sr, start_date, end_date, total_weight, + finished = False): + start_date = to_date(start_date) + end_date = to_date(end_date) + + # anything set by the user will be uniform weighting + duration = max((end_date - start_date).days, 1) + weight = total_weight / duration + + d = start_date + while d < end_date: + cls._new(thing, idx, sr, d, + thing.author_id, weight, weight, finished = finished) + d += datetime.timedelta(1) + + @classmethod + def delete_unfinished(cls, thing, idx): + #TODO: do this the right (fast) way before release. I don't + #have the inclination to figure out the proper delete method + #now + for item in cls.query(thing_name = thing._fullname, + promo_idx = idx, + finished = False): + item._delete() + + @classmethod + def get_campaigns(cls, d): + d = to_date(d) + return list(cls.query(date = d)) + + @classmethod + def get_schedule(cls, start_date, end_date, author_id = None): + start_date = to_date(start_date) + end_date = to_date(end_date) + q = cls.query() + q = q.filter(and_(cls.date >= start_date, cls.date < end_date)) + + if author_id is not None: + q.filter(author_id = author_id) + + res = {} + for x in q.all(): + res.setdefault((x.thing_name, x.promo_idx), []).append(x.date) + + return [(k[0], k[1], min(v), max(v)) for k, v in res.iteritems()] + + @classmethod + @memoize('promodates.bid_history', time = 10 * 60) + def bid_history(cls, start_date, end_date = None, account_id = None): + from r2.models import Link + start_date = to_date(start_date) + end_date = to_date(end_date) + q = cls.query() + q = q.filter(and_(cls.date >= start_date, cls.date < end_date)) + q = list(q) + + links = Link._by_fullname([x.thing_name for x in q], data=True) + + d = start_date + res = [] + while d < end_date: + bid = 0 + refund = 0 + for i in q: + if d == i.date: + camp = links[i.thing_name].campaigns[i.promo_idx] + bid += i.bid + refund += i.bid if camp[-1] <= 0 else 0 + res.append([d, bid, refund]) + d += datetime.timedelta(1) + return res + +def to_date(d): + if isinstance(d, datetime.datetime): + return d.date() + return d # do all the leg work of creating/connecting to tables Base.metadata.create_all() diff --git a/r2/r2/models/builder.py b/r2/r2/models/builder.py index ad84989a15..1a8314fa9c 100644 --- a/r2/r2/models/builder.py +++ b/r2/r2/models/builder.py @@ -69,10 +69,7 @@ def wrap_items(self, items): if aids: authors = Account._byID(aids, True) if aids else {} - if c.user_is_admin: - cup_infos = Account.cup_info_multi(aids) - else: - cup_infos = {} + cup_infos = Account.cup_info_multi(aids) else: authors = {} cup_infos = {} @@ -175,6 +172,15 @@ def wrap_items(self, items): w.deleted = item._deleted + w.link_notes = [] + + if c.user_is_admin: + if item._deleted: + w.link_notes.append("deleted link") + if getattr(item, "verdict", None): + if not item.verdict.endswith("-approved"): + w.link_notes.append(w.verdict) + w.rowstyle = getattr(w, 'rowstyle', "") w.rowstyle += ' ' + ('even' if (count % 2) else 'odd') @@ -191,22 +197,31 @@ def wrap_items(self, items): w.show_reports = False w.show_spam = False w.can_ban = False + w.reveal_trial_info = False + w.use_big_modbuttons = False + if (c.user_is_admin or (user and hasattr(item,'sr_id') and item.sr_id in can_ban_set)): w.can_ban = True + + ban_info = getattr(item, 'ban_info', {}) + w.unbanner = ban_info.get('unbanner') + if item._spam: w.show_spam = True - ban_info = getattr(item, 'ban_info', {}) w.moderator_banned = ban_info.get('moderator_banned', False) w.autobanned = ban_info.get('auto', False) w.banner = ban_info.get('banner') + w.use_big_modbuttons = True if getattr(w, "author", None) and w.author._spam: w.show_spam = "author" elif getattr(item, 'reported', 0) > 0: w.show_reports = True + w.use_big_modbuttons = True + # recache the user object: it may be None if user is not logged in, # whereas now we are happy to have the UnloggedUser object @@ -227,6 +242,8 @@ def must_skip(self, item): """whether or not to skip any item regardless of whether the builder was contructed with skip=true""" user = c.user if c.user_is_loggedin else None + if hasattr(item, "promoted") and item.promoted is not None: + return False if hasattr(item, 'subreddit') and not item.subreddit.can_view(user): return True @@ -341,6 +358,7 @@ def get_items(self): #skip and count while new_items and (not self.num or num_have < self.num): i = new_items.pop(0) + if not (self.must_skip(i) or self.skip and not self.keep_item(i)): items.append(i) num_have += 1 @@ -414,7 +432,6 @@ def fetch_more(self, last_item, num_have): self.names, new_names = names[slice_size:], names[:slice_size] new_items = Thing._by_fullname(new_names, data = True, return_dict=False) - return done, new_items class SearchBuilder(IDBuilder): diff --git a/r2/r2/models/jury.py b/r2/r2/models/jury.py index df6bea7f7a..a120d20f8a 100644 --- a/r2/r2/models/jury.py +++ b/r2/r2/models/jury.py @@ -43,14 +43,14 @@ def _new(cls, account, defendant): @classmethod @memoize('jury.by_account') - def by_account_cache(cls, account): - q = cls._query(cls.c._thing1_id == account._id) + def by_account_cache(cls, account_id): + q = cls._query(cls.c._thing1_id == account_id) q._limit = 100 return [ j._fullname for j in q ] @classmethod def by_account(cls, account, _update=False): - rel_ids = cls.by_account_cache(account, _update=_update) + rel_ids = cls.by_account_cache(account._id, _update=_update) juries = DataThing._by_fullname(rel_ids, data=True, return_dict = False) if juries: @@ -59,14 +59,14 @@ def by_account(cls, account, _update=False): @classmethod @memoize('jury.by_defendant') - def by_defendant_cache(cls, defendant): - q = cls._query(cls.c._thing2_id == defendant._id) + def by_defendant_cache(cls, defendant_id): + q = cls._query(cls.c._thing2_id == defendant_id) q._limit = 1000 return [ j._fullname for j in q ] @classmethod def by_defendant(cls, defendant, _update=False): - rel_ids = cls.by_defendant_cache(defendant, _update=_update) + rel_ids = cls.by_defendant_cache(defendant._id, _update=_update) juries = DataThing._by_fullname(rel_ids, data=True, return_dict = False) if juries: diff --git a/r2/r2/models/link.py b/r2/r2/models/link.py index df59b13f38..c20c015f92 100644 --- a/r2/r2/models/link.py +++ b/r2/r2/models/link.py @@ -23,7 +23,7 @@ CreationError from r2.lib.db.operators import desc from r2.lib.utils import base_url, tup, domain, title_to_url -from r2.lib.utils.trial_utils import on_trial +from r2.lib.utils.trial_utils import trial_info from account import Account, DeletedUser from subreddit import Subreddit from printable import Printable @@ -185,6 +185,12 @@ def _hide(self, user): def _unhide(self, user): return self._unsomething(user, self._hidden, 'hide') + def link_domain(self): + if self.is_self: + return 'self' + else: + return domain(self.url) + def keep_item(self, wrapped): user = c.user if c.user_is_loggedin else None @@ -229,9 +235,8 @@ def wrapped_cache_key(wrapped, style): s = Printable.wrapped_cache_key(wrapped, style) if wrapped.promoted is not None: s.extend([getattr(wrapped, "promote_status", -1), - wrapped.disable_comments, + getattr(wrapped, "disable_comments", False), wrapped._date, - wrapped.promote_until, c.user_is_sponsor, wrapped.url, repr(wrapped.title)]) if style == "htmllite": @@ -274,6 +279,7 @@ def make_permalink_slow(self, force_domain = False): @classmethod def add_props(cls, user, wrapped): + from r2.lib.pages import make_link_child from r2.lib.count import incr_counts from r2.lib.media import thumbnail_url from r2.lib.utils import timeago @@ -293,7 +299,7 @@ def add_props(cls, user, wrapped): saved = Link._saved(user, wrapped) if user_is_loggedin else {} hidden = Link._hidden(user, wrapped) if user_is_loggedin else {} - trials = on_trial(wrapped) + trials = trial_info(wrapped) #clicked = Link._clicked(user, wrapped) if user else {} clicked = {} @@ -326,6 +332,8 @@ def add_props(cls, user, wrapped): item.thumbnail = "" elif item.has_thumbnail: item.thumbnail = thumbnail_url(item) + elif item.is_self: + item.thumbnail = g.self_thumb else: item.thumbnail = g.default_thumb @@ -345,7 +353,7 @@ def add_props(cls, user, wrapped): # do we hide the score? if user_is_admin: item.hide_score = False - elif item.promoted: + elif item.promoted and item.score <= 0: item.hide_score = True elif user == item.author: item.hide_score = False @@ -367,7 +375,7 @@ def add_props(cls, user, wrapped): item.nofollow = True else: item.nofollow = False - + if c.user.pref_no_profanity: item.title = profanity_filter(item.title) @@ -382,21 +390,8 @@ def add_props(cls, user, wrapped): if item.is_self: item.domain_path = item.subreddit_path - #this is wrong, but won't be so wrong when we move this - #whole chunk of code into pages.py - from r2.lib.pages import MediaChild, SelfTextChild - item.link_child = None - item.editable = False - if item.media_object: - item.link_child = MediaChild(item, load = True) - elif item.selftext: - expand = getattr(item, 'expand_children', False) - item.link_child = SelfTextChild(item, expand = expand, - nofollow = item.nofollow) - #draw the edit button if the contents are pre-expanded - item.editable = (expand and - item.author == c.user and - not item._deleted) + # attach video or selftext as needed + item.link_child, item.editable = make_link_child(item) item.tblink = "http://%s/tb/%s" % ( get_domain(cname = cname, subreddit=False), @@ -414,8 +409,6 @@ def add_props(cls, user, wrapped): else: item.mousedown_url = None - item.on_trial = trials.get(item._fullname, False) - item.fresh = not any((item.likes != None, item.saved, item.clicked, @@ -438,6 +431,26 @@ def add_props(cls, user, wrapped): item.author = DeletedUser() item.as_deleted = True + item.trial_info = trials.get(item._fullname, None) + + item.approval_checkmark = None + + if item.can_ban: + verdict = getattr(item, "verdict", None) + if verdict in ('admin-approved', 'mod-approved'): + approver = None + if getattr(item, "ban_info", None): + approver = item.ban_info.get("unbanner", None) + + if approver: + item.approval_checkmark = _("approved by %s") % approver + else: + item.approval_checkmark = _("approved by a moderator") + + if item.trial_info is not None: + item.reveal_trial_info = True + item.use_big_modbuttons = True + if user_is_loggedin: incr_counts(wrapped) @@ -593,7 +606,7 @@ def add_props(cls, user, wrapped): can_reply_srs = set(s._id for s in subreddits if s.can_comment(user)) \ if c.user_is_loggedin else set() - can_reply_srs.add(promote.PromoteSR._id) + can_reply_srs.add(promote.get_promote_srid()) min_score = user.pref_min_comment_score @@ -789,7 +802,10 @@ def _new(cls, author, to, subject, body, ip, parent = None, sr = None): sr_id = None # check to see if the recipient is a subreddit and swap args accordingly if to and isinstance(to, Subreddit): + to_subreddit = True to, sr = None, to + else: + to_subreddit = False if sr: sr_id = sr._id @@ -816,9 +832,12 @@ def _new(cls, author, to, subject, body, ip, parent = None, sr = None): sr = Subreddit._byID(sr_id) inbox_rel = [] - # if there is a subreddit id, we have to add it to the moderator inbox if sr_id: - inbox_rel.append(ModeratorInbox._add(sr, m, 'inbox')) + # if there is a subreddit id, and it's either a reply or + # an initial message to an SR, add to the moderator inbox + # (i.e., don't do it for automated messages from the SR) + if parent or to_subreddit: + inbox_rel.append(ModeratorInbox._add(sr, m, 'inbox')) if author.name in g.admins: m.distinguished = 'admin' m._commit() diff --git a/r2/r2/models/mail_queue.py b/r2/r2/models/mail_queue.py index 8f25654f62..9ebfed9063 100644 --- a/r2/r2/models/mail_queue.py +++ b/r2/r2/models/mail_queue.py @@ -314,7 +314,7 @@ class Email(object): Kind.BID_PROMO : _("[reddit] your bid has been accepted"), Kind.ACCEPT_PROMO : _("[reddit] your promotion has been accepted"), Kind.REJECT_PROMO : _("[reddit] your promotion has been rejected"), - Kind.QUEUED_PROMO : _("[reddit] your promotion has been queued"), + Kind.QUEUED_PROMO : _("[reddit] your promotion has been charged"), Kind.LIVE_PROMO : _("[reddit] your promotion is now live"), Kind.FINISHED_PROMO : _("[reddit] your promotion has finished"), Kind.NEW_PROMO : _("[reddit] your promotion has been created"), @@ -373,18 +373,22 @@ def set_sent(self, date = None, rejected = False): from pylons import g self.date = date or datetime.datetime.now(g.tz) t = self.handler.reject_table if rejected else self.handler.track_table - t.insert().values({t.c.account_id: - self.user._id if self.user else 0, - t.c.to_addr : self.to_addr, - t.c.fr_addr : self.fr_addr, - t.c.reply_to : self.reply_to, - t.c.ip : self.ip, - t.c.fullname: - self.thing._fullname if self.thing else "", - t.c.date: self.date, - t.c.kind : self.kind, - t.c.msg_hash : self.msg_hash, - }).execute() + try: + t.insert().values({t.c.account_id: + self.user._id if self.user else 0, + t.c.to_addr : self.to_addr, + t.c.fr_addr : self.fr_addr, + t.c.reply_to : self.reply_to, + t.c.ip : self.ip, + t.c.fullname: + self.thing._fullname if self.thing else "", + t.c.date: self.date, + t.c.kind : self.kind, + t.c.msg_hash : self.msg_hash, + }).execute() + except: + print "failed to send message" + self.sent = True def to_MIMEText(self): diff --git a/r2/r2/models/subreddit.py b/r2/r2/models/subreddit.py index 12f47b620c..314bd3677e 100644 --- a/r2/r2/models/subreddit.py +++ b/r2/r2/models/subreddit.py @@ -40,6 +40,9 @@ class SubredditExists(Exception): pass class Subreddit(Thing, Printable): + # Note: As of 2010/03/18, nothing actually overrides the static_path + # attribute, even on a cname. So c.site.static_path should always be + # the same as g.static_path. _defaults = dict(static_path = g.static_path, stylesheet = None, stylesheet_rtl = None, @@ -68,6 +71,7 @@ class Subreddit(Thing, Printable): sr_limit = 50 + # note: for purposely unrenderable reddits (like promos) set author_id = -1 @classmethod def _new(cls, name, title, author_id, ip, lang = g.lang, type = 'public', over_18 = False, **kw): @@ -113,6 +117,14 @@ def _by_name(cls, name, _update = False): if name == 'friends': return Friends + elif name == 'randnsfw': + return RandomNSFW + elif name == 'random': + return Random + elif name == 'mod': + return Mod + elif name == 'contrib': + return Contrib elif name == 'all': return All else: @@ -211,7 +223,7 @@ def can_give_karma(self, user): return self.is_special(user) def should_ratelimit(self, user, kind): - if c.user_is_admin: + if c.user_is_admin or self.is_special(user): return False if kind == 'comment': @@ -219,8 +231,7 @@ def should_ratelimit(self, user, kind): else: rl_karma = g.MIN_RATE_LIMIT_KARMA - return not (self.is_special(user) or - user.karma(kind, self) >= rl_karma) + return user.karma(kind, self) < rl_karma def can_view(self, user): if c.user_is_admin: @@ -271,6 +282,19 @@ def get_reported(self): from r2.lib.db import queries return queries.get_reported(self) + def get_trials(self): + from r2.lib.db import queries + return queries.get_trials(self) + + def get_modqueue(self): + from r2.lib.db import queries + return queries.get_modqueue(self) + + def get_all_comments(self): + from r2.lib.db import queries + return queries.get_all_comments() + + @classmethod def add_props(cls, user, wrapped): names = ('subscriber', 'moderator', 'contributor') @@ -308,7 +332,8 @@ def wrapped_cache_key(wrapped, style): return s @classmethod - def top_lang_srs(cls, lang, limit, filter_allow_top = False): + def top_lang_srs(cls, lang, limit, filter_allow_top = False, over18 = True, + over18_only = False): """Returns the default list of subreddits for a given language, sorted by popularity""" pop_reddits = Subreddit._query(Subreddit.c.type == ('public', @@ -318,19 +343,24 @@ def top_lang_srs(cls, lang, limit, filter_allow_top = False): data = True, read_cache = True, write_cache = True, - cache_time = 3600) + cache_time = 5 * 60) if lang != 'all': pop_reddits._filter(Subreddit.c.lang == lang) - if not c.over18: + if not over18: pop_reddits._filter(Subreddit.c.over_18 == False) + elif over18_only: + pop_reddits._filter(Subreddit.c.over_18 == True) if filter_allow_top: pop_reddits._limit = 2 * limit - return filter(lambda sr: sr.allow_top == True, - pop_reddits)[:limit] + pop_reddits = filter(lambda sr: sr.allow_top == True, + pop_reddits)[:limit] + + # reddits with negative author_id are system reddits and shouldn't be displayed + return [x for x in pop_reddits + if getattr(x, "author_id", 0) is None or getattr(x, "author_id", 0) >= 0] - return list(pop_reddits) @classmethod def default_subreddits(cls, ids = True, limit = g.num_default_reddits): @@ -346,7 +376,8 @@ def default_subreddits(cls, ids = True, limit = g.num_default_reddits): auto_srs = [ Subreddit._by_name(n) for n in g.automatic_reddits ] srs = cls.top_lang_srs(c.content_langs, limit + len(auto_srs), - filter_allow_top = True) + filter_allow_top = True, + over18 = c.over18) rv = [] for i, s in enumerate(srs): if len(rv) >= limit: @@ -370,6 +401,13 @@ def random_reddits(cls, user_name, sr_ids, limit): a while so their front page doesn't jump around.""" return random.sample(sr_ids, limit) + @classmethod + def random_reddit(cls, limit = 1000, over18 = False): + return random.choice(cls.top_lang_srs(c.content_langs, limit, + filter_allow_top = False, + over18 = over18, + over18_only = over18)) + @classmethod def user_subreddits(cls, user, ids = True, limit = sr_limit): """ @@ -395,6 +433,26 @@ def user_subreddits(cls, user, ids = True, limit = sr_limit): srs = srs[:limit] return srs + @classmethod + @memoize('subreddit.special_reddits') + def special_reddits_cache(cls, user_id, query_param): + reddits = SRMember._query(SRMember.c._name == query_param, + SRMember.c._thing2_id == user_id, + #hack to prevent the query from + #adding it's own date + sort = (desc('_t1_ups'), desc('_t1_date')), + eager_load = True, + thing_data = True, + limit = 100) + + return [ sr._thing1_id for sr in reddits ] + + # Used to pull all of the SRs a given user moderates or is a contributor + # to (which one is controlled by query_param) + @classmethod + def special_reddits(cls, user, query_param, _update=False): + return cls.special_reddits_cache(user._id, query_param, _update=_update) + def is_subscriber_defaults(self, user): if user.has_subscribed: return self.is_subscriber(user) @@ -409,7 +467,7 @@ def subscribe_defaults(cls, user): #this will call reverse_subscriber_ids after every #addition. if it becomes a problem we should make an #add_multiple_subscriber fn - if sr.add_subscriber(c.user): + if sr.add_subscriber(user): sr._incr('_ups', 1) user.has_subscribed = True user._commit() @@ -538,6 +596,22 @@ class FriendsSR(FakeSubreddit): name = 'friends' title = 'friends' + @classmethod + @memoize("get_important_friends", 5*60) + def get_important_friends(cls, user_id, max_lookup = 500, limit = 100): + a = Account._byID(user_id, data = True) + # friends are returned chronologically by date, so pick the end of the list + # for the most recent additions + friends = Account._byID(a.friends[-max_lookup:], return_dict = False, + data = True) + + # if we don't have a last visit for your friends, we don't care about them + friends = [x for x in friends if hasattr(x, "last_visit")] + + # sort friends by most recent interactions + friends.sort(key = lambda x: getattr(x, "last_visit"), reverse = True) + return [x._id for x in friends[:limit]] + def get_links(self, sort, time): from r2.lib.db import queries from r2.models import Link @@ -546,7 +620,9 @@ def get_links(self, sort, time): if not c.user_is_loggedin: raise UserRequiredException - if not c.user.friends: + friends = self.get_important_friends(c.user._id) + + if not friends: return [] if g.use_query_cache: @@ -557,20 +633,54 @@ def get_links(self, sort, time): sort = 'new' time = 'all' - friends = Account._byID(c.user.friends, - return_dict=False) + friends = Account._byID(friends, return_dict=False) crs = [queries.get_submitted(friend, sort, time) for friend in friends] return queries.MergedCachedResults(crs) else: - q = Link._query(Link.c.author_id == c.user.friends, - sort = queries.db_sort(sort)) + q = Link._query(Link.c.author_id == friends, + sort = queries.db_sort(sort), + data = True) if time != 'all': q._filter(queries.db_times[time]) return q - + + def get_all_comments(self): + from r2.lib.db import queries + from r2.models import Comment + from r2.controllers.errors import UserRequiredException + + if not c.user_is_loggedin: + raise UserRequiredException + + friends = self.get_important_friends(c.user._id) + + if not friends: + return [] + + if g.use_query_cache: + # with the precomputer enabled, this Subreddit only supports + # being sorted by 'new'. it would be nice to have a + # cleaner UI than just blatantly ignoring their sort, + # though + sort = 'new' + time = 'all' + + friends = Account._byID(friends, + return_dict=False) + + crs = [queries.get_comments(friend, sort, time) + for friend in friends] + return queries.MergedCachedResults(crs) + + else: + q = Comment._query(Comment.c.author_id == friends, + sort = desc('_date'), + data = True) + return q + class AllSR(FakeSubreddit): name = 'all' title = 'all' @@ -579,7 +689,10 @@ def get_links(self, sort, time): from r2.lib import promote from r2.models import Link from r2.lib.db import queries - q = Link._query(sort = queries.db_sort(sort)) + q = Link._query(sort = queries.db_sort(sort), + read_cache = True, + write_cache = True, + cache_time = 60) if time != 'all': q._filter(queries.db_times[time]) return q @@ -641,6 +754,42 @@ def get_links(self, sort, time): def rising_srs(self): return self.sr_ids +class RandomReddit(FakeSubreddit): + name = 'random' + +class RandomNSFWReddit(FakeSubreddit): + name = 'randnsfw' + +class ModContribSR(DefaultSR): + name = None + title = None + query_param = None + real_path = None + + @property + def path(self): + return '/r/' + self.real_path + + def sr_ids(self): + if c.user_is_loggedin: + return Subreddit.special_reddits(c.user, self.query_param) + else: + return [] + + def get_links(self, sort, time): + return self.get_links_sr_ids(self.sr_ids(), sort, time) + +class ModSR(ModContribSR): + name = "communities you moderate" + title = "communities you moderate" + query_param = "moderator" + real_path = "mod" + +class ContribSR(ModContribSR): + name = "contrib" + title = "communities you're a contributor on" + query_param = "contributor" + real_path = "contrib" class SubSR(FakeSubreddit): stylesheet = 'subreddit.css' @@ -677,8 +826,12 @@ def get_links(self, sort, time): Sub = SubSR() Friends = FriendsSR() +Mod = ModSR() +Contrib = ContribSR() All = AllSR() Default = DefaultSR() +Random = RandomReddit() +RandomNSFW = RandomNSFWReddit() class SRMember(Relation(Subreddit, Account)): pass Subreddit.__bases__ += (UserRel('moderator', SRMember), diff --git a/r2/r2/models/trial.py b/r2/r2/models/trial.py index 418c68f0f6..76d1945f43 100644 --- a/r2/r2/models/trial.py +++ b/r2/r2/models/trial.py @@ -20,35 +20,37 @@ # CondeNet, Inc. All Rights Reserved. ################################################################################ -from r2.models import Link -from r2.lib.utils import Storage +from r2.models import Thing, Link, Subreddit, AllSR, admintools +from r2.lib.utils import Storage, tup +from r2.lib.memoize import memoize from datetime import datetime from pylons import g class Trial(Storage): def __init__(self, defendant): - from r2.lib.utils.trial_utils import on_trial + from r2.lib.utils.trial_utils import trial_info if not defendant._loaded: defendant._load() - if not on_trial(defendant): + if not trial_info(defendant): raise ValueError ("Defendant %s is not on trial" % defendant._id) self.defendant = defendant - def convict(self): -# train_spam_filter(self.defendant, "spam") - if self.defendant._spam: - pass #TODO: PM submitter - else: - pass #TODO: ban it + def convict(self, details = ''): +# if self.defendant._spam: +# TODO: PM submitter, maybe? +# else: +# TODO: PM submitter, maybe? + admintools.spam(self.defendant, auto=False, moderator_banned=True, + banner="deputy moderation" + details) - def acquit(self): -# train_spam_filter(self.defendant, "ham") - if self.defendant._spam: - pass -# self.defendant._date = datetime.now(g.tz) -# self.defendant._spam = False - #TODO: PM submitter + def acquit(self, details = ''): + admintools.unspam(self.defendant, unbanner="deputy moderation" + details) + +# if self.defendant._spam: +# TODO: PM submitter +# TODO: reset submission time: +# self.defendant._date = datetime.now(g.tz) def mistrial(self): #TODO: PM mods @@ -57,15 +59,16 @@ def mistrial(self): def verdict(self): from r2.models import Jury + from r2.lib.utils.trial_utils import update_voting - ups = 0 - downs = 0 + koshers = 0 + spams = 0 nones = 0 now = datetime.now(g.tz) defendant_age = now - self.defendant._date if defendant_age.days > 0: - return "timeout" + return ("jury timeout", None, None) latest_juryvote = None for j in Jury.by_defendant(self.defendant): @@ -80,64 +83,115 @@ def verdict(self): latest_juryvote = max(latest_juryvote, j._date) if j._name == "1": - ups += 1 + koshers += 1 elif j._name == "-1": - downs += 1 + spams += 1 else: raise ValueError("weird jury vote: [%s]" % j._name) # The following trace is temporary; it'll be removed once this # is done via cron job as opposed to manually - print "%d ups, %d downs, %d haven't voted yet" % (ups, downs, nones) + print "%d koshers, %d spams, %d haven't voted yet" % (koshers, spams, nones) - total_votes = ups + downs + update_voting(self.defendant, koshers, spams) + + total_votes = koshers + spams if total_votes < 7: g.log.debug("not enough votes yet") - return None + return (None, koshers, spams) # Stop showing this in the spotlight box once it has 30 votes if total_votes >= 30: g.cache.set("quench_jurors-" + self.defendant._fullname, True) - # If a trial is less than an hour old, and votes are still trickling in - # (i.e., there was one in the past five minutes), it's not yet time to - # declare a verdict. + # If a trial is less than an hour old, and votes are still trickling + # in (i.e., there was one in the past five minutes), we're going to + # require a nearly unanimous opinion to end the trial without + # waiting for more votes. if defendant_age.seconds < 3600 and (now - latest_juryvote).seconds < 300: - g.log.debug("votes still trickling in") - return None + trickling = True + else: + trickling = False - up_pct = float(ups) / float(total_votes) + kosher_pct = float(koshers) / float(total_votes) - if up_pct < 0.34: - return "guilty" - elif up_pct > 0.66: - return "innocent" - elif total_votes >= 30: - return "hung jury" + if kosher_pct < 0.13: + return ("guilty", koshers, spams) + elif kosher_pct > 0.86: + return ("innocent", koshers, spams) + elif trickling: + g.log.debug("votes still trickling in") + return (None, koshers, spams) + elif kosher_pct < 0.34: + return ("guilty", koshers, spams) + elif kosher_pct > 0.66: + return ("innocent", koshers, spams) + elif total_votes >= 100: + # This should never really happen; quenching should kick in + # after 30 votes, so new jurors won't be assigned to the + # trial. Just in case something goes wrong, close any trials + # with more than 100 votes. + return ("hung jury", koshers, spams) else: g.log.debug("hung jury, so far") - return None # no decision yet; wait for more voters + return (None, koshers, spams) # no decision yet; wait for more voters def check_verdict(self): from r2.lib.utils.trial_utils import end_trial - verdict = self.verdict() + verdict, koshers, spams = self.verdict() if verdict is None: return # no verdict yet - if verdict == "guilty": - self.convict() - elif verdict == "innocent": - self.acquit() - elif verdict in ("timeout", "hung jury"): + if verdict in ("jury timeout", "hung jury"): self.mistrial() else: - raise ValueError("Invalid verdict [%s]" % verdict) + details=", %d-%d" % (spams, koshers) - self.defendant.verdict = verdict - self.defendant._commit() + if verdict == "guilty": + self.convict(details) + elif verdict == "innocent": + self.acquit(details) + else: + raise ValueError("Invalid verdict [%s]" % verdict) - end_trial(self.defendant) + end_trial(self.defendant, verdict) return verdict + + @classmethod + @memoize('trial.all_defendants') + def all_defendants_cache(cls): + fnames = g.hardcache.backend.ids_by_category("trial") + return fnames + + @classmethod + def all_defendants(cls, quench=False, _update=False): + all = cls.all_defendants_cache(_update=_update) + + defs = Thing._by_fullname(all, data=True).values() + + if quench: + # Used for the spotlight, to filter out trials with over 30 votes; + # otherwise, hung juries would hog the spotlight for an hour as + # their vote counts continued to skyrocket + + return filter (lambda d: + not g.cache.get("quench_jurors-" + d._fullname), + defs) + else: + return defs + + # sr can be plural + @classmethod + def defendants_by_sr(cls, sr): + all = cls.all_defendants() + + if isinstance(sr, AllSR): + return all + + sr = tup(sr) + sr_ids = [ s._id for s in sr ] + + return filter (lambda x: x.sr_id in sr_ids, all) diff --git a/r2/r2/models/vote.py b/r2/r2/models/vote.py index 4f9189a444..8f272e5ae1 100644 --- a/r2/r2/models/vote.py +++ b/r2/r2/models/vote.py @@ -53,6 +53,7 @@ class Vote(MultiRelation('vote', def vote(cls, sub, obj, dir, ip, organic = False, cheater = False): from admintools import valid_user, valid_thing, update_score from r2.lib.count import incr_counts + from r2.lib.db import queries sr = obj.subreddit_slow kind = obj.__class__.__name__.lower() @@ -99,6 +100,18 @@ def vote(cls, sub, obj, dir, ip, organic = False, cheater = False): v.organic = organic v._commit() + g.cache.delete(queries.prequeued_vote_key(sub, obj)) + + lastvote_attr_name = 'last_vote_' + obj.__class__.__name__ + try: + setattr(sub, lastvote_attr_name, datetime.now(g.tz)) + except TypeError: + # this temporarily works around an issue with timezones in + # a really hacky way. Remove me later + setattr(sub, lastvote_attr_name, None) + sub._commit() + setattr(sub, lastvote_attr_name, datetime.now(g.tz)) + sub._commit() up_change, down_change = score_changes(amount, oldamount) @@ -122,7 +135,8 @@ def vote(cls, sub, obj, dir, ip, organic = False, cheater = False): #TODO make this generic and put on multirelation? @classmethod def likes(cls, sub, obj): - votes = cls._fast_query(sub, obj, ('1', '-1'), data=False) + votes = cls._fast_query(sub, obj, ('1', '-1'), + data=False, eager_load=False) votes = dict((tuple(k[:2]), v) for k, v in votes.iteritems() if v) return votes diff --git a/r2/r2/public/static/cake.png b/r2/r2/public/static/cake.png index 2e358898f5532df5967b64c6cea8dcb2d17e4314..18a7b58337cebecfd79c5e7dd87505e0262c6a5d 100644 GIT binary patch delta 125 zcmV-@0D}ME0o(zQF()2SOjJbx830Bu48<1{?QCk>nE;=k|M$%$&(Htm=JodY;;M&7 zl95|EO9TZI11BwLIGO+e04hmDK~xCWV_;wuU;qI@qW~buD6TXWNDGE`mWBo}h@=5Q f0)rq32%z!+)`SSJ*H$UC00000NkvXXu0mjfAgeA# delta 129 zcmV-{0Dk}60pJ0UF)ATYOjJbx002fV48<1{m3UA8W&rJMYM-C~|CLYA&;RA-_4fGU zkg)LTkytrQ0}&7p0X)r6>Hq)$D@jB_R0!8&U|Y=_OG{u7 j1OWjCk#Zm?K;;1d;qnR{f_uaC00000NkvXXu0mjfi1so) diff --git a/r2/r2/public/static/css/reddit.css b/r2/r2/public/static/css/reddit.css index 80112a9472..a3425018c6 100644 --- a/r2/r2/public/static/css/reddit.css +++ b/r2/r2/public/static/css/reddit.css @@ -390,7 +390,6 @@ ul.flat-vert {text-align: left;} .infotable .small { font-size: smaller; } .infotable td { padding-right: 1em; } .infotable a:hover { text-decoration: underline } -.infotable a.pretty-button:hover { text-decoration: none } .infotable .state-button a { background-color: #F0F0F0; color: gray; } .infotable .bold { font-weight: bold; } .infotable .invalid-user { background-color: pink} @@ -476,6 +475,7 @@ ul.flat-vert {text-align: left;} .domain a:hover { text-decoration: underline } .link-note { + background-color: white; color: #ff4444; font-size:x-small; } @@ -485,7 +485,7 @@ ul.flat-vert {text-align: left;} .tagline a {color: #369; text-decoration: none; } .tagline .friend { color: orangered } .tagline .submitter { color: #0055df } -.tagline .moderator { color: #228822 } +.tagline .moderator, .green { color: #228822 } .tagline .admin { color: #ff0011; } .tagline a.author.admin { font-weight: bold } .tagline a:hover { text-decoration: underline } @@ -514,7 +514,7 @@ ul.flat-vert {text-align: left;} .nextprev { color: gray; font-size: larger; margin-top: 10px;} /* corner help */ -.help a { +.help a.help { color: #808080; text-decoration: underline; } @@ -539,6 +539,16 @@ ul.flat-vert {text-align: left;} .help p, .help form { margin: 5px; } .help form { display: inline; } +.infotext { + border: 1px solid #369; + background-color: #EFF7FF; +} + +.infotext p { + font-size: small; + margin: 5px; +} + .wikipage { margin: 15px; } @@ -732,6 +742,9 @@ a.star { text-decoration: none; color: #ff8b60 } padding-left: 4px; } +.entry .buttons li.stamp + li.stamp { + margin-left: 4px; +} .entry .buttons li a { color: #888; @@ -854,6 +867,7 @@ a.star { text-decoration: none; color: #ff8b60 } .comment .child { margin-top: 10px; margin-left: 15px; + border-left: 1px dotted #DDF; } textarea.gray { color: gray; } @@ -1807,6 +1821,7 @@ textarea.gray { color: gray; } } .button .blog .r { color: gray; } +.button .blog .score { white-space: nowrap; } .button {color: #369;} .button a:hover { text-decoration: underline } @@ -2261,9 +2276,35 @@ form input[type=radio] {margin: 2px .5em 0 0; } text-decoration: none; } -.reported { background-color: #f6e69f } +.entry .buttons li.trial-stamp, .entry .buttons li.reported-stamp { + border: 1px solid black !important; + padding: 0 4px; + background-color: #f6e69f; +} + +.entry .buttons li.trial-stamp .spam { + color: #e00; +} + +.entry .buttons li.trial-stamp .kosher { + color: #090; +} + .suspicious { background-color: #f6e69f } -.spam { background-color: #FA8072 } +.thing.spam { background-color: #FA8072 } + +.comment.spam > .child, .message.spam > .child { + background-color: white; +} +.comment.spam > .child { + margin-left: 0; + padding-left: 15px; +} +.message.spam > .child { + /* There's a thin pink "border" due to the parent's padding:7px, + which we could try to fix here some day. */ +} + .banned-user { overflow: hidden; opacity: .7; @@ -2274,6 +2315,13 @@ form input[type=radio] {margin: 2px .5em 0 0; } text-decoration: line-through; } +.approval-checkmark { + cursor: pointer; + height: 0.8em; + vertical-align: baseline; + margin-left: 3px; +} + .little { font-size: smaller } .gray { color: gray } @@ -2293,6 +2341,7 @@ form input[type=radio] {margin: 2px .5em 0 0; } float: left; margin: 0px 5px; overflow: hidden; + width: 70px; } @@ -2538,8 +2587,8 @@ ul#image-preview-list .description pre { border-top: 1px dotted #369; } -#sr-bar .separator {color: gray; } -#sr-bar a {color: black;} +.sr-bar .separator {color: gray; } +.sr-bar a {color: black;} #sr-more-link { color: black; @@ -2982,6 +3031,82 @@ ul.tabmenu.formtab { width: 100%; } +.campaign { + border: 1px solid #336699; + background-color: #EFF7FF; + padding: 5px; +} + +.campaign .status { + font-size: x-small; +} + +.existing-campaigns > table { + font-size: x-small; + border: 1px solid #888; + background-color: #F8F8F8; + width: 100%; + margin: 10px 0; +} + +.existing-campaigns td.bid { + background: transparent none no-repeat scroll center right; + padding-right: 15px; +} + +.existing-campaigns td.bid.paid { + background-image: url(/static/green-check.png); +} + +.existing-campaigns td.bid .info{ + margin-right: 3px; +} + +.existing-campaigns > table > tbody > tr > td { + text-align: right; + border: 1px solid #888; + padding: 1px 5px; +} +.existing-campaigns > table > tbody > tr#edit-campaign-tr > td { + text-align: left; +} + +.existing-campaigns > table > tbody > tr > th { + text-align: center; + font-weight: bold; + padding: 1px 5px; + border: 1px solid #888; +} + +.campaign ul { + font-size: x-small; + list-style-type: disc; + margin: 0 20px; +} +.existing-campaigns td > button { margin: 0px 5px 0px 0px; } + +.campaign { width: 95%; } +.campaign .bid-info { font-size: x-small; } +.campaign .buttons { float:right; } +.campaign td.prefright { + padding: 4px; +} +.campaign .targeting { + margin-left: 25px; +} +.campaign .targeting input{ + width: 95%; +} + +.campaign th { + font-size: small; + padding: 4px; + padding-top: 8px; +} +.linefield .campaign input[type=text] { + font-size: x-small; +} + /***traffic stuff***/ .traffic-table {margin: 10px 20px; } .traffic-table a:hover { text-decoration: underline; } @@ -3007,6 +3132,10 @@ ul.tabmenu.formtab { border: 1px solid #B0B0B0; margin-left: 10px; margin-bottom: 10px; + display: inline-block; +} +.traffic-graph .title { + text-align: center; } .promoted-traffic h1 { @@ -3180,9 +3309,12 @@ table.lined-table { /* Datepicker ----------------------------------*/ .datepicker { + z-index: 1000; display: none; -moz-border-radius: 6px; -webkit-border-radius: 6px; + -moz-box-shadow: 3px 3px 3px #888; + -webkit-box-shadow: 3px 3px 3px #888; } .datepicker.inuse { display: block; } @@ -3325,6 +3457,22 @@ table.lined-table { font-weight: bold; } +.create-promotion .help { + font-size: x-small; +} + +.create-promotion .help p { + margin: 5px; +} + +.create-promotion .help a.help { + font-weight: bold; + text-decoration: none; + float: right; + color: orangered; +} + + .create-promo { float: left; width: 520px; margin-right: 20px;} .create-promo .infobar { margin-right: 0; @@ -3336,7 +3484,7 @@ table.lined-table { .create-promo h2 { margin-top: 10px; color: black; } .create-promo ol { margin: 0px 30px 10px 30px; } .create-promo ol > li { - list-style-type: disc; margin: + list-style-type: disc; } .create-promo .rules { text-align: right; } @@ -3364,8 +3512,24 @@ table.lined-table { font-family: courier; } +.bidding-history table.bidding-history tr:hover { + background-color: #AAA; +} +.bidding-history table.bidding-history th, +.bidding-history table.bidding-history td { + padding: 2px 5px; + text-align: right; + font-size: x-small; +} +.bidding-history table.bidding-history th { + font-weight: bold; +} +.bidding-history div.graph { + height: 8px; + background-color: #336699; +} .pay-form tr.input-error th { color: red; @@ -3624,6 +3788,11 @@ a.adminbox:hover { border: solid 1px orangered; } +.email { + font-family: monospace; + font-size: larger; +} + .lined-table, .lined-table th, .lined-table td { border: solid #cdcdcd 1px; border-collapse: collapse; @@ -3716,6 +3885,8 @@ a.adminbox:hover { padding-right: 5px; } +a.pretty-button:hover { text-decoration: none !important } + .pretty-button { margin-left: 5px; margin-bottom: 5px; @@ -3808,3 +3979,8 @@ a.pretty-button.positive.pressed { .sitetable .we-need-help { display: none; } + +.oatmeal img { + display: block; + margin: 5px auto; +} \ No newline at end of file diff --git a/r2/r2/public/static/green-check.png b/r2/r2/public/static/green-check.png index 5745a64ee5f8bf2b8cfda33f154e498cee3aa96a..41c4d99acb9282045eace7273f93bf26716a1702 100644 GIT binary patch delta 515 zcmV+e0{s2L29X4i7=H)`0001hYg94-0004VQb$4nuFf3k00006VoOIv0RI600RN!9 zr;`8x010qNS#tmY3ljhU3ljkVnw%H_000McNliru+5`|53kWZ4>Wu&Z0g*{WK~yNu zt&=}UlyMx!Ki~I#Z|X+j(j`QxrN#3Sg$A)iaB;Gw;+EjTrhhXB4Z>6Wcj2qUaf1&1 z8{gZqa_~r(mIJq-NJ$bwD%>Re13nHO;-&Xe&=-EokKe=d@O(c%lf=J{^LNk867v?Z z!W|a?Nuei`!9jA)-h~Q+09O*F5|`Vim?VEMg=EzomhmJ}e9P^)&nAx$PjBqy+cXT$6B77zq59`KRWdQNopOpmVQY!XD zAXgF(b1X50c!HR|wSg3qaFFazk)vJ%ps#_Ms~&I=f!e#i;D;3g6P$;$+`CCQ$vlsl z?Hc2!qG6=i#gv;tb^I$RA6YTa=2MX{0Z`59zx=1lIR*>lQv~_??{feE002ovPDHLk FV1jrB=z#zL delta 823 zcmV-71IYZ51i}W87=I7~0001AnI9$q000SaNLh0L002k;002k;M#*bF00009c5p#w z0000D0000I0MWSK1ONa6-$_J4R5;7Mlxav*Q546Ikx6P%L?Ve6R4OSJnrNG7g{IMF zP|6T(Qf3B@Sy@p^3;Dq!C?#542u+L1LKAFp$uuz%N(74ti+^;SaZ4L#=DmCGzYp&^ zMTz<$I-kz}o%4S8e>nF&2LJ%*AH}2zcYNhR#g(>Xmjp&l=mppYu2R(>f;?MSnt!U1-tV8)|I28=T($PXu z$Zl%6F|zx4MO{d6cEOanMEWB{zUKUaxYX+J#`3N*xl#a~2xqtWS2aa>#80Zrcqi6l zH`3w3L$szJHGS@lQOmbp>~#tvN^K@uySl76D}Cq~oPXr87GcE$l#aNwL4hjsc~~N1 zcn)GTY4AOiRN(LTm`J~kC?cno;iSySh5>oo|Trr9|`VZ$|BP?u_NV0Duyf%vZt-v+RZ#Yy<%6-m`wC*UO@0e1n zW-stNGlOVyXx1tpuPEJ`Px>sSmn=1){return"rgb("+[F.r,F.g,F.b].join(",")+")"}else{return"rgba("+[F.r,F.g,F.b,F.a].join(",")+")"}};F.normalize=function(){function G(I,J,H){return JH?H:J)}F.r=G(0,parseInt(F.r),255);F.g=G(0,parseInt(F.g),255);F.b=G(0,parseInt(F.b),255);F.a=G(0,F.a,1);return F};F.clone=function(){return jQuery.color.make(F.r,F.b,F.g,F.a)};return F.normalize()};jQuery.color.extract=function(C,B){var D;do{D=C.css(B).toLowerCase();if(D!=""&&D!="transparent"){break}C=C.parent()}while(!jQuery.nodeName(C.get(0),"body"));if(D=="rgba(0, 0, 0, 0)"){D="transparent"}return jQuery.color.parse(D)};jQuery.color.parse=function(E){var D,B=jQuery.color.make;if(D=/rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/.exec(E)){return B(parseInt(D[1],10),parseInt(D[2],10),parseInt(D[3],10))}if(D=/rgba\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]+(?:\.[0-9]+)?)\s*\)/.exec(E)){return B(parseInt(D[1],10),parseInt(D[2],10),parseInt(D[3],10),parseFloat(D[4]))}if(D=/rgb\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*\)/.exec(E)){return B(parseFloat(D[1])*2.55,parseFloat(D[2])*2.55,parseFloat(D[3])*2.55)}if(D=/rgba\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\s*\)/.exec(E)){return B(parseFloat(D[1])*2.55,parseFloat(D[2])*2.55,parseFloat(D[3])*2.55,parseFloat(D[4]))}if(D=/#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(E)){return B(parseInt(D[1],16),parseInt(D[2],16),parseInt(D[3],16))}if(D=/#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(E)){return B(parseInt(D[1]+D[1],16),parseInt(D[2]+D[2],16),parseInt(D[3]+D[3],16))}var C=jQuery.trim(E).toLowerCase();if(C=="transparent"){return B(255,255,255,0)}else{D=A[C];return B(D[0],D[1],D[2])}};var A={aqua:[0,255,255],azure:[240,255,255],beige:[245,245,220],black:[0,0,0],blue:[0,0,255],brown:[165,42,42],cyan:[0,255,255],darkblue:[0,0,139],darkcyan:[0,139,139],darkgrey:[169,169,169],darkgreen:[0,100,0],darkkhaki:[189,183,107],darkmagenta:[139,0,139],darkolivegreen:[85,107,47],darkorange:[255,140,0],darkorchid:[153,50,204],darkred:[139,0,0],darksalmon:[233,150,122],darkviolet:[148,0,211],fuchsia:[255,0,255],gold:[255,215,0],green:[0,128,0],indigo:[75,0,130],khaki:[240,230,140],lightblue:[173,216,230],lightcyan:[224,255,255],lightgreen:[144,238,144],lightgrey:[211,211,211],lightpink:[255,182,193],lightyellow:[255,255,224],lime:[0,255,0],magenta:[255,0,255],maroon:[128,0,0],navy:[0,0,128],olive:[128,128,0],orange:[255,165,0],pink:[255,192,203],purple:[128,0,128],violet:[128,0,128],red:[255,0,0],silver:[192,192,192],white:[255,255,255],yellow:[255,255,0]}})(); + +// the actual Flot code +(function($) { + function Plot(placeholder, data_, options_, plugins) { + // data is on the form: + // [ series1, series2 ... ] + // where series is either just the data as [ [x1, y1], [x2, y2], ... ] + // or { data: [ [x1, y1], [x2, y2], ... ], label: "some label", ... } + + var series = [], + options = { + // the color theme used for graphs + colors: ["#edc240", "#afd8f8", "#cb4b4b", "#4da74d", "#9440ed"], + legend: { + show: true, + noColumns: 1, // number of colums in legend table + labelFormatter: null, // fn: string -> string + labelBoxBorderColor: "#ccc", // border color for the little label boxes + container: null, // container (as jQuery object) to put legend in, null means default on top of graph + position: "ne", // position of default legend container within plot + margin: 5, // distance from grid edge to default legend container within plot + backgroundColor: null, // null means auto-detect + backgroundOpacity: 0.85 // set to 0 to avoid background + }, + xaxis: { + mode: null, // null or "time" + transform: null, // null or f: number -> number to transform axis + inverseTransform: null, // if transform is set, this should be the inverse function + min: null, // min. value to show, null means set automatically + max: null, // max. value to show, null means set automatically + autoscaleMargin: null, // margin in % to add if auto-setting min/max + ticks: null, // either [1, 3] or [[1, "a"], 3] or (fn: axis info -> ticks) or app. number of ticks for auto-ticks + tickFormatter: null, // fn: number -> string + labelWidth: null, // size of tick labels in pixels + labelHeight: null, + + // mode specific options + tickDecimals: null, // no. of decimals, null means auto + tickSize: null, // number or [number, "unit"] + minTickSize: null, // number or [number, "unit"] + monthNames: null, // list of names of months + timeformat: null, // format string to use + twelveHourClock: false // 12 or 24 time in time mode + }, + yaxis: { + autoscaleMargin: 0.02 + }, + x2axis: { + autoscaleMargin: null + }, + y2axis: { + autoscaleMargin: 0.02 + }, + series: { + points: { + show: false, + radius: 3, + lineWidth: 2, // in pixels + fill: true, + fillColor: "#ffffff" + }, + lines: { + // we don't put in show: false so we can see + // whether lines were actively disabled + lineWidth: 2, // in pixels + fill: false, + fillColor: null, + steps: false + }, + bars: { + show: false, + lineWidth: 2, // in pixels + barWidth: 1, // in units of the x axis + fill: true, + fillColor: null, + align: "left", // or "center" + horizontal: false // when horizontal, left is now top + }, + shadowSize: 3 + }, + grid: { + show: true, + aboveData: false, + color: "#545454", // primary color used for outline and labels + backgroundColor: null, // null for transparent, else color + tickColor: "rgba(0,0,0,0.15)", // color used for the ticks + labelMargin: 5, // in pixels + borderWidth: 2, // in pixels + borderColor: null, // set if different from the grid color + markings: null, // array of ranges or fn: axes -> array of ranges + markingsColor: "#f4f4f4", + markingsLineWidth: 2, + // interactive stuff + clickable: false, + hoverable: false, + autoHighlight: true, // highlight in case mouse is near + mouseActiveRadius: 10 // how far the mouse can be away to activate an item + }, + hooks: {} + }, + canvas = null, // the canvas for the plot itself + overlay = null, // canvas for interactive stuff on top of plot + eventHolder = null, // jQuery object that events should be bound to + ctx = null, octx = null, + axes = { xaxis: {}, yaxis: {}, x2axis: {}, y2axis: {} }, + plotOffset = { left: 0, right: 0, top: 0, bottom: 0}, + canvasWidth = 0, canvasHeight = 0, + plotWidth = 0, plotHeight = 0, + hooks = { + processOptions: [], + processRawData: [], + processDatapoints: [], + draw: [], + bindEvents: [], + drawOverlay: [] + }, + plot = this; + + // public functions + plot.setData = setData; + plot.setupGrid = setupGrid; + plot.draw = draw; + plot.getPlaceholder = function() { return placeholder; }; + plot.getCanvas = function() { return canvas; }; + plot.getPlotOffset = function() { return plotOffset; }; + plot.width = function () { return plotWidth; }; + plot.height = function () { return plotHeight; }; + plot.offset = function () { + var o = eventHolder.offset(); + o.left += plotOffset.left; + o.top += plotOffset.top; + return o; + }; + plot.getData = function() { return series; }; + plot.getAxes = function() { return axes; }; + plot.getOptions = function() { return options; }; + plot.highlight = highlight; + plot.unhighlight = unhighlight; + plot.triggerRedrawOverlay = triggerRedrawOverlay; + plot.pointOffset = function(point) { + return { left: parseInt(axisSpecToRealAxis(point, "xaxis").p2c(+point.x) + plotOffset.left), + top: parseInt(axisSpecToRealAxis(point, "yaxis").p2c(+point.y) + plotOffset.top) }; + }; + + + // public attributes + plot.hooks = hooks; + + // initialize + initPlugins(plot); + parseOptions(options_); + constructCanvas(); + setData(data_); + setupGrid(); + draw(); + bindEvents(); + + + function executeHooks(hook, args) { + args = [plot].concat(args); + for (var i = 0; i < hook.length; ++i) + hook[i].apply(this, args); + } + + function initPlugins() { + for (var i = 0; i < plugins.length; ++i) { + var p = plugins[i]; + p.init(plot); + if (p.options) + $.extend(true, options, p.options); + } + } + + function parseOptions(opts) { + $.extend(true, options, opts); + if (options.grid.borderColor == null) + options.grid.borderColor = options.grid.color; + // backwards compatibility, to be removed in future + if (options.xaxis.noTicks && options.xaxis.ticks == null) + options.xaxis.ticks = options.xaxis.noTicks; + if (options.yaxis.noTicks && options.yaxis.ticks == null) + options.yaxis.ticks = options.yaxis.noTicks; + if (options.grid.coloredAreas) + options.grid.markings = options.grid.coloredAreas; + if (options.grid.coloredAreasColor) + options.grid.markingsColor = options.grid.coloredAreasColor; + if (options.lines) + $.extend(true, options.series.lines, options.lines); + if (options.points) + $.extend(true, options.series.points, options.points); + if (options.bars) + $.extend(true, options.series.bars, options.bars); + if (options.shadowSize) + options.series.shadowSize = options.shadowSize; + + for (var n in hooks) + if (options.hooks[n] && options.hooks[n].length) + hooks[n] = hooks[n].concat(options.hooks[n]); + + executeHooks(hooks.processOptions, [options]); + } + + function setData(d) { + series = parseData(d); + fillInSeriesOptions(); + processData(); + } + + function parseData(d) { + var res = []; + for (var i = 0; i < d.length; ++i) { + var s = $.extend(true, {}, options.series); + + if (d[i].data) { + s.data = d[i].data; // move the data instead of deep-copy + delete d[i].data; + + $.extend(true, s, d[i]); + + d[i].data = s.data; + } + else + s.data = d[i]; + res.push(s); + } + + return res; + } + + function axisSpecToRealAxis(obj, attr) { + var a = obj[attr]; + if (!a || a == 1) + return axes[attr]; + if (typeof a == "number") + return axes[attr.charAt(0) + a + attr.slice(1)]; + return a; // assume it's OK + } + + function fillInSeriesOptions() { + var i; + + // collect what we already got of colors + var neededColors = series.length, + usedColors = [], + assignedColors = []; + for (i = 0; i < series.length; ++i) { + var sc = series[i].color; + if (sc != null) { + --neededColors; + if (typeof sc == "number") + assignedColors.push(sc); + else + usedColors.push($.color.parse(series[i].color)); + } + } + + // we might need to generate more colors if higher indices + // are assigned + for (i = 0; i < assignedColors.length; ++i) { + neededColors = Math.max(neededColors, assignedColors[i] + 1); + } + + // produce colors as needed + var colors = [], variation = 0; + i = 0; + while (colors.length < neededColors) { + var c; + if (options.colors.length == i) // check degenerate case + c = $.color.make(100, 100, 100); + else + c = $.color.parse(options.colors[i]); + + // vary color if needed + var sign = variation % 2 == 1 ? -1 : 1; + c.scale('rgb', 1 + sign * Math.ceil(variation / 2) * 0.2) + + // FIXME: if we're getting to close to something else, + // we should probably skip this one + colors.push(c); + + ++i; + if (i >= options.colors.length) { + i = 0; + ++variation; + } + } + + // fill in the options + var colori = 0, s; + for (i = 0; i < series.length; ++i) { + s = series[i]; + + // assign colors + if (s.color == null) { + s.color = colors[colori].toString(); + ++colori; + } + else if (typeof s.color == "number") + s.color = colors[s.color].toString(); + + // turn on lines automatically in case nothing is set + if (s.lines.show == null) { + var v, show = true; + for (v in s) + if (s[v].show) { + show = false; + break; + } + if (show) + s.lines.show = true; + } + + // setup axes + s.xaxis = axisSpecToRealAxis(s, "xaxis"); + s.yaxis = axisSpecToRealAxis(s, "yaxis"); + } + } + + function processData() { + var topSentry = Number.POSITIVE_INFINITY, + bottomSentry = Number.NEGATIVE_INFINITY, + i, j, k, m, length, + s, points, ps, x, y, axis, val, f, p; + + for (axis in axes) { + axes[axis].datamin = topSentry; + axes[axis].datamax = bottomSentry; + axes[axis].used = false; + } + + function updateAxis(axis, min, max) { + if (min < axis.datamin) + axis.datamin = min; + if (max > axis.datamax) + axis.datamax = max; + } + + for (i = 0; i < series.length; ++i) { + s = series[i]; + s.datapoints = { points: [] }; + + executeHooks(hooks.processRawData, [ s, s.data, s.datapoints ]); + } + + // first pass: clean and copy data + for (i = 0; i < series.length; ++i) { + s = series[i]; + + var data = s.data, format = s.datapoints.format; + + if (!format) { + format = []; + // find out how to copy + format.push({ x: true, number: true, required: true }); + format.push({ y: true, number: true, required: true }); + + if (s.bars.show) + format.push({ y: true, number: true, required: false, defaultValue: 0 }); + + s.datapoints.format = format; + } + + if (s.datapoints.pointsize != null) + continue; // already filled in + + if (s.datapoints.pointsize == null) + s.datapoints.pointsize = format.length; + + ps = s.datapoints.pointsize; + points = s.datapoints.points; + + insertSteps = s.lines.show && s.lines.steps; + s.xaxis.used = s.yaxis.used = true; + + for (j = k = 0; j < data.length; ++j, k += ps) { + p = data[j]; + + var nullify = p == null; + if (!nullify) { + for (m = 0; m < ps; ++m) { + val = p[m]; + f = format[m]; + + if (f) { + if (f.number && val != null) { + val = +val; // convert to number + if (isNaN(val)) + val = null; + } + + if (val == null) { + if (f.required) + nullify = true; + + if (f.defaultValue != null) + val = f.defaultValue; + } + } + + points[k + m] = val; + } + } + + if (nullify) { + for (m = 0; m < ps; ++m) { + val = points[k + m]; + if (val != null) { + f = format[m]; + // extract min/max info + if (f.x) + updateAxis(s.xaxis, val, val); + if (f.y) + updateAxis(s.yaxis, val, val); + } + points[k + m] = null; + } + } + else { + // a little bit of line specific stuff that + // perhaps shouldn't be here, but lacking + // better means... + if (insertSteps && k > 0 + && points[k - ps] != null + && points[k - ps] != points[k] + && points[k - ps + 1] != points[k + 1]) { + // copy the point to make room for a middle point + for (m = 0; m < ps; ++m) + points[k + ps + m] = points[k + m]; + + // middle point has same y + points[k + 1] = points[k - ps + 1]; + + // we've added a point, better reflect that + k += ps; + } + } + } + } + + // give the hooks a chance to run + for (i = 0; i < series.length; ++i) { + s = series[i]; + + executeHooks(hooks.processDatapoints, [ s, s.datapoints]); + } + + // second pass: find datamax/datamin for auto-scaling + for (i = 0; i < series.length; ++i) { + s = series[i]; + points = s.datapoints.points, + ps = s.datapoints.pointsize; + + var xmin = topSentry, ymin = topSentry, + xmax = bottomSentry, ymax = bottomSentry; + + for (j = 0; j < points.length; j += ps) { + if (points[j] == null) + continue; + + for (m = 0; m < ps; ++m) { + val = points[j + m]; + f = format[m]; + if (!f) + continue; + + if (f.x) { + if (val < xmin) + xmin = val; + if (val > xmax) + xmax = val; + } + if (f.y) { + if (val < ymin) + ymin = val; + if (val > ymax) + ymax = val; + } + } + } + + if (s.bars.show) { + // make sure we got room for the bar on the dancing floor + var delta = s.bars.align == "left" ? 0 : -s.bars.barWidth/2; + if (s.bars.horizontal) { + ymin += delta; + ymax += delta + s.bars.barWidth; + } + else { + xmin += delta; + xmax += delta + s.bars.barWidth; + } + } + + updateAxis(s.xaxis, xmin, xmax); + updateAxis(s.yaxis, ymin, ymax); + } + + for (axis in axes) { + if (axes[axis].datamin == topSentry) + axes[axis].datamin = null; + if (axes[axis].datamax == bottomSentry) + axes[axis].datamax = null; + } + } + + function constructCanvas() { + function makeCanvas(width, height) { + var c = document.createElement('canvas'); + c.width = width; + c.height = height; + if ($.browser.msie) // excanvas hack + c = window.G_vmlCanvasManager.initElement(c); + return c; + } + + canvasWidth = placeholder.width(); + canvasHeight = placeholder.height(); + placeholder.html(""); // clear placeholder + if (placeholder.css("position") == 'static') + placeholder.css("position", "relative"); // for positioning labels and overlay + + if (canvasWidth <= 0 || canvasHeight <= 0) + throw "Invalid dimensions for plot, width = " + canvasWidth + ", height = " + canvasHeight; + + if ($.browser.msie) // excanvas hack + window.G_vmlCanvasManager.init_(document); // make sure everything is setup + + // the canvas + canvas = $(makeCanvas(canvasWidth, canvasHeight)).appendTo(placeholder).get(0); + ctx = canvas.getContext("2d"); + + // overlay canvas for interactive features + overlay = $(makeCanvas(canvasWidth, canvasHeight)).css({ position: 'absolute', left: 0, top: 0 }).appendTo(placeholder).get(0); + octx = overlay.getContext("2d"); + octx.stroke(); + } + + function bindEvents() { + // we include the canvas in the event holder too, because IE 7 + // sometimes has trouble with the stacking order + eventHolder = $([overlay, canvas]); + + // bind events + if (options.grid.hoverable) + eventHolder.mousemove(onMouseMove); + + if (options.grid.clickable) + eventHolder.click(onClick); + + executeHooks(hooks.bindEvents, [eventHolder]); + } + + function setupGrid() { + function setTransformationHelpers(axis, o) { + function identity(x) { return x; } + + var s, m, t = o.transform || identity, + it = o.inverseTransform; + + // add transformation helpers + if (axis == axes.xaxis || axis == axes.x2axis) { + // precompute how much the axis is scaling a point + // in canvas space + s = axis.scale = plotWidth / (t(axis.max) - t(axis.min)); + m = t(axis.min); + + // data point to canvas coordinate + if (t == identity) // slight optimization + axis.p2c = function (p) { return (p - m) * s; }; + else + axis.p2c = function (p) { return (t(p) - m) * s; }; + // canvas coordinate to data point + if (!it) + axis.c2p = function (c) { return m + c / s; }; + else + axis.c2p = function (c) { return it(m + c / s); }; + } + else { + s = axis.scale = plotHeight / (t(axis.max) - t(axis.min)); + m = t(axis.max); + + if (t == identity) + axis.p2c = function (p) { return (m - p) * s; }; + else + axis.p2c = function (p) { return (m - t(p)) * s; }; + if (!it) + axis.c2p = function (c) { return m - c / s; }; + else + axis.c2p = function (c) { return it(m - c / s); }; + } + } + + function measureLabels(axis, axisOptions) { + var i, labels = [], l; + + axis.labelWidth = axisOptions.labelWidth; + axis.labelHeight = axisOptions.labelHeight; + + if (axis == axes.xaxis || axis == axes.x2axis) { + // to avoid measuring the widths of the labels, we + // construct fixed-size boxes and put the labels inside + // them, we don't need the exact figures and the + // fixed-size box content is easy to center + if (axis.labelWidth == null) + axis.labelWidth = canvasWidth / (axis.ticks.length > 0 ? axis.ticks.length : 1); + + // measure x label heights + if (axis.labelHeight == null) { + labels = []; + for (i = 0; i < axis.ticks.length; ++i) { + l = axis.ticks[i].label; + if (l) + labels.push('
' + l + '
'); + } + + if (labels.length > 0) { + var dummyDiv = $('
' + + labels.join("") + '
').appendTo(placeholder); + axis.labelHeight = dummyDiv.height(); + dummyDiv.remove(); + } + } + } + else if (axis.labelWidth == null || axis.labelHeight == null) { + // calculate y label dimensions + for (i = 0; i < axis.ticks.length; ++i) { + l = axis.ticks[i].label; + if (l) + labels.push('
' + l + '
'); + } + + if (labels.length > 0) { + var dummyDiv = $('
' + + labels.join("") + '
').appendTo(placeholder); + if (axis.labelWidth == null) + axis.labelWidth = dummyDiv.width(); + if (axis.labelHeight == null) + axis.labelHeight = dummyDiv.find("div").height(); + dummyDiv.remove(); + } + + } + + if (axis.labelWidth == null) + axis.labelWidth = 0; + if (axis.labelHeight == null) + axis.labelHeight = 0; + } + + function setGridSpacing() { + // get the most space needed around the grid for things + // that may stick out + var maxOutset = options.grid.borderWidth; + for (i = 0; i < series.length; ++i) + maxOutset = Math.max(maxOutset, 2 * (series[i].points.radius + series[i].points.lineWidth/2)); + + plotOffset.left = plotOffset.right = plotOffset.top = plotOffset.bottom = maxOutset; + + var margin = options.grid.labelMargin + options.grid.borderWidth; + + if (axes.xaxis.labelHeight > 0) + plotOffset.bottom = Math.max(maxOutset, axes.xaxis.labelHeight + margin); + if (axes.yaxis.labelWidth > 0) + plotOffset.left = Math.max(maxOutset, axes.yaxis.labelWidth + margin); + if (axes.x2axis.labelHeight > 0) + plotOffset.top = Math.max(maxOutset, axes.x2axis.labelHeight + margin); + if (axes.y2axis.labelWidth > 0) + plotOffset.right = Math.max(maxOutset, axes.y2axis.labelWidth + margin); + + plotWidth = canvasWidth - plotOffset.left - plotOffset.right; + plotHeight = canvasHeight - plotOffset.bottom - plotOffset.top; + } + + var axis; + for (axis in axes) + setRange(axes[axis], options[axis]); + + if (options.grid.show) { + for (axis in axes) { + prepareTickGeneration(axes[axis], options[axis]); + setTicks(axes[axis], options[axis]); + measureLabels(axes[axis], options[axis]); + } + + setGridSpacing(); + } + else { + plotOffset.left = plotOffset.right = plotOffset.top = plotOffset.bottom = 0; + plotWidth = canvasWidth; + plotHeight = canvasHeight; + } + + for (axis in axes) + setTransformationHelpers(axes[axis], options[axis]); + + if (options.grid.show) + insertLabels(); + + insertLegend(); + } + + function setRange(axis, axisOptions) { + var min = +(axisOptions.min != null ? axisOptions.min : axis.datamin), + max = +(axisOptions.max != null ? axisOptions.max : axis.datamax), + delta = max - min; + + if (delta == 0.0) { + // degenerate case + var widen = max == 0 ? 1 : 0.01; + + if (axisOptions.min == null) + min -= widen; + // alway widen max if we couldn't widen min to ensure we + // don't fall into min == max which doesn't work + if (axisOptions.max == null || axisOptions.min != null) + max += widen; + } + else { + // consider autoscaling + var margin = axisOptions.autoscaleMargin; + if (margin != null) { + if (axisOptions.min == null) { + min -= delta * margin; + // make sure we don't go below zero if all values + // are positive + if (min < 0 && axis.datamin != null && axis.datamin >= 0) + min = 0; + } + if (axisOptions.max == null) { + max += delta * margin; + if (max > 0 && axis.datamax != null && axis.datamax <= 0) + max = 0; + } + } + } + axis.min = min; + axis.max = max; + } + + function prepareTickGeneration(axis, axisOptions) { + // estimate number of ticks + var noTicks; + if (typeof axisOptions.ticks == "number" && axisOptions.ticks > 0) + noTicks = axisOptions.ticks; + else if (axis == axes.xaxis || axis == axes.x2axis) + // heuristic based on the model a*sqrt(x) fitted to + // some reasonable data points + noTicks = 0.3 * Math.sqrt(canvasWidth); + else + noTicks = 0.3 * Math.sqrt(canvasHeight); + + var delta = (axis.max - axis.min) / noTicks, + size, generator, unit, formatter, i, magn, norm; + + if (axisOptions.mode == "time") { + // pretty handling of time + + // map of app. size of time units in milliseconds + var timeUnitSize = { + "second": 1000, + "minute": 60 * 1000, + "hour": 60 * 60 * 1000, + "day": 24 * 60 * 60 * 1000, + "month": 30 * 24 * 60 * 60 * 1000, + "year": 365.2425 * 24 * 60 * 60 * 1000 + }; + + + // the allowed tick sizes, after 1 year we use + // an integer algorithm + var spec = [ + [1, "second"], [2, "second"], [5, "second"], [10, "second"], + [30, "second"], + [1, "minute"], [2, "minute"], [5, "minute"], [10, "minute"], + [30, "minute"], + [1, "hour"], [2, "hour"], [4, "hour"], + [8, "hour"], [12, "hour"], + [1, "day"], [2, "day"], [3, "day"], + [0.25, "month"], [0.5, "month"], [1, "month"], + [2, "month"], [3, "month"], [6, "month"], + [1, "year"] + ]; + + var minSize = 0; + if (axisOptions.minTickSize != null) { + if (typeof axisOptions.tickSize == "number") + minSize = axisOptions.tickSize; + else + minSize = axisOptions.minTickSize[0] * timeUnitSize[axisOptions.minTickSize[1]]; + } + + for (i = 0; i < spec.length - 1; ++i) + if (delta < (spec[i][0] * timeUnitSize[spec[i][1]] + + spec[i + 1][0] * timeUnitSize[spec[i + 1][1]]) / 2 + && spec[i][0] * timeUnitSize[spec[i][1]] >= minSize) + break; + size = spec[i][0]; + unit = spec[i][1]; + + // special-case the possibility of several years + if (unit == "year") { + magn = Math.pow(10, Math.floor(Math.log(delta / timeUnitSize.year) / Math.LN10)); + norm = (delta / timeUnitSize.year) / magn; + if (norm < 1.5) + size = 1; + else if (norm < 3) + size = 2; + else if (norm < 7.5) + size = 5; + else + size = 10; + + size *= magn; + } + + if (axisOptions.tickSize) { + size = axisOptions.tickSize[0]; + unit = axisOptions.tickSize[1]; + } + + generator = function(axis) { + var ticks = [], + tickSize = axis.tickSize[0], unit = axis.tickSize[1], + d = new Date(axis.min); + + var step = tickSize * timeUnitSize[unit]; + + if (unit == "second") + d.setUTCSeconds(floorInBase(d.getUTCSeconds(), tickSize)); + if (unit == "minute") + d.setUTCMinutes(floorInBase(d.getUTCMinutes(), tickSize)); + if (unit == "hour") + d.setUTCHours(floorInBase(d.getUTCHours(), tickSize)); + if (unit == "month") + d.setUTCMonth(floorInBase(d.getUTCMonth(), tickSize)); + if (unit == "year") + d.setUTCFullYear(floorInBase(d.getUTCFullYear(), tickSize)); + + // reset smaller components + d.setUTCMilliseconds(0); + if (step >= timeUnitSize.minute) + d.setUTCSeconds(0); + if (step >= timeUnitSize.hour) + d.setUTCMinutes(0); + if (step >= timeUnitSize.day) + d.setUTCHours(0); + if (step >= timeUnitSize.day * 4) + d.setUTCDate(1); + if (step >= timeUnitSize.year) + d.setUTCMonth(0); + + + var carry = 0, v = Number.NaN, prev; + do { + prev = v; + v = d.getTime(); + ticks.push({ v: v, label: axis.tickFormatter(v, axis) }); + if (unit == "month") { + if (tickSize < 1) { + // a bit complicated - we'll divide the month + // up but we need to take care of fractions + // so we don't end up in the middle of a day + d.setUTCDate(1); + var start = d.getTime(); + d.setUTCMonth(d.getUTCMonth() + 1); + var end = d.getTime(); + d.setTime(v + carry * timeUnitSize.hour + (end - start) * tickSize); + carry = d.getUTCHours(); + d.setUTCHours(0); + } + else + d.setUTCMonth(d.getUTCMonth() + tickSize); + } + else if (unit == "year") { + d.setUTCFullYear(d.getUTCFullYear() + tickSize); + } + else + d.setTime(v + step); + } while (v < axis.max && v != prev); + + return ticks; + }; + + formatter = function (v, axis) { + var d = new Date(v); + + // first check global format + if (axisOptions.timeformat != null) + return $.plot.formatDate(d, axisOptions.timeformat, axisOptions.monthNames); + + var t = axis.tickSize[0] * timeUnitSize[axis.tickSize[1]]; + var span = axis.max - axis.min; + var suffix = (axisOptions.twelveHourClock) ? " %p" : ""; + + if (t < timeUnitSize.minute) + fmt = "%h:%M:%S" + suffix; + else if (t < timeUnitSize.day) { + if (span < 2 * timeUnitSize.day) + fmt = "%h:%M" + suffix; + else + fmt = "%b %d %h:%M" + suffix; + } + else if (t < timeUnitSize.month) + fmt = "%b %d"; + else if (t < timeUnitSize.year) { + if (span < timeUnitSize.year) + fmt = "%b"; + else + fmt = "%b %y"; + } + else + fmt = "%y"; + + return $.plot.formatDate(d, fmt, axisOptions.monthNames); + }; + } + else { + // pretty rounding of base-10 numbers + var maxDec = axisOptions.tickDecimals; + var dec = -Math.floor(Math.log(delta) / Math.LN10); + if (maxDec != null && dec > maxDec) + dec = maxDec; + + magn = Math.pow(10, -dec); + norm = delta / magn; // norm is between 1.0 and 10.0 + + if (norm < 1.5) + size = 1; + else if (norm < 3) { + size = 2; + // special case for 2.5, requires an extra decimal + if (norm > 2.25 && (maxDec == null || dec + 1 <= maxDec)) { + size = 2.5; + ++dec; + } + } + else if (norm < 7.5) + size = 5; + else + size = 10; + + size *= magn; + + if (axisOptions.minTickSize != null && size < axisOptions.minTickSize) + size = axisOptions.minTickSize; + + if (axisOptions.tickSize != null) + size = axisOptions.tickSize; + + axis.tickDecimals = Math.max(0, (maxDec != null) ? maxDec : dec); + + generator = function (axis) { + var ticks = []; + + // spew out all possible ticks + var start = floorInBase(axis.min, axis.tickSize), + i = 0, v = Number.NaN, prev; + do { + prev = v; + v = start + i * axis.tickSize; + ticks.push({ v: v, label: axis.tickFormatter(v, axis) }); + ++i; + } while (v < axis.max && v != prev); + return ticks; + }; + + formatter = function (v, axis) { + return v.toFixed(axis.tickDecimals); + }; + } + + axis.tickSize = unit ? [size, unit] : size; + axis.tickGenerator = generator; + if ($.isFunction(axisOptions.tickFormatter)) + axis.tickFormatter = function (v, axis) { return "" + axisOptions.tickFormatter(v, axis); }; + else + axis.tickFormatter = formatter; + } + + function setTicks(axis, axisOptions) { + axis.ticks = []; + + if (!axis.used) + return; + + if (axisOptions.ticks == null) + axis.ticks = axis.tickGenerator(axis); + else if (typeof axisOptions.ticks == "number") { + if (axisOptions.ticks > 0) + axis.ticks = axis.tickGenerator(axis); + } + else if (axisOptions.ticks) { + var ticks = axisOptions.ticks; + + if ($.isFunction(ticks)) + // generate the ticks + ticks = ticks({ min: axis.min, max: axis.max }); + + // clean up the user-supplied ticks, copy them over + var i, v; + for (i = 0; i < ticks.length; ++i) { + var label = null; + var t = ticks[i]; + if (typeof t == "object") { + v = t[0]; + if (t.length > 1) + label = t[1]; + } + else + v = t; + if (label == null) + label = axis.tickFormatter(v, axis); + axis.ticks[i] = { v: v, label: label }; + } + } + + if (axisOptions.autoscaleMargin != null && axis.ticks.length > 0) { + // snap to ticks + if (axisOptions.min == null) + axis.min = Math.min(axis.min, axis.ticks[0].v); + if (axisOptions.max == null && axis.ticks.length > 1) + axis.max = Math.max(axis.max, axis.ticks[axis.ticks.length - 1].v); + } + } + + function draw() { + ctx.clearRect(0, 0, canvasWidth, canvasHeight); + + var grid = options.grid; + + if (grid.show && !grid.aboveData) + drawGrid(); + + for (var i = 0; i < series.length; ++i) + drawSeries(series[i]); + + executeHooks(hooks.draw, [ctx]); + + if (grid.show && grid.aboveData) + drawGrid(); + } + + function extractRange(ranges, coord) { + var firstAxis = coord + "axis", + secondaryAxis = coord + "2axis", + axis, from, to, reverse; + + if (ranges[firstAxis]) { + axis = axes[firstAxis]; + from = ranges[firstAxis].from; + to = ranges[firstAxis].to; + } + else if (ranges[secondaryAxis]) { + axis = axes[secondaryAxis]; + from = ranges[secondaryAxis].from; + to = ranges[secondaryAxis].to; + } + else { + // backwards-compat stuff - to be removed in future + axis = axes[firstAxis]; + from = ranges[coord + "1"]; + to = ranges[coord + "2"]; + } + + // auto-reverse as an added bonus + if (from != null && to != null && from > to) + return { from: to, to: from, axis: axis }; + + return { from: from, to: to, axis: axis }; + } + + function drawGrid() { + var i; + + ctx.save(); + ctx.translate(plotOffset.left, plotOffset.top); + + // draw background, if any + if (options.grid.backgroundColor) { + ctx.fillStyle = getColorOrGradient(options.grid.backgroundColor, plotHeight, 0, "rgba(255, 255, 255, 0)"); + ctx.fillRect(0, 0, plotWidth, plotHeight); + } + + // draw markings + var markings = options.grid.markings; + if (markings) { + if ($.isFunction(markings)) + // xmin etc. are backwards-compatible, to be removed in future + markings = markings({ xmin: axes.xaxis.min, xmax: axes.xaxis.max, ymin: axes.yaxis.min, ymax: axes.yaxis.max, xaxis: axes.xaxis, yaxis: axes.yaxis, x2axis: axes.x2axis, y2axis: axes.y2axis }); + + for (i = 0; i < markings.length; ++i) { + var m = markings[i], + xrange = extractRange(m, "x"), + yrange = extractRange(m, "y"); + + // fill in missing + if (xrange.from == null) + xrange.from = xrange.axis.min; + if (xrange.to == null) + xrange.to = xrange.axis.max; + if (yrange.from == null) + yrange.from = yrange.axis.min; + if (yrange.to == null) + yrange.to = yrange.axis.max; + + // clip + if (xrange.to < xrange.axis.min || xrange.from > xrange.axis.max || + yrange.to < yrange.axis.min || yrange.from > yrange.axis.max) + continue; + + xrange.from = Math.max(xrange.from, xrange.axis.min); + xrange.to = Math.min(xrange.to, xrange.axis.max); + yrange.from = Math.max(yrange.from, yrange.axis.min); + yrange.to = Math.min(yrange.to, yrange.axis.max); + + if (xrange.from == xrange.to && yrange.from == yrange.to) + continue; + + // then draw + xrange.from = xrange.axis.p2c(xrange.from); + xrange.to = xrange.axis.p2c(xrange.to); + yrange.from = yrange.axis.p2c(yrange.from); + yrange.to = yrange.axis.p2c(yrange.to); + + if (xrange.from == xrange.to || yrange.from == yrange.to) { + // draw line + ctx.beginPath(); + ctx.strokeStyle = m.color || options.grid.markingsColor; + ctx.lineWidth = m.lineWidth || options.grid.markingsLineWidth; + //ctx.moveTo(Math.floor(xrange.from), yrange.from); + //ctx.lineTo(Math.floor(xrange.to), yrange.to); + ctx.moveTo(xrange.from, yrange.from); + ctx.lineTo(xrange.to, yrange.to); + ctx.stroke(); + } + else { + // fill area + ctx.fillStyle = m.color || options.grid.markingsColor; + ctx.fillRect(xrange.from, yrange.to, + xrange.to - xrange.from, + yrange.from - yrange.to); + } + } + } + + // draw the inner grid + ctx.lineWidth = 1; + ctx.strokeStyle = options.grid.tickColor; + ctx.beginPath(); + var v, axis = axes.xaxis; + for (i = 0; i < axis.ticks.length; ++i) { + v = axis.ticks[i].v; + if (v <= axis.min || v >= axes.xaxis.max) + continue; // skip those lying on the axes + + ctx.moveTo(Math.floor(axis.p2c(v)) + ctx.lineWidth/2, 0); + ctx.lineTo(Math.floor(axis.p2c(v)) + ctx.lineWidth/2, plotHeight); + } + + axis = axes.yaxis; + for (i = 0; i < axis.ticks.length; ++i) { + v = axis.ticks[i].v; + if (v <= axis.min || v >= axis.max) + continue; + + ctx.moveTo(0, Math.floor(axis.p2c(v)) + ctx.lineWidth/2); + ctx.lineTo(plotWidth, Math.floor(axis.p2c(v)) + ctx.lineWidth/2); + } + + axis = axes.x2axis; + for (i = 0; i < axis.ticks.length; ++i) { + v = axis.ticks[i].v; + if (v <= axis.min || v >= axis.max) + continue; + + ctx.moveTo(Math.floor(axis.p2c(v)) + ctx.lineWidth/2, -5); + ctx.lineTo(Math.floor(axis.p2c(v)) + ctx.lineWidth/2, 5); + } + + axis = axes.y2axis; + for (i = 0; i < axis.ticks.length; ++i) { + v = axis.ticks[i].v; + if (v <= axis.min || v >= axis.max) + continue; + + ctx.moveTo(plotWidth-5, Math.floor(axis.p2c(v)) + ctx.lineWidth/2); + ctx.lineTo(plotWidth+5, Math.floor(axis.p2c(v)) + ctx.lineWidth/2); + } + + ctx.stroke(); + + if (options.grid.borderWidth) { + // draw border + var bw = options.grid.borderWidth; + ctx.lineWidth = bw; + ctx.strokeStyle = options.grid.borderColor; + ctx.strokeRect(-bw/2, -bw/2, plotWidth + bw, plotHeight + bw); + } + + ctx.restore(); + } + + function insertLabels() { + placeholder.find(".tickLabels").remove(); + + var html = ['
']; + + function addLabels(axis, labelGenerator) { + for (var i = 0; i < axis.ticks.length; ++i) { + var tick = axis.ticks[i]; + if (!tick.label || tick.v < axis.min || tick.v > axis.max) + continue; + html.push(labelGenerator(tick, axis)); + } + } + + var margin = options.grid.labelMargin + options.grid.borderWidth; + + addLabels(axes.xaxis, function (tick, axis) { + return '
' + tick.label + "
"; + }); + + + addLabels(axes.yaxis, function (tick, axis) { + return '
' + tick.label + "
"; + }); + + addLabels(axes.x2axis, function (tick, axis) { + return '
' + tick.label + "
"; + }); + + addLabels(axes.y2axis, function (tick, axis) { + return '
' + tick.label + "
"; + }); + + html.push('
'); + + placeholder.append(html.join("")); + } + + function drawSeries(series) { + if (series.lines.show) + drawSeriesLines(series); + if (series.bars.show) + drawSeriesBars(series); + if (series.points.show) + drawSeriesPoints(series); + } + + function drawSeriesLines(series) { + function plotLine(datapoints, xoffset, yoffset, axisx, axisy) { + var points = datapoints.points, + ps = datapoints.pointsize, + prevx = null, prevy = null; + + ctx.beginPath(); + for (var i = ps; i < points.length; i += ps) { + var x1 = points[i - ps], y1 = points[i - ps + 1], + x2 = points[i], y2 = points[i + 1]; + + if (x1 == null || x2 == null) + continue; + + // clip with ymin + if (y1 <= y2 && y1 < axisy.min) { + if (y2 < axisy.min) + continue; // line segment is outside + // compute new intersection point + x1 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; + y1 = axisy.min; + } + else if (y2 <= y1 && y2 < axisy.min) { + if (y1 < axisy.min) + continue; + x2 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; + y2 = axisy.min; + } + + // clip with ymax + if (y1 >= y2 && y1 > axisy.max) { + if (y2 > axisy.max) + continue; + x1 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; + y1 = axisy.max; + } + else if (y2 >= y1 && y2 > axisy.max) { + if (y1 > axisy.max) + continue; + x2 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; + y2 = axisy.max; + } + + // clip with xmin + if (x1 <= x2 && x1 < axisx.min) { + if (x2 < axisx.min) + continue; + y1 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; + x1 = axisx.min; + } + else if (x2 <= x1 && x2 < axisx.min) { + if (x1 < axisx.min) + continue; + y2 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; + x2 = axisx.min; + } + + // clip with xmax + if (x1 >= x2 && x1 > axisx.max) { + if (x2 > axisx.max) + continue; + y1 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; + x1 = axisx.max; + } + else if (x2 >= x1 && x2 > axisx.max) { + if (x1 > axisx.max) + continue; + y2 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; + x2 = axisx.max; + } + + if (x1 != prevx || y1 != prevy) + ctx.moveTo(axisx.p2c(x1) + xoffset, axisy.p2c(y1) + yoffset); + + prevx = x2; + prevy = y2; + ctx.lineTo(axisx.p2c(x2) + xoffset, axisy.p2c(y2) + yoffset); + } + ctx.stroke(); + } + + function plotLineArea(datapoints, axisx, axisy) { + var points = datapoints.points, + ps = datapoints.pointsize, + bottom = Math.min(Math.max(0, axisy.min), axisy.max), + top, lastX = 0, areaOpen = false; + + for (var i = ps; i < points.length; i += ps) { + var x1 = points[i - ps], y1 = points[i - ps + 1], + x2 = points[i], y2 = points[i + 1]; + + if (areaOpen && x1 != null && x2 == null) { + // close area + ctx.lineTo(axisx.p2c(lastX), axisy.p2c(bottom)); + ctx.fill(); + areaOpen = false; + continue; + } + + if (x1 == null || x2 == null) + continue; + + // clip x values + + // clip with xmin + if (x1 <= x2 && x1 < axisx.min) { + if (x2 < axisx.min) + continue; + y1 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; + x1 = axisx.min; + } + else if (x2 <= x1 && x2 < axisx.min) { + if (x1 < axisx.min) + continue; + y2 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1; + x2 = axisx.min; + } + + // clip with xmax + if (x1 >= x2 && x1 > axisx.max) { + if (x2 > axisx.max) + continue; + y1 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; + x1 = axisx.max; + } + else if (x2 >= x1 && x2 > axisx.max) { + if (x1 > axisx.max) + continue; + y2 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1; + x2 = axisx.max; + } + + if (!areaOpen) { + // open area + ctx.beginPath(); + ctx.moveTo(axisx.p2c(x1), axisy.p2c(bottom)); + areaOpen = true; + } + + // now first check the case where both is outside + if (y1 >= axisy.max && y2 >= axisy.max) { + ctx.lineTo(axisx.p2c(x1), axisy.p2c(axisy.max)); + ctx.lineTo(axisx.p2c(x2), axisy.p2c(axisy.max)); + lastX = x2; + continue; + } + else if (y1 <= axisy.min && y2 <= axisy.min) { + ctx.lineTo(axisx.p2c(x1), axisy.p2c(axisy.min)); + ctx.lineTo(axisx.p2c(x2), axisy.p2c(axisy.min)); + lastX = x2; + continue; + } + + // else it's a bit more complicated, there might + // be two rectangles and two triangles we need to fill + // in; to find these keep track of the current x values + var x1old = x1, x2old = x2; + + // and clip the y values, without shortcutting + + // clip with ymin + if (y1 <= y2 && y1 < axisy.min && y2 >= axisy.min) { + x1 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; + y1 = axisy.min; + } + else if (y2 <= y1 && y2 < axisy.min && y1 >= axisy.min) { + x2 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1; + y2 = axisy.min; + } + + // clip with ymax + if (y1 >= y2 && y1 > axisy.max && y2 <= axisy.max) { + x1 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; + y1 = axisy.max; + } + else if (y2 >= y1 && y2 > axisy.max && y1 <= axisy.max) { + x2 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1; + y2 = axisy.max; + } + + + // if the x value was changed we got a rectangle + // to fill + if (x1 != x1old) { + if (y1 <= axisy.min) + top = axisy.min; + else + top = axisy.max; + + ctx.lineTo(axisx.p2c(x1old), axisy.p2c(top)); + ctx.lineTo(axisx.p2c(x1), axisy.p2c(top)); + } + + // fill the triangles + ctx.lineTo(axisx.p2c(x1), axisy.p2c(y1)); + ctx.lineTo(axisx.p2c(x2), axisy.p2c(y2)); + + // fill the other rectangle if it's there + if (x2 != x2old) { + if (y2 <= axisy.min) + top = axisy.min; + else + top = axisy.max; + + ctx.lineTo(axisx.p2c(x2), axisy.p2c(top)); + ctx.lineTo(axisx.p2c(x2old), axisy.p2c(top)); + } + + lastX = Math.max(x2, x2old); + } + + if (areaOpen) { + ctx.lineTo(axisx.p2c(lastX), axisy.p2c(bottom)); + ctx.fill(); + } + } + + ctx.save(); + ctx.translate(plotOffset.left, plotOffset.top); + ctx.lineJoin = "round"; + + var lw = series.lines.lineWidth, + sw = series.shadowSize; + // FIXME: consider another form of shadow when filling is turned on + if (lw > 0 && sw > 0) { + // draw shadow as a thick and thin line with transparency + ctx.lineWidth = sw; + ctx.strokeStyle = "rgba(0,0,0,0.1)"; + // position shadow at angle from the mid of line + var angle = Math.PI/18; + plotLine(series.datapoints, Math.sin(angle) * (lw/2 + sw/2), Math.cos(angle) * (lw/2 + sw/2), series.xaxis, series.yaxis); + ctx.lineWidth = sw/2; + plotLine(series.datapoints, Math.sin(angle) * (lw/2 + sw/4), Math.cos(angle) * (lw/2 + sw/4), series.xaxis, series.yaxis); + } + + ctx.lineWidth = lw; + ctx.strokeStyle = series.color; + var fillStyle = getFillStyle(series.lines, series.color, 0, plotHeight); + if (fillStyle) { + ctx.fillStyle = fillStyle; + plotLineArea(series.datapoints, series.xaxis, series.yaxis); + } + + if (lw > 0) + plotLine(series.datapoints, 0, 0, series.xaxis, series.yaxis); + ctx.restore(); + } + + function drawSeriesPoints(series) { + function plotPoints(datapoints, radius, fillStyle, offset, circumference, axisx, axisy) { + var points = datapoints.points, ps = datapoints.pointsize; + + for (var i = 0; i < points.length; i += ps) { + var x = points[i], y = points[i + 1]; + if (x == null || x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max) + continue; + + ctx.beginPath(); + ctx.arc(axisx.p2c(x), axisy.p2c(y) + offset, radius, 0, circumference, false); + if (fillStyle) { + ctx.fillStyle = fillStyle; + ctx.fill(); + } + ctx.stroke(); + } + } + + ctx.save(); + ctx.translate(plotOffset.left, plotOffset.top); + + var lw = series.lines.lineWidth, + sw = series.shadowSize, + radius = series.points.radius; + if (lw > 0 && sw > 0) { + // draw shadow in two steps + var w = sw / 2; + ctx.lineWidth = w; + ctx.strokeStyle = "rgba(0,0,0,0.1)"; + plotPoints(series.datapoints, radius, null, w + w/2, Math.PI, + series.xaxis, series.yaxis); + + ctx.strokeStyle = "rgba(0,0,0,0.2)"; + plotPoints(series.datapoints, radius, null, w/2, Math.PI, + series.xaxis, series.yaxis); + } + + ctx.lineWidth = lw; + ctx.strokeStyle = series.color; + plotPoints(series.datapoints, radius, + getFillStyle(series.points, series.color), 0, 2 * Math.PI, + series.xaxis, series.yaxis); + ctx.restore(); + } + + function drawBar(x, y, b, barLeft, barRight, offset, fillStyleCallback, axisx, axisy, c, horizontal) { + var left, right, bottom, top, + drawLeft, drawRight, drawTop, drawBottom, + tmp; + + if (horizontal) { + drawBottom = drawRight = drawTop = true; + drawLeft = false; + left = b; + right = x; + top = y + barLeft; + bottom = y + barRight; + + // account for negative bars + if (right < left) { + tmp = right; + right = left; + left = tmp; + drawLeft = true; + drawRight = false; + } + } + else { + drawLeft = drawRight = drawTop = true; + drawBottom = false; + left = x + barLeft; + right = x + barRight; + bottom = b; + top = y; + + // account for negative bars + if (top < bottom) { + tmp = top; + top = bottom; + bottom = tmp; + drawBottom = true; + drawTop = false; + } + } + + // clip + if (right < axisx.min || left > axisx.max || + top < axisy.min || bottom > axisy.max) + return; + + if (left < axisx.min) { + left = axisx.min; + drawLeft = false; + } + + if (right > axisx.max) { + right = axisx.max; + drawRight = false; + } + + if (bottom < axisy.min) { + bottom = axisy.min; + drawBottom = false; + } + + if (top > axisy.max) { + top = axisy.max; + drawTop = false; + } + + left = axisx.p2c(left); + bottom = axisy.p2c(bottom); + right = axisx.p2c(right); + top = axisy.p2c(top); + + // fill the bar + if (fillStyleCallback) { + c.beginPath(); + c.moveTo(left, bottom); + c.lineTo(left, top); + c.lineTo(right, top); + c.lineTo(right, bottom); + c.fillStyle = fillStyleCallback(bottom, top); + c.fill(); + } + + // draw outline + if (drawLeft || drawRight || drawTop || drawBottom) { + c.beginPath(); + + // FIXME: inline moveTo is buggy with excanvas + c.moveTo(left, bottom + offset); + if (drawLeft) + c.lineTo(left, top + offset); + else + c.moveTo(left, top + offset); + if (drawTop) + c.lineTo(right, top + offset); + else + c.moveTo(right, top + offset); + if (drawRight) + c.lineTo(right, bottom + offset); + else + c.moveTo(right, bottom + offset); + if (drawBottom) + c.lineTo(left, bottom + offset); + else + c.moveTo(left, bottom + offset); + c.stroke(); + } + } + + function drawSeriesBars(series) { + function plotBars(datapoints, barLeft, barRight, offset, fillStyleCallback, axisx, axisy) { + var points = datapoints.points, ps = datapoints.pointsize; + + for (var i = 0; i < points.length; i += ps) { + if (points[i] == null) + continue; + drawBar(points[i], points[i + 1], points[i + 2], barLeft, barRight, offset, fillStyleCallback, axisx, axisy, ctx, series.bars.horizontal); + } + } + + ctx.save(); + ctx.translate(plotOffset.left, plotOffset.top); + + // FIXME: figure out a way to add shadows (for instance along the right edge) + ctx.lineWidth = series.bars.lineWidth; + ctx.strokeStyle = series.color; + var barLeft = series.bars.align == "left" ? 0 : -series.bars.barWidth/2; + var fillStyleCallback = series.bars.fill ? function (bottom, top) { return getFillStyle(series.bars, series.color, bottom, top); } : null; + plotBars(series.datapoints, barLeft, barLeft + series.bars.barWidth, 0, fillStyleCallback, series.xaxis, series.yaxis); + ctx.restore(); + } + + function getFillStyle(filloptions, seriesColor, bottom, top) { + var fill = filloptions.fill; + if (!fill) + return null; + + if (filloptions.fillColor) + return getColorOrGradient(filloptions.fillColor, bottom, top, seriesColor); + + var c = $.color.parse(seriesColor); + c.a = typeof fill == "number" ? fill : 0.4; + c.normalize(); + return c.toString(); + } + + function insertLegend() { + placeholder.find(".legend").remove(); + + if (!options.legend.show) + return; + + var fragments = [], rowStarted = false, + lf = options.legend.labelFormatter, s, label; + for (i = 0; i < series.length; ++i) { + s = series[i]; + label = s.label; + if (!label) + continue; + + if (i % options.legend.noColumns == 0) { + if (rowStarted) + fragments.push(''); + fragments.push(''); + rowStarted = true; + } + + if (lf) + label = lf(label, s); + + fragments.push( + '
' + + '' + label + ''); + } + if (rowStarted) + fragments.push(''); + + if (fragments.length == 0) + return; + + var table = '' + fragments.join("") + '
'; + if (options.legend.container != null) + $(options.legend.container).html(table); + else { + var pos = "", + p = options.legend.position, + m = options.legend.margin; + if (m[0] == null) + m = [m, m]; + if (p.charAt(0) == "n") + pos += 'top:' + (m[1] + plotOffset.top) + 'px;'; + else if (p.charAt(0) == "s") + pos += 'bottom:' + (m[1] + plotOffset.bottom) + 'px;'; + if (p.charAt(1) == "e") + pos += 'right:' + (m[0] + plotOffset.right) + 'px;'; + else if (p.charAt(1) == "w") + pos += 'left:' + (m[0] + plotOffset.left) + 'px;'; + var legend = $('
' + table.replace('style="', 'style="position:absolute;' + pos +';') + '
').appendTo(placeholder); + if (options.legend.backgroundOpacity != 0.0) { + // put in the transparent background + // separately to avoid blended labels and + // label boxes + var c = options.legend.backgroundColor; + if (c == null) { + c = options.grid.backgroundColor; + if (c && typeof c == "string") + c = $.color.parse(c); + else + c = $.color.extract(legend, 'background-color'); + c.a = 1; + c = c.toString(); + } + var div = legend.children(); + $('
').prependTo(legend).css('opacity', options.legend.backgroundOpacity); + } + } + } + + + // interactive features + + var highlights = [], + redrawTimeout = null; + + // returns the data item the mouse is over, or null if none is found + function findNearbyItem(mouseX, mouseY, seriesFilter) { + var maxDistance = options.grid.mouseActiveRadius, + smallestDistance = maxDistance * maxDistance + 1, + item = null, foundPoint = false, i, j; + + for (i = 0; i < series.length; ++i) { + if (!seriesFilter(series[i])) + continue; + + var s = series[i], + axisx = s.xaxis, + axisy = s.yaxis, + points = s.datapoints.points, + ps = s.datapoints.pointsize, + mx = axisx.c2p(mouseX), // precompute some stuff to make the loop faster + my = axisy.c2p(mouseY), + maxx = maxDistance / axisx.scale, + maxy = maxDistance / axisy.scale; + + if (s.lines.show || s.points.show) { + for (j = 0; j < points.length; j += ps) { + var x = points[j], y = points[j + 1]; + if (x == null) + continue; + + // For points and lines, the cursor must be within a + // certain distance to the data point + if (x - mx > maxx || x - mx < -maxx || + y - my > maxy || y - my < -maxy) + continue; + + // We have to calculate distances in pixels, not in + // data units, because the scales of the axes may be different + var dx = Math.abs(axisx.p2c(x) - mouseX), + dy = Math.abs(axisy.p2c(y) - mouseY), + dist = dx * dx + dy * dy; // we save the sqrt + + // use <= to ensure last point takes precedence + // (last generally means on top of) + if (dist <= smallestDistance) { + smallestDistance = dist; + item = [i, j / ps]; + } + } + } + + if (s.bars.show && !item) { // no other point can be nearby + var barLeft = s.bars.align == "left" ? 0 : -s.bars.barWidth/2, + barRight = barLeft + s.bars.barWidth; + + for (j = 0; j < points.length; j += ps) { + var x = points[j], y = points[j + 1], b = points[j + 2]; + if (x == null) + continue; + + // for a bar graph, the cursor must be inside the bar + if (series[i].bars.horizontal ? + (mx <= Math.max(b, x) && mx >= Math.min(b, x) && + my >= y + barLeft && my <= y + barRight) : + (mx >= x + barLeft && mx <= x + barRight && + my >= Math.min(b, y) && my <= Math.max(b, y))) + item = [i, j / ps]; + } + } + } + + if (item) { + i = item[0]; + j = item[1]; + ps = series[i].datapoints.pointsize; + + return { datapoint: series[i].datapoints.points.slice(j * ps, (j + 1) * ps), + dataIndex: j, + series: series[i], + seriesIndex: i }; + } + + return null; + } + + function onMouseMove(e) { + if (options.grid.hoverable) + triggerClickHoverEvent("plothover", e, + function (s) { return s["hoverable"] != false; }); + } + + function onClick(e) { + triggerClickHoverEvent("plotclick", e, + function (s) { return s["clickable"] != false; }); + } + + // trigger click or hover event (they send the same parameters + // so we share their code) + function triggerClickHoverEvent(eventname, event, seriesFilter) { + var offset = eventHolder.offset(), + pos = { pageX: event.pageX, pageY: event.pageY }, + canvasX = event.pageX - offset.left - plotOffset.left, + canvasY = event.pageY - offset.top - plotOffset.top; + + if (axes.xaxis.used) + pos.x = axes.xaxis.c2p(canvasX); + if (axes.yaxis.used) + pos.y = axes.yaxis.c2p(canvasY); + if (axes.x2axis.used) + pos.x2 = axes.x2axis.c2p(canvasX); + if (axes.y2axis.used) + pos.y2 = axes.y2axis.c2p(canvasY); + + var item = findNearbyItem(canvasX, canvasY, seriesFilter); + + if (item) { + // fill in mouse pos for any listeners out there + item.pageX = parseInt(item.series.xaxis.p2c(item.datapoint[0]) + offset.left + plotOffset.left); + item.pageY = parseInt(item.series.yaxis.p2c(item.datapoint[1]) + offset.top + plotOffset.top); + } + + if (options.grid.autoHighlight) { + // clear auto-highlights + for (var i = 0; i < highlights.length; ++i) { + var h = highlights[i]; + if (h.auto == eventname && + !(item && h.series == item.series && h.point == item.datapoint)) + unhighlight(h.series, h.point); + } + + if (item) + highlight(item.series, item.datapoint, eventname); + } + + placeholder.trigger(eventname, [ pos, item ]); + } + + function triggerRedrawOverlay() { + if (!redrawTimeout) + redrawTimeout = setTimeout(drawOverlay, 30); + } + + function drawOverlay() { + redrawTimeout = null; + + // draw highlights + octx.save(); + octx.clearRect(0, 0, canvasWidth, canvasHeight); + octx.translate(plotOffset.left, plotOffset.top); + + var i, hi; + for (i = 0; i < highlights.length; ++i) { + hi = highlights[i]; + + if (hi.series.bars.show) + drawBarHighlight(hi.series, hi.point); + else + drawPointHighlight(hi.series, hi.point); + } + octx.restore(); + + executeHooks(hooks.drawOverlay, [octx]); + } + + function highlight(s, point, auto) { + if (typeof s == "number") + s = series[s]; + + if (typeof point == "number") + point = s.data[point]; + + var i = indexOfHighlight(s, point); + if (i == -1) { + highlights.push({ series: s, point: point, auto: auto }); + + triggerRedrawOverlay(); + } + else if (!auto) + highlights[i].auto = false; + } + + function unhighlight(s, point) { + if (s == null && point == null) { + highlights = []; + triggerRedrawOverlay(); + } + + if (typeof s == "number") + s = series[s]; + + if (typeof point == "number") + point = s.data[point]; + + var i = indexOfHighlight(s, point); + if (i != -1) { + highlights.splice(i, 1); + + triggerRedrawOverlay(); + } + } + + function indexOfHighlight(s, p) { + for (var i = 0; i < highlights.length; ++i) { + var h = highlights[i]; + if (h.series == s && h.point[0] == p[0] + && h.point[1] == p[1]) + return i; + } + return -1; + } + + function drawPointHighlight(series, point) { + var x = point[0], y = point[1], + axisx = series.xaxis, axisy = series.yaxis; + + if (x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max) + return; + + var pointRadius = series.points.radius + series.points.lineWidth / 2; + octx.lineWidth = pointRadius; + octx.strokeStyle = $.color.parse(series.color).scale('a', 0.5).toString(); + var radius = 1.5 * pointRadius; + octx.beginPath(); + octx.arc(axisx.p2c(x), axisy.p2c(y), radius, 0, 2 * Math.PI, false); + octx.stroke(); + } + + function drawBarHighlight(series, point) { + octx.lineWidth = series.bars.lineWidth; + octx.strokeStyle = $.color.parse(series.color).scale('a', 0.5).toString(); + var fillStyle = $.color.parse(series.color).scale('a', 0.5).toString(); + var barLeft = series.bars.align == "left" ? 0 : -series.bars.barWidth/2; + drawBar(point[0], point[1], point[2] || 0, barLeft, barLeft + series.bars.barWidth, + 0, function () { return fillStyle; }, series.xaxis, series.yaxis, octx, series.bars.horizontal); + } + + function getColorOrGradient(spec, bottom, top, defaultColor) { + if (typeof spec == "string") + return spec; + else { + // assume this is a gradient spec; IE currently only + // supports a simple vertical gradient properly, so that's + // what we support too + var gradient = ctx.createLinearGradient(0, top, 0, bottom); + + for (var i = 0, l = spec.colors.length; i < l; ++i) { + var c = spec.colors[i]; + if (typeof c != "string") { + c = $.color.parse(defaultColor).scale('rgb', c.brightness); + c.a *= c.opacity; + c = c.toString(); + } + gradient.addColorStop(i / (l - 1), c); + } + + return gradient; + } + } + } + + $.plot = function(placeholder, data, options) { + var plot = new Plot($(placeholder), data, options, $.plot.plugins); + /*var t0 = new Date(); + var t1 = new Date(); + var tstr = "time used (msecs): " + (t1.getTime() - t0.getTime()) + if (window.console) + console.log(tstr); + else + alert(tstr);*/ + return plot; + }; + + $.plot.plugins = []; + + // returns a string with the date d formatted according to fmt + $.plot.formatDate = function(d, fmt, monthNames) { + var leftPad = function(n) { + n = "" + n; + return n.length == 1 ? "0" + n : n; + }; + + var r = []; + var escape = false; + var hours = d.getUTCHours(); + var isAM = hours < 12; + if (monthNames == null) + monthNames = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]; + + if (fmt.search(/%p|%P/) != -1) { + if (hours > 12) { + hours = hours - 12; + } else if (hours == 0) { + hours = 12; + } + } + for (var i = 0; i < fmt.length; ++i) { + var c = fmt.charAt(i); + + if (escape) { + switch (c) { + case 'h': c = "" + hours; break; + case 'H': c = leftPad(hours); break; + case 'M': c = leftPad(d.getUTCMinutes()); break; + case 'S': c = leftPad(d.getUTCSeconds()); break; + case 'd': c = "" + d.getUTCDate(); break; + case 'm': c = "" + (d.getUTCMonth() + 1); break; + case 'y': c = "" + d.getUTCFullYear(); break; + case 'b': c = "" + monthNames[d.getUTCMonth()]; break; + case 'p': c = (isAM) ? ("" + "am") : ("" + "pm"); break; + case 'P': c = (isAM) ? ("" + "AM") : ("" + "PM"); break; + } + r.push(c); + escape = false; + } + else { + if (c == "%") + escape = true; + else + r.push(c); + } + } + return r.join(""); + }; + + // round to nearby lower multiple of base + function floorInBase(n, base) { + return base * Math.floor(n / base); + } + +})(jQuery); diff --git a/r2/r2/public/static/js/jquery.lazyload.js b/r2/r2/public/static/js/jquery.lazyload.js new file mode 100644 index 0000000000..276e9882bb --- /dev/null +++ b/r2/r2/public/static/js/jquery.lazyload.js @@ -0,0 +1,164 @@ +/* + * Lazy Load - jQuery plugin for lazy loading images + * + * Copyright (c) 2007-2009 Mika Tuupola + * + * Licensed under the MIT license: + * http://www.opensource.org/licenses/mit-license.php + * + * Project home: + * http://www.appelsiini.net/projects/lazyload + * + * Version: 1.5.0 + * + */ +(function($) { + + $.fn.lazyload = function(options) { + var settings = { + threshold : 0, + failurelimit : 0, + event : "scroll", + effect : "show", + container : window + }; + + if(options) { + $.extend(settings, options); + } + + /* Fire one scroll event per scroll. Not one scroll event per image. */ + var elements = this; + if ("scroll" == settings.event) { + $(settings.container).bind("scroll", function(event) { + + var counter = 0; + elements.each(function() { + if ($.abovethetop(this, settings) || + $.leftofbegin(this, settings)) { + /* Nothing. */ + } else if (!$.belowthefold(this, settings) && + !$.rightoffold(this, settings)) { + $(this).trigger("appear"); + } else { + if (counter++ > settings.failurelimit) { + return false; + } + } + }); + /* Remove image from array so it is not looped next time. */ + var temp = $.grep(elements, function(element) { + return !element.loaded; + }); + elements = $(temp); + }); + } + + this.each(function() { + var self = this; + + /* Save original only if it is not defined in HTML. */ + if (undefined == $(self).attr("original")) { + $(self).attr("original", $(self).attr("src")); + } + + if ("scroll" != settings.event || + undefined == $(self).attr("src") || + settings.placeholder == $(self).attr("src") || + ($.abovethetop(self, settings) || + $.leftofbegin(self, settings) || + $.belowthefold(self, settings) || + $.rightoffold(self, settings) )) { + + if (settings.placeholder) { + $(self).attr("src", settings.placeholder); + } else { + $(self).removeAttr("src"); + } + self.loaded = false; + } else { + self.loaded = true; + } + + /* When appear is triggered load original image. */ + $(self).one("appear", function() { + if (!this.loaded) { + $("") + .bind("load", function() { + $(self) + .hide() + .attr("src", $(self).attr("original")) + [settings.effect](settings.effectspeed); + self.loaded = true; + }) + .attr("src", $(self).attr("original")); + }; + }); + + /* When wanted event is triggered load original image */ + /* by triggering appear. */ + if ("scroll" != settings.event) { + $(self).bind(settings.event, function(event) { + if (!self.loaded) { + $(self).trigger("appear"); + } + }); + } + }); + + /* Force initial check if images should appear. */ + $(settings.container).trigger(settings.event); + + return this; + + }; + + /* Convenience methods in jQuery namespace. */ + /* Use as $.belowthefold(element, {threshold : 100, container : window}) */ + + $.belowthefold = function(element, settings) { + if (settings.container === undefined || settings.container === window) { + var fold = $(window).height() + $(window).scrollTop(); + } else { + var fold = $(settings.container).offset().top + $(settings.container).height(); + } + return fold <= $(element).offset().top - settings.threshold; + }; + + $.rightoffold = function(element, settings) { + if (settings.container === undefined || settings.container === window) { + var fold = $(window).width() + $(window).scrollLeft(); + } else { + var fold = $(settings.container).offset().left + $(settings.container).width(); + } + return fold <= $(element).offset().left - settings.threshold; + }; + + $.abovethetop = function(element, settings) { + if (settings.container === undefined || settings.container === window) { + var fold = $(window).scrollTop(); + } else { + var fold = $(settings.container).offset().top; + } + return fold >= $(element).offset().top + settings.threshold + $(element).height(); + }; + + $.leftofbegin = function(element, settings) { + if (settings.container === undefined || settings.container === window) { + var fold = $(window).scrollLeft(); + } else { + var fold = $(settings.container).offset().left; + } + return fold >= $(element).offset().left + settings.threshold + $(element).width(); + }; + /* Custom selectors for your convenience. */ + /* Use as $("img:below-the-fold").something() */ + + $.extend($.expr[':'], { + "below-the-fold" : "$.belowthefold(a, {threshold : 0, container: window})", + "above-the-fold" : "!$.belowthefold(a, {threshold : 0, container: window})", + "right-of-fold" : "$.rightoffold(a, {threshold : 0, container: window})", + "left-of-fold" : "!$.rightoffold(a, {threshold : 0, container: window})" + }); + +})(jQuery); diff --git a/r2/r2/public/static/js/jquery.reddit.js b/r2/r2/public/static/js/jquery.reddit.js index 798e2ceb69..e287d3a452 100644 --- a/r2/r2/public/static/js/jquery.reddit.js +++ b/r2/r2/public/static/js/jquery.reddit.js @@ -310,9 +310,13 @@ $.fn.all_things_by_id = function() { return this.thing().add( $.things(this.thing_id()) ); }; -$.fn.thing_id = function() { +$.fn.thing_id = function(class_filter) { + class_filter = $.with_default(class_filter, "thing"); /* Returns the (reddit) ID of the current element's thing */ var t = (this.hasClass("thing")) ? this : this.thing(); + if(class_filter != "thing") { + t = t.find("." + class_filter + ":first"); + } if(t.length) { var id = $.grep(t.get(0).className.split(' '), function(i) { return i.match(/^id-/); }); @@ -332,6 +336,15 @@ $.things = function() { return $(sel); }; +$.fn.same_author = function() { + var aid = $(this).thing_id("author"); + var ids = []; + $(".author.id-" + aid).each(function() { + ids.push(".thing.id-" + $(this).thing_id()); + }); + return $(ids.join(", ")); +}; + $.fn.things = function() { /* * try to find all things that occur below a given selector, like: @@ -387,7 +400,7 @@ $.listing = function(name) { var thing_init_func = function() { }; $.fn.set_thing_init = function(func) { thing_init_func = func; - $(this).find(".thing").each(function() { func(this) }); + $(this).find(".thing:not(.stub)").each(function() { func(this) }); }; @@ -508,12 +521,19 @@ $.insert_things = function(things, append) { }); }; -$.fn.delete_table_row = function() { +$.fn.delete_table_row = function(callback) { var tr = this.parents("tr:first").get(0); var table = this.parents("table").get(0); - $(tr).fadeOut(function() { - table.deleteRow(tr.rowIndex); - }); + if(tr) { + $(tr).fadeOut(function() { + table.deleteRow(tr.rowIndex); + if(callback) { + callback(); + } + }); + } else if (callback) { + callback(); + } }; $.fn.insert_table_rows = function(rows, index) { diff --git a/r2/r2/public/static/js/reddit.js b/r2/r2/public/static/js/reddit.js index 48ff1c1d15..993e98a02b 100644 --- a/r2/r2/public/static/js/reddit.js +++ b/r2/r2/public/static/js/reddit.js @@ -1206,6 +1206,30 @@ function fetch_parent(elem, parent_permalink, parent_id) { return false; } +function big_mod_action(elem, dir) { + if ( ! elem.hasClass("pressed")) { + elem.addClass("pressed"); + + var thing_id = elem.thing_id(); + + d = { + id: thing_id + }; + + if (dir == -1) { + $.request("remove", d, null, true); + elem.siblings(".removed").show(); + elem.siblings(".approved").hide(); + } else if (dir == 1) { + $.request("approve", d, null, true); + elem.siblings(".removed").hide(); + elem.siblings(".approved").show(); + } + } + elem.siblings(".pretty-button").removeClass("pressed"); + return false; +} + function juryvote(elem, dir) { var thing_id = elem.thing_id(); @@ -1231,7 +1255,10 @@ $(function() { * and call it on all things currently rendered in the * page. */ $("body").set_thing_init(updateEventHandlers); - + $(".thumbnail img").lazyload({ + threshold: 200, + placeholder: "/static/nothing.png" + }); /* Set up gray inputs and textareas to clear on focus */ $("textarea.gray, input.gray") .focus( function() { diff --git a/r2/r2/public/static/js/sponsored.js b/r2/r2/public/static/js/sponsored.js index 8e9be4c004..c607067f00 100644 --- a/r2/r2/public/static/js/sponsored.js +++ b/r2/r2/public/static/js/sponsored.js @@ -3,13 +3,13 @@ function update_box(elem) { }; function update_bid(elem) { - var form = $(elem).parents(".pretty-form:first"); + var form = $(elem).parents(".campaign"); var bid = parseFloat(form.find("*[name=bid]").val()); var ndays = ((Date.parse(form.find("*[name=enddate]").val()) - Date.parse(form.find("*[name=startdate]").val())) / (86400*1000)); - $("#bid-field span.gray").html("[Current campaign totals " + - "$" + (bid/ndays).toFixed(2) + - " per day for " + ndays + " day(s)]"); + $(".bid-info").html("  &rarr" + + "$" + (bid/ndays).toFixed(2) + + " per day for " + ndays + " day(s)"); $("#duration span.gray") .html( ndays == 1 ? "(1 day)" : "(" + ndays + " days)"); } @@ -62,3 +62,266 @@ function attach_calendar(where, min_date_src, max_date_src, callback) { $(this).siblings(".datepicker.inuse").addClass("active"); }); } + +function targeting_on(elem) { + $(elem).parents(".campaign").find(".targeting") + .find("*[name=sr]").attr("disabled", "").end().show(); +} + +function targeting_off(elem) { + $(elem).parents(".campaign").find(".targeting") + .find("*[name=sr]").attr("disabled", "disabled").end().hide(); +} + +(function($) { + +function get_flag_class(flags) { + var css_class = ""; + if(flags.free) { + css_class += " free"; + } + if(flags.complete) { + css_class += " complete"; + } + else { + if(flags.sponsor) { + css_class += " sponsor"; + } + if(flags.paid) { + css_class += " paid"; + } + } + return css_class +} + +$.new_campaign = function(indx, start_date, end_date, duration, + bid, targeting, flags) { + cancel_edit(function() { + var data =('' + + '' + + '' + + '' + + ''); + if (flags && flags.pay_url) { + data += (""); + } + var row = [start_date, end_date, duration, "$" + bid, targeting, data]; + $(".existing-campaigns .error").hide(); + var css_class = get_flag_class(flags); + $(".existing-campaigns table").show() + .insert_table_rows([{"id": "", "css_class": css_class, + "cells": row}], -1); + $.set_up_campaigns() + }); + return $; +}; + +$.update_campaign = function(indx, start_date, end_date, + duration, bid, targeting, flags) { + cancel_edit(function() { + $(".existing-campaigns input[name=indx]") + .filter("*[value=" + (indx || '0') + "]") + .parents("tr").removeClass() + .addClass(get_flag_class(flags)) + .children(":first").html(start_date) + .next().html(end_date) + .next().html(duration) + .next().html("$" + bid).removeClass() + .next().html(targeting) + .next() + .find("*[name=startdate]").val(start_date).end() + .find("*[name=enddate]").val(end_date).end() + .find("*[name=targeting]").val(targeting).end() + .find("*[name=bid]").val(bid).end() + .find("button, span").remove(); + $.set_up_campaigns(); + }); +}; + +$.set_up_campaigns = function() { + var edit = ""; + var del = ""; + var pay = ""; + var free = ""; + var repay = ""; + $(".existing-campaigns tr").each(function() { + var tr = $(this); + var td = $(this).find("td:last"); + var bid_td = $(this).find("td:first").next().next().next() + .addClass("bid"); + if(td.length && ! td.children("button, span").length ) { + /* once paid, we shouldn't muck around with the campaign */ + if(!tr.hasClass("complete")) { + if (tr.hasClass("sponsor") && !tr.hasClass("free")) { + $(bid_td).append($(free).addClass("free") + .click(function() { free_campaign(tr) })) + } + else if (!tr.hasClass("paid")) { + $(bid_td).prepend($(pay).addClass("pay") + .click(function() { pay_campaign(tr) })); + } else if (tr.hasClass("free")) { + $(bid_td).addClass("free paid") + .prepend("freebie"); + } else { + (bid_td).addClass("paid") + .prepend($(repay).addClass("pay") + .click(function() { pay_campaign(tr) })); + } + var e = $(edit).addClass("edit") + .click(function() { edit_campaign(tr); }); + var d = $(del).addClass("d") + .click(function() { del_campaign(tr); }); + $(td).append(e).append(d); + } + else { + $(td).append("complete/live"); + $(bid_td).addClass("paid") + } + } + }); + return $; + +} + +}(jQuery)); + +function detach_campaign_form() { + /* remove datepicker from fields */ + $("#campaign").find(".datepicker").each(function() { + $(this).datepicker("destroy").siblings().unbind(); + }); + + /* clone and remove original */ + var orig = $("#campaign"); + var campaign = orig.clone(true); + orig.remove(); + return campaign; +} + +function cancel_edit(callback) { + if($("#campaign").parents('tr:first').length) { + var tr = $("#campaign").parents("tr:first").prev(); + /* copy the campaign element */ + /* delete the original */ + $("#campaign").fadeOut(function() { + $(this).parent('tr').prev().fadeIn(); + var td = $(this).parent(); + var campaign = detach_campaign_form(); + td.delete_table_row(function() { + tr.fadeIn(function() { + $(".existing-campaigns").before(campaign); + campaign.hide(); + if(callback) { callback(); } + }); + }); + }); + } else { + if ($("#campaign:visible").length) { + $("#campaign").fadeOut(function() { + if(callback) { + callback(); + }}); + } + else if (callback) { + callback(); + } + } +} + +function del_campaign(elem) { + var indx = $(elem).find("*[name=indx]").val(); + var link_id = $("#campaign").find("*[name=link_id]").val(); + $.request("delete_campaign", {"indx": indx, "link_id": link_id}, + null, true, "json", false); + $(elem).children(":first").delete_table_row(); +} + + +function edit_campaign(elem) { + /* find the table row in question */ + var tr = $(elem).get(0); + + if ($("#campaign").parents('tr:first').get(0) != tr) { + + cancel_edit(function() { + + /* copy the campaign element */ + var campaign = detach_campaign_form(); + + $(".existing-campaigns table") + .insert_table_rows([{"id": "edit-campaign-tr", + "css_class": "", "cells": [""]}], + tr.rowIndex + 1); + $("#edit-campaign-tr").children('td:first') + .attr("colspan", 6).append(campaign).end() + .prev().fadeOut(function() { + var data_tr = $(this); + var c = $("#campaign"); + $.map(['startdate', 'enddate', 'bid', 'indx'], + function(i) { + i = "*[name=" + i + "]"; + c.find(i).val(data_tr.find(i).val()); + }); + /* check if targeting is turned on */ + var targeting = data_tr + .find("*[name=targeting]").val(); + var radios=c.find("*[name=targeting]"); + if (targeting) { + radios.filter("*[value=one]") + .attr("checked", "checked"); + c.find("*[name=sr]").val(targeting).attr("disabled", "").end() + .find(".targeting").show(); + } + else { + radios.filter("*[value=none]") + .attr("checked", "checked"); + c.find("*[name=sr]").val("").attr("disabled", "disabled").end() + .find(".targeting").hide(); + } + /* attach the dates to the date widgets */ + init_startdate(); + init_enddate(); + c.find("button[name=edit]").show().end() + .find("button[name=create]").hide().end(); + update_bid("*[name=bid]"); + c.fadeIn(); + } ); + } + ); + } +} + +function create_campaign(elem) { + cancel_edit(function() {; + init_startdate(); + init_enddate(); + $("#campaign") + .find("button[name=edit]").hide().end() + .find("button[name=create]").show().end() + .find("input[name=indx]").val('').end() + .find("input[name=sr]").val('').end() + .find("input[name=targeting][value=none]") + .attr("checked", "checked").end() + .find(".targeting").hide().end() + .find("*[name=sr]").val("").attr("disabled", "disabled").end() + .fadeIn(); + update_bid("*[name=bid]"); + }); +} + +function free_campaign(elem) { + var indx = $(elem).find("*[name=indx]").val(); + var link_id = $("#campaign").find("*[name=link_id]").val(); + $.request("freebie", {"indx": indx, "link_id": link_id}, + null, true, "json", false); + $(elem).find(".free").fadeOut(); + return false; +} + +function pay_campaign(elem) { + $.redirect($(elem).find("input[name=pay_url]").val()); +} \ No newline at end of file diff --git a/r2/r2/public/static/noimage.png b/r2/r2/public/static/noimage.png index c1556c7380b658c55a6067865ae57b45fb480b2f..9fc0124eb7c518f7d71d8823c652cb195cbb868a 100644 GIT binary patch literal 2736 zcmV;h3QzTkP)dwT3=He*A+k1hB$@VqGP~~sacrB*aI%A-5GnPL%g*g z>Y2`@%Bx?3KSAgx2>Aq2K0(QgpX@32J04-bn3+ID%rt?dNOlD^*sX+(oCZ0ezu(nf zakW}$SBs?anK|0~=bZcJx96U7?_IUY8%Q^A-W({Suw|baAYjuZy{Q;EE zAd~Lx?M*}?kp$By`^ZnP(e-VOZm?Q=nB2!sT@ueUtl+U*mNK24o$)JIu1Hr`m#WTY zvobq7D}_SAhR&Q@r&;-y0D}p0X{5o^U<(`tN5BLrXFMmy$H%Sy{(cng)7sja%+Jry z0GJQ=OPiZI+}!3%trLg@CU&D#f&>7&j0XW?y+w9|f-->>@jew&&pUDcUT9Y zY%CqCySuwgG@s9x<%4E!F6|zJ>;yOlw!xaxNF>2C*T*`((1qr+RnFH&ryPhMO+S42 zPjI%=~nzFhByJ(5I!raj*?G$wG!;O+zjP9y2SMZgUqjBM(d1L%R`J zh;P`1?BB3-Vkb|YR6|LnQgpm==>8?>*f7!0AUhdU9&$Ob4eMFbI!y-#6Xn~W;~dnT z`aPjT-(x#+)GZCzS9fWbx(SyY^V8rom}G+|z!q2&!Lb02dU>tzF*Z6(>fVDq0mK~S z$YE&Or8ME;N9JHvambM;9QlZozrlPfd`uMmB-mNA$?+AI(a)3zD5{#40VW6uLq4X{ ztpLVGXGz^*lb@xDIsL=~^k>RMv?FdRFz^+}Ffhwm8Ue<5V3DZN0zSF-J0NS*{93WYBf^tSBlXA!;!TQO~Us#_~Y8R9I5O18$m;JKs(9{-M(^15v z8FePWvG(?M3x_}9291oo^!4>QlKT(tO}J^x<#NLG*47r&oJayZ4Up7ZbbyYdECwb~ zW(GV78DDFlf*%3MKSPFXDgw9)lhD9m&>x|tDeO5c?m32rh7=f1`*lAE(E9qitgNgM zv@{xsgQrGEMx zZ8;K2jcm$pv!&vN*tC@SXa83;g8^Mg*r&?^Il&>r5si7cX8cduysP?9a1|NM@y|-G%YJSM1BHIXdsFSFfteDqcTm zXYQQ1b6z^t$_z`X2@3Vg_A&Gy=saqI!rL>qDVI7f;1v8XOfdDR_$*b_NMtufmSF+8 zts-YuMZR6Ap&-h~a16MtQYY`t%}qM!9G1~iO~uN}85H?~LN&L=M7dl@T*q{BTu$1A zRfYMJoE6zIURn;5KKH&UQJE$nZhD%kX*Cq*7`UXJsKlZv{sbjh3!|uQ$WOx2eqrRx zl4TZS*3jv3EpwfF65qTD3Wc7pi@eNwiZ`N=FKKmkmDUo6wR~P@n=wN^2~cY4yMama zt`FO&0AtpJ9e9?>a@ zk6pH(vh-kCkXuOReQ=%K+J<~6i!t=ytRz9u1y_h0cTs6PMMD}VZa5nnSCSD z+-QPkS}~Z4rQ~#)z$VHzqClXr@Qv!IMg=->eFYJy0UsgfU|BaX*u(ISCSf#h`&m#m zi<#jSKt05b*WJ5!g}YKs1c?#u0DeI?r8ahmm8n?HNy8@vrSHC7CCJeOSW(Tce1V}h!1)-DIL}V>IetsV z>`_~a2`tP0xt5ag%QzZ@qz7~Nu#dc1P9*fBE*-<(1PW4?i^_xk7fZ+ffll=n# zx{M2>IQ&JF;E*|SH%MGAbhNx+ayDjV^WeOGbb$G4ab9*_WaaG0D2`mmWn(=fi(k&m z$ZrbLe`>HyPiLBpOT|T%?zifbrVDV?eK5fw0NkeIY*pIOF_Q3G5tk1gs3$PA81T#O ztUQIK{P9m8NavAu*+t>Nz@WVK_Rr;;r3E?h>ngxB8JWNqqmcqAy#BQG1r~h{H+A>! z-BZBmaIAd_>%2C2oY(6gaWMa(Ae~3MWp}Sb+L7-f})p}zP#q$F_#>fTl-_b>{UL7%c+37e zC>EH0X#2lg^89H=@-OfO6H5;i!;50r-Y#l4?ze7>LDy0DCQMrdpC%|Armow5nU}rx zygb=hmZJcr@6>4p%n{t<`~b_?c(Nw%oEnlg#K}(l{Nt;nlGDJXzFw5)8z~j?henEF z9@aUDC2OOb-?FbpQW#*=w#7Yh?c-i%X$E=X>ZaIrF=# z^39jC^7QeF^qlCI&GnSD9cq`eC@=ifCa-XhOi=XlWV<9f{#&OreMZ3JPYRVZ&j~T(SenJ$;;v;{={B*0$ zo7UtNor3mq|NeayZP>Q`9Pxp delta 1220 zcmV;#1Uvh%72yex8Gi!+007oQU10zK00DDSM?wIu&K&6g00AaYOjJei@bJOW;KkJ9 z$=Bq}%*@Qz;mq6Z&CSlu&d|-!*3Ht@&D!S8-}uhX&(6@&&e7D))Y;D0-_Os_&(hV; z)7j9_(9qM?(bd_}*4xqD=+e^C(%0P5*WJ?D-_qdh)6>(_+JE8G>;Kf-c)z#J2 z-sjcd>DA!s*4Eb6+T+&X>)F}a+2ZWl?*H7}+}-8w-sJV)-{0Ti^5Ed$;Na!q<>}$* z@Z#d)vL}>+FfXL?)2&P|LpAS?D6&Q@9*#S{P6Vm z@%jJq^78ZZ^JDb)`Stbn_4WVu_V)Pt`}z6#{QUg<{r&&i%>Ufa|K8I7;nn}++5hO@ z|LWrZ?B)OO>HqQU|Mv3#`1Sw#`Tza=|Ns9G;1e7G0004WQchCkjKUmCdVCwqJQxJKi@{vovh zV02?^(~c?3b~G?=NI?)%Pyv^7j>-)&Vjq^NVGD@De+|ei?Jh}@)ZDFyiT7ki-n7z% zV=2*oHN{Fl-;&4K*vu%BRP9h7N=~Xt<^%d22+rp!Ocxjb|1dQcZf#VCrsRaliRJiF z1iBJawy>1$WVae)LG>xTuE1FRDE9!mTCz85{&eI9Oz^Dc2l12*({QJxNL%(>6Hx#W?grMYS*ue(+Y zbUz2Ux{Q7u8SGqL2}pB)K4sA`7%+$P^Xb`D0+|HCB>KZwiId07hs6e+a~o80=G0xqPawRN--;wD=e>_iO*JwRcSPLI z7_P%C!X@6wv?VKi`G+t)j|?xq&z?CSXU}^fh8RSOOo?-rSYDn@FVj==bHzGX*cAYV zyh=5?dSB$l-{nA^&g)cR@-;sw!5EqE4iWWn5hHi2LQGOH4JDN#B4`DQkxYuX1xgG3 ze;Gki)+!iLVR=p{H_krHx0+_jpjKmymINE$hYHh&rkKahLXkQF8 z)3oYL$v$YAF5!})3ZIUSj^3qOsWoj!z-GyV%;g@ud7IU0&FQazZ2NJ)6Il)Gz24^C zN4K7A_k`+V8Wy#4G#raT7%9TE;lsh!e}j8F*TT@1NN1vhyE;|8121~(aPswF>vo$c zot3&2q^3g@>c~D#8Tn3!Fnh##)$VPt(n8&<(NNeMVba3d8u_pdkM~Z#Jl%N-%y3x0 zCdT1fSnD1`48z)MuiStBtkVINnj>PpOC#OS?64*g+!zySUDS$O^FC@qT89QVRxp-K zm5QL+RRSd(V&WId?GTf7wxvd@2LIbLY+pIb@}I z(#8e{2VWgpU}Umi%VAhrfhLpvxx#(l$RU67+5D>V#4^opA%qiA1Yu3lw%5rXIEX%S%5Oy zOmA8`G>vXe_oOTnZPyf-Cn#QBZ6ZUd(2696Xn z`t@tGu&_{OM$^;N{F!gm;F|ZzzYvXP#B_eEb}wOB?$17d{yZ-x<+Zl98quHtFu68* zsDM#ziM-kZ06wQf0J>cA3lTv5RDsFG?No{9G(wv8$&CC)GuH#HG0IEQ8dhz{g`4^zLi|U8+jmn2=N(tOa+lZip9o1|3wzjqu2_Ceaa9R&} z`}VDwpPvUT--c#+UyqWrva#s(NZ<+c?a_R!MDT?9UWo;a&1%n{JzJVIj1zJt37jv< zop#ea@Jd6UNNX{`cDv7U$~KtAX}lX0DKZ#{|Z&#rdY(NMdUsRZWM< z{W`t%dvRXkPrvZit}3ab0J5s%^@Z|(5OC82#n3?vBEomeo zcz|I=a+M&`R5hLA?DlgDbG!FJt5!pc$nOse+inGf3l}Z~nnNTDk28VH#092636YoL zF)0Cpy7BCqF1E}MldBm7BJ)`Yinx-R)zsY4#i$)v~#X} z)+R>Xviv#A>-xR$g8p&Bx(JlCS!VY4^%O>8@gapSK(H zWp6n+Jm+X-f=C6=^E^!4mnb6>8SI`vE z<`K6pqueVeWA@ldkZU)0;4$CY>|Qv`k)+wR2X}}q01_7zvUpc*lvX+cSol;i9a z!qcXFr!$whv?-{6FCn|gV4@13K;K2vg5U6=XV0FQ2M->YyLa!}eW64GglyhzN- zXZ37`>k6vQ>f!*Ij0+g59!KMN4t7JZm!3zOz+w<6Ab9w=)2C0H zY9)rc0OOcdN>(E`K0Y2y-USGCMw=KMD@H`(9t)T{$EtwfU;#uU%vd;ra1Zq%?s@$9 zanRsgyxaOgsJoQ74zNAcMhK;Sr;80P_yS__jUyyIMP)=U)5kbB`KLjr!9elDrA=BiQ4 zhU`?5?U&{{fO1*vI&=Q~`BHdhaA*mF82|#H0Fo_X*oq)P;1@Utux`zVYc|*6JkODS zIr^cEVWy36%~trNZ{j_vdTh82hoci(!vN6&w8BOX21}BM2H2gu3~Q z&B}>XIA^H3yC)tO6g8o2qdqulssk{)-hiJ$D=d)%#Ov3u8+cDtP~3bA^AS)2O=;ve zd9`ZH7~ijm33JO)|21b%q+YpFu*t;-%0wK?eu*fx_!6AC5WsB(?Y)qH a1sDJ>hyG6koEl#M0000{bB+W;O4KXCqsJCQy^P$OR-Q7m59(wRD>dAwk=)r?m z@#9p%g9w$XC@TIH7DSM`KC{cF3KlQE%kKR4;hA@4-kAZy%YSv#?1ljJ49612&3Jc6De}s0#$#SOY19HXes|p!dPklCtCs8jxb3>D_c_KV zVDY>}Lnwa19J^kN(bnT|Acp+<#jHstZ7Y?Jc$k#)0jZffA@E z5sJry*RLUEU`r-jD9EvdGFi(B_H_PaI=%k2dvdI ztiSJ>Z>Ng^H?3Sb;Is8(Vy<2u-8fMR*mS0c9^K9k_4#(S7I@C}XNMk5&kW~abc^Dy zcsV;xzJD4pGRUI9M@9?@D40YIIh6eKMi;j6oZT5uB&I6QBiDuSM>zULZy(cr^ODtR zwH-Od+onh5yrCYB$}#1Lf?v$=rp;zx-+KT60T)R`K~y*qWBmXBf9ULsch8(;APXp) zn6FONX8{`Vsn{UZ4q#z@VbM4gdhr Wk`D^w3mNnP0000eSaefwW^{L9a%BKPWN%_+AW3auXJt}l zVPtu6$z?nM0037|OjJex|NpkOw%OU)+S=ON+uPmU-QM2b;Nall;o;)q;^X7vlt)=IZL|@9*#M@bK~R@$~fc_4W1k_V)Mp_xSku`T6<#`+xiV{QUj>{r>*`|NsAB zy@;9s0004WQchCUC`_1I` zUcZ;pt=GA&Ehx5bVRZKo+nY=5^QedUQ}<2$!vyedDOgx#NEQG9002ovPDHLkV1jpL BsUH9U diff --git a/r2/r2/public/static/vid-expanded.png b/r2/r2/public/static/vid-expanded.png index f517ee97496afee8fcd3ab6cf03e05cc387730d2..55847e6037910eb0f3368dbf1935e51039623b52 100644 GIT binary patch delta 890 zcmV-=1BLvu0)_{W8Gix*003`DL-zmx0x)SqLr_UWLm*IcZ)Rz1WdHzp0j-nGYZE~j z#-B};D%ARcroAW&ih78$CV>{bB+W;O4KXCqsJCQy^P$OR-Q7m59(wRD>dAwk=)r?m z@#9p%g9w$XC@TIH7DSM`KC{cF3KlQE%kKR4;hA@4-kAZy%YSv#?1ljJ49612&3Jc6De}s0#$#SOY19HXes|p!dPklCtCs8jxb3>D_c_KV zVDY>}Lnwa19J^kN(bnT|Acp+<#jHstZ7Y?Jc$k#)0jZffA@E z5sJry*RLUEU`r-jD9EvdGFi(B_H_PaI=%k2dvdI ztiSJ>Z>Ng^H?3Sb;Is8(Vy<2u-8fMR*mS0c9^K9k_4#(S7I@C}XNMk5&kW~abc^Dy zcsV;xzJD4pGRUI9M@9?@D40YIIh6eKMi;j6oZT5uB&I6QBiDuSM>zULZy(cr^ODtR zwH-Od+onh5yrCYB$}#1Lf?v$=rp;zx-+KT60TD?=K~y*qWBmXBf9ULsch8(;APXp) zn6FONX8{`VSEUa8Q{7EPjDyCotEM2Y`BkCNKhvTgvMI0DYv*&1YSm QC;$Ke07*qoM6N<$g1yMDS^xk5 delta 290 zcmV+-0p0$F2eJZ?8Gi!+007tYB$5CC02y>eSaefwW^{L9a%BKPWN%_+AW3auXJt}l zVPtu6$z?nM0037|OjJex|NpkOw%OU)+S=ON+uPmU-QM2b;Nall;o;)q;^X7vlt)=IZL|@9*#M@bK~R@$~fc_4W1k_V)Mp_xSku`T6<#`+xiV{QUj>{r>*`|NsAB zy@;9s0004WQchC1AC`GSzWV{K26~R oo6EngXHWkA=NlJK?Kg1`H&6>H`b%ih -<% domain = get_domain(True) %> +<% domain = get_domain(False) %>

${_("put %(site)s buttons on your site") % dict(site=c.site.name)}

@@ -90,6 +90,10 @@

${_('interactive button advanced settings')}

${_("useful in places like blogs, where you want to link to the post's permalink")}

${drawoption('url','[URL]')} +
  • +

    ${_("specify a community to target")}

    + ${drawoption('target','[COMMUNITY]')} +
  • ${_("specify a title")}

    ${drawoption('title','[TITLE]')} @@ -174,9 +178,9 @@

    ${_("more badges and buttons")}

    <%def name="draw_interactive(type)"> %if type: + src="http://${domain}/static/button/button${type}.js"> %else: - + %endif @@ -190,6 +194,6 @@

    ${_("more badges and buttons")}

    reddit_bgcolor = "FF3"; reddit_bordercolor = "00F"; - + diff --git a/r2/r2/templates/link.html b/r2/r2/templates/link.html index 9db7baae57..5dd3b07b0b 100644 --- a/r2/r2/templates/link.html +++ b/r2/r2/templates/link.html @@ -24,6 +24,7 @@ from r2.lib.template_helpers import get_domain from r2.lib.pages.things import LinkButtons from r2.lib.pages import WrappedUser + from r2.lib.template_helpers import static %> <%inherit file="printable.html"/> @@ -61,7 +62,7 @@ <%def name="bottom_buttons()">
      %if thing.nsfw: -
    • +
    • ${_("NSFW")} @@ -77,16 +78,21 @@ <%call expr="make_link('title', 'title')"> ${thing.title} + %if getattr(thing, "approval_checkmark", None): + + %endif + ${self.domain()} %if c.user_is_admin: - %if thing._deleted: - [link deleted] - %endif - %if thing.on_trial: - [on trial] - %endif + %for link_note in thing.link_notes: + [${link_note}] + %endfor %endif

      @@ -108,8 +114,9 @@ expand = thing.link_child and thing.link_child.expand %> - ##if we're not on a permalink page we'll render the buttons on top - %if not expand: + ## if we're not on a permalink page we'll render the buttons on top + ## (unless it's also a jury duty listing) + %if not (expand or getattr(thing, "trial_mode", None)): ${bottom_buttons()} %endif @@ -206,13 +213,15 @@ <%def name="buttons(comments=True, delete=True, report=True, additional='')"> - ${LinkButtons(thing, comments = comments, delete = delete, - report = report)} + ${LinkButtons(thing, comments = comments, delete = delete, + report = report, + )} <%def name="thumbnail()"> %if thing.thumbnail and not getattr(thing, "trial_mode", None): <%call expr="make_link('thumbnail', 'thumbnail')"> + ​ %endif diff --git a/r2/r2/templates/link.xml b/r2/r2/templates/link.xml index 10b790c081..6a49079e7f 100644 --- a/r2/r2/templates/link.xml +++ b/r2/r2/templates/link.xml @@ -24,6 +24,7 @@ from pylons.i18n import _, ungettext from r2.lib.template_helpers import add_sr, get_domain from r2.models import FakeSubreddit + from r2.lib.filters import unsafe, safemarkdown %> <% permalink = add_sr(thing.permalink, force_hostname = True) @@ -50,6 +51,9 @@ <% domain = get_domain(cname = c.cname, subreddit = False) %> + %if getattr(thing, 'selftext', None): + ${unsafe(safemarkdown(thing.selftext))} + %endif submitted by ${thing.author.name} diff --git a/r2/r2/templates/linkontrial.html b/r2/r2/templates/linkontrial.html index 989ada02ce..f7000c0080 100644 --- a/r2/r2/templates/linkontrial.html +++ b/r2/r2/templates/linkontrial.html @@ -47,6 +47,10 @@ ${parent.entry()}
      + + ${_("how would you classify this link?")} + + <% pos_class = "positive" neg_class = "negative" @@ -59,17 +63,13 @@ neg_class += " pressed" %> - - ${_("how would you classify this link?")} - - ${pretty_button(_("off-topic / spam"), "juryvote", -1, neg_class)} ${pretty_button(_("kosher"), "juryvote", 1, pos_class)} ${_("thanks for voting!")} - + ${_("Click here for more info.").lower()} diff --git a/r2/r2/templates/linkpromoteinfobar.html b/r2/r2/templates/linkpromoteinfobar.html index b645a68d93..63fe080cf1 100644 --- a/r2/r2/templates/linkpromoteinfobar.html +++ b/r2/r2/templates/linkpromoteinfobar.html @@ -24,33 +24,3 @@ %> <%namespace file="printablebuttons.html" import="ynbutton" /> <%namespace file="utils.html" import="plain_link" /> - -%if thing.a.promoted: - %if hasattr(thing.a, "promoted_on"): - - ${_('promoted on')} - - ${thing.a.promoted_on.strftime(thing.datefmt)} - - - %endif - %if hasattr(thing.a, "unpromoted_on"): - - ${_('unpromoted on')} - - ${thing.a.unpromoted_on.strftime(thing.datefmt)} - - - %endif - %if thing.a.promote_until: - - ${_('promote until')} - - ${thing.a.promote_until.strftime(thing.datefmt)} - %if thing.a.promote_until < datetime.now(g.tz): - ${_('(this link has expired and is no longer being promoted)')} - %endif - - - %endif -%endif diff --git a/r2/r2/templates/messagecompose.html b/r2/r2/templates/messagecompose.html index d5458249a7..4b88f52eed 100644 --- a/r2/r2/templates/messagecompose.html +++ b/r2/r2/templates/messagecompose.html @@ -67,7 +67,7 @@

      ${_("send a message")}

      <%utils:round_field title="${_('message')}"> - ${UserText(None, have_form = False, creating = True)} + ${UserText(None, text=thing.message, have_form = False, creating = True)}
      diff --git a/r2/r2/templates/morechildren.html b/r2/r2/templates/morechildren.html index a1b78c3e5c..1c0b2e8159 100644 --- a/r2/r2/templates/morechildren.html +++ b/r2/r2/templates/morechildren.html @@ -31,7 +31,7 @@ %> ${_("load more comments")}  (${thing.count} ${ungettext("reply", "replies", thing.count)}) diff --git a/r2/r2/templates/moremessages.html b/r2/r2/templates/moremessages.html index d627ba41c0..0306d47fe6 100644 --- a/r2/r2/templates/moremessages.html +++ b/r2/r2/templates/moremessages.html @@ -32,7 +32,7 @@ <%def name="tagline(collapse=False)"> ${_("[+] load the full conversation.")} diff --git a/r2/r2/templates/newlink.html b/r2/r2/templates/newlink.html index 7ced7f68b0..05863dc763 100644 --- a/r2/r2/templates/newlink.html +++ b/r2/r2/templates/newlink.html @@ -26,7 +26,7 @@ from r2.lib.template_helpers import add_sr %> -<%namespace file="utils.html" import="error_field, submit_form, plain_link, text_with_links"/> +<%namespace file="utils.html" import="error_field, submit_form, plain_link, text_with_links, reddit_selector"/> <%namespace name="utils" file="utils.html"/>

      ${_("submit to reddit")}

      @@ -76,37 +76,7 @@

      ${_("submit to reddit")}

      diff --git a/r2/r2/templates/paymentform.html b/r2/r2/templates/paymentform.html index 91ee0efbe1..14a0e33a1d 100644 --- a/r2/r2/templates/paymentform.html +++ b/r2/r2/templates/paymentform.html @@ -33,28 +33,18 @@

      ${_("set up payment for this link")}

      onsubmit="return post_form(this, 'update_pay')"> - - -

      - <% - day = (thing.link.promote_until - thing.link._date).days - %> - The duration of this link is ${day} ${ungettext("day", "days", day)} - (${thing.link._date.strftime("%m/%d/%Y")} - - ${thing.link.promote_until.strftime("%m/%d/%Y")}). -

      -

      - <% - bid = unsafe("" % thing.link.promote_bid) - %> - ${unsafe(_("Your current bid is $%(bid)s") % dict(bid=bid))} - ${error_field("BAD_BID", "bid")} +

      + The duration of this link is ${thing.campaign.duration} + (from ${thing.campaign.start_date} to ${thing.campaign.end_date}). +

      +

      + + + ${unsafe(_("Your current bid is $%(bid)s") % dict(bid=thing.campaign.bid))} + ${error_field("BAD_BID", "bid")} - ${_('(total for the duration provided)')} + ${_('(total for the duration provided)')}

      %if thing.profiles: @@ -78,9 +68,6 @@

      ${_("set up payment for this link")}

      ${_("NOTE: your card will not be charged until the link has been queued for promotion.")}

      - %if thing.link: - - %endif ${profile_info(None, disabled=bool(thing.profiles))} diff --git a/r2/r2/templates/printable.html b/r2/r2/templates/printable.html index a89e134e71..7215ec978e 100644 --- a/r2/r2/templates/printable.html +++ b/r2/r2/templates/printable.html @@ -34,19 +34,15 @@ %if thing.show_spam:
    • [ %if c.user_is_admin: - ${"auto" if thing.autobanned else ""}banned + ${"auto" if thing.autobanned else ""}${strings.banned} ${("by %s" % thing.banner) if thing.banner else ""} %elif thing.moderator_banned and thing.banner: ${strings.banned_by % thing.banner} %else: - ${_("banned")} + ${strings.banned} %endif ]
    • - %elif thing.show_reports: -
    • [ - ${strings.reports % thing.reported} - ]
    • - %endif + %endif <%def name="thing_css_class(what)" buffered="True"> @@ -82,7 +78,7 @@ ${self.ParentDiv()}

      ${self.numcol()} - <% + <% like_cls = "unvoted" if getattr(thing, "likes", None): like_cls = "likes" diff --git a/r2/r2/templates/printablebuttons.html b/r2/r2/templates/printablebuttons.html index eb9d11249b..d9a29ff2b7 100644 --- a/r2/r2/templates/printablebuttons.html +++ b/r2/r2/templates/printablebuttons.html @@ -20,15 +20,17 @@ ## CondeNet, Inc. All Rights Reserved. ################################################################################ <%namespace file="utils.html" import="plain_link" /> +<%namespace file="utils.html" import="pretty_button" /> + <%! from r2.lib.strings import strings from r2.lib.promote import STATUS %> <%def name="banbuttons()"> - %if thing.show_report: + %if thing.show_delete:
    • - ${ynbutton(_("report"), _("reported"), "report", "hide_thing")} + ${ynbutton(_("delete"), _("deleted"), "del", "hide_thing")}
    • %endif %if thing.show_indict: @@ -36,29 +38,26 @@ ${ynbutton(_("indict"), _("indicted"), "indict")}
    • %endif - %if thing.show_delete: -
    • - ${ynbutton(_("delete"), _("deleted"), "del", "hide_thing")} -
    • - %endif %if thing.can_ban: - %if thing.show_spam: -
    • - ${self.state_button("unban", _("unban"), - "return change_state(this, 'unban');", _("unbanned"))} -
    • - %else: -
    • - ${self.state_button("ban", _("ban"), - "return change_state(this, 'ban');", _("banned"))} -
    • - %endif - %if thing.show_reports: -
    • - ${self.state_button("ignore", _("ignore"), \ - "change_state(this, 'ignore');", _("ignored"))} -
    • + %if not getattr(thing.thing, "use_big_modbuttons", False): + %if not thing.show_spam: +
    • + ${self.state_button("remove", _("remove"), + "return change_state(this, 'remove');", _("removed"))} +
    • + %endif + + %if thing.show_spam or thing.show_ignore: +
    • + ${self.state_button("approve", _("approve"), + "return change_state(this, 'approve');", _("approved"))} +
    • + %endif %endif + %elif thing.show_report: +
    • + ${ynbutton(_("report"), _("reported"), "report", "hide_thing")} +
    • %endif @@ -100,10 +99,41 @@ %endif +<%def name="big_modbuttons(thing, kind)"> + + %if getattr(thing, "moderator_banned", None): + + %elif thing._spam: + ${pretty_button(_("confirm %(obj)s removal") % dict(obj=kind), + "big_mod_action", -1, "negative")} + %else: + ${pretty_button(_("remove %(obj)s") % dict(obj=kind), + "big_mod_action", -1, "negative")} + %endif + + %if getattr(thing, "approval_checkmark", None): + ${pretty_button(_("reapprove %(obj)s") % dict(obj=kind), + "big_mod_action", 1, "positive")} + %else: + ${pretty_button(_("approve %(obj)s") % dict(obj=kind), + "big_mod_action", 1, "positive")} + %endif + + + + + + + <%def name="linkbuttons()"> %if thing.show_comments:
    • ${self.comment_button("comment", thing.comment_label, thing.permalink, + _sr_path = (thing.promoted is None), a_class = thing.commentcls, newwindow = thing.new_window)}
    • @@ -136,57 +166,75 @@ %endif %endif + + ${self.distinguish()} ${self.banbuttons()} %if thing.promoted is not None: - %if thing.promote_status != STATUS.finished or c.user_is_sponsor: - %if thing.user_is_sponsor or thing.is_author: + %if thing.user_is_sponsor or thing.is_author: +
    • + ${plain_link(_("edit"), thing.promo_url, _sr_path = False)} +
    • + %endif + %if c.user_is_sponsor: +
    • + +
      + +
      + +
      + + submit + / + + ${toggle_button("reject_promo", \ + _("reject"), _("cancel"), \ + "reject_promo", "cancel_reject_promo")} +
    • + %if thing.promote_status in (STATUS.unseen, STATUS.rejected):
    • - ${plain_link(_("edit"), thing.promo_url, _sr_path = False)} + ${ynbutton(_("accept"), _("accepted"), "promote")}
    • - %if thing.promote_status == STATUS.promoted: -
    • - ${ynbutton(_("unpromote"), _("unpromoted"), "unpromote")} -
    • - %elif c.user_is_sponsor and thing.promote_status != STATUS.rejected: -
    • - - ${toggle_button("reject_promo", \ - _("reject"), _("cancel"), \ - "reject_promo", "cancel_reject_promo")} -
    • - %endif - %endif - %if thing.user_is_sponsor: - %if thing.promote_status in (STATUS.unseen, STATUS.rejected): -
    • - ${ynbutton(_("accept"), _("accepted"), "promote")} -
    • - %elif thing.promotable and thing.promote_status in (STATUS.accepted, STATUS.pending): -
    • - ${ynbutton(_("promote"), _("promote"), "promote")} -
    • - %endif %endif - %endif - %if (thing.user_is_sponsor or thing.is_author) and thing.promote_status >= STATUS.promoted: -
    • - ${plain_link(_("traffic"), thing.traffic_url, _sr_path = False)} -
    • - %endif - %endif - ${self.distinguish()} + %endif + %if thing.user_is_sponsor or thing.is_author: +
    • + ${plain_link(_("traffic"), thing.traffic_url, _sr_path = False)} +
    • + %endif + %endif + + %if getattr(thing.thing, "reveal_trial_info", False): +
    • + ${_("deputy opinion:")} + + + ${_("%d spam") % thing.thing.trial_info.get("spams", "?")} + + + / + + + ${_("%d kosher") % thing.thing.trial_info.get("koshers", "?")} + +
    • + %endif + + %if thing.show_reports and not thing.show_spam: +
    • + ${strings.reports % thing.thing.reported} +
    • + %endif + + %if getattr(thing.thing, "use_big_modbuttons", False): + ${big_modbuttons(thing.thing, "link")} + %endif + <%def name="commentbuttons()"> @@ -224,6 +272,14 @@ ${self.simple_button(_("reply {verb}"), "reply")} %endif + %if thing.show_reports and not thing.show_spam: +
    • + ${strings.reports % thing.thing.reported} +
    • + %endif + %if getattr(thing.thing, "use_big_modbuttons", False): + ${big_modbuttons(thing.thing, "comment")} + %endif %endif @@ -257,7 +313,7 @@ <%def name="state_button(name, title, onclick, executed, clicked=False, a_class = '', fmt=None, fmt_param = '', hidden_data = {})"> <%def name="_link()" buffered="True"> - %if alt_title: ${alt_title} %else: -   +   %endif @@ -365,8 +421,9 @@ ### originally in commentbutton <%def name="comment_button(name, link_text, link,\ - a_class='', title='', newwindow = False)"> + _sr_path = True, a_class='', title='', newwindow = False)"> ${plain_link(link_text, link, + _sr_path = _sr_path, _class=a_class, title=title, target='_blank' if newwindow else '_parent')} diff --git a/r2/r2/templates/promo_email.email b/r2/r2/templates/promo_email.email index 1195afe7e3..e0fe9b8073 100644 --- a/r2/r2/templates/promo_email.email +++ b/r2/r2/templates/promo_email.email @@ -30,9 +30,9 @@ %> %if thing.kind == Email.Kind.NEW_PROMO: -This email is to confirm reddit.com's receipt of your submitted self-serve ad. To set up payment for your ad, please go here: +This email is to confirm reddit.com's receipt of your submitted self-serve ad. At the moment you have no campaigns associated with this link. To set up a campaign and payment if you haven't already, please visit the link's edit page: -${g.payment_domain}promoted/pay/${thing.link._id36} + ${edit_url} Please note that we can't approve your ad until you have set up payment, and that your ad must be approved before it goes live on your selected dates. @@ -43,13 +43,13 @@ If your ad is rejected your credit card will not be charged. Don't take it pers http://www.reddit.com/help/selfservicepromotion %elif thing.kind == Email.Kind.BID_PROMO: -This email is to confirm that your bid of $${"%.2f" % thing.link.promote_bid} for a self-serve ad on reddit.com has been accepted. The credit card number you provided will be charged 24 hours prior to the date your self-serve ad is set to run. +This email is to confirm that your bid of $${"%.2f" % thing.bid} for a self-serve ad on reddit.com has been accepted. The credit card number you provided will be charged 24 hours prior to the date your self-serve ad is set to run. -Having second thoughts about your bid? Want to be sure you're outbidding the competition? You'll have until ${(thing.link._date - timedelta(1)).strftime("%Y-%m-%d")} to change your bid here: +Having second thoughts about your bid? Want to be sure you're outbidding the competition? You'll have until ${(thing.start_date - timedelta(1)).strftime("%Y-%m-%d")} to change your bid here: ${edit_url} %elif thing.kind == Email.Kind.ACCEPT_PROMO: -This email is to confirm that your self-serve reddit.com ad has been approved by reddit! The credit card you provided will not be charged until 24 hours prior to the date you have set your ad to run. If you make any changes to your ad, they will have to be re-approved. Keep in mind that after we have charged your credit card you will not be able to change your ad. +This email is to confirm that your self-serve reddit.com ad has been approved by reddit! The credit card you provided will not be charged until 24 hours prior to the date you have set your ad to run. If you make any changes to your ad, they will have to be re-approved. It won't be long now until your ad is being displayed to hundreds of thousands of the Internet's finest surfers. @@ -67,21 +67,21 @@ and we'll reconsider it for sumbission. %elif thing.kind == Email.Kind.QUEUED_PROMO: This email is to inform you that your self-serve ad on reddit.com is about to go live. Feel free to reply to this email if you have any questions. -%if thing.link.promote_trans_id > 0: +%if thing.trans_id > 0: Your credit card has been successfully charged by reddit for the amount you bid. Please use this email as your receipt. ================================================================================ -TRANSACTION #${thing.link.promote_trans_id} +TRANSACTION #${thing.trans_id} DATE: ${datetime.now(g.tz).strftime("%Y-%m-%d")} ................................................................................ -AMOUNT CHARGED: $${"%.2f" % thing.link.promote_bid} +AMOUNT CHARGED: $${"%.2f" % thing.bid} SPONSORSHIP PERMALINK: ${thing.link.make_permalink_slow(force_domain = True)} ================================================================================ %else: -Your promotion was a freebie in the amount of $${"%.2f" % thing.link.promote_bid}. +Your promotion was a freebie in the amount of $${"%.2f" % thing.bid}. %endif %elif thing.kind == Email.Kind.LIVE_PROMO: This email is to inform you that your self-serve ad on reddit.com is now live and can be found at the following link: diff --git a/r2/r2/templates/promote_graph.html b/r2/r2/templates/promote_graph.html index 09b3c9c54e..86bb1eaab0 100644 --- a/r2/r2/templates/promote_graph.html +++ b/r2/r2/templates/promote_graph.html @@ -20,7 +20,9 @@ ## CondeNet, Inc. All Rights Reserved. ################################################################################ <%! + from r2.lib.template_helpers import static import datetime, locale + from r2.lib import promote def num(x): return locale.format('%d', x, True) def money(x): @@ -28,6 +30,8 @@ %>

      Sponsored link calendar

      + + %if not c.user_is_sponsor:

      Below is a calendar of your scheduled and completed promotions (if you have any, of course), along with some site-wide averages to use as a guide for setting up future promotions. These values are:

      @@ -77,7 +81,7 @@

      Sponsored link calendar

      %else: YOUR COMMIT %endif -
      +
  • %for i in xrange(thing.total_size): <% @@ -125,10 +129,15 @@

    Sponsored link calendar

    <% prev_end = 0 %> -%for link, start, end in thing.promote_blocks: +%for link, bid, start, end, indx in thing.promote_blocks: <% start += 1 end += 1 + sr = '' + if indx in getattr(link, "campaigns", {}): + sr = link.campaigns[indx][promote.CAMPAIGN.sr] + if sr: + sr += ':' %> %if start != end: %if prev_end > start: @@ -142,10 +151,9 @@

    Sponsored link calendar

    %> + + +%if thing.link and not c.user_is_sponsor: +
    + <%utils:line_field title="${_('promotion history')}"> + <% + import datetime + pc_max = max(thing.promo_counter.values() or [1]) + %> +

    + Here is a summary of the data presented on the graph page. + Remember: best to pick days with less competition.

    + + + + + + + + + %for i in xrange(0, 28): + <% + day = (thing.now + datetime.timedelta(i-14)) + CPC = CPM = imp_traffic = cli_traffic = "---" + if thing.promo_traffic.has_key(day): + imp_traffic, cli_traffic = thing.promo_traffic[day] + if thing.market.has_key(i): + CPM = "$%.2f" % (thing.market[i] * 1000./max(imp_traffic, 1)) + CPC = "$%.2f" % (thing.market[i] * 1./max(cli_traffic, 1)) + %> + + + + + +
    dateCPMCPCcount
    + ${unsafe("»" if i == 14 else '')} + ${day.strftime("%m/%d/%Y")}${CPM}${CPC}${thing.promo_counter.get(i, 0)}
    + %endfor +
    + +
    +%endif %if thing.link and c.user_is_sponsor:
    @@ -382,6 +447,7 @@

    ${title}

    date user transaction id + campaign id pay id amount status @@ -394,6 +460,7 @@

    ${title}

    ${bid.date} ${accounts[bid.account_id].name} ${bid.transaction} + ${bid.campaign} ${bid.pay_id} $${"%.2f" % bid.bid} ${status} @@ -406,10 +473,14 @@

    ${title}

    <%utils:line_field title="${_('promotion history')}"> +
    + For correspondence, the email address of this author is + ${thing.author.email}. +
    To check with authorize.net, use CustomerID - t${Account._type_id}_${to36(thing.link.author_id)} when searching by batch. + ${thing.author._fullname} when searching by batch.
    @@ -425,4 +496,4 @@

    ${title}

    %endif - + diff --git a/r2/r2/templates/reddit.html b/r2/r2/templates/reddit.html index 7d8d38ce0a..a134af4881 100644 --- a/r2/r2/templates/reddit.html +++ b/r2/r2/templates/reddit.html @@ -22,6 +22,7 @@ <%! from r2.lib.template_helpers import add_sr, static, join_urls, class_dict, get_domain + from r2.lib.filters import unsafe from r2.lib.pages import SearchForm, ClickGadget, SideContentBox from r2.lib import tracking from pylons import request @@ -62,10 +63,20 @@ %endif %if c.allow_styles and c.site.stylesheet_contents: - + <% inline_stylesheet = ( + len(c.site.stylesheet_contents) < 1024 + and '<' not in c.site.stylesheet_contents) %> + %if inline_stylesheet: + ## for very simple stylesheets, we can just include them inline + + %else: + + %endif %endif %if getattr(thing, "additional_css", None): + diff --git a/r2/r2/templates/redditfooter.html b/r2/r2/templates/redditfooter.html index 64fa093783..79838da145 100644 --- a/r2/r2/templates/redditfooter.html +++ b/r2/r2/templates/redditfooter.html @@ -45,5 +45,7 @@ ${_("(c) %(year)d Conde Nast Digital. All rights reserved.") % \ dict(year=datetime.datetime.now().timetuple()[0])}

    +

    REDDIT and the ALIEN Logo are registered trademarks of Advance Magazine +Publishers Inc.

    diff --git a/r2/r2/templates/redditheader.html b/r2/r2/templates/redditheader.html index 51ab5cef96..bdeb33ecf5 100644 --- a/r2/r2/templates/redditheader.html +++ b/r2/r2/templates/redditheader.html @@ -123,12 +123,10 @@
    - - - - - + + + + ## Be careful when adding things here: If all users try to get the new images ## at once, they'll crush the static servers! - diff --git a/r2/r2/templates/reddittraffic.html b/r2/r2/templates/reddittraffic.html index 158f04ac7d..561571b5e7 100644 --- a/r2/r2/templates/reddittraffic.html +++ b/r2/r2/templates/reddittraffic.html @@ -20,12 +20,15 @@ ## CondeNet, Inc. All Rights Reserved. ################################################################################ <%! + from r2.lib.template_helpers import static import locale from r2.models.subreddit import DomainSR, FakeSubreddit def num(x): return locale.format('%d', x, True) %> + + <%def name="daily_summary()"> <% thing.day_data = filter(None, thing.day_data) @@ -105,48 +108,77 @@

    Traffic for ${c.site.name}

    +

    + Below are the traffic stats for your reddit. Each graph represents one of the following over the interval specified +

    +
      +
    • + pageviews are all hits to ${c.site.name}, including both listing pages and comment pages. +
    • +
    • + uniques are the total number of unique visitors (IP and U/A combo) that generate the above pageviews. This is independent of whether or not they are logged in. +
    • +
    • + subscriptions is the number of new subscriptions in a given day that have been generated. This number is less accurate than the first two metrics, as, though we can track new subscriptions, we have no way to track unsubscriptions (which are when a subscription is actually deleted from the db). +
    • +
    +

    + Note: there are a couple of places outside of your reddit where someone can click "subscribe", so it is possible (though unlikely) that the subscription count can exceed the unique count on a given day. +

    + %if not thing.has_data:

    ${_("There doesn't seem to be any traffic data at the moment. Please check back later.")}

    %else: - -
    -
    - hourly uniques -
    -
    - daily uniques -
    + + + + + + + + + %if c.default_sr: -
    - monthly uniques -
    + + + + + %else: + + + + %endif +
    + ${thing.uniques_hour} + + ${thing.impressions_hour} +
    + ${thing.uniques_day} + + ${thing.impressions_day} +
    + ${thing.uniques_month} + + ${thing.impressions_month} +
    + ${thing.subscriptions_day} + +
    + +
    ${weekly_summary()} - %if c.default_sr: - ${daily_summary()} - %endif -
    -
    - hourly impressions + %if c.default_sr: + ${daily_summary()} + %endif
    -
    - daily impressions -
    - %if c.default_sr: -
    - monthly impressions -
    - <% data = thing.monthly_summary() %> - - + + %if c.default_sr: + <% data = thing.monthly_summary() %> +
    + @@ -207,8 +239,6 @@

    Traffic for ${c.site.name}

    %endfor
    Monthly data
    %else: - daily subscriptions ${daily_summary()} %endif %endif diff --git a/r2/r2/templates/selfserveblurb.html b/r2/r2/templates/selfserveblurb.html index 04599eae68..e376ad4395 100644 --- a/r2/r2/templates/selfserveblurb.html +++ b/r2/r2/templates/selfserveblurb.html @@ -29,7 +29,7 @@

    use our self-serve advertising tool

    reddit has created a system where you can buy sponsored links on the front page using our self-serve tool. Overall, we've been quite pleased to find a ridiculously successful (2% CTR to upwards of 10% CTR) means of advertising that doesn't diminish the user experience.

    -

    Even if your budget is only $20 a day, you can get in on the action. Here's how: we'll allow you to submit a sponsored link (set the title, upload an image), and set the start date and duration. Then, you can bid on how much you want to spend on that link (we've set a minimum of $20 per link per day). This bid is exactly how much you'll pay for the link once it starts running.

    +

    Even if your budget is only $20 a day, you can get in on the action. Here's how: we'll allow you to submit a sponsored link (set the title, upload an image), and set the start date and duration. Then, you can bid on how much you want to spend on that link (we've set a minimum of $20 per link per day, and $30/day for targeted ads). This bid is exactly how much you'll pay for the link once it starts running.

    On each day, we tally up the total number of bids, and use the total to figure out how large a piece of the pie each sponsored link gets. For example, if you were to bid $20, and the total for the day is $200, you'll get 10% of sponsored link impressions for the day.

    diff --git a/r2/r2/templates/selfserviceoatmeal.html b/r2/r2/templates/selfserviceoatmeal.html new file mode 100644 index 0000000000..c9c281c164 --- /dev/null +++ b/r2/r2/templates/selfserviceoatmeal.html @@ -0,0 +1,40 @@ +## The contents of this file are subject to the Common Public Attribution +## License Version 1.0. (the "License"); you may not use this file except in +## compliance with the License. You may obtain a copy of the License at +## http://code.reddit.com/LICENSE. The License is based on the Mozilla Public +## License Version 1.1, but Sections 14 and 15 have been added to cover use of +## software over a computer network and provide for limited attribution for the +## Original Developer. In addition, Exhibit A has been modified to be consistent +## with Exhibit B. +## +## Software distributed under the License is distributed on an "AS IS" basis, +## WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for +## the specific language governing rights and limitations under the License. +## +## The Original Code is Reddit. +## +## The Original Developer is the Initial Developer. The Initial Developer of +## the Original Code is CondeNet, Inc. +## +## All portions of the code written by CondeNet are Copyright (c) 2006-2010 +## CondeNet, Inc. All Rights Reserved. +################################################################################ + +
    + + + + + + + + + + + + + + + + +
    diff --git a/r2/r2/templates/spotlightlisting.html b/r2/r2/templates/spotlightlisting.html index 13e82be444..a066029f4c 100644 --- a/r2/r2/templates/spotlightlisting.html +++ b/r2/r2/templates/spotlightlisting.html @@ -23,20 +23,16 @@ <%namespace file="printablebuttons.html" import="ynbutton"/> <% from r2.lib.template_helpers import static - from r2.lib.promote import get_promoted %>
    <% lookup = dict((t._fullname, t) for t in thing.things) seen = set([]) - promoted = set(get_promoted()) - promoted = [o for o in thing.spotlight_links if o in promoted ] %> %for name in thing.spotlight_links: %if name in seen: @@ -71,9 +67,9 @@

    - ${_("Sometimes reddit needs input from the community to train its spam filter. This is your chance to help.")} + ${_("Sometimes reddit needs to ask the community to help fight spam. This is your chance to take part in that.")} - + ${_("Click here for more info.")}

    diff --git a/r2/r2/templates/subredditinfobar.html b/r2/r2/templates/subredditinfobar.html index 74432ac06a..d58cc2cf4e 100644 --- a/r2/r2/templates/subredditinfobar.html +++ b/r2/r2/templates/subredditinfobar.html @@ -73,16 +73,16 @@

    ${thing.sr.name}

    %if c.user_is_admin: %if thing.sr._spam: - ${state_button("unban", _("unban this reddit"), - "return change_state(this, 'unban');", _("unbanned"), + ${state_button("approve", _("approve this reddit"), + "return change_state(this, 'approve');", _("approved"), hidden_data = dict(id = thing.sr._fullname))} %if thing.sr._spam and hasattr(thing.sr, "banner"): (${strings.banned_by % thing.sr.banner}) %endif %else: - ${state_button("ban", _("ban this reddit"), - "return change_state(this, 'ban');", _("banned"), + ${state_button("remove", _("ban this reddit"), + "return change_state(this, 'remove');", _("banned"), hidden_data = dict(id = thing.sr._fullname))} %endif %endif diff --git a/r2/r2/templates/subredditstylesheet.html b/r2/r2/templates/subredditstylesheet.html index 3ef37dc2bb..93f753da57 100644 --- a/r2/r2/templates/subredditstylesheet.html +++ b/r2/r2/templates/subredditstylesheet.html @@ -37,7 +37,7 @@ + value="http://${g.s3_thumb_bucket}/${c.site._fullname}" />

    ${_("stylesheet")}

    @@ -206,7 +206,7 @@

    ${_("images")}

  • <% if img is not None: - img = "http:/%s%s_%d.png" % \ + img = "http://%s/%s_%d.png" % \ (g.s3_thumb_bucket, c.site._fullname, img) else: img = "/static/kill.png" diff --git a/r2/r2/templates/trafficgraph.html b/r2/r2/templates/trafficgraph.html new file mode 100644 index 0000000000..d6cabda49c --- /dev/null +++ b/r2/r2/templates/trafficgraph.html @@ -0,0 +1,74 @@ +## The contents of this file are subject to the Common Public Attribution +## License Version 1.0. (the "License"); you may not use this file except in +## compliance with the License. You may obtain a copy of the License at +## http://code.reddit.com/LICENSE. The License is based on the Mozilla Public +## License Version 1.1, but Sections 14 and 15 have been added to cover use of +## software over a computer network and provide for limited attribution for the +## Original Developer. In addition, Exhibit A has been modified to be consistent +## with Exhibit B. +## +## Software distributed under the License is distributed on an "AS IS" basis, +## WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for +## the specific language governing rights and limitations under the License. +## +## The Original Code is Reddit. +## +## The Original Developer is the Initial Developer. The Initial Developer of +## the Original Code is CondeNet, Inc. +## +## All portions of the code written by CondeNet are Copyright (c) 2006-2010 +## CondeNet, Inc. All Rights Reserved. +################################################################################ +<%! + from random import random + import simplejson + %> + + +<% + _id = str(random()).split('.')[-1] + %> +
    +
    ${thing.title}
    +
    +
    +
    + + diff --git a/r2/r2/templates/upgradebuttons.html b/r2/r2/templates/upgradebuttons.html new file mode 100644 index 0000000000..452cb60470 --- /dev/null +++ b/r2/r2/templates/upgradebuttons.html @@ -0,0 +1,149 @@ +## The contents of this file are subject to the Common Public Attribution +## License Version 1.0. (the "License"); you may not use this file except in +## compliance with the License. You may obtain a copy of the License at +## http://code.reddit.com/LICENSE. The License is based on the Mozilla Public +## License Version 1.1, but Sections 14 and 15 have been added to cover use of +## software over a computer network and provide for limited attribution for the +## Original Developer. In addition, Exhibit A has been modified to be consistent +## with Exhibit B. +## +## Software distributed under the License is distributed on an "AS IS" basis, +## WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for +## the specific language governing rights and limitations under the License. +## +## The Original Code is Reddit. +## +## The Original Developer is the Initial Developer. The Initial Developer of +## the Original Code is CondeNet, Inc. +## +## All portions of the code written by CondeNet are Copyright (c) 2006-2010 +## CondeNet, Inc. All Rights Reserved. +################################################################################ +<%! + from r2.lib.template_helpers import get_domain + %> + +<% domain = get_domain(False) %> + +
    +

    You need to upgrade your buttons

    +

    + You were redirected here because the site you came from has out of date buttons and were linking directly to our iframe contents rather than our JS (tsk tsk). If you are the site administrator, use the code snippets below to fix it. +

    + +

    ${_('simple interactive button')}

    +

    ${_('put this code on your page:')}

    + + ${capture(draw_interactive,False)} + +

    ${_("and you'll get something like this:")}

    + + ${draw_interactive(False)} + + +

    ${_("more interactive buttons")}

    +
      + %for x in xrange(1,4): + ${demo(capture(draw_interactive, x))} + %endfor +
    + +

    ${_('interactive button advanced settings')}

    +
    +
      +
    • +

      ${_("specify a url")}
      + ${_("useful in places like blogs, where you want to link to the post's permalink")}

      + ${drawoption('url','[URL]')} +
    • +
    • +

      ${_("specify a community to target")}

      + ${drawoption('target','[COMMUNITY]')} +
    • +
    • +

      ${_("specify a title")}

      + ${drawoption('title','[TITLE]')} +
    • +
    • +

      ${_("open links in a new window")}

      + ${drawoption('newwindow','1')} +
    • +
    • +

      ${_("specify the color")}

      + ${drawoption('bgcolor','[COLOR]')} +
    • +
    • +

      ${_("specify a border color")}

      + ${drawoption('bordercolor','[COLOR]')} +
    • +
    +

    ${_('Example:')}

    +

    ${_('to make this button:')}

    + ${draw_interactive_example()} +

    ${_('use this code:')}

    + + <% + ex = websafe(capture(draw_interactive_example)) + ex = ex.replace("\n", "
    ").replace(" ", " ") + %> + ${unsafe(ex)} +
    +
    + +
    + + + +<%def name="demo(content)"> +
  • + ${_("view code")} + ${_("hide code")} + ${unsafe(content)} +
    + + ${content} + +
  • + + +<%def name="draw_point_button(image)"> + + + +<%def name="point_option_example()" buffered="True"> + + + +<%def name="draw_interactive(type)"> +%if type: + +%else: + +%endif + + +<%def name="drawoption(option, val)" buffered="True"> + + + +<%def name="draw_interactive_example()"> + + + diff --git a/r2/r2/templates/utils.html b/r2/r2/templates/utils.html index 1304c6af66..9b73b67f30 100644 --- a/r2/r2/templates/utils.html +++ b/r2/r2/templates/utils.html @@ -439,6 +439,40 @@ +<%def name="reddit_selector(default_sr, sr_searches, subreddits)"> +
    + +
      +
    • nothin
    • +
    +
    + + ${error_field("SUBREDDIT_NOEXIST", "sr", "div")} + ${error_field("SUBREDDIT_NOTALLOWED", "sr", "div")} + ${error_field("SUBREDDIT_REQUIRED", "sr", "div")} + +
    + ${_("popular choices")} +
      + %for name in subreddits: +
    • + ${name} +
    • + %endfor +
    +
    + + <%def name="percentage(slice, total)"> %if total is None or total == "" or total == 0 or slice is None or slice == "": -- diff --git a/r2/r2/tests/testfile b/r2/r2/tests/testfile new file mode 100644 index 0000000000..e88600031b --- /dev/null +++ b/r2/r2/tests/testfile @@ -0,0 +1 @@ +testing 4 diff --git a/r2/setup.py b/r2/setup.py index 3f245b4cc8..3315d6ff7c 100644 --- a/r2/setup.py +++ b/r2/setup.py @@ -75,13 +75,13 @@ class null(): pass # we're using a custom build of pylibmc at the moment, so we need to # be sure that we have the right version -pylibmc_version = '1.0-reddit-01' +pylibmc_version = '1.0-reddit-03' try: import pylibmc assert pylibmc.__version__ == pylibmc_version except (ImportError, AssertionError): print "Installing pylibmc" - easy_install(["http://github.com/downloads/ketralnis/pylibmc/pylibmc-1.0-reddit-01.tar.gz"]) + easy_install(["http://github.com/downloads/ketralnis/pylibmc/pylibmc-1.0-reddit-03.tar.gz"]) filtermod = Extension('Cfilters', sources = ['r2/lib/c/filters.c']) @@ -116,6 +116,7 @@ class null(): pass #url="", install_requires=["Routes<=1.8", "Pylons<=0.9.6.2", + "boto", "pytz", "pycrypto", "Babel>=0.9.1",