Skip to content
This repository has been archived by the owner on Nov 9, 2017. It is now read-only.

Commit

Permalink
* transparency updates to some of the pngs thanks to ytknows
Browse files Browse the repository at this point in the history
 * bugfixes in unicode handling
 * Nag nonparticpating jurors 20% of the time
 * fix thumbnails for sponsored links
   * non admins can't change the thumb of a live link
   * the uploader updates the status properly to 'saved'
  • Loading branch information
KeyserSosa committed May 21, 2010
1 parent d251ba7 commit 1d9b9fe
Show file tree
Hide file tree
Showing 33 changed files with 199 additions and 83 deletions.
2 changes: 1 addition & 1 deletion r2/r2/controllers/front.py
Original file line number Diff line number Diff line change
Expand Up @@ -672,7 +672,7 @@ def GET_traffic(self, article):
comment = None,
content = content).render()

@validate(VAdmin())
@validate(VSponsorAdmin())
def GET_site_traffic(self):
return BoringPage("traffic",
content = RedditTraffic()).render()
Expand Down
35 changes: 19 additions & 16 deletions r2/r2/controllers/promotecontroller.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from r2.lib.strings import strings
from r2.lib.menus import *
from r2.controllers import ListingController
import sha

from r2.controllers.reddit_base import RedditController

Expand Down Expand Up @@ -446,20 +447,22 @@ def GET_link_thumb(self, *a, **kw):
link = VByName('link_id'),
file = VLength('file', 500*1024))
def POST_link_thumb(self, link=None, file=None):
errors = dict(BAD_CSS_NAME = "", IMAGE_ERROR = "")
try:
force_thumbnail(link, file)
except cssfilter.BadImage:
# if the image doesn't clean up nicely, abort
errors["IMAGE_ERROR"] = _("bad image")

if any(errors.values()):
return UploadedImage("", "", "upload", errors = errors).render()
else:
if (not c.user_is_sponsor and not c.user.trusted_sponsor and
not promote.is_unpaid(link)):
promote.unapprove_promotion(link)
return UploadedImage(_('saved'), thumbnail_url(link), "",
errors = errors).render()

if link and (not promote.is_promoted(link) or
c.user_is_sponsor or c.user.trusted_sponsor):
errors = dict(BAD_CSS_NAME = "", IMAGE_ERROR = "")
try:
# thumnails for promoted links can change and therefore expire
force_thumbnail(link, file)
except cssfilter.BadImage:
# if the image doesn't clean up nicely, abort
errors["IMAGE_ERROR"] = _("bad image")
if any(errors.values()):
return UploadedImage("", "", "upload", errors = errors,
form_id = "image-upload").render()
else:
link.thumbnail_version = sha.new(file).hexdigest()
link._commit()
return UploadedImage(_('saved'), thumbnail_url(link), "",
errors = errors,
form_id = "image-upload").render()

8 changes: 4 additions & 4 deletions r2/r2/controllers/validator/validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -691,8 +691,8 @@ def run(self, sr_name):
return None

try:
sr = Subreddit._by_name(sr_name)
except (NotFound, AttributeError):
sr = Subreddit._by_name(str(sr_name))
except (NotFound, AttributeError, UnicodeEncodeError):
self.set_error(errors.SUBREDDIT_NOEXIST)
return None

Expand Down Expand Up @@ -772,8 +772,8 @@ def run(self, url, sr = None):
sr = c.site
elif sr:
try:
sr = Subreddit._by_name(sr)
except NotFound:
sr = Subreddit._by_name(str(sr))
except (NotFound, UnicodeEncodeError):
self.set_error(errors.SUBREDDIT_NOEXIST)
sr = None
else:
Expand Down
2 changes: 1 addition & 1 deletion r2/r2/lib/db/thing.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ def _other_self(self):
def _cache_myself(self):
ck = self._cache_key()
if self.__class__.__name__ in ("Link", "Comment", "Subreddit") and not self._t:
log_text ("{} cache", "About to cache {} for %r" % ck, "warning")
raise ValueError("Refusing to cache {} for %r" % ck)
cache.set(ck, self)

def _sync_latest(self):
Expand Down
14 changes: 9 additions & 5 deletions r2/r2/lib/media.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,12 @@

def thumbnail_url(link):
"""Given a link, returns the url for its thumbnail based on its fullname"""
return 'http://%s/%s.png' % (s3_thumbnail_bucket, link._fullname)
res = 'http://%s/%s.png' % (s3_thumbnail_bucket, link._fullname)
if hasattr(link, "thumbnail_version"):
res += "?v=%s" % link.thumbnail_version
return res

def upload_thumb(link, image):
def upload_thumb(link, image, never_expire = True):
"""Given a link and an image, uploads the image to s3 into an image
based on the link's fullname"""
f = tempfile.NamedTemporaryFile(suffix = '.png', delete=False)
Expand All @@ -57,7 +60,8 @@ def upload_thumb(link, image):
s3fname = link._fullname + '.png'

log.debug('uploading to s3: %s' % link._fullname)
s3cp.send_file(g.s3_thumb_bucket, s3fname, contents, 'image/png', never_expire=True)
s3cp.send_file(g.s3_thumb_bucket, s3fname, contents, 'image/png',
never_expire=never_expire)
log.debug('thumbnail %s: %s' % (link._fullname, thumbnail_url(link)))
finally:
os.unlink(f.name)
Expand Down Expand Up @@ -93,10 +97,10 @@ def set_media(link, force = False):

update_link(link, thumbnail, media_object)

def force_thumbnail(link, image_data):
def force_thumbnail(link, image_data, never_expire = True):
image = str_to_image(image_data)
image = prepare_image(image)
upload_thumb(link, image)
upload_thumb(link, image, never_expire = never_expire)
update_link(link, thumbnail = True, media_object = None)

def run():
Expand Down
4 changes: 1 addition & 3 deletions r2/r2/lib/pages/pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -2500,9 +2500,7 @@ class RedditTraffic(Traffic):
"""
def __init__(self):
self.has_data = False
ivals = ["hour", "day"]
if c.default_sr:
ivals.append("month")
ivals = ["hour", "day", "month"]

for ival in ivals:
if c.default_sr:
Expand Down
4 changes: 3 additions & 1 deletion r2/r2/lib/promote.py
Original file line number Diff line number Diff line change
Expand Up @@ -512,7 +512,9 @@ def reject_promotion(link, reason = None):
del weighted[k]
set_live_promotions((links, weighted))
promotion_log(link, "dequeued")
emailer.reject_promo(link, reason = reason)
# don't send a rejection email when the rejection was user initiated.
if not c.user or c.user._id != link.author_id:
emailer.reject_promo(link, reason = reason)


def unapprove_promotion(link):
Expand Down
15 changes: 15 additions & 0 deletions r2/r2/lib/template_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -342,6 +342,21 @@ def add_attr(attrs, code, label=None, link=None):
label = _('reddit admin, speaking officially')
if not link:
link = '/help/faq#Whomadereddit'
elif code in ('X', '@'):
priority = 5
cssclass = 'gray'
if not label:
raise ValueError ("Need a label")
elif code == 'V':
priority = 6
cssclass = 'green'
if not label:
raise ValueError ("Need a label")
elif code == 'B':
priority = 7
cssclass = 'wrong'
if not label:
raise ValueError ("Need a label")
elif code.startswith ('trophy:'):
img = (code[7:], '!', 11, 8)
priority = 99
Expand Down
35 changes: 22 additions & 13 deletions r2/r2/lib/utils/trial_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from r2.lib.utils import ip_and_slash16, jury_cache_dict, voir_dire_priv, tup
from r2.lib.memoize import memoize
from r2.lib.log import log_text
import random as rand

# Hardcache lifetime for a trial.
# The regular hardcache reaper should never run on one of these,
Expand Down Expand Up @@ -92,15 +93,15 @@ def update_voting(defendant, koshers, spams):
g.hardcache.set(tk, d, TRIAL_TIME)

# Check to see if a juror is eligible to serve on a jury for a given link.
def voir_dire(account, ip, slash16, defendants_voted_upon, defendant, sr):
def voir_dire(account, ip, slash16, defendants_assigned_to, defendant, sr):
from r2.models import Link

if defendant._deleted:
g.log.debug("%s is deleted" % defendant)
return False

if defendant._id in defendants_voted_upon:
g.log.debug("%s already jury-voted for %s" % (account.name, defendant))
if defendant._id in defendants_assigned_to:
g.log.debug("%s is already assigned to %s" % (account.name, defendant))
return False

if not isinstance(defendant, Link):
Expand All @@ -116,16 +117,13 @@ def voir_dire(account, ip, slash16, defendants_voted_upon, defendant, sr):

return True

def assign_trial(account, ip, slash16):
def assign_trial(account, juries_already_on, ip, slash16):
from r2.models import Jury, Subreddit, Trial
from r2.lib.db import queries

defendants_voted_upon = []
defendants_assigned_to = []
for jury in Jury.by_account(account):
for jury in juries_already_on:
defendants_assigned_to.append(jury._thing2_id)
if jury._name != '0':
defendants_voted_upon.append(jury._thing2_id)

subscribed_sr_ids = Subreddit.user_subreddits(account, ids=True, limit=None)

Expand Down Expand Up @@ -158,19 +156,30 @@ def assign_trial(account, ip, slash16):
for defendant in defs:
sr = srs[defendant.sr_id]

if voir_dire(account, ip, slash16, defendants_voted_upon, defendant, sr):
if defendant._id not in defendants_assigned_to:
j = Jury._new(account, defendant)

if voir_dire(account, ip, slash16, defendants_assigned_to, defendant, sr):
j = Jury._new(account, defendant)
return defendant

return None

def populate_spotlight():
from r2.models import Jury

if not (c.user_is_loggedin and c.user.jury_betatester()):
g.log.debug("not eligible")
return None

juries_already_on = Jury.by_account(c.user)
# If they're already on a jury, and haven't yet voted, re-show
# it every five or so times.
if rand.random() < 0.2:
unvoted = filter(lambda j: j._name == '0', juries_already_on)
defs = [u._thing2 for u in unvoted]
active_trials = trial_info(defs)
for d in defs:
if active_trials.get(d._fullname, False):
return d

if not g.cache.add("global-jury-key", True, 5):
g.log.debug("not yet time to add another juror")
return None
Expand All @@ -188,7 +197,7 @@ def populate_spotlight():
g.cache.delete("global-jury-key")
return None

trial = assign_trial(c.user, ip, slash16)
trial = assign_trial(c.user, juries_already_on, ip, slash16)

if trial is None:
g.log.debug("nothing available")
Expand Down
25 changes: 0 additions & 25 deletions r2/r2/lib/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -492,31 +492,6 @@ def median(l):
i = len(s) / 2
return s[i]

# localpart and domain should be canonicalized
# When true, returns the reason
def is_banned_email(localpart, domain):
from pylons import g

key = "email_banned-%s@%s" % (localpart, domain)
if g.hardcache.get(key):
return "address"

# For abc@foo.bar.com, if foo.bar.com or bar.com is on the
# no-email list, treat the address as unverified.
parts = domain.rstrip(".").split(".")
while len(parts) >= 2:
whole = ".".join(parts)

d = g.hardcache.get("domain-" + whole)

if d and d.get("no_email", None):
return "domain"

parts.pop(0)

return None


def query_string(dict):
pairs = []
for k,v in dict.iteritems():
Expand Down
62 changes: 56 additions & 6 deletions r2/r2/models/account.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from r2.lib.db.userrel import UserRel
from r2.lib.memoize import memoize
from r2.lib.utils import modhash, valid_hash, randstr, timefromnow
from r2.lib.utils import UrlParser, is_banned_email
from r2.lib.utils import UrlParser
from r2.lib.cache import sgm

from pylons import g
Expand Down Expand Up @@ -346,16 +346,68 @@ def quota_baskets(self, kind):

return baskets

# Needs to take the *canonicalized* version of each email
# When true, returns the reason
@classmethod
def which_emails_are_banned(cls, canons):
banned = g.hardcache.get_multi(canons, prefix="email_banned-")

# Filter out all the ones that are simply banned by address.
# Of the remaining ones, create a dictionary like:
# d["abc.def.com"] = [ "bob@abc.def.com", "sue@abc.def.com" ]
rv = {}
canons_by_domain = {}
for canon in canons:
if banned.get(canon, False):
rv[canon] = "address"
continue
rv[canon] = None

at_sign = canon.find("@")
domain = canon[at_sign+1:]
canons_by_domain.setdefault(domain, [])
canons_by_domain[domain].append(canon)

# Now, build a list of subdomains to check for ban status; for
# abc@foo.bar.com, we need to check foo.bar.com and bar.com
canons_by_subdomain = {}
for domain, canons in canons_by_domain.iteritems():
parts = domain.rstrip(".").split(".")
while len(parts) >= 2:
whole = ".".join(parts)
canons_by_subdomain.setdefault(whole, [])
canons_by_subdomain[whole].extend(canons)
parts.pop(0)

banned_subdomains = {}
sub_dict = g.hardcache.get_multi(canons_by_subdomain.keys(),
prefix="domain-")
for subdomain, d in sub_dict.iteritems():
if d and d.get("no_email", None):
for canon in canons_by_subdomain[subdomain]:
rv[canon] = "domain"

return rv

def has_banned_email(self):
canon = self.canonical_email()
which = self.which_emails_are_banned((canon,))
return which.get(canon, None)

def canonical_email(self):
localpart, domain = str(self.email.lower()).split("@")
email = str(self.email.lower())
if email.count("@") != 1:
return "invalid@invalid.invalid"

localpart, domain = email.split("@")

# a.s.d.f+something@gmail.com --> asdf@gmail.com
localpart.replace(".", "")
plus = localpart.find("+")
if plus > 0:
localpart = localpart[:plus]

return (localpart, domain)
return localpart + "@" + domain

def cromulent(self):
"""Return whether the user has validated their email address and
Expand All @@ -364,9 +416,7 @@ def cromulent(self):
if not self.email_verified:
return False

t = self.canonical_email()

if is_banned_email(*t):
if self.has_banned_email():
return False

# Otherwise, congratulations; you're cromulent!
Expand Down
Loading

0 comments on commit 1d9b9fe

Please sign in to comment.