This repository has been archived by the owner on Nov 9, 2017. It is now read-only.
/
reddit_base.py
1014 lines (862 loc) · 36.5 KB
/
reddit_base.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# The contents of this file are subject to the Common Public Attribution
# License Version 1.0. (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
# License Version 1.1, but Sections 14 and 15 have been added to cover use of
# software over a computer network and provide for limited attribution for the
# Original Developer. In addition, Exhibit A has been modified to be consistent
# with Exhibit B.
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
# the specific language governing rights and limitations under the License.
#
# The Original Code is reddit.
#
# The Original Developer is the Initial Developer. The Initial Developer of
# the Original Code is reddit Inc.
#
# All portions of the code written by reddit are Copyright (c) 2006-2012 reddit
# Inc. All Rights Reserved.
###############################################################################
from mako.filters import url_escape
from pylons import c, g, request
from pylons.controllers.util import redirect_to
from pylons.i18n import _
from pylons.i18n.translation import LanguageError
from r2.lib import pages, utils, filters, amqp, stats
from r2.lib.utils import (
SimpleSillyStub,
UniqueIterator,
http_utils,
is_subdomain,
is_throttled,
)
from r2.lib.cache import LocalCache, make_key, MemcachedError
import random as rand
from r2.models.account import FakeAccount, valid_feed, valid_admin_cookie
from r2.models.subreddit import Subreddit, Frontpage
from r2.models import *
from errors import ErrorSet, BadRequestError, ForbiddenError, errors
from validator import *
from r2.lib.template_helpers import add_sr
from r2.config.extensions import is_api
from r2.lib.translation import set_lang
from r2.lib.contrib import ipaddress
from r2.lib.base import BaseController, proxyurl, abort
from r2.lib.authentication import authenticate_user
from Cookie import CookieError
from copy import copy
from Cookie import CookieError
from datetime import datetime, timedelta
from hashlib import sha1, md5
from urllib import quote, unquote
import simplejson
import locale, socket
import babel.core
from functools import wraps
from r2.lib.tracking import encrypt, decrypt
from pylons import Response
NEVER = 'Thu, 31 Dec 2037 23:59:59 GMT'
DELETE = 'Thu, 01-Jan-1970 00:00:01 GMT'
cache_affecting_cookies = ('reddit_first','over18','_options')
class Cookies(dict):
def add(self, name, value, *k, **kw):
name = name.encode('utf-8')
self[name] = Cookie(value, *k, **kw)
class Cookie(object):
def __init__(self, value, expires=None, domain=None,
dirty=True, secure=False, httponly=False):
self.value = value
self.expires = expires
self.dirty = dirty
self.secure = secure
self.httponly = httponly
if domain:
self.domain = domain
elif c.authorized_cname and not c.default_sr:
self.domain = utils.common_subdomain(request.host, c.site.domain)
else:
self.domain = g.domain
def __repr__(self):
return ("Cookie(value=%r, expires=%r, domain=%r, dirty=%r)"
% (self.value, self.expires, self.domain, self.dirty))
class UnloggedUser(FakeAccount):
_cookie = 'options'
allowed_prefs = ('pref_content_langs', 'pref_lang', 'pref_frame_commentspanel')
def __init__(self, browser_langs, *a, **kw):
FakeAccount.__init__(self, *a, **kw)
if browser_langs:
lang = browser_langs[0]
content_langs = list(browser_langs)
# try to coerce the default language
if g.lang not in content_langs:
content_langs.append(g.lang)
content_langs.sort()
else:
lang = g.lang
content_langs = 'all'
self._defaults = self._defaults.copy()
self._defaults['pref_lang'] = lang
self._defaults['pref_content_langs'] = content_langs
self._defaults['pref_frame_commentspanel'] = False
self._load()
@property
def name(self):
raise NotImplementedError
def _from_cookie(self):
z = read_user_cookie(self._cookie)
try:
d = simplejson.loads(decrypt(z))
return dict((k, v) for k, v in d.iteritems()
if k in self.allowed_prefs)
except ValueError:
return {}
def _to_cookie(self, data):
data = data.copy()
for k in data.keys():
if k not in self.allowed_prefs:
del k
set_user_cookie(self._cookie, encrypt(simplejson.dumps(data)))
def _subscribe(self, sr):
pass
def _unsubscribe(self, sr):
pass
def valid_hash(self, hash):
return False
def _commit(self):
if self._dirty:
for k, (oldv, newv) in self._dirties.iteritems():
self._t[k] = newv
self._to_cookie(self._t)
def _load(self):
self._t.update(self._from_cookie())
self._loaded = True
def read_user_cookie(name):
uname = c.user.name if c.user_is_loggedin else ""
cookie_name = uname + '_' + name
if cookie_name in c.cookies:
return c.cookies[cookie_name].value
else:
return ''
def set_user_cookie(name, val, **kwargs):
uname = c.user.name if c.user_is_loggedin else ""
c.cookies[uname + '_' + name] = Cookie(value=val,
**kwargs)
valid_click_cookie = fullname_regex(Link, True).match
def set_recent_clicks():
c.recent_clicks = []
if not c.user_is_loggedin:
return
click_cookie = read_user_cookie('recentclicks2')
if click_cookie:
if valid_click_cookie(click_cookie):
names = [ x for x in UniqueIterator(click_cookie.split(',')) if x ]
if len(names) > 5:
names = names[:5]
set_user_cookie('recentclicks2', ','.join(names))
#eventually this will look at the user preference
names = names[:5]
try:
c.recent_clicks = Link._by_fullname(names, data = True,
return_dict = False)
except NotFound:
# clear their cookie because it's got bad links in it
set_user_cookie('recentclicks2', '')
else:
#if the cookie wasn't valid, clear it
set_user_cookie('recentclicks2', '')
def read_mod_cookie():
cook = [s.split('=')[0:2] for s in read_user_cookie('mod').split(':') if s]
if cook:
set_user_cookie('mod', '')
def firsttime():
if (request.user_agent and
('iphone' in request.user_agent.lower() or
'android' in request.user_agent.lower()) and
not get_redditfirst('mobile_suggest')):
set_redditfirst('mobile_suggest','first')
return 'mobile_suggest'
elif get_redditfirst('firsttime'):
return False
else:
set_redditfirst('firsttime','first')
return True
def get_redditfirst(key,default=None):
try:
val = c.cookies['reddit_first'].value
# on cookie presence, return as much
if default is None:
default = True
cookie = simplejson.loads(val)
return cookie[key]
except (ValueError,TypeError,KeyError),e:
# it's not a proper json dict, or the cookie isn't present, or
# the key isn't part of the cookie; we don't really want a
# broken cookie to propogate an exception up
return default
def set_redditfirst(key,val):
try:
cookie = simplejson.loads(c.cookies['reddit_first'].value)
cookie[key] = val
except (ValueError,TypeError,KeyError),e:
# invalid JSON data; we'll just construct a new cookie
cookie = {key: val}
c.cookies['reddit_first'] = Cookie(simplejson.dumps(cookie),
expires = NEVER)
# this cookie is also accessed by organic.js, so changes to the format
# will have to be made there as well
organic_pos_key = 'organic_pos'
def organic_pos():
"organic_pos() -> (calc_date = str(), pos = int())"
pos = get_redditfirst(organic_pos_key, 0)
if not isinstance(pos, int):
pos = 0
return pos
def set_organic_pos(pos):
"set_organic_pos(str(), int()) -> None"
set_redditfirst(organic_pos_key, pos)
def over18():
if c.user.pref_over_18 or c.user_is_admin:
return True
else:
if 'over18' in c.cookies:
cookie = c.cookies['over18'].value
if cookie == sha1(request.ip).hexdigest():
return True
def set_obey_over18():
"querystring parameter for API to obey over18 filtering rules"
c.obey_over18 = request.GET.get("obey_over18") == "true"
def set_subreddit():
#the r parameter gets added by javascript for POST requests so we
#can reference c.site in api.py
sr_name = request.environ.get("subreddit", request.POST.get('r'))
domain = request.environ.get("domain")
can_stale = request.method.upper() in ('GET','HEAD')
c.site = Frontpage
if not sr_name:
#check for cnames
cname = request.environ.get('legacy-cname')
if cname:
sr = Subreddit._by_domain(cname) or Frontpage
domain = g.domain
if g.domain_prefix:
domain = ".".join((g.domain_prefix, domain))
redirect_to('http://%s%s' % (domain, sr.path), _code=301)
elif sr_name == 'r':
#reddits
c.site = Sub
elif '+' in sr_name:
sr_names = sr_name.split('+')
srs = set(Subreddit._by_name(sr_names, stale=can_stale).values())
if All in srs:
c.site = All
elif Friends in srs:
c.site = Friends
else:
srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)]
if len(srs) == 0:
c.site = MultiReddit([], sr_name)
elif len(srs) == 1:
c.site = srs.pop()
else:
sr_ids = [sr._id for sr in srs]
c.site = MultiReddit(sr_ids, sr_name)
else:
try:
c.site = Subreddit._by_name(sr_name, stale=can_stale)
except NotFound:
sr_name = chksrname(sr_name)
if sr_name:
redirect_to("/reddits/search?q=%s" % sr_name)
elif not c.error_page and not request.path.startswith("/api/login/") :
abort(404)
#if we didn't find a subreddit, check for a domain listing
if not sr_name and isinstance(c.site, DefaultSR) and domain:
c.site = DomainSR(domain)
if isinstance(c.site, FakeSubreddit):
c.default_sr = True
def set_content_type():
e = request.environ
c.render_style = e['render_style']
c.response_content_type = e['content_type']
if e.has_key('extension'):
c.extension = ext = e['extension']
if ext in ('embed', 'wired', 'widget'):
def to_js(content):
return utils.to_js(content,callback = request.params.get(
"callback", "document.write"))
c.response_wrappers.append(to_js)
if ext in ("rss", "api", "json") and request.method.upper() == "GET":
user = valid_feed(request.GET.get("user"),
request.GET.get("feed"),
request.path)
if user and not g.read_only_mode:
c.user = user
c.user_is_loggedin = True
if ext in ("mobile", "m") and not request.GET.get("keep_extension"):
try:
if request.cookies['reddit_mobility'] == "compact":
c.extension = "compact"
c.render_style = "compact"
except (ValueError, KeyError):
c.suggest_compact = True
if ext in ("mobile", "m", "compact"):
if request.GET.get("keep_extension"):
c.cookies['reddit_mobility'] = Cookie(ext, expires = NEVER)
# allow JSONP requests to generate callbacks, but do not allow
# the user to be logged in for these
callback = request.GET.get("jsonp")
if is_api() and request.method.upper() == "GET" and callback:
if not valid_jsonp_callback(callback):
abort(BadRequestError(errors.BAD_JSONP_CALLBACK))
c.allowed_callback = callback
c.user = UnloggedUser(get_browser_langs())
c.user_is_loggedin = False
def get_browser_langs():
browser_langs = []
langs = request.environ.get('HTTP_ACCEPT_LANGUAGE')
if langs:
langs = langs.split(',')
browser_langs = []
seen_langs = set()
# extract languages from browser string
for l in langs:
if ';' in l:
l = l.split(';')[0]
if l not in seen_langs and l in g.languages:
browser_langs.append(l)
seen_langs.add(l)
if '-' in l:
l = l.split('-')[0]
if l not in seen_langs and l in g.languages:
browser_langs.append(l)
seen_langs.add(l)
return browser_langs
def set_host_lang():
# try to grab the language from the domain
host_lang = request.environ.get('reddit-prefer-lang')
if host_lang:
c.host_lang = host_lang
def set_iface_lang():
locale.setlocale(locale.LC_ALL, g.locale)
lang = [g.lang]
# GET param wins
if c.host_lang:
lang = [c.host_lang]
else:
lang = [c.user.pref_lang]
if getattr(g, "lang_override") and lang[0] == "en":
lang.insert(0, g.lang_override)
#choose the first language
c.lang = lang[0]
#then try to overwrite it if we have the translation for another
#one
for l in lang:
try:
set_lang(l, fallback_lang=g.lang)
c.lang = l
break
except LanguageError:
#we don't have a translation for that language
set_lang(g.lang, graceful_fail=True)
try:
c.locale = babel.core.Locale.parse(c.lang, sep='-')
except (babel.core.UnknownLocaleError, ValueError):
c.locale = babel.core.Locale.parse(g.lang, sep='-')
#TODO: add exceptions here for rtl languages
if c.lang in ('ar', 'he', 'fa'):
c.lang_rtl = True
def set_content_lang():
if c.user.pref_content_langs != 'all':
c.content_langs = list(c.user.pref_content_langs)
c.content_langs.sort()
else:
c.content_langs = c.user.pref_content_langs
def set_cnameframe():
if (bool(request.params.get(utils.UrlParser.cname_get))
or not request.host.split(":")[0].endswith(g.domain)):
c.cname = True
request.environ['REDDIT_CNAME'] = 1
if request.params.has_key(utils.UrlParser.cname_get):
del request.params[utils.UrlParser.cname_get]
if request.get.has_key(utils.UrlParser.cname_get):
del request.get[utils.UrlParser.cname_get]
c.frameless_cname = request.environ.get('frameless_cname', False)
if hasattr(c.site, 'domain'):
c.authorized_cname = request.environ.get('authorized_cname', False)
def set_colors():
theme_rx = re.compile(r'')
color_rx = re.compile(r'\A([a-fA-F0-9]){3}(([a-fA-F0-9]){3})?\Z')
c.theme = None
if color_rx.match(request.get.get('bgcolor') or ''):
c.bgcolor = request.get.get('bgcolor')
if color_rx.match(request.get.get('bordercolor') or ''):
c.bordercolor = request.get.get('bordercolor')
def ratelimit_agent(agent):
key = 'rate_agent_' + agent
if g.cache.get(key):
request.environ['retry_after'] = 1
abort(429)
else:
g.cache.set(key, 't', time = 1)
appengine_re = re.compile(r'AppEngine-Google; \(\+http://code.google.com/appengine; appid: s~([a-z0-9-]{6,30})\)\Z')
def ratelimit_agents():
user_agent = request.user_agent
if not user_agent:
return
# parse out the appid for appengine apps
appengine_match = appengine_re.match(user_agent)
if appengine_match:
appid = appengine_match.group(1)
ratelimit_agent(appid)
return
user_agent = user_agent.lower()
for s in g.agents:
if s and user_agent and s in user_agent:
ratelimit_agent(s)
def ratelimit_throttled():
ip = request.ip.strip()
if is_throttled(ip):
abort(429)
def paginated_listing(default_page_size=25, max_page_size=100, backend='sql'):
def decorator(fn):
@validate(num=VLimit('limit', default=default_page_size,
max_limit=max_page_size),
after=VByName('after', backend=backend),
before=VByName('before', backend=backend),
count=VCount('count'),
target=VTarget("target"),
show=VLength('show', 3))
@wraps(fn)
def new_fn(self, before, **env):
if c.render_style == "htmllite":
c.link_target = env.get("target")
elif "target" in env:
del env["target"]
if "show" in env and env['show'] == 'all':
c.ignore_hide_rules = True
kw = build_arg_list(fn, env)
#turn before into after/reverse
kw['reverse'] = False
if before:
kw['after'] = before
kw['reverse'] = True
return fn(self, **kw)
return new_fn
return decorator
#TODO i want to get rid of this function. once the listings in front.py are
#moved into listingcontroller, we shouldn't have a need for this
#anymore
def base_listing(fn):
return paginated_listing()(fn)
def is_trusted_origin(origin):
try:
origin = urlparse(origin)
except ValueError:
return False
return any(is_subdomain(origin.hostname, domain) for domain in g.trusted_domains)
def cross_domain(origin_check=is_trusted_origin, **options):
"""Set up cross domain validation and hoisting for a request handler."""
def cross_domain_wrap(fn):
cors_perms = {
"origin_check": origin_check,
"allow_credentials": bool(options.get("allow_credentials"))
}
@wraps(fn)
def cross_domain_handler(self, *args, **kwargs):
if request.params.get("hoist") == "cookie":
# Cookie polling response
if cors_perms["origin_check"](g.origin):
name = request.environ["pylons.routes_dict"]["action_name"]
resp = fn(self, *args, **kwargs)
c.cookies.add('hoist_%s' % name, ''.join(resp.content))
c.response_content_type = 'text/html'
resp.content = ''
return resp
else:
abort(403)
else:
self.check_cors()
return fn(self, *args, **kwargs)
cross_domain_handler.cors_perms = cors_perms
return cross_domain_handler
return cross_domain_wrap
def require_https():
if not c.secure:
abort(ForbiddenError(errors.HTTPS_REQUIRED))
def prevent_framing_and_css(allow_cname_frame=False):
def wrap(f):
@wraps(f)
def no_funny_business(*args, **kwargs):
c.allow_styles = False
if not (allow_cname_frame and c.cname and not c.authorized_cname):
c.deny_frames = True
return f(*args, **kwargs)
return no_funny_business
return wrap
def request_timer_name(action):
return "service_time.web." + action
class MinimalController(BaseController):
allow_stylesheets = False
def request_key(self):
# note that this references the cookie at request time, not
# the current value of it
try:
cookies_key = [(x, request.cookies.get(x,''))
for x in cache_affecting_cookies]
except CookieError:
cookies_key = ''
return make_key('request_key_',
c.lang,
c.content_langs,
request.host,
c.secure,
c.cname,
request.fullpath,
c.over18,
c.firsttime,
c.extension,
c.render_style,
cookies_key)
def cached_response(self):
return c.response
def pre(self):
action = request.environ["pylons.routes_dict"].get("action")
if action:
c.request_timer = g.stats.get_timer(request_timer_name(action))
else:
c.request_timer = SimpleSillyStub()
c.response_wrappers = []
c.start_time = datetime.now(g.tz)
c.request_timer.start()
g.reset_caches()
c.domain_prefix = request.environ.get("reddit-domain-prefix",
g.domain_prefix)
c.secure = request.host in g.secure_domains
#check if user-agent needs a dose of rate-limiting
if not c.error_page:
ratelimit_throttled()
ratelimit_agents()
c.allow_loggedin_cache = False
c.show_wiki_actions = False
# the domain has to be set before Cookies get initialized
set_subreddit()
c.errors = ErrorSet()
c.cookies = Cookies()
# if an rss feed, this will also log the user in if a feed=
# GET param is included
set_content_type()
c.request_timer.intermediate("minimal-pre")
def try_pagecache(self):
#check content cache
if request.method.upper() == 'GET' and not c.user_is_loggedin:
r = g.pagecache.get(self.request_key())
if r:
r, c.cookies = r
response = c.response
response.headers = r.headers
response.content = r.content
for x in r.cookies.keys():
if x in cache_affecting_cookies:
cookie = r.cookies[x]
response.set_cookie(key = x,
value = cookie.value,
domain = cookie.get('domain',None),
expires = cookie.get('expires',None),
path = cookie.get('path',None),
secure = cookie.get('secure', False),
httponly = cookie.get('httponly', False))
response.status_code = r.status_code
request.environ['pylons.routes_dict']['action'] = 'cached_response'
c.request_timer.name = request_timer_name("cached_response")
# make sure to carry over the content type
c.response_content_type = r.headers['content-type']
c.used_cache = True
# response wrappers have already been applied before cache write
c.response_wrappers = []
def post(self):
c.request_timer.intermediate("action")
response = c.response
content = filter(None, response.content)
if isinstance(content, (list, tuple)):
content = ''.join(content)
for w in c.response_wrappers:
content = w(content)
response.content = content
if c.response_content_type:
response.headers['Content-Type'] = c.response_content_type
if c.user_is_loggedin and not c.allow_loggedin_cache:
response.headers['Cache-Control'] = 'no-cache'
response.headers['Pragma'] = 'no-cache'
if c.deny_frames:
response.headers["X-Frame-Options"] = "DENY"
#return
#set content cache
if (g.page_cache_time
and request.method.upper() == 'GET'
and (not c.user_is_loggedin or c.allow_loggedin_cache)
and not c.used_cache
and response.status_code not in (429, 503)
and response.content and response.content[0]):
try:
g.pagecache.set(self.request_key(),
(response, c.cookies),
g.page_cache_time)
except MemcachedError as e:
# this codepath will actually never be hit as long as
# the pagecache memcached client is in no_reply mode.
g.log.warning("Ignored exception (%r) on pagecache "
"write for %r", e, request.path)
# send cookies
for k,v in c.cookies.iteritems():
if v.dirty:
response.set_cookie(key = k,
value = quote(v.value),
domain = v.domain,
expires = v.expires,
secure = getattr(v, 'secure', False),
httponly = getattr(v, 'httponly', False))
end_time = datetime.now(g.tz)
# update last_visit
if (c.user_is_loggedin and not g.disallow_db_writes and
request.method.upper() != "POST" and
not c.dont_update_last_visit and
request.path != '/validuser'):
c.user.update_last_visit(c.start_time)
check_request(end_time)
# this thread is probably going to be reused, but it could be
# a while before it is. So we might as well dump the cache in
# the mean time so that we don't have dead objects hanging
# around taking up memory
g.reset_caches()
# push data to statsd
c.request_timer.stop()
g.stats.flush()
def abort404(self):
abort(404, "not found")
def abort403(self):
abort(403, "forbidden")
def check_cors(self):
origin = request.headers.get("Origin")
if not origin:
return
method = request.method
if method == 'OPTIONS':
# preflight request
method = request.headers.get("Access-Control-Request-Method")
if not method:
self.abort403()
action = request.environ["pylons.routes_dict"]["action_name"]
handler = self._get_action_handler(action, method)
cors = handler and getattr(handler, "cors_perms", None)
if cors and cors["origin_check"](origin):
response.headers["Access-Control-Allow-Origin"] = origin
if cors.get("allow_credentials"):
response.headers["Access-Control-Allow-Credentials"] = "true"
def OPTIONS(self):
"""Return empty responses for CORS preflight requests"""
self.check_cors()
def sendpng(self, string):
c.response_content_type = 'image/png'
c.response.content = string
return c.response
def update_qstring(self, dict):
merged = copy(request.get)
merged.update(dict)
return request.path + utils.query_string(merged)
def api_wrapper(self, kw):
data = simplejson.dumps(kw)
c.response.content = filters.websafe_json(data)
return c.response
def iframe_api_wrapper(self, kw):
data = simplejson.dumps(kw)
c.response_content_type = 'text/html'
c.response.content = (
'<html><head><script type="text/javascript">\n'
'parent.$.handleResponse().call('
'parent.$("#" + window.frameElement.id).parent(), %s)\n'
'</script></head></html>') % filters.websafe_json(data)
return c.response
class RedditController(MinimalController):
@staticmethod
def login(user, rem=False):
c.cookies[g.login_cookie] = Cookie(value = user.make_cookie(),
expires = NEVER if rem else None,
httponly=True)
@staticmethod
def logout():
c.cookies[g.login_cookie] = Cookie(value='', expires=DELETE)
@staticmethod
def enable_admin_mode(user, first_login=None):
# no expiration time so the cookie dies with the browser session
c.cookies[g.admin_cookie] = Cookie(value=user.make_admin_cookie(first_login=first_login))
@staticmethod
def remember_otp(user):
cookie = user.make_otp_cookie()
expiration = datetime.utcnow() + timedelta(seconds=g.OTP_COOKIE_TTL)
expiration = expiration.strftime("%a, %d %b %Y %H:%M:%S GMT")
set_user_cookie(g.otp_cookie,
cookie,
secure=True,
httponly=True,
expires=expiration)
@staticmethod
def disable_admin_mode(user):
c.cookies[g.admin_cookie] = Cookie(value='', expires=DELETE)
def pre(self):
MinimalController.pre(self)
set_cnameframe()
# populate c.cookies unless we're on the unsafe media_domain
if request.host != g.media_domain or g.media_domain == g.domain:
try:
for k,v in request.cookies.iteritems():
# minimalcontroller can still set cookies
if k not in c.cookies:
# we can unquote even if it's not quoted
c.cookies[k] = Cookie(value=unquote(v), dirty=False)
except CookieError:
#pylons or one of the associated retarded libraries
#can't handle broken cookies
request.environ['HTTP_COOKIE'] = ''
c.firsttime = firsttime()
# the user could have been logged in via one of the feeds
maybe_admin = False
is_otpcookie_valid = False
# no logins for RSS feed unless valid_feed has already been called
if not c.user:
if c.extension != "rss":
authenticate_user()
admin_cookie = c.cookies.get(g.admin_cookie)
if c.user_is_loggedin and admin_cookie:
maybe_admin, first_login = valid_admin_cookie(admin_cookie.value)
if maybe_admin:
self.enable_admin_mode(c.user, first_login=first_login)
else:
self.disable_admin_mode(c.user)
otp_cookie = read_user_cookie(g.otp_cookie)
if c.user_is_loggedin and otp_cookie:
is_otpcookie_valid = valid_otp_cookie(otp_cookie)
if not c.user:
c.user = UnloggedUser(get_browser_langs())
# patch for fixing mangled language preferences
if (not isinstance(c.user.pref_lang, basestring) or
not all(isinstance(x, basestring)
for x in c.user.pref_content_langs)):
c.user.pref_lang = g.lang
c.user.pref_content_langs = [g.lang]
c.user._commit()
if c.user_is_loggedin:
if not c.user._loaded:
c.user._load()
c.modhash = c.user.modhash()
if request.method.upper() == 'GET':
read_mod_cookie()
if hasattr(c.user, 'msgtime') and c.user.msgtime:
c.have_messages = c.user.msgtime
c.show_mod_mail = Subreddit.reverse_moderator_ids(c.user)
c.have_mod_messages = getattr(c.user, "modmsgtime", False)
c.user_is_admin = maybe_admin and c.user.name in g.admins
c.user_special_distinguish = c.user.special_distinguish()
c.user_is_sponsor = c.user_is_admin or c.user.name in g.sponsors
c.otp_cached = is_otpcookie_valid
if not isinstance(c.site, FakeSubreddit) and not g.disallow_db_writes:
c.user.update_sr_activity(c.site)
c.over18 = over18()
set_obey_over18()
#set_browser_langs()
set_host_lang()
set_iface_lang()
set_content_lang()
set_recent_clicks()
# used for HTML-lite templates
set_colors()
# set some environmental variables in case we hit an abort
if not isinstance(c.site, FakeSubreddit):
request.environ['REDDIT_NAME'] = c.site.name
# random reddit trickery -- have to do this after the content lang is set
if c.site == Random:
c.site = Subreddit.random_reddit()
redirect_to("/" + c.site.path.strip('/') + request.path)
elif c.site == RandomNSFW:
c.site = Subreddit.random_reddit(over18 = True)
redirect_to("/" + c.site.path.strip('/') + request.path)
if not request.path.startswith("/api/login/"):
# is the subreddit banned?
if c.site.spammy() and not c.user_is_admin and not c.error_page:
ban_info = getattr(c.site, "ban_info", {})
if "message" in ban_info:
message = ban_info['message']
else:
sitelink = url_escape(add_sr("/"))
subject = ("/r/%s has been incorrectly banned" %
c.site.name)
link = ("/r/redditrequest/submit?url=%s&title=%s" %
(sitelink, subject))
message = strings.banned_subreddit_message % dict(
link=link)
errpage = pages.RedditError(strings.banned_subreddit_title,
message,
image="subreddit-banned.png")
request.environ['usable_error_content'] = errpage.render()
self.abort404()
# check if the user has access to this subreddit
if not c.site.can_view(c.user) and not c.error_page:
public_description = c.site.public_description
errpage = pages.RedditError(strings.private_subreddit_title,
strings.private_subreddit_message,
image="subreddit-private.png",
sr_description=public_description)
request.environ['usable_error_content'] = errpage.render()
self.abort403()
#check over 18
if (c.site.over_18 and not c.over18 and
request.path not in ("/frame", "/over18")
and c.render_style == 'html'):
return self.intermediate_redirect("/over18")
#check whether to allow custom styles
c.allow_styles = True
c.can_apply_styles = self.allow_stylesheets
if g.css_killswitch:
c.can_apply_styles = False
#if the preference is set and we're not at a cname
elif not c.user.pref_show_stylesheets and not c.cname:
c.can_apply_styles = False
#if the site has a cname, but we're not using it
elif c.site.domain and c.site.css_on_cname and not c.cname:
c.can_apply_styles = False
c.request_timer.intermediate("base-pre")
def check_modified(self, thing, action):
# this is a legacy shim until the old last_modified system is dead
last_modified = utils.last_modified_date(thing, action)
return self.abort_if_not_modified(last_modified)
def abort_if_not_modified(self, last_modified, private=True,
max_age=timedelta(0),
must_revalidate=True):
"""Check If-Modified-Since and abort(304) if appropriate."""
if c.user_is_loggedin and not c.allow_loggedin_cache:
return
# HTTP timestamps round to nearest second. truncate this value for
# comparisons.
last_modified = last_modified.replace(microsecond=0)
date_str = http_utils.http_date_str(last_modified)
c.response.headers['last-modified'] = date_str
cache_control = []
if private:
cache_control.append('private')
cache_control.append('max-age=%d' % max_age.total_seconds())
if must_revalidate:
cache_control.append('must-revalidate')
c.response.headers['cache-control'] = ', '.join(cache_control)
modified_since = request.if_modified_since