forked from praw-dev/praw
/
helpers.py
99 lines (91 loc) · 3.77 KB
/
helpers.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
# This file is part of reddit_api.
#
# reddit_api is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# reddit_api is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with reddit_api. If not, see <http://www.gnu.org/licenses/>.
import urllib
import urllib2
from urlparse import urljoin
import settings
from decorators import require_login, sleep_after
from urls import urls
from util import memoize
def _get_section(subpath=""):
"""
Used by the Redditor class to generate each of the sections (overview,
comments, submitted).
"""
def get_section(self, sort="new", time="all",
limit=settings.DEFAULT_CONTENT_LIMIT,
place_holder=None):
url_data = {"sort" : sort, "time" : time}
return self.reddit_session._get_content(urljoin(self._url, subpath),
limit=limit,
url_data=url_data,
place_holder=place_holder)
return get_section
def _get_sorter(subpath="", **defaults):
"""
Used by the Reddit Page classes to generate each of the currently supported
sorts (hot, top, new, best).
"""
def sorted(self, limit=settings.DEFAULT_CONTENT_LIMIT,
place_holder=None, **data):
for k, v in defaults.items():
if k == "time":
# time should be "t" in the API data dict
k = "t"
data.setdefault(k, v)
return self.reddit_session._get_content(urljoin(self._url, subpath),
limit=limit,
url_data=data,
place_holder=place_holder)
return sorted
def _modify_relationship(relationship, unlink=False):
"""
Modify the relationship between the current user or subreddit and a target
thing.
Used to support friending (user-to-user), as well as moderating,
contributor creating, and banning (user-to-subreddit).
"""
# the API uses friend and unfriend to manage all of these relationships
url = urls["unfriend" if unlink else "friend"]
@require_login
def do_relationship(self, thing):
params = {'name': thing,
'container': self.content_id,
'type': relationship,
'uh': self.modhash,
'api_type': 'json'}
return self._request_json(url, params)
return do_relationship
@memoize
@sleep_after
def _request(reddit_session, page_url, params=None, url_data=None,
openerdirector=None):
if url_data:
page_url += "?" + urllib.urlencode(url_data)
# urllib2.Request throws a 404 for some reason with data=""
encoded_params = None
if params:
params = dict([k, v.encode('utf-8')] for k, v in params.items())
encoded_params = urllib.urlencode(params)
if isinstance(page_url, unicode):
page_url = urllib.quote(page_url.encode('utf-8'), ':/')
request = urllib2.Request(page_url, data=encoded_params,
headers=reddit_session.DEFAULT_HEADERS)
# The openerdirector manages cookies on a per-session basis
if openerdirector:
response = openerdirector.open(request)
else:
response = urllib2.urlopen(request)
return response.read()