Skip to content

Commit

Permalink
Incorporate get_sticky contribution along with some internal changes.
Browse files Browse the repository at this point in the history
  • Loading branch information
bboe committed Apr 27, 2015
1 parent ab3c6b5 commit 7a2ec7d
Show file tree
Hide file tree
Showing 8 changed files with 49 additions and 32 deletions.
8 changes: 5 additions & 3 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ Unreleased
* **[CHANGE]** Removed :class:`praw.Config` instance attribute ``is_reddit``.
* **[FEATURE]** Added :meth:`get_message` to fetch a single Message object
by its ID.
* **[FEATURE]** Added :meth:`get_sticky` to get a Subreddit's sticky post.
* **[REDDIT]** Removed ``send_feedback`` as it is no longer supported by
reddit.
* **[REDDIT]** Added ``DeprecationWarning`` to :meth:`login` as reddit will
Expand Down Expand Up @@ -60,9 +61,10 @@ PRAW 2.1.20
* **[FEATURE]** Added a ``nsfw`` parameter to :meth:`.get_random_subreddit`
that permits fetching a random NSFW Subreddit. This change also supports
fetching these subreddits via ``get_subreddit('randnsfw')``.
* **[FEATURE]** Added a ``from_sr`` parameter to :meth:`.send_message` to
send the private message from a subreddit you moderate (Like the "From"
dropdown box when composing a message).
* **[FEATURE]** Added a ``from_sr`` parameter to
:meth:`~.PrivateMessagesMixin.send_message` to send the private message from
a subreddit you moderate (Like the "From" dropdown box when composing a
message).
* **[FEATURE]** Added :class:`Multireddit`
* **[FEATURE]** Added :meth:`get_multireddit` to get a single multireddit obj
* **[FEATURE]** Added :meth:`get_my_multireddits` to get all multireddits
Expand Down
12 changes: 6 additions & 6 deletions docs/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -33,17 +33,17 @@ References And Other Relevant Pages
* `PRAW's Source Code <https://github.com/praw-dev/praw>`_
* `reddit's Source Code <https://github.com/reddit/reddit>`_
* `reddit's API Wiki Page <https://github.com/reddit/reddit/wiki/API>`_
* `reddit's API Documentation <http://www.reddit.com/dev/api>`_
* `reddit's API Documentation <https://www.reddit.com/dev/api>`_

* `reddit Markdown Primer
<http://www.reddit.com/r/reddit.com/comments/6ewgt/reddit_markdown_primer_or
<https://www.reddit.com/r/reddit.com/comments/6ewgt/reddit_markdown_primer_or
_how_do_you_do_all_that/c03nik6>`_
* `reddit.com's FAQ <http://www.reddit.com/help/faq>`_
* `reddit.com's FAQ <https://www.reddit.com/help/faq>`_
* `reddit.com's Status Twitterbot <https://twitter.com/redditstatus/>`_.
Tweets when reddit goes up or down
* `r/changelog <http://www.reddit.com/r/changelog/>`_. Significant changes to
* `r/changelog <https://www.reddit.com/r/changelog/>`_. Significant changes to
reddit's codebase will be announced here in non-developer speak
* `r/redditdev <http://www.reddit.com/r/redditdev>`_. Ask questions about
* `r/redditdev <https://www.reddit.com/r/redditdev>`_. Ask questions about
reddit's codebase, PRAW and other API clients here

.. include:: ../README.rst
Expand Down Expand Up @@ -204,7 +204,7 @@ Useful Scripts
`ClockStalker <https://github.com/ClockStalker/clockstalker>`_
Examines a redditor's posting history and creates `a comment with a nice
activity overview
<http://www.reddit.com/r/AskReddit/comments/129lyb/what_fact_about_reality_
<https://www.reddit.com/r/AskReddit/comments/129lyb/what_fact_about_reality_
terrifies_you_or_gives/c6tbgd7?context=1>`_. ClockStalker uses an older
version of PRAW, the ``reddit``, module. It should, but may not, work with
the latest version of PRAW.
Expand Down
24 changes: 10 additions & 14 deletions praw/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,10 +367,13 @@ def handle_redirect():
url = request.url
while url: # Manually handle 302 redirects
request.url = url
kwargs['_cache_key'] = (normalize_url(request.url),
tuple(key_items))
response = self.handler.request(request=request.prepare(),
proxies=self.http.proxies,
timeout=timeout, **kwargs)
url = _raise_redirect_exceptions(response)
assert url != request.url
return response

request = _prepare_request(self, url, params, data, auth, files)
Expand All @@ -386,10 +389,8 @@ def handle_redirect():
key_items.append(tuple(key_value.get_dict().items()))
else:
key_items.append(key_value)
cache_key = (normalize_url(request.url), tuple(key_items))
kwargs = {'_rate_domain': self.config.domain,
'_rate_delay': int(self.config.api_request_delay),
'_cache_key': cache_key,
'_cache_ignore': bool(files) or raw_response,
'_cache_timeout': int(self.config.cache_timeout)}

Expand Down Expand Up @@ -803,8 +804,8 @@ def get_info(self, url=None, thing_id=None, limit=None):
:param url: The url to lookup.
:param thing_id: A single thing_id, or a list of thing_ids. A thing_id
can be any one of Comment (t1_), Link (t3_), or Subreddit (t5_) to
lookup by fullname.
can be any one of Comment (``t1_``), Link (``t3_``), or Subreddit
(``t5_``) to lookup by fullname.
:param limit: The maximum number of Submissions to return when looking
up by url. When None, uses account default settings.
:returns: When a single thing_id is provided, return the corresponding
Expand Down Expand Up @@ -947,13 +948,8 @@ def get_rising(self, *args, **kwargs):
@decorators.restrict_access(scope='read')
def get_sticky(self, subreddit=None):
"""Return a Submission object for the sticky of the subreddit."""
try:
self.request_json(self.config['sticky'] %
six.text_type(subreddit))
except errors.RedirectException as exc: # This _should_ occur
# TODO: This request 404s if no thread is stickied.
return self.get_submission(exc.response_url)
raise errors.ClientException('Expected exception not raised.')
return objects.Submission.from_json(self.request_json(
self.config['sticky'] % six.text_type(subreddit)))

def get_submission(self, url=None, submission_id=None, comment_limit=0,
comment_sort=None, params=None):
Expand Down Expand Up @@ -1298,8 +1294,8 @@ def login(self, username=None, password=None):
were empty get it from stdin. Look for password in parameter, then
praw.ini (but only if username matches that in praw.ini) and finally
if they both are empty get it with getpass. Add the variables user
(username) and pswd (password) to your praw.ini file to allow for auto-
login.
(username) and pswd (password) to your praw.ini file to allow for
auto-login.
A successful login will overwrite any existing authentication.
Expand Down Expand Up @@ -2054,7 +2050,7 @@ def get_message(self, message_id, *args, **kwargs):
:param message_id: The ID or Fullname for a Message
The additional parameters are passed into
:meth:`.from_id` of Message, and subsequently into
:meth:`~praw.objects.Message.from_id` of Message, and subsequently into
:meth:`.request_json`.
"""
Expand Down
12 changes: 7 additions & 5 deletions praw/internal.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,18 +20,18 @@

from __future__ import print_function, unicode_literals

from requests import Request
from requests import Request, codes
import re
import six
import sys
from requests.compat import urljoin
from praw.decorators import restrict_access
from praw.errors import (InvalidSubreddit, OAuthException,
from praw.errors import (InvalidSubreddit, NotFound, OAuthException,
OAuthInsufficientScope, OAuthInvalidToken,
RedirectException)


RE_RANDOM = re.compile('rand(om|nsfw)')
RE_REDIRECT = re.compile('(rand(om|nsfw))|about/sticky')


def _get_redditor_listing(subpath=''):
Expand Down Expand Up @@ -165,7 +165,7 @@ def _raise_redirect_exceptions(response):
subreddit = new_url.rsplit('=', 1)[1]
raise InvalidSubreddit('`{0}` is not a valid subreddit'
.format(subreddit))
elif not RE_RANDOM.search(response.url):
elif not RE_REDIRECT.search(response.url):
raise RedirectException(response.url, new_url)
return new_url

Expand All @@ -180,7 +180,9 @@ def _raise_response_exceptions(response):
raise OAuthInvalidToken('invalid_token', response.url)
else:
raise OAuthException(msg, response.url)
response.raise_for_status()
if response.status_code == codes.not_found:
raise NotFound(response)
response.raise_for_status() # TODO: Map all codes to a PRAWException


def _to_reddit_list(arg):
Expand Down
14 changes: 10 additions & 4 deletions praw/objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -943,6 +943,13 @@ def from_id(reddit_session, subreddit_id):
'permalink': '/comments/{0}'.format(subreddit_id)}
return Submission(reddit_session, pseudo_data)

@staticmethod
def from_json(json_response):
"""Return a submission object from the json response."""
submission = json_response[0]['data']['children'][0]
submission.comments = json_response[1]['data']['children']
return submission

@staticmethod
@restrict_access(scope='read')
def from_url(reddit_session, url, comment_limit=0, comment_sort=None,
Expand Down Expand Up @@ -975,11 +982,10 @@ def from_url(reddit_session, url, comment_limit=0, comment_sort=None,
if comment_sort:
params['sort'] = comment_sort

s_info, c_info = reddit_session.request_json(url, params=params)
response = reddit_session.request_json(url, params=params)
if comments_only:
return c_info['data']['children']
submission = s_info['data']['children'][0]
submission.comments = c_info['data']['children']
return response[1]['data']['children']
submission = Submission.from_json(response)
submission._comment_sort = comment_sort # pylint: disable=W0212
submission._params = params # pylint: disable=W0212
return submission
Expand Down
1 change: 1 addition & 0 deletions tests/cassettes/test_get_sticky.json

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions tests/cassettes/test_get_sticky__not_found.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"http_interactions": [{"request": {"body": {"string": "", "encoding": "utf-8"}, "headers": {"Connection": ["keep-alive"], "Accept-Encoding": ["gzip, deflate"], "Accept": ["*/*"], "User-Agent": ["PRAW_test_suite PRAW/3.0a1 Python/2.7.5 Darwin-13.4.0-x86_64-i386-64bit"]}, "method": "GET", "uri": "https://api.reddit.com/r/reddit_api_test/about/sticky/.json"}, "response": {"body": {"string": "{\"error\": 404}", "encoding": "UTF-8"}, "headers": {"content-length": ["14"], "x-xss-protection": ["1; mode=block"], "x-reddit-tracking": ["https://pixel.redditmedia.com/pixel/of_destiny.png?v=gcFMC%2FmkoFRKQtrhLbpzxBdd%2FWJ%2BaNn4BCTYYVuctWcKMdatKaWO9bh1PgiFYWopiRBROv5P1SRr3JbqVG7d%2B7jlTblVR1pt"], "x-content-type-options": ["nosniff"], "access-control-expose-headers": ["X-Reddit-Tracking, X-Moose"], "set-cookie": ["__cfduid=d8b101ed275e03aa546e9e29e3f497aed1430091390; expires=Mon, 25-Apr-16 23:36:30 GMT; path=/; domain=.reddit.com; HttpOnly"], "server": ["cloudflare-nginx"], "connection": ["keep-alive"], "x-ua-compatible": ["IE=edge"], "cache-control": ["no-cache"], "date": ["Sun, 26 Apr 2015 23:36:30 GMT"], "x-frame-options": ["SAMEORIGIN"], "access-control-allow-origin": ["*"], "x-moose": ["majestic"], "content-type": ["application/json; charset=UTF-8"], "cf-ray": ["1dd61c34a9df1165-DFW"]}, "status": {"message": "Not Found", "code": 404}, "url": "https://api.reddit.com/r/reddit_api_test/about/sticky/.json"}, "recorded_at": "2015-04-26T23:36:30"}], "recorded_with": "betamax/0.4.2"}
9 changes: 9 additions & 0 deletions tests/test_unauthenticated_reddit.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,15 @@ def test_get_rising(self):
result = self.r.get_rising(limit=num)
self.assertEqual(num, len(list(result)))

@betamax
def test_get_sticky(self):
self.assertEqual('2ujhkr', self.r.get_sticky('redditdev').id)

@betamax
def test_get_sticky__not_found(self):
subreddit = self.r.get_subreddit(self.sr)
self.assertRaises(errors.NotFound, subreddit.get_sticky)

@betamax
def test_get_submissions(self):
def fullname(url):
Expand Down

0 comments on commit 7a2ec7d

Please sign in to comment.