Skip to content

Commit

Permalink
Fix some pylint errors.
Browse files Browse the repository at this point in the history
  • Loading branch information
bboe committed Apr 14, 2015
1 parent a71dd55 commit 2e87db6
Show file tree
Hide file tree
Showing 8 changed files with 89 additions and 94 deletions.
55 changes: 29 additions & 26 deletions praw/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,14 +50,14 @@
if os.environ.get('SERVER_SOFTWARE') is not None:
# Google App Engine information
# https://developers.google.com/appengine/docs/python/
platform_info = os.environ.get('SERVER_SOFTWARE')
PLATFORM_INFO = os.environ.get('SERVER_SOFTWARE')
else:
# Standard platform information
platform_info = platform.platform(True)
PLATFORM_INFO = platform.platform(True)

UA_STRING = '%%s PRAW/%s Python/%s %s' % (__version__,
sys.version.split()[0],
platform_info)
PLATFORM_INFO)

MIN_IMAGE_SIZE = 128
MAX_IMAGE_SIZE = 512000
Expand All @@ -71,7 +71,7 @@
CHR = unichr # NOQA


class Config(object): # pylint: disable-msg=R0903, R0924
class Config(object): # pylint: disable=R0903,R0924

"""A class containing the configuration for a reddit site."""

Expand Down Expand Up @@ -334,6 +334,9 @@ def _req_error(*_, **__):
update_check(__name__, __version__)
self.update_checked = True

# Initial values
self._use_oauth = False

def _request(self, url, params=None, data=None, files=None, auth=None,
timeout=None, raw_response=False, retry_on_error=True):
"""Given a page url and a dict of params, open and return the page.
Expand Down Expand Up @@ -481,26 +484,21 @@ def get_content(self, url, params=None, limit=0, place_holder=None,
else:
fetch_once = True

# When getting posts from a multireddit owned by the authenticated
# Redditor, we are redirected to me/m/multi/. Handle that now
# instead of catching later.
if re.search('user/.*/m/.*', url):
redditor = url.split('/')[-4]
if self.user and self.user.name.lower() == redditor.lower():
url = url.replace("user/"+redditor, 'me')
if hasattr(self, '_url_update'):
url = self._url_update(url) # pylint: disable=E1101

# While we still need to fetch more content to reach our limit, do so.
while fetch_once or fetch_all or objects_found < limit:
if _use_oauth: # Set the necessary _use_oauth value
assert self._use_oauth is False
self._use_oauth = _use_oauth # pylint: disable-msg=W0201
self._use_oauth = _use_oauth
try:
page_data = self.request_json(url, params=params)
if object_filter:
page_data = page_data[object_filter]
finally: # Restore _use_oauth value
if _use_oauth:
self._use_oauth = False # pylint: disable-msg=W0201
self._use_oauth = False
fetch_once = False
root = page_data.get(root_field, page_data)
for thing in root[thing_field]:
Expand Down Expand Up @@ -551,7 +549,7 @@ def request_json(self, url, params=None, data=None, as_objects=True,
retry_on_error=retry_on_error)
hook = self._json_reddit_objecter if as_objects else None
# Request url just needs to be available for the objecter to use
self._request_url = url # pylint: disable-msg=W0201
self._request_url = url # pylint: disable=W0201
data = json.loads(response, object_hook=hook)
delattr(self, '_request_url')
# Update the modhash
Expand Down Expand Up @@ -1149,7 +1147,6 @@ def __init__(self, *args, **kwargs):
# * True mean login authenticated
# * set(...) means OAuth authenticated with the scopes in the set
self._authentication = None
self._use_oauth = False # Updated on a request by request basis
self.access_token = None
self.refresh_token = None
self.user = None
Expand All @@ -1164,6 +1161,16 @@ def __str__(self):
else:
return 'Unauthenticated reddit sesssion'

def _url_update(self, url):
# When getting posts from a multireddit owned by the authenticated
# Redditor, we are redirected to me/m/multi/. Handle that now
# instead of catching later.
if re.search('user/.*/m/.*', url):
redditor = url.split('/')[-4]
if self.user and self.user.name.lower() == redditor.lower():
url = url.replace("user/"+redditor, 'me')
return url

@decorators.restrict_access(scope=None, login=True)
def accept_moderator_invite(self, subreddit):
"""Accept a moderator invite to the given subreddit.
Expand All @@ -1175,7 +1182,7 @@ def accept_moderator_invite(self, subreddit):
"""
data = {'r': six.text_type(subreddit)}
# Clear moderated subreddits and cache
self.user._mod_subs = None # pylint: disable-msg=W0212
self.user._mod_subs = None # pylint: disable=W0212
self.evict(self.config['my_mod_subreddits'])
return self.request_json(self.config['accept_mod_invite'], data=data)

Expand Down Expand Up @@ -1225,7 +1232,7 @@ def edit_wiki_page(self, subreddit, page, content, reason=''):
page.lower()))
return self.request_json(self.config['wiki_edit'], data=data)

def get_access_information(self, code, # pylint: disable-msg=W0221
def get_access_information(self, code, # pylint: disable=W0221
update_session=True):
"""Return the access information for an OAuth2 authorization grant.
Expand Down Expand Up @@ -1319,7 +1326,7 @@ def login(self, username=None, password=None):
self.user = self.get_redditor(user)
self.user.__class__ = objects.LoggedInRedditor

def refresh_access_information(self, # pylint: disable-msg=W0221
def refresh_access_information(self, # pylint: disable=W0221
refresh_token=None,
update_session=True):
"""Return updated access information for an OAuth2 authorization grant.
Expand Down Expand Up @@ -1580,7 +1587,7 @@ def upload_image(self, subreddit, image_path, name=None, header=False):
json_end = response.find(']]')
try:
image_errors = dict(json.loads(response[json_start:json_end + 2]))
except Exception: # pylint: disable-msg=W0703
except Exception: # pylint: disable=W0703
warn_explicit('image_upload parsing issue', UserWarning, '', 0)
return False
if image_errors['BAD_CSS_NAME']:
Expand Down Expand Up @@ -1826,7 +1833,7 @@ def get_contributors(self, subreddit, *args, **kwargs):
subreddits only access is required. See issue #246.
"""
# pylint: disable-msg=W0613
# pylint: disable=W0613
def get_contributors_helper(self, subreddit):
# It is necessary to have the 'self' argument as it's needed in
# restrict_access to determine what class the decorator is
Expand Down Expand Up @@ -1984,13 +1991,11 @@ def get_my_moderation(self, *args, **kwargs):
**kwargs)

@decorators.restrict_access(scope='mysubreddits')
def get_my_multireddits(self, *args, **kwargs):
def get_my_multireddits(self):
"""Return a list of the authenticated Redditor's Multireddits."""
# The JSON data for multireddits is returned from Reddit as a list
# Therefore, we cannot use :meth:`get_content` to retrieve the objects
url = self.config['my_multis']
response = self.request_json(url)
return response
return self.request_json(self.config['my_multis'])

@decorators.restrict_access(scope='mysubreddits')
def get_my_subreddits(self, *args, **kwargs):
Expand Down Expand Up @@ -2200,8 +2205,6 @@ def submit(self, subreddit, title, text=None, url=None, captcha=None,
# Clear the OAuth setting when attempting to fetch the submission
if self._use_oauth:
self._use_oauth = False
# TODO Verify this hack
# Hack until reddit/627 is resolved
if url.startswith(self.config.oauth_url):
url = self.config.api_url + url[len(self.config.oauth_url):]
try:
Expand Down
6 changes: 3 additions & 3 deletions praw/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,10 +315,10 @@ def is_mod_of_all(user, subreddit):
obj = getattr(cls, 'reddit_session', cls)
# This function sets _use_oauth for one time use only.
# Verify that statement is actually true.
assert not obj._use_oauth # pylint: disable-msg=W0212
assert not obj._use_oauth # pylint: disable=W0212

if scope and obj.has_scope(scope):
obj._use_oauth = True # pylint: disable-msg=W0212
obj._use_oauth = True # pylint: disable=W0212
elif oauth_only:
raise errors.OAuthScopeRequired(function.__name__, scope)
elif login and obj.is_logged_in():
Expand All @@ -338,7 +338,7 @@ def is_mod_of_all(user, subreddit):
try:
return function(cls, *args, **kwargs)
finally:
obj._use_oauth = False # pylint: disable-msg=W0212
obj._use_oauth = False # pylint: disable=W0212
return function if IS_SPHINX_BUILD else wrapped
return wrap

Expand Down
6 changes: 3 additions & 3 deletions praw/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def wrapped(cls, _rate_domain, _rate_delay, **kwargs):
return wrapped

@classmethod
def evict(cls, urls): # pylint: disable-msg=W0613
def evict(cls, urls): # pylint: disable=W0613
"""Method utilized to evict entries for the given urls.
:param urls: An iterable containing normalized urls.
Expand Down Expand Up @@ -204,7 +204,7 @@ def _relay(self, **kwargs):
cPickle.dump(kwargs, sock_fp, cPickle.HIGHEST_PROTOCOL)
sock_fp.flush()
retval = cPickle.load(sock_fp)
except: # pylint: disable-msg=W0702
except: # pylint: disable=W0702
exc_type, exc, _ = sys.exc_info()
socket_error = exc_type is socket.error
if socket_error and exc.errno == 111: # Connection refused
Expand All @@ -227,7 +227,7 @@ def _relay(self, **kwargs):
sock_fp.close()
sock.close()
if isinstance(retval, Exception):
raise retval # pylint: disable-msg=E0702
raise retval # pylint: disable=E0702
return retval

def evict(self, urls):
Expand Down
9 changes: 4 additions & 5 deletions praw/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def comment_stream(reddit_session, subreddit, limit=None, verbosity=1):
"""
get_function = partial(reddit_session.get_comments,
six.text_type(subreddit))
return _stream_generator(get_function, reddit_session, limit, verbosity)
return _stream_generator(get_function, limit, verbosity)


def submission_stream(reddit_session, subreddit, limit=None, verbosity=1):
Expand Down Expand Up @@ -88,8 +88,7 @@ def submission_stream(reddit_session, subreddit, limit=None, verbosity=1):
limit = 1000
if not hasattr(subreddit, 'reddit_session'):
subreddit = reddit_session.get_subreddit(subreddit)
return _stream_generator(subreddit.get_new, reddit_session, limit,
verbosity)
return _stream_generator(subreddit.get_new, limit, verbosity)


def valid_redditors(redditors, sub):
Expand All @@ -111,7 +110,7 @@ def valid_redditors(redditors, sub):
if resp['ok']]


def _stream_generator(get_function, reddit_session, limit=None, verbosity=1):
def _stream_generator(get_function, limit=None, verbosity=1):
def debug(msg, level):
if verbosity >= level:
sys.stderr.write(msg + '\n')
Expand Down Expand Up @@ -177,7 +176,7 @@ def b36_id(item):
yield item
# Sleep if necessary
if sleep:
sleep_time, msg, msg_level = sleep
sleep_time, msg, msg_level = sleep # pylint: disable=W0633
debug(msg.format(sleep_time), msg_level)
time.sleep(sleep_time)

Expand Down
4 changes: 2 additions & 2 deletions praw/internal.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def _listing(self, sort='new', time='all', *args, **kwargs):
kwargs.setdefault('params', {})
kwargs['params'].setdefault('sort', sort)
kwargs['params'].setdefault('t', time)
url = urljoin(self._url, subpath) # pylint: disable-msg=W0212
url = urljoin(self._url, subpath) # pylint: disable=W0212
return self.reddit_session.get_content(url, *args, **kwargs)
return _listing

Expand All @@ -69,7 +69,7 @@ def _sorted(self, *args, **kwargs):
kwargs['params'] = {}
for key, value in six.iteritems(defaults):
kwargs['params'].setdefault(key, value)
url = urljoin(self._url, subpath) # pylint: disable-msg=W0212
url = urljoin(self._url, subpath) # pylint: disable=W0212
return self.reddit_session.get_content(url, *args, **kwargs)
return _sorted

Expand Down
12 changes: 6 additions & 6 deletions praw/multiprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@


class ThreadingTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
# pylint: disable-msg=R0903,W0232
# pylint: disable=R0903,W0232

"""A TCP server that creates new threads per connection."""

Expand All @@ -32,7 +32,7 @@ def handle_error(_, client_addr):


class RequestHandler(socketserver.StreamRequestHandler):
# pylint: disable-msg=W0232
# pylint: disable=W0232

"""A class that handles incoming requests.
Expand Down Expand Up @@ -66,18 +66,18 @@ def do_request(self, request, proxies, timeout, **_):

def handle(self):
"""Parse the RPC, make the call, and pickle up the return value."""
data = cPickle.load(self.rfile) # pylint: disable-msg=E1101
data = cPickle.load(self.rfile) # pylint: disable=E1101
method = data.pop('method')
try:
retval = getattr(self, 'do_{0}'.format(method))(**data)
except Timeout as retval:
# TODO: Remove this hack once my urllib3 PR is pushed downstream to
# requests: https://github.com/shazow/urllib3/issues/174
retval.message.url = None
except Exception as retval: # pylint: disable-msg=W0703
except Exception as retval: # pylint: disable=W0703
# All exceptions should be passed to the client
pass
cPickle.dump(retval, self.wfile, # pylint: disable-msg=E1101
cPickle.dump(retval, self.wfile, # pylint: disable=E1101
cPickle.HIGHEST_PROTOCOL)


Expand All @@ -100,7 +100,7 @@ def run():
sys.exit(1)
print('Listening on {0} port {1}'.format(options.addr, options.port))
try:
server.serve_forever() # pylint: disable-msg=E1101
server.serve_forever() # pylint: disable=E1101
except KeyboardInterrupt:
server.socket.close()
RequestHandler.http.close()
Expand Down

0 comments on commit 2e87db6

Please sign in to comment.