Skip to content

Commit

Permalink
migrate to python 3 compatible with futurize
Browse files Browse the repository at this point in the history
generated by running:

```
futurize -w -n --no-diffs *.py tests/*.py scripts/*.py
```

...and then cleaning up a few things by hand, mostly unnecessary `list(...)`s.
  • Loading branch information
snarfed committed Sep 25, 2019
1 parent 2a499ae commit cc43943
Show file tree
Hide file tree
Showing 40 changed files with 377 additions and 274 deletions.
19 changes: 11 additions & 8 deletions app.py
Expand Up @@ -2,13 +2,16 @@
"""
from __future__ import unicode_literals

from future import standard_library
standard_library.install_aliases()
from builtins import str
import datetime
import itertools
import json
import logging
import string
import urllib
import urlparse
import urllib.request, urllib.parse, urllib.error
import urllib.parse

import appengine_config

Expand Down Expand Up @@ -208,9 +211,9 @@ def get(self, source_short_name, id):
self.source = cls.lookup(id)

if not self.source:
id = urllib.unquote(id).decode('utf-8')
id = urllib.parse.unquote(id).decode('utf-8')
key = cls.query(ndb.OR(*[ndb.GenericProperty(prop) == id for prop in
'domains', 'inferred_username', 'name', 'username'])
('domains', 'inferred_username', 'name', 'username')])
).get(keys_only=True)
if key:
return self.redirect(cls(key=key).bridgy_path(), permanent=True)
Expand Down Expand Up @@ -409,7 +412,7 @@ def get_paging_param(param):
w.pretty_source = util.pretty_link(
w.source_url(), attrs={'class': 'original-post'}, new_tab=True)
try:
target_is_source = (urlparse.urlparse(w.target_url()).netloc in
target_is_source = (urllib.parse.urlparse(w.target_url()).netloc in
self.source.domains)
except BaseException:
target_is_source = False
Expand Down Expand Up @@ -490,7 +493,7 @@ def post(self):
code, body = util.interpret_http_exception(e)
if not code and util.is_connection_failure(e):
code = '-'
body = unicode(e)
body = str(e)
if code:
self.messages.add('%s API error %s: %s' % (source.GR_CLASS.NAME, code, body))
self.redirect(source.bridgy_url(self))
Expand Down Expand Up @@ -623,7 +626,7 @@ def post(self):
# validate URL, find silo post
url = util.get_required_param(self, 'url')
domain = util.domain_from_link(url)
path = urlparse.urlparse(url).path
path = urllib.parse.urlparse(url).path
msg = 'Discovering now. Refresh in a minute to see the results!'

if domain == source.GR_CLASS.DOMAIN:
Expand Down Expand Up @@ -662,7 +665,7 @@ def content_type(self):

def post(self):
source = self.load_source()
redirect_url = '%s?%s' % (self.request.path, urllib.urlencode({
redirect_url = '%s?%s' % (self.request.path, urllib.parse.urlencode({
'source_key': source.key.urlsafe(),
}))

Expand Down
14 changes: 9 additions & 5 deletions blog_webmention.py
Expand Up @@ -2,9 +2,13 @@
"""
from __future__ import unicode_literals

from future import standard_library
standard_library.install_aliases()
from builtins import next
from past.builtins import basestring
import logging
import json
import urlparse
import urllib.parse

import appengine_config

Expand All @@ -30,8 +34,8 @@ class BlogWebmentionHandler(webmention.WebmentionHandler):
def post(self, source_short_name):
logging.info('Params: %self', self.request.params.items())
# strip fragments from source and target url
self.source_url = urlparse.urldefrag(util.get_required_param(self, 'source'))[0]
self.target_url = urlparse.urldefrag(util.get_required_param(self, 'target'))[0]
self.source_url = urllib.parse.urldefrag(util.get_required_param(self, 'source'))[0]
self.target_url = urllib.parse.urldefrag(util.get_required_param(self, 'target'))[0]

# follow target url through any redirects, strip utm_* query params
resp = util.follow_redirects(self.target_url)
Expand Down Expand Up @@ -76,7 +80,7 @@ def post(self, source_short_name):
(source_cls.GR_CLASS.NAME, domain))

# check that the target URL path is supported
target_path = urlparse.urlparse(self.target_url).path
target_path = urllib.parse.urlparse(self.target_url).path
if target_path in ('', '/'):
return self.error('Home page webmentions are not currently supported.',
status=202)
Expand Down Expand Up @@ -182,7 +186,7 @@ def find_mention_item(self, items):
text = content.get('html') or content.get('value')

for type in 'in-reply-to', 'like', 'like-of', 'repost', 'repost-of':
urls = [urlparse.urldefrag(u)[0] for u in
urls = [urllib.parse.urldefrag(u)[0] for u in
microformats2.get_string_urls(props.get(type, []))]
if self.any_target_in(urls):
break
Expand Down
12 changes: 8 additions & 4 deletions blogger.py
Expand Up @@ -22,10 +22,14 @@
"""
from __future__ import unicode_literals

from future import standard_library
standard_library.install_aliases()
from builtins import str
from builtins import zip
import collections
import logging
import re
import urlparse
import urllib.parse

import appengine_config

Expand Down Expand Up @@ -57,7 +61,7 @@ class Blogger(models.Source):

def feed_url(self):
# https://support.google.com/blogger/answer/97933?hl=en
return urlparse.urljoin(self.url, '/feeds/posts/default') # Atom
return urllib.parse.urljoin(self.url, '/feeds/posts/default') # Atom

def silo_url(self):
return self.url
Expand Down Expand Up @@ -134,7 +138,7 @@ def create_comment(self, post_url, author_name, author_url, content, client=None
client = self.auth_entity.get().api()

# extract the post's path and look up its post id
path = urlparse.urlparse(post_url).path
path = urllib.parse.urlparse(post_url).path
logging.info('Looking up post id for %s', path)
feed = client.get_posts(self.key.id(), query=Query(path=path))

Expand All @@ -153,7 +157,7 @@ def create_comment(self, post_url, author_name, author_url, content, client=None
post_id, content.encode('utf-8'))
try:
comment = client.add_comment(self.key.id(), post_id, content)
except Error, e:
except Error as e:
msg = str(e)
if ('Internal error:' in msg):
# known errors. e.g. https://github.com/snarfed/bridgy/issues/175
Expand Down
18 changes: 11 additions & 7 deletions cron.py
@@ -1,7 +1,11 @@
"""Cron jobs. Currently just minor cleanup tasks.
"""
from __future__ import unicode_literals
from __future__ import division

from future import standard_library
standard_library.install_aliases()
from builtins import range
import datetime
import itertools
import json
Expand All @@ -13,7 +17,7 @@
from google.appengine.api import urlfetch
from google.appengine.ext import ndb
from google.appengine.ext.ndb import metadata
import httplib
import http.client

import models
from models import Source
Expand Down Expand Up @@ -55,15 +59,15 @@ def get(self):
return

# just auth as me or the first user. TODO: use app-only auth instead.
auther = sources.get('schnarfed') or sources.values()[0]
usernames = sources.keys()
auther = sources.get('schnarfed') or list(sources.values())[0]
usernames = list(sources.keys())
users = []
for i in range(0, len(usernames), TWITTER_USERS_PER_LOOKUP):
username_batch = usernames[i:i + TWITTER_USERS_PER_LOOKUP]
url = TWITTER_API_USER_LOOKUP % ','.join(username_batch)
try:
users += auther.gr_source.urlopen(url)
except Exception, e:
except Exception as e:
code, body = util.interpret_http_exception(e)
if not (code == '404' and len(username_batch) == 1):
# 404 for a single user means they deleted their account. otherwise...
Expand Down Expand Up @@ -112,13 +116,13 @@ class UpdateInstagramPictures(UpdatePictures):
SOURCE_CLS = Instagram
FREQUENCY = datetime.timedelta(hours=1)
WEEK = datetime.timedelta(days=7)
BATCH = float(WEEK.total_seconds()) / FREQUENCY.total_seconds()
BATCH = float(WEEK.total_seconds()), FREQUENCY.total_seconds()

def source_query(self):
now = util.now_fn()
since_sun = (now.weekday() * datetime.timedelta(days=1) +
(now - now.replace(hour=0, minute=0, second=0)))
batch = float(Instagram.query().count()) / self.BATCH
batch = float(Instagram.query().count()), self.BATCH
offset = batch * float(since_sun.total_seconds()) / self.FREQUENCY.total_seconds()
return Instagram.query().fetch(offset=int(math.floor(offset)),
limit=int(math.ceil(batch)))
Expand Down Expand Up @@ -188,7 +192,7 @@ def get(self):
payload=json.dumps(request),
method=urlfetch.POST,
headers=headers)
if result.status_code == httplib.OK:
if result.status_code == http.client.OK:
logging.info(result.content)
else:
logging.error(result.content)
Expand Down
19 changes: 11 additions & 8 deletions facebook.py
Expand Up @@ -21,14 +21,17 @@
"""
from __future__ import unicode_literals

from future import standard_library
standard_library.install_aliases()
from builtins import str
import datetime
import heapq
import itertools
import json
import logging
import urllib
import urllib2
import urlparse
import urllib.request, urllib.parse, urllib.error
import urllib.request, urllib.error, urllib.parse
import urllib.parse

import appengine_config
from google.appengine.ext import ndb
Expand Down Expand Up @@ -156,7 +159,7 @@ def get_activities_response(self, **kwargs):

try:
activities = super(FacebookPage, self).get_activities_response(**kwargs)
except urllib2.HTTPError as e:
except urllib.error.HTTPError as e:
code, body = util.interpret_http_exception(e)
# use a function so any new exceptions (JSON decoding, missing keys) don't
# clobber the original exception so we can re-raise it below.
Expand Down Expand Up @@ -225,8 +228,8 @@ def canonicalize_url(self, url, activity=None, **kwargs):
def post_url(id):
return 'https://www.facebook.com/%s/posts/%s' % (self.key.id(), id)

parsed = urlparse.urlparse(url)
params = urlparse.parse_qs(parsed.query)
parsed = urllib.parse.urlparse(url)
params = urllib.parse.parse_qs(parsed.query)
path = parsed.path.strip('/').split('/')
url_id = self.gr_source.post_id(url)

Expand Down Expand Up @@ -345,7 +348,7 @@ def infer_profile_url(self, url):
"""
domain = util.domain_from_link(url)
if domain == self.gr_source.DOMAIN:
username = urlparse.urlparse(url).path.strip('/')
username = urllib.parse.urlparse(url).path.strip('/')
if '/' not in username:
user = FacebookPage.query(ndb.OR(
FacebookPage.username == username,
Expand Down Expand Up @@ -426,7 +429,7 @@ def finish_oauth_flow(self, auth_entity, state):

# ask the user for their web site if we don't already have one.
if source and not source.domains:
self.redirect('/edit-websites?' + urllib.urlencode({
self.redirect('/edit-websites?' + urllib.parse.urlencode({
'source_key': source.key.urlsafe(),
}))

Expand Down
16 changes: 9 additions & 7 deletions facebook_test_live.py
Expand Up @@ -26,11 +26,13 @@
"""
from __future__ import unicode_literals

from future import standard_library
standard_library.install_aliases()
import logging
import sys
import unittest
import urllib
import urlparse
import urllib.request, urllib.parse, urllib.error
import urllib.parse

from requests import adapters, sessions
orig_HTTPAdapter = adapters.HTTPAdapter
Expand Down Expand Up @@ -65,7 +67,7 @@ def test_live(self):
self.assertEqual(302, resp.status_int)
to = resp.headers['Location']
self.assertTrue(to.startswith('https://www.facebook.com/v2.10/dialog/oauth?'), to)
params = urlparse.parse_qs(urlparse.urlparse(to).query)
params = urllib.parse.parse_qs(urllib.parse.urlparse(to).query)
redirect = params['redirect_uri'][0]
state = params['state'][0]
self.dot()
Expand All @@ -75,7 +77,7 @@ def test_live(self):
self.expect_urlopen(oauth_facebook.GET_ACCESS_TOKEN_URL % {
'client_id': appengine_config.FACEBOOK_APP_ID,
'client_secret': appengine_config.FACEBOOK_APP_SECRET,
'redirect_uri': urllib.quote_plus(redirect),
'redirect_uri': urllib.parse.quote_plus(redirect),
'auth_code': 'fake_code',
},
'{"access_token": "%s"}' % appengine_config.FACEBOOK_TEST_USER_TOKEN,
Expand All @@ -85,7 +87,7 @@ def test_live(self):
resp = facebook.application.get_response(
util.add_query_params(redirect, {
'code': 'fake_code',
'state': urllib.unquote(state),
'state': urllib.parse.unquote(state),
}))
self.assertEqual(302, resp.status_int)
source = facebook.FacebookPage.get_by_id(TEST_USER_ID)
Expand Down Expand Up @@ -143,13 +145,13 @@ def submit_form(html):
data = {input['name']: input['value'] for input in form.find_all('input')
if input.get('name') and input.get('value')}
return facebook.application.get_response(
form['action'], method=form['method'].upper(), body=urllib.urlencode(data))
form['action'], method=form['method'].upper(), body=urllib.parse.urlencode(data))

@staticmethod
def run_task(task):
"""Runs a task queue task."""
return tasks.application.get_response(
task['url'], method='POST', body=urllib.urlencode(testutil.get_task_params(task)))
task['url'], method='POST', body=urllib.parse.urlencode(testutil.get_task_params(task)))


if __name__ == '__main__':
Expand Down
6 changes: 4 additions & 2 deletions instagram.py
Expand Up @@ -18,10 +18,12 @@
"""
from __future__ import unicode_literals

from future import standard_library
standard_library.install_aliases()
import datetime
import json
import logging
import urlparse
import urllib.parse

import appengine_config
from granary import instagram as gr_instagram
Expand Down Expand Up @@ -136,7 +138,7 @@ def finish(self, auth_entity, state=None):
logging.info('rel-mes: %s', urls)
for url in util.trim_nulls(urls):
if util.domain_from_link(url) == gr_instagram.Instagram.DOMAIN:
username = urlparse.urlparse(url).path.strip('/')
username = urllib.parse.urlparse(url).path.strip('/')
break
else:
self.messages.add(
Expand Down
4 changes: 2 additions & 2 deletions logs.py
Expand Up @@ -9,8 +9,8 @@

class LogHandler(logs.LogHandler):
MODULE_VERSIONS = \
[('default', ver) for ver in '2', '3', '4', '5', '6', '7', '8'] + \
[('background', ver) for ver in '7', '8']
[('default', ver) for ver in ('2', '3', '4', '5', '6', '7', '8')] + \
[('background', ver) for ver in ('7', '8')]


application = webapp2.WSGIApplication([
Expand Down
1 change: 1 addition & 0 deletions medium.py
Expand Up @@ -11,6 +11,7 @@
"""
from __future__ import unicode_literals

from builtins import str
import collections
import json
import logging
Expand Down

0 comments on commit cc43943

Please sign in to comment.