Skip to content

Commit

Permalink
Merge pull request #6 from josephw/syntax-changes-for-python-3-withou…
Browse files Browse the repository at this point in the history
…t-print

Apply some syntax changes accepted by Python 2 as well as Python 3
  • Loading branch information
josephw committed Sep 18, 2016
2 parents 2704fd7 + d87eda0 commit 849deba
Show file tree
Hide file tree
Showing 48 changed files with 411 additions and 412 deletions.
4 changes: 2 additions & 2 deletions check.cgi
Expand Up @@ -230,7 +230,7 @@ def checker_app(environ, start_response):
events = params['loggedEvents']
feedType = params['feedType']
goon = 1
except ValidationFailure, vfv:
except ValidationFailure as vfv:
yield applyTemplate('header.tmpl', {'title':'Feed Validator Results: %s' % escapeURL(url)})
yield applyTemplate('manual.tmpl', {'rawdata':escapeURL(url)})
output = Formatter([vfv.event], None)
Expand All @@ -249,7 +249,7 @@ def checker_app(environ, start_response):
rawdata = params['rawdata']
feedType = params['feedType']
goon = 1
except ValidationFailure, vfv:
except ValidationFailure as vfv:
yield applyTemplate('header.tmpl', {'title':'Feed Validator Results: %s' % escapeURL(url)})
yield applyTemplate('index.tmpl', {'value':escapeURL(url)})
output = Formatter([vfv.event], None)
Expand Down
22 changes: 11 additions & 11 deletions fcgi.py
Expand Up @@ -470,7 +470,7 @@ def _recvall(sock, length):
while length:
try:
data = sock.recv(length)
except socket.error, e:
except socket.error as e:
if e[0] == errno.EAGAIN:
select.select([sock], [], [])
continue
Expand Down Expand Up @@ -527,7 +527,7 @@ def _sendall(sock, data):
while length:
try:
sent = sock.send(data)
except socket.error, e:
except socket.error as e:
if e[0] == errno.EPIPE:
return # Don't bother raising an exception. Just ignore.
elif e[0] == errno.EAGAIN:
Expand Down Expand Up @@ -666,7 +666,7 @@ def run(self):
self.process_input()
except EOFError:
break
except (select.error, socket.error), e:
except (select.error, socket.error) as e:
if e[0] == errno.EBADF: # Socket was closed by Request.
break
raise
Expand Down Expand Up @@ -991,7 +991,7 @@ def _setupSocket(self):
socket.SOCK_STREAM)
try:
sock.getpeername()
except socket.error, e:
except socket.error as e:
if e[0] == errno.ENOTSOCK:
# Not a socket, assume CGI context.
isFCGI = False
Expand Down Expand Up @@ -1072,15 +1072,15 @@ def run(self, timeout=1.0):
while self._keepGoing:
try:
r, w, e = select.select([sock], [], [], timeout)
except select.error, e:
except select.error as e:
if e[0] == errno.EINTR:
continue
raise

if r:
try:
clientSock, addr = sock.accept()
except socket.error, e:
except socket.error as e:
if e[0] in (errno.EINTR, errno.EAGAIN):
continue
raise
Expand Down Expand Up @@ -1150,7 +1150,7 @@ def __init__(self, application, environ=None, multithreaded=True, **kw):
Set multithreaded to False if your application is not MT-safe.
"""
if kw.has_key('handler'):
if 'handler' in kw:
del kw['handler'] # Doesn't make sense to let this through
super(WSGIServer, self).__init__(**kw)

Expand Down Expand Up @@ -1274,9 +1274,9 @@ def start_response(status, response_headers, exc_info=None):

def _sanitizeEnv(self, environ):
"""Ensure certain values are present, if required by WSGI."""
if not environ.has_key('SCRIPT_NAME'):
if 'SCRIPT_NAME' not in environ:
environ['SCRIPT_NAME'] = ''
if not environ.has_key('PATH_INFO'):
if 'PATH_INFO' not in environ:
environ['PATH_INFO'] = ''

# If any of these are missing, it probably signifies a broken
Expand All @@ -1285,7 +1285,7 @@ def _sanitizeEnv(self, environ):
('SERVER_NAME', 'localhost'),
('SERVER_PORT', '80'),
('SERVER_PROTOCOL', 'HTTP/1.0')]:
if not environ.has_key(name):
if name not in environ:
environ['wsgi.errors'].write('%s: missing FastCGI param %s '
'required by WSGI!\n' %
(self.__class__.__name__, name))
Expand All @@ -1304,7 +1304,7 @@ def test_app(environ, start_response):
names.sort()
for name in names:
yield '<tr><td>%s</td><td>%s</td></tr>\n' % (
name, cgi.escape(`environ[name]`))
name, cgi.escape(repr(environ[name])))

form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ,
keep_blank_values=1)
Expand Down
12 changes: 6 additions & 6 deletions feedfinder.py
Expand Up @@ -111,7 +111,7 @@ def __init__(self):

def _getrp(self, url):
protocol, domain = urlparse.urlparse(url)[:2]
if self.rpcache.has_key(domain):
if domain in self.rpcache:
return self.rpcache[domain]
baseurl = '%s://%s' % (protocol, domain)
robotsurl = urlparse.urljoin(baseurl, 'robots.txt')
Expand Down Expand Up @@ -158,7 +158,7 @@ def cleanattr(v):

def do_base(self, attrs):
attrsD = dict(self.normalize_attrs(attrs))
if not attrsD.has_key('href'): return
if 'href' not in attrsD: return
self.baseuri = attrsD['href']

def error(self, *a, **kw): pass # we're not picky
Expand All @@ -171,17 +171,17 @@ class LinkParser(BaseParser):
'application/x-atom+xml')
def do_link(self, attrs):
attrsD = dict(self.normalize_attrs(attrs))
if not attrsD.has_key('rel'): return
if 'rel' not in attrsD: return
rels = attrsD['rel'].split()
if 'alternate' not in rels: return
if attrsD.get('type') not in self.FEED_TYPES: return
if not attrsD.has_key('href'): return
if 'href' not in attrsD: return
self.links.append(urlparse.urljoin(self.baseuri, attrsD['href']))

class ALinkParser(BaseParser):
def start_a(self, attrs):
attrsD = dict(self.normalize_attrs(attrs))
if not attrsD.has_key('href'): return
if 'href' not in attrsD: return
self.links.append(urlparse.urljoin(self.baseuri, attrsD['href']))

def makeFullURI(uri):
Expand Down Expand Up @@ -298,7 +298,7 @@ def feeds(uri, all=False, querySyndic8=False):
# still no luck, search Syndic8 for feeds (requires xmlrpclib)
_debuglog('still no luck, searching Syndic8')
feeds.extend(getFeedsFromSyndic8(uri))
if hasattr(__builtins__, 'set') or __builtins__.has_key('set'):
if hasattr(__builtins__, 'set') or 'set' in __builtins__:
feeds = list(set(feeds))
return feeds

Expand Down
2 changes: 1 addition & 1 deletion src/demo.py
Expand Up @@ -29,7 +29,7 @@
events = feedvalidator.validateStream(urllib.urlopen(link), firstOccurrenceOnly=1,base=link.replace(basedir,"http://www.feedvalidator.org/"))['loggedEvents']
else:
events = feedvalidator.validateURL(link, firstOccurrenceOnly=1)['loggedEvents']
except feedvalidator.logging.ValidationFailure, vf:
except feedvalidator.logging.ValidationFailure as vf:
events = [vf.event]

# (optional) arg 2 is compatibility level
Expand Down
28 changes: 14 additions & 14 deletions src/feedvalidator/__init__.py
Expand Up @@ -7,18 +7,18 @@
socket.setdefaulttimeout(10)
Timeout = socket.timeout
else:
import timeoutsocket
from . import timeoutsocket
timeoutsocket.setDefaultSocketTimeout(10)
Timeout = timeoutsocket.Timeout

import urllib2
import logging
from logging import *
from . import logging
from .logging import *
from xml.sax import SAXException
from xml.sax.xmlreader import InputSource
import re
import xmlEncoding
import mediaTypes
from . import xmlEncoding
from . import mediaTypes
from httplib import BadStatusLine

MAXDATALENGTH = 2000000
Expand All @@ -40,7 +40,7 @@ def sniffPossibleFeed(rawdata):
def _validate(aString, firstOccurrenceOnly, loggedEvents, base, encoding, selfURIs=None, mediaType=None):
"""validate RSS from string, returns validator object"""
from xml.sax import make_parser, handler
from base import SAXDispatcher
from .base import SAXDispatcher
from exceptions import UnicodeError
from cStringIO import StringIO

Expand Down Expand Up @@ -69,7 +69,7 @@ def _validate(aString, firstOccurrenceOnly, loggedEvents, base, encoding, selfUR
validator.rssCharData = [s.find('&#x')>=0 for s in aString.split('\n')]

xmlver = re.match("^<\?\s*xml\s+version\s*=\s*['\"]([-a-zA-Z0-9_.:]*)['\"]",aString)
if xmlver and xmlver.group(1)<>'1.0':
if xmlver and xmlver.group(1) != '1.0':
validator.log(logging.BadXmlVersion({"version":xmlver.group(1)}))

try:
Expand Down Expand Up @@ -194,7 +194,7 @@ def validateURL(url, firstOccurrenceOnly=1, wantRawData=0):
raise ValidationFailure(logging.ValidatorLimit({'limit': 'feed length > ' + str(MAXDATALENGTH) + ' bytes'}))

# check for temporary redirects
if usock.geturl()<>request.get_full_url():
if usock.geturl() != request.get_full_url():
from urlparse import urlsplit
(scheme, netloc, path, query, fragment) = urlsplit(url)
if scheme == 'http':
Expand All @@ -204,13 +204,13 @@ def validateURL(url, firstOccurrenceOnly=1, wantRawData=0):
conn=HTTPConnection(netloc)
conn.request("GET", requestUri)
resp=conn.getresponse()
if resp.status<>301:
if resp.status != 301:
loggedEvents.append(TempRedirect({}))

except BadStatusLine, status:
except BadStatusLine as status:
raise ValidationFailure(logging.HttpError({'status': status.__class__}))

except urllib2.HTTPError, status:
except urllib2.HTTPError as status:
rawdata = status.read()
if len(rawdata) < 512 or 'content-encoding' in status.headers:
loggedEvents.append(logging.HttpError({'status': status}))
Expand All @@ -224,11 +224,11 @@ def validateURL(url, firstOccurrenceOnly=1, wantRawData=0):
usock = status
else:
raise ValidationFailure(logging.HttpError({'status': status}))
except urllib2.URLError, x:
except urllib2.URLError as x:
raise ValidationFailure(logging.HttpError({'status': x.reason}))
except Timeout, x:
except Timeout as x:
raise ValidationFailure(logging.IOError({"message": 'Server timed out', "exception":x}))
except Exception, x:
except Exception as x:
raise ValidationFailure(logging.IOError({"message": x.__class__.__name__,
"exception":x}))

Expand Down
6 changes: 3 additions & 3 deletions src/feedvalidator/author.py
Expand Up @@ -2,8 +2,8 @@
__version__ = "$Revision$"
__copyright__ = "Copyright (c) 2002 Sam Ruby and Mark Pilgrim"

from base import validatorBase
from validators import *
from .base import validatorBase
from .validators import *

#
# author element.
Expand Down Expand Up @@ -41,7 +41,7 @@ def do_foaf_firstName(self):
return text()

def do_xhtml_div(self):
from content import diveater
from .content import diveater
return diveater()

# RSS/Atom support
Expand Down

0 comments on commit 849deba

Please sign in to comment.