Permalink
Browse files

Some progress towards making things work on python3 with 2to3 but no

other modifications.

Needs a lot of work (currently breaks things on 2.6 as well), but I
wanted to go ahead and checkpoint the ioloop changes before I forget
what needs to change.
  • Loading branch information...
1 parent d47c453 commit 82b770f704e6e83d94044c7d004082355a1d0160 @bdarnell committed Oct 21, 2010
Showing with 47 additions and 29 deletions.
  1. +12 −4 tornado/escape.py
  2. +7 −6 tornado/httpserver.py
  3. +4 −2 tornado/httputil.py
  4. +7 −5 tornado/ioloop.py
  5. +3 −2 tornado/iostream.py
  6. +5 −2 tornado/options.py
  7. +5 −4 tornado/simple_httpclient.py
  8. +4 −4 tornado/web.py
View
@@ -21,6 +21,14 @@
import xml.sax.saxutils
import urllib
+import sys
+if sys.version_info[0] < 3:
+ bytes_type = str
+ string_type = unicode
+else:
+ bytes_type = bytes
+ string_type = str
+
# json module is in the standard library as of python 2.6; fall back to
# simplejson if present for older versions.
try:
@@ -89,9 +97,9 @@ def url_unescape(value):
def utf8(value):
- if isinstance(value, unicode):
+ if isinstance(value, string_type):
return value.encode("utf-8")
- assert isinstance(value, str)
+ assert isinstance(value, bytes_type)
return value
@@ -180,9 +188,9 @@ def make_link(m):
def _unicode(value):
- if isinstance(value, str):
+ if isinstance(value, bytes_type):
return value.decode("utf-8")
- assert isinstance(value, unicode)
+ assert isinstance(value, string_type)
return value
View
@@ -24,6 +24,7 @@
import time
import urlparse
+from tornado.escape import utf8, _unicode
from tornado import httputil
from tornado import ioloop
from tornado import iostream
@@ -282,7 +283,7 @@ def __init__(self, stream, address, request_callback, no_keep_alive=False,
self.xheaders = xheaders
self._request = None
self._request_finished = False
- self.stream.read_until("\r\n\r\n", self._on_headers)
+ self.stream.read_until(utf8("\r\n\r\n"), self._on_headers)
def write(self, chunk):
assert self._request, "Request closed"
@@ -316,13 +317,13 @@ def _finish_request(self):
if disconnect:
self.stream.close()
return
- self.stream.read_until("\r\n\r\n", self._on_headers)
+ self.stream.read_until(utf8("\r\n\r\n"), self._on_headers)
def _on_headers(self, data):
- eol = data.find("\r\n")
+ eol = data.find(utf8("\r\n"))
start_line = data[:eol]
- method, uri, version = start_line.split(" ")
- if not version.startswith("HTTP/"):
+ method, uri, version = start_line.split(utf8(" "))
+ if not version.startswith(utf8("HTTP/")):
raise Exception("Malformed HTTP version in HTTP Request-Line")
headers = httputil.HTTPHeaders.parse(data[eol:])
self._request = HTTPRequest(
@@ -443,7 +444,7 @@ def __init__(self, method, uri, version="HTTP/1.0", headers=None,
self._start_time = time.time()
self._finish_time = None
- scheme, netloc, path, query, fragment = urlparse.urlsplit(uri)
+ scheme, netloc, path, query, fragment = urlparse.urlsplit(_unicode(uri))
self.path = path
self.query = query
arguments = cgi.parse_qs(query)
View
@@ -16,6 +16,8 @@
"""HTTP utility code shared by clients and servers."""
+from tornado.escape import utf8
+
class HTTPHeaders(dict):
"""A dictionary that maintains Http-Header-Case for all keys.
@@ -85,7 +87,7 @@ def parse_line(self, line):
>>> h.get('content-type')
'text/html'
"""
- name, value = line.split(":", 1)
+ name, value = line.split(utf8(":"), 1)
self.add(name, value.strip())
@classmethod
@@ -132,7 +134,7 @@ def _normalize_name(name):
>>> HTTPHeaders._normalize_name("coNtent-TYPE")
'Content-Type'
"""
- return "-".join([w.capitalize() for w in name.split("-")])
+ return utf8("-").join([w.capitalize() for w in utf8(name).split(utf8("-"))])
def doctests():
View
@@ -25,6 +25,7 @@
import traceback
from tornado import stack_context
+from tornado.escape import utf8
try:
import signal
@@ -324,7 +325,7 @@ def add_callback(self, callback):
def _wake(self):
try:
- self._waker_writer.write("x")
+ self._waker_writer.write(utf8("x"))
except IOError:
pass
@@ -351,7 +352,8 @@ def handle_callback_exception(self, callback):
def _read_waker(self, fd, events):
try:
while True:
- self._waker_reader.read()
+ result = self._waker_reader.read()
+ if not result: break
except IOError:
pass
@@ -374,9 +376,9 @@ def __init__(self, deadline, callback):
self.deadline = deadline
self.callback = callback
- def __cmp__(self, other):
- return cmp((self.deadline, id(self.callback)),
- (other.deadline, id(other.callback)))
+ def __lt__(self, other):
+ return (self.deadline < other.deadline or
+ id(self.callback) < id(other.callback))
class PeriodicCallback(object):
View
@@ -20,6 +20,7 @@
import logging
import socket
+from tornado.escape import utf8
from tornado import ioloop
from tornado import stack_context
@@ -79,8 +80,8 @@ def __init__(self, socket, io_loop=None, max_buffer_size=104857600,
self.io_loop = io_loop or ioloop.IOLoop.instance()
self.max_buffer_size = max_buffer_size
self.read_chunk_size = read_chunk_size
- self._read_buffer = ""
- self._write_buffer = ""
+ self._read_buffer = utf8("")
+ self._write_buffer = utf8("")
self._read_delimiter = None
self._read_bytes = None
self._read_callback = None
View
@@ -55,12 +55,15 @@ def connect():
import sys
import time
+from tornado.escape import utf8, _unicode
+
# For pretty log messages, if available
try:
import curses
except:
curses = None
-
+if sys.version_info[0] >= 3:
+ curses = None
def define(name, default=None, type=str, help=None, metavar=None,
multiple=False):
@@ -295,7 +298,7 @@ def _parse_bool(self, value):
return value.lower() not in ("false", "0", "f")
def _parse_string(self, value):
- return value.decode("utf-8")
+ return _unicode(value)
class Error(Exception):
@@ -2,6 +2,7 @@
from __future__ import with_statement
from cStringIO import StringIO
+from tornado.escape import utf8, _unicode
from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError
from tornado.httputil import HTTPHeaders
from tornado.ioloop import IOLoop
@@ -67,7 +68,7 @@ def __init__(self, io_loop, request, callback):
self.headers = None
self.chunks = None
with stack_context.StackContext(self.cleanup):
- parsed = urlparse.urlsplit(self.request.url)
+ parsed = urlparse.urlsplit(_unicode(self.request.url))
if ":" in parsed.netloc:
host, _, port = parsed.netloc.partition(":")
port = int(port)
@@ -107,10 +108,10 @@ def _on_connect(self, parsed):
if logging.getLogger().isEnabledFor(logging.DEBUG):
for line in request_lines:
logging.debug(line)
- self.stream.write("\r\n".join(request_lines) + "\r\n\r\n")
+ self.stream.write(utf8("\r\n".join(request_lines) + "\r\n\r\n"))
if has_body:
- self.stream.write(self.request.body)
- self.stream.read_until("\r\n\r\n", self._on_headers)
+ self.stream.write(utf8(self.request.body))
+ self.stream.read_until(utf8("\r\n\r\n"), self._on_headers)
@contextlib.contextmanager
def cleanup(self):
View
@@ -521,7 +521,7 @@ def flush(self, include_footers=False):
if self.application._wsgi:
raise Exception("WSGI applications do not support flush()")
- chunk = "".join(self._write_buffer)
+ chunk = _utf8("").join(self._write_buffer)
self._write_buffer = []
if not self._headers_written:
self._headers_written = True
@@ -540,7 +540,7 @@ def flush(self, include_footers=False):
return
if headers or chunk:
- self.request.write(headers + chunk)
+ self.request.write(escape.utf8(headers) + escape.utf8(chunk))
def finish(self, chunk=None):
"""Finishes this response, ending the HTTP request."""
@@ -833,8 +833,8 @@ def _execute(self, transforms, *args, **kwargs):
self.finish()
def _generate_headers(self):
- lines = [self.request.version + " " + str(self._status_code) + " " +
- httplib.responses[self._status_code]]
+ lines = [str(self.request.version) + " " + str(self._status_code) + " " +
+ str(httplib.responses[self._status_code])]
lines.extend(["%s: %s" % (n, v) for n, v in self._headers.iteritems()])
for cookie_dict in getattr(self, "_new_cookies", []):
for cookie in cookie_dict.values():

0 comments on commit 82b770f

Please sign in to comment.