Permalink
Browse files

Environment.lex returns unicode tokens now, even if the input data wa…

…s a bytestring.

--HG--
branch : trunk
  • Loading branch information...
1 parent 6df604e commit 5c047ea2c3cd5e3a744e2298e032f41929aa2974 @mitsuhiko mitsuhiko committed May 23, 2008
Showing with 10 additions and 5 deletions.
  1. +2 −1 jinja2-debug.py
  2. +7 −2 jinja2/ext.py
  3. +1 −2 jinja2/lexer.py
View
@@ -13,7 +13,8 @@
import jinja2
from werkzeug import script
-env = jinja2.Environment(extensions=['jinja2.ext.i18n', 'jinja2.ext.do'])
+env = jinja2.Environment(extensions=['jinja2.ext.i18n', 'jinja2.ext.do',
+ 'jinja2.ext.loopcontrols'])
def shell_init_func():
def _compile(x):
View
@@ -25,7 +25,7 @@
class ExtensionRegistry(type):
- """Gives the extension a unique identifier."""
+ """Gives the extension an unique identifier."""
def __new__(cls, name, bases, d):
rv = type.__new__(cls, name, bases, d)
@@ -95,13 +95,18 @@ def call_method(self, name, args=None, kwargs=None, dyn_args=None,
dyn_args, dyn_kwargs, lineno=lineno)
+@contextfunction
+def _gettext_alias(context, string):
+ return context.resolve('gettext')(string)
+
+
class InternationalizationExtension(Extension):
"""This extension adds gettext support to Jinja2."""
tags = set(['trans'])
def __init__(self, environment):
Extension.__init__(self, environment)
- environment.globals['_'] = contextfunction(lambda c, x: c['gettext'](x))
+ environment.globals['_'] = _gettext_alias
environment.extend(
install_gettext_translations=self._install,
install_null_translations=self._install_null,
View
@@ -371,7 +371,6 @@ def tokenize(self, source, name=None, filename=None):
converted into types and postprocessed. For example comments are removed,
integers and floats converted, strings unescaped etc.
"""
- source = unicode(source)
def generate():
for lineno, token, value in self.tokeniter(source, name, filename):
if token in ('comment_begin', 'comment', 'comment_end',
@@ -425,7 +424,7 @@ def tokeniter(self, source, name, filename=None):
wants. The parser uses the `tokenize` function with returns a
`TokenStream` and postprocessed tokens.
"""
- source = '\n'.join(source.splitlines())
+ source = u'\n'.join(unicode(source).splitlines())
pos = 0
lineno = 1
stack = ['root']

0 comments on commit 5c047ea

Please sign in to comment.