Skip to content

Commit

Permalink
Environment.lex returns unicode tokens now, even if the input data wa…
Browse files Browse the repository at this point in the history
…s a bytestring.

--HG--
branch : trunk
  • Loading branch information
mitsuhiko committed May 23, 2008
1 parent 6df604e commit 5c047ea
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 5 deletions.
3 changes: 2 additions & 1 deletion jinja2-debug.py
Expand Up @@ -13,7 +13,8 @@
import jinja2
from werkzeug import script

env = jinja2.Environment(extensions=['jinja2.ext.i18n', 'jinja2.ext.do'])
env = jinja2.Environment(extensions=['jinja2.ext.i18n', 'jinja2.ext.do',
'jinja2.ext.loopcontrols'])

def shell_init_func():
def _compile(x):
Expand Down
9 changes: 7 additions & 2 deletions jinja2/ext.py
Expand Up @@ -25,7 +25,7 @@


class ExtensionRegistry(type):
"""Gives the extension a unique identifier."""
"""Gives the extension an unique identifier."""

def __new__(cls, name, bases, d):
rv = type.__new__(cls, name, bases, d)
Expand Down Expand Up @@ -95,13 +95,18 @@ def call_method(self, name, args=None, kwargs=None, dyn_args=None,
dyn_args, dyn_kwargs, lineno=lineno)


@contextfunction
def _gettext_alias(context, string):
return context.resolve('gettext')(string)


class InternationalizationExtension(Extension):
"""This extension adds gettext support to Jinja2."""
tags = set(['trans'])

def __init__(self, environment):
Extension.__init__(self, environment)
environment.globals['_'] = contextfunction(lambda c, x: c['gettext'](x))
environment.globals['_'] = _gettext_alias
environment.extend(
install_gettext_translations=self._install,
install_null_translations=self._install_null,
Expand Down
3 changes: 1 addition & 2 deletions jinja2/lexer.py
Expand Up @@ -371,7 +371,6 @@ def tokenize(self, source, name=None, filename=None):
converted into types and postprocessed. For example comments are removed,
integers and floats converted, strings unescaped etc.
"""
source = unicode(source)
def generate():
for lineno, token, value in self.tokeniter(source, name, filename):
if token in ('comment_begin', 'comment', 'comment_end',
Expand Down Expand Up @@ -425,7 +424,7 @@ def tokeniter(self, source, name, filename=None):
wants. The parser uses the `tokenize` function with returns a
`TokenStream` and postprocessed tokens.
"""
source = '\n'.join(source.splitlines())
source = u'\n'.join(unicode(source).splitlines())
pos = 0
lineno = 1
stack = ['root']
Expand Down

0 comments on commit 5c047ea

Please sign in to comment.