Skip to content
This repository

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse code

Fixed #4164, #4171 -- Reworked some of the template lexer logic to en…

…sure we

don't get caught out by a couple of corner cases.


git-svn-id: http://code.djangoproject.com/svn/django/trunk@5104 bcc190cf-cafb-0310-a4f2-bffc1f526a37
  • Loading branch information...
commit 54f11ee63c619b9348dbbb3b4493157442ddf6d8 1 parent 6fe5235
Malcolm Tredinnick authored April 27, 2007
47  django/template/__init__.py
@@ -193,18 +193,27 @@ def __init__(self, template_string, origin):
193 193
 
194 194
     def tokenize(self):
195 195
         "Return a list of tokens from a given template_string"
196  
-        # remove all empty strings, because the regex has a tendency to add them
197  
-        bits = filter(None, tag_re.split(self.template_string))
198  
-        return map(self.create_token, bits)
199  
-
200  
-    def create_token(self,token_string):
201  
-        "Convert the given token string into a new Token object and return it"
202  
-        if token_string.startswith(VARIABLE_TAG_START):
203  
-            token = Token(TOKEN_VAR, token_string[len(VARIABLE_TAG_START):-len(VARIABLE_TAG_END)].strip())
204  
-        elif token_string.startswith(BLOCK_TAG_START):
205  
-            token = Token(TOKEN_BLOCK, token_string[len(BLOCK_TAG_START):-len(BLOCK_TAG_END)].strip())
206  
-        elif token_string.startswith(COMMENT_TAG_START):
207  
-            token = Token(TOKEN_COMMENT, '')
  196
+        in_tag = False
  197
+        result = []
  198
+        for bit in tag_re.split(self.template_string):
  199
+            if bit:
  200
+                result.append(self.create_token(bit, in_tag))
  201
+            in_tag = not in_tag
  202
+        return result
  203
+
  204
+    def create_token(self, token_string, in_tag=False):
  205
+        """
  206
+        Convert the given token string into a new Token object and return it.
  207
+        If tag is True, we are processing something that matched a tag,
  208
+        otherwise it should be treated as a literal string.
  209
+        """
  210
+        if in_tag:
  211
+            if token_string.startswith(VARIABLE_TAG_START):
  212
+                token = Token(TOKEN_VAR, token_string[len(VARIABLE_TAG_START):-len(VARIABLE_TAG_END)].strip())
  213
+            elif token_string.startswith(BLOCK_TAG_START):
  214
+                token = Token(TOKEN_BLOCK, token_string[len(BLOCK_TAG_START):-len(BLOCK_TAG_END)].strip())
  215
+            elif token_string.startswith(COMMENT_TAG_START):
  216
+                token = Token(TOKEN_COMMENT, '')
208 217
         else:
209 218
             token = Token(TOKEN_TEXT, token_string)
210 219
         return token
@@ -215,21 +224,21 @@ def __init__(self, template_string, origin):
215 224
 
216 225
     def tokenize(self):
217 226
         "Return a list of tokens from a given template_string"
218  
-        token_tups, upto = [], 0
  227
+        result, upto = [], 0
219 228
         for match in tag_re.finditer(self.template_string):
220 229
             start, end = match.span()
221 230
             if start > upto:
222  
-                token_tups.append( (self.template_string[upto:start], (upto, start)) )
  231
+                result.append(self.create_token(self.template_string[upto:start], (upto, start), False))
223 232
                 upto = start
224  
-            token_tups.append( (self.template_string[start:end], (start,end)) )
  233
+            result.append(self.create_token(self.template_string[start:end], (start, end), True))
225 234
             upto = end
226 235
         last_bit = self.template_string[upto:]
227 236
         if last_bit:
228  
-            token_tups.append( (last_bit, (upto, upto + len(last_bit))) )
229  
-        return [self.create_token(tok, (self.origin, loc)) for tok, loc in token_tups]
  237
+            result.append(self.create_token(last_bit, (upto, upto + len(last_bit)), False))
  238
+        return result
230 239
 
231  
-    def create_token(self, token_string, source):
232  
-        token = super(DebugLexer, self).create_token(token_string)
  240
+    def create_token(self, token_string, source, in_tag):
  241
+        token = super(DebugLexer, self).create_token(token_string, in_tag)
233 242
         token.source = source
234 243
         return token
235 244
 
12  tests/regressiontests/templates/tests.py
@@ -127,6 +127,18 @@ def test_templates(self):
127 127
             # Fail silently when accessing a non-simple method
128 128
             'basic-syntax20': ("{{ var.method2 }}", {"var": SomeClass()}, ("","INVALID")),
129 129
 
  130
+            # Don't get confused when parsing something that is almost, but not
  131
+            # quite, a template tag.
  132
+            'basic-syntax21': ("a {{ moo %} b", {}, "a {{ moo %} b"),
  133
+            'basic-syntax22': ("{{ moo #}", {}, "{{ moo #}"),
  134
+
  135
+            # Will try to treat "moo #} {{ cow" as the variable. Not ideal, but
  136
+            # costly to work around, so this triggers an error.
  137
+            'basic-syntax23': ("{{ moo #} {{ cow }}", {"cow": "cow"}, template.TemplateSyntaxError),
  138
+
  139
+            # Embedded newlines make it not-a-tag.
  140
+            'basic-syntax24': ("{{ moo\n }}", {}, "{{ moo\n }}"),
  141
+
130 142
             # List-index syntax allows a template to access a certain item of a subscriptable object.
131 143
             'list-index01': ("{{ var.1 }}", {"var": ["first item", "second item"]}, "second item"),
132 144
 

0 notes on commit 54f11ee

Please sign in to comment.
Something went wrong with that request. Please try again.