Skip to content

Commit

Permalink
Merge 32081bd into eb146ad
Browse files Browse the repository at this point in the history
  • Loading branch information
hatamov committed Mar 6, 2015
2 parents eb146ad + 32081bd commit db3fdfa
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 2 deletions.
1 change: 1 addition & 0 deletions AUTHORS.txt
Expand Up @@ -32,5 +32,6 @@ Albertas Agejevas (@alga)
Savor d'Isavano (@KenetJervet) <newelevenken@163.com>
Phillip Berndt (@phillipberndt) <phillip.berndt@gmail.com>
Ian Lee (@IanLee1521) <IanLee1521@gmail.com>
Farkhad Khatamov (@hatamov) <comsgn@gmail.com>

Note: (@user) means a github user name.
4 changes: 2 additions & 2 deletions jedi/parser/tokenize.py
Expand Up @@ -68,7 +68,7 @@ def maybe(*choices):
single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
# Tail end of """ string.
double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
triple = group("[bB]?[rR]?'''", '[bB]?[rR]?"""')
triple = group("[uUbB]?[rR]?'''", '[uUbB]?[rR]?"""')
# Single-line ' or " string.

# Because of leftmost-then-longest match semantics, be sure to put the
Expand Down Expand Up @@ -126,7 +126,7 @@ def _compile(expr):
for t in ("'", '"',
"r'", 'r"', "R'", 'R"',
"b'", 'b"', "B'", 'B"',
"u'", 'u""', "U'", 'U"',
"u'", 'u"', "U'", 'U"',
"br'", 'br"', "Br'", 'Br"',
"bR'", 'bR"', "BR'", 'BR"'):
single_quoted[t] = t
Expand Down
19 changes: 19 additions & 0 deletions test/test_parser/test_tokenize.py
Expand Up @@ -6,6 +6,7 @@
from jedi._compatibility import u, is_py3
from jedi.parser.token import NAME, OP, NEWLINE, STRING, INDENT
from jedi import parser
from token import STRING


from ..helpers import unittest
Expand Down Expand Up @@ -95,6 +96,24 @@ def 我あφ():
# They will be ignored in the parser, that's ok.
assert unicode_token[0] == OP

def test_quoted_strings(self):

string_tokens = [
'u"test"',
'u"""test"""',
'U"""test"""',
"u'''test'''",
"U'''test'''",
]

for s in string_tokens:
parsed = parser.Parser(u('''a = %s\n''' % s))
tok_list = parsed.module.statements[0]._token_list
self.assertEqual(len(tok_list), 3)
tok = tok_list[2]
self.assertIsInstance(tok, parser.tokenize.Token)
self.assertEqual(tok.type, STRING)


def test_tokenizer_with_string_literal_backslash():
import jedi
Expand Down

0 comments on commit db3fdfa

Please sign in to comment.