Skip to content

Commit

Permalink
Remove unicode designators in commented code and remove some commente…
Browse files Browse the repository at this point in the history
…d code.
  • Loading branch information
jaraco committed Jun 9, 2022
1 parent 3e17ced commit 5231042
Showing 1 changed file with 5 additions and 18 deletions.
23 changes: 5 additions & 18 deletions cssutils/tests/test_tokenize2.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,21 +74,13 @@ class TestTokenizer:
' "" ': [('S', ' ', 1, 1), ('STRING', '""', 1, 2), ('S', ' ', 1, 4)],
' "\'" ': [('S', ' ', 1, 1), ('STRING', '"\'"', 1, 2), ('S', ' ', 1, 5)],
" '' ": [('S', ' ', 1, 1), ('STRING', "''", 1, 2), ('S', ' ', 1, 4)],
# until 0.9.5.x
# u"'\\\n'": [('STRING', u"'\\\n'", 1, 1)],
# u"'\\\n\\\n\\\n'": [('STRING', u"'\\\n\\\n\\\n'", 1, 1)],
# u"'\\\f'": [('STRING', u"'\\\f'", 1, 1)],
# u"'\\\r'": [('STRING', u"'\\\r'", 1, 1)],
# u"'\\\r\n'": [('STRING', u"'\\\r\n'", 1, 1)],
# u"'1\\\n2'": [('STRING', u"'1\\\n2'", 1, 1)],
# from 0.9.6a0 escaped nl is removed from string
"'\\\n'": [('STRING', "''", 1, 1)],
"'\\\n\\\n\\\n'": [('STRING', "''", 1, 1)],
"'\\\f'": [('STRING', "''", 1, 1)],
"'\\\r'": [('STRING', "''", 1, 1)],
"'1\\\n2'": [('STRING', "'12'", 1, 1)],
"'1\\\r\n2'": [('STRING', "'12'", 1, 1)],
# ur'"\0020|\0020"': [('STRING', u'"\\0020|\\0020"', 1, 1)],
# r'"\0020|\0020"': [('STRING', '"\\0020|\\0020"', 1, 1)],
r'"\61|\0061"': [('STRING', '"a|a"', 1, 1)],
# HASH
' #a ': [('S', ' ', 1, 1), ('HASH', '#a', 1, 2), ('S', ' ', 1, 4)],
Expand All @@ -102,8 +94,8 @@ class TestTokenizer:
' .0 ': [('S', ' ', 1, 1), ('NUMBER', '.0', 1, 2), ('S', ' ', 1, 4)],
' -0 ': [
('S', ' ', 1, 1),
# ('CHAR', u'-', 1, 2),
# ('NUMBER', u'0', 1, 3),
# ('CHAR', '-', 1, 2),
# ('NUMBER', '0', 1, 3),
('NUMBER', '-0', 1, 2),
('S', ' ', 1, 4),
],
Expand Down Expand Up @@ -215,8 +207,8 @@ class TestTokenizer:
],
# specials
'c\\olor': [('IDENT', 'c\\olor', 1, 1)],
# u'-1': [('CHAR', u'-', 1, 1), ('NUMBER', u'1', 1, 2)],
# u'-1px': [('CHAR', u'-', 1, 1), ('DIMENSION', u'1px', 1, 2)],
# '-1': [('CHAR', '-', 1, 1), ('NUMBER', '1', 1, 2)],
# '-1px': [('CHAR', '-', 1, 1), ('DIMENSION', '1px', 1, 2)],
'-1': [('NUMBER', '-1', 1, 1)],
'-1px': [('DIMENSION', '-1px', 1, 1)],
# ATKEYWORD
Expand Down Expand Up @@ -256,11 +248,6 @@ class TestTokenizer:
' *= ': [('S', ' ', 1, 1), ('SUBSTRINGMATCH', '*=', 1, 2), ('S', ' ', 1, 4)],
'*==': [('SUBSTRINGMATCH', '*=', 1, 1), ('CHAR', '=', 1, 3)],
# BOM only at start
# u'\xFEFF ': [('BOM', u'\xfeFF', 1, 1),
# ('S', u' ', 1, 1)],
# u' \xFEFF ': [('S', u' ', 1, 1),
# ('IDENT', u'\xfeFF', 1, 2),
# ('S', u' ', 1, 5)],
'\xfe\xff ': [('BOM', '\xfe\xff', 1, 1), ('S', ' ', 1, 1)],
' \xfe\xff ': [('S', ' ', 1, 1), ('IDENT', '\xfe\xff', 1, 2), ('S', ' ', 1, 4)],
'\xef\xbb\xbf ': [('BOM', '\xef\xbb\xbf', 1, 1), ('S', ' ', 1, 1)],
Expand Down

0 comments on commit 5231042

Please sign in to comment.