Skip to content

Commit

Permalink
Enhance verbose output with -vvv and -vvvv. Remove wrong comment abou…
Browse files Browse the repository at this point in the history
…t identifiers being ASCII in Python 3.
  • Loading branch information
florentx committed Sep 8, 2010
1 parent 5674c1f commit dd9cd8f
Show file tree
Hide file tree
Showing 7 changed files with 36 additions and 6 deletions.
19 changes: 13 additions & 6 deletions pep8.py
Expand Up @@ -120,7 +120,7 @@ def blank_lines(logical_line, blank_lines, indent_level, line_number)
WHITESPACE_AROUND_OPERATOR_REGEX = re.compile(' \W+|\W+ |\t\W+|\W+\t')
EXTRANEOUS_WHITESPACE_REGEX = re.compile(r'[[({] | []}),;:]')
WHITESPACE_AROUND_NAMED_PARAMETER_REGEX = \
re.compile(r'[()]|\s=[^=]|[^=!<>]=\s')
re.compile(r'[()]|\s=[^=]|[^=!<>]=\s')


WHITESPACE = ' \t'
Expand Down Expand Up @@ -709,8 +709,9 @@ def python_3000_backticks(logical_line):
def readlines(filename):
return open(filename).readlines()
else:
# Python 3: decode to latin-1, without reading the encoding declaration.
# This function is lazy, because identifiers are restricted to ASCII.
# Python 3: decode to latin-1.
# This function is lazy, it does not read the encoding declaration.
# XXX: use tokenize.detect_encoding()
def readlines(filename):
return open(filename, encoding='latin-1').readlines()

Expand Down Expand Up @@ -902,8 +903,8 @@ def check_logical(self):
if options.verbose >= 2:
print(self.logical_line[:80].rstrip())
for name, check, argument_names in options.logical_checks:
if options.verbose >= 3:
print(' ', name)
if options.verbose >= 4:
print(' ' + name)
result = self.run_check(check, argument_names)
if result is not None:
offset, text = result
Expand Down Expand Up @@ -933,7 +934,13 @@ def check_all(self):
self.tokens = []
parens = 0
for token in tokenize.generate_tokens(self.readline_check_physical):
# print(tokenize.tok_name[token[0]], repr(token))
if options.verbose >= 3:
if token[2][0] == token[3][0]:
pos = '[%s:%s]' % (token[2][1] or '', token[3][1])
else:
pos = 'l.%s' % token[3][0]
print('l.%s\t%s\t%s\t%r' %
(token[2][0], pos, tokenize.tok_name[token[0]], token[1]))
self.tokens.append(token)
token_type, text = token[0:2]
if token_type == tokenize.OP and text in '([{':
Expand Down
1 change: 1 addition & 0 deletions testsuite/E211c.py
@@ -0,0 +1 @@
dict['key'] ['subkey'] = list[index]
10 changes: 10 additions & 0 deletions testsuite/E211not.py
@@ -1,2 +1,12 @@
spam(1)
dict['key'] = list[index]


def squares(n):
return (i**2 for i in range(n))


# This is not prohibited by PEP8, but avoid it.
class Foo (Bar, Baz):
pass

3 changes: 3 additions & 0 deletions testsuite/E221c.py
@@ -0,0 +1,3 @@
x[0] = 1
x[1] = 2
long_variable = 3
3 changes: 3 additions & 0 deletions testsuite/E221d.py
@@ -0,0 +1,3 @@
x = f(x) + 1
y = long_variable + 2
z = x[0] + 3
3 changes: 3 additions & 0 deletions testsuite/E222b.py
@@ -0,0 +1,3 @@
x = -1
y = -2
long_variable = 3
3 changes: 3 additions & 0 deletions testsuite/E222c.py
@@ -0,0 +1,3 @@
x[0] = 1
x[1] = 2
long_variable = 3

0 comments on commit dd9cd8f

Please sign in to comment.