Skip to content

Commit

Permalink
Merge f102ccc into 20f93ae
Browse files Browse the repository at this point in the history
  • Loading branch information
asottile committed Oct 2, 2018
2 parents 20f93ae + f102ccc commit f09b63b
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 61 deletions.
1 change: 1 addition & 0 deletions changelog/4066.bugfix.rst
@@ -0,0 +1 @@
Fix source reindenting by using ``textwrap.dedent`` directly.
48 changes: 2 additions & 46 deletions src/_pytest/_code/source.py
Expand Up @@ -7,6 +7,7 @@
import sys
import six
import inspect
import textwrap
import tokenize
import py

Expand All @@ -23,7 +24,6 @@ class Source(object):
def __init__(self, *parts, **kwargs):
self.lines = lines = []
de = kwargs.get("deindent", True)
rstrip = kwargs.get("rstrip", True)
for part in parts:
if not part:
partlines = []
Expand All @@ -33,11 +33,6 @@ def __init__(self, *parts, **kwargs):
partlines = [x.rstrip("\n") for x in part]
elif isinstance(part, six.string_types):
partlines = part.split("\n")
if rstrip:
while partlines:
if partlines[-1].strip():
break
partlines.pop()
else:
partlines = getsource(part, deindent=de).lines
if de:
Expand Down Expand Up @@ -269,46 +264,7 @@ def getsource(obj, **kwargs):


def deindent(lines, offset=None):
if offset is None:
for line in lines:
line = line.expandtabs()
s = line.lstrip()
if s:
offset = len(line) - len(s)
break
else:
offset = 0
if offset == 0:
return list(lines)
newlines = []

def readline_generator(lines):
for line in lines:
yield line + "\n"

it = readline_generator(lines)

try:
for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(
lambda: next(it)
):
if sline > len(lines):
break # End of input reached
if sline > len(newlines):
line = lines[sline - 1].expandtabs()
if line.lstrip() and line[:offset].isspace():
line = line[offset:] # Deindent
newlines.append(line)

for i in range(sline, eline):
# Don't deindent continuing lines of
# multiline tokens (i.e. multiline strings)
newlines.append(lines[i])
except (IndentationError, tokenize.TokenError):
pass
# Add any lines we didn't see. E.g. if an exception was raised.
newlines.extend(lines[len(newlines) :])
return newlines
return textwrap.dedent("\n".join(lines)).splitlines()


def get_statement_startend2(lineno, node):
Expand Down
24 changes: 9 additions & 15 deletions testing/code/test_source.py
Expand Up @@ -27,16 +27,7 @@ def test_source_str_function():
x = Source(
"""
3
""",
rstrip=False,
)
assert str(x) == "\n3\n "

x = Source(
"""
3
""",
rstrip=True,
)
assert str(x) == "\n3"

Expand Down Expand Up @@ -400,10 +391,13 @@ def f():
pass
"""

assert (
str(_pytest._code.Source(f)).strip()
== 'def f():\n c = """while True:\n pass\n"""'
)
expected = '''\
def f():
c = """while True:
pass
"""
'''
assert str(_pytest._code.Source(f)) == expected.rstrip()


def test_deindent():
Expand All @@ -417,15 +411,15 @@ def f():
"""

lines = deindent(inspect.getsource(f).splitlines())
assert lines == ["def f():", ' c = """while True:', " pass", '"""']
assert lines == [" def f():", ' c = """while True:', " pass", '"""']

source = """
def f():
def g():
pass
"""
lines = deindent(source.splitlines())
assert lines == ["", "def f():", " def g():", " pass", " "]
assert lines == ["", "def f():", " def g():", " pass"]


def test_source_of_class_at_eof_without_newline(tmpdir):
Expand Down

0 comments on commit f09b63b

Please sign in to comment.