Skip to content

Commit

Permalink
fixed tests
Browse files Browse the repository at this point in the history
  • Loading branch information
wolph committed Jan 20, 2017
1 parent 27ebcc7 commit 611c8b8
Show file tree
Hide file tree
Showing 6 changed files with 13 additions and 6 deletions.
7 changes: 4 additions & 3 deletions formatter2/formatter.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import print_function

import os
import sys
import logging
Expand Down Expand Up @@ -25,12 +27,11 @@ def __call__(self, input_file, seek=True):
else:
with open(input_file) as fh:
file_ = _stringio.StringIO()
file_.write(fh.read())
print(fh.read(), file=file_)
file_.seek(0)
tokens = Tokens.from_readline(file_.readline)

formatted = tokens()

# Test if we didn't break anything
try:
compile(formatted, '', 'exec')
Expand Down Expand Up @@ -74,5 +75,5 @@ def format_file(self, name):
def format_string(cls, string):
formatter = Formatter()
fh = _stringio.StringIO()
fh.write(string)
print(string, file=fh)
return formatter(fh)
6 changes: 4 additions & 2 deletions formatter2/offsets.py
Original file line number Diff line number Diff line change
Expand Up @@ -252,8 +252,8 @@ def get_token_offsets():
keywords['except'].post = 1
keywords['import'].post = 1
keywords['for'].post = 1
keywords['while'].post = 1
keywords['del'].post = 1
# keywords['while'].post = 1
# keywords['del'].post = 1
keywords['if'].post = 1
keywords['if'].pre_collapse = False
# keywords['if'].post_collapse = False
Expand Down Expand Up @@ -411,4 +411,6 @@ def get_token_offsets():

return token_offsets


TOKEN_OFFSETS = get_token_offsets()

2 changes: 1 addition & 1 deletion formatter2/tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ def __tab_to_space(match):
while True:
try:
yield re.sub('^\s+', __tab_to_space, readline())
except StopIteration:
except StopIteration: # pragma: no cover
break

readline = iter(_tab_to_space(readline)).next
Expand Down
2 changes: 2 additions & 0 deletions formatter2/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,4 +143,6 @@ def get_token_types():
token_types.register(IndentTokenType)
return token_types


TOKEN_TYPES = get_token_types()

1 change: 1 addition & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,6 @@ flake8-ignore =
formatter2/tokenize_fork.py ALL
formatter2/_stringio.py ALL
tests/samples/*.py F811
tests/samples/dictionaries.py F999
*.py W391

1 change: 1 addition & 0 deletions tests/samples/generators.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
def spam():
yield 'eggs'


spam = ('egg %d' % egg for egg in range(5))


Expand Down

0 comments on commit 611c8b8

Please sign in to comment.