Skip to content

Commit

Permalink
Fix linting
Browse files Browse the repository at this point in the history
Need to escape the single slash in docstring regexes.
It is an error in Python 3.8

See PyCQA/pycodestyle#854 for
  • Loading branch information
has2k1 committed Jun 30, 2019
1 parent d8ca85f commit 18607fc
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 5 deletions.
2 changes: 1 addition & 1 deletion plydata/helper_verbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -1655,7 +1655,7 @@ class group_by_at(_at):
group columns.
>>> def double(s): return s + s
>>> df >> group_by_at(dict(matches=r'\w+eta$'), double)
>>> df >> group_by_at(dict(matches=r'\\w+eta$'), double)
groups: ['beta', 'theta']
alpha beta theta x y z
0 a bb cc 1 6 7
Expand Down
2 changes: 1 addition & 1 deletion plydata/one_table_verbs.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ class select(DataOperator):
0 1 1 1
1 2 2 2
2 3 3 3
>>> df >> select('bell', matches='\w+tle$')
>>> df >> select('bell', matches=r'\\w+tle$')
bell whistle
0 1 1
1 2 2
Expand Down
6 changes: 3 additions & 3 deletions plydata/tests/test_dataframe.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,13 +149,13 @@ def test_select():
assert len(result.columns) == 3

result = df >> select('caracal', endswith='ar', contains='ee',
matches='\w+opa')
matches=r'\w+opa')
assert len(result.columns) == 5

result = df >> select(contains=['ee', 'ion', '23'])
assert len(result.columns) == 2

result = df >> select(matches=('\w+opa', '\w+r$'))
result = df >> select(matches=(r'\w+opa', r'\w+r$'))
assert len(result.columns) == 4

# grouped on columns are never dropped
Expand All @@ -176,7 +176,7 @@ def test_select():
df[123] = 1
df[456] = 2
df[789] = 3
pattern = re.compile('\w+opa')
pattern = re.compile(r'\w+opa')
result = df >> select(startswith='t', matches=pattern)
assert len(result.columns) == 2

Expand Down

0 comments on commit 18607fc

Please sign in to comment.