Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 0 additions & 39 deletions pydocstringformatter/formatting.py

This file was deleted.

11 changes: 11 additions & 0 deletions pydocstringformatter/formatting/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
__all__ = ["FORMATTERS"]

from pydocstringformatter.formatting.formatter import (
BeginningQuotesFormatter,
ClosingQuotesFormatter,
)

FORMATTERS = [
BeginningQuotesFormatter(),
ClosingQuotesFormatter(),
]
59 changes: 59 additions & 0 deletions pydocstringformatter/formatting/formatter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import abc
import re
import tokenize


class Formatter:
"""Base class for docstring formatter"""

@abc.abstractmethod
def treat_token(self, tokeninfo: tokenize.TokenInfo) -> tokenize.TokenInfo:
"""Return a modified token"""


class StringFormatter(Formatter):
"""Base class for formatter that only modifies the string content"""

@abc.abstractmethod
def _treat_string(self, tokeninfo: tokenize.TokenInfo) -> str:
"""Return a modified string"""

def treat_token(self, tokeninfo: tokenize.TokenInfo) -> tokenize.TokenInfo:
return tokenize.TokenInfo(
tokeninfo.type,
self._treat_string(tokeninfo),
tokeninfo.start,
tokeninfo.end,
tokeninfo.line,
)


class BeginningQuotesFormatter(StringFormatter):
"""Fix the position of the opening quotes"""

def _treat_string(self, tokeninfo: tokenize.TokenInfo) -> str:
new_string = tokeninfo.string
if new_string[3] == "\n":
new_string = re.sub(r"\n *", "", new_string, 1)
return new_string


class ClosingQuotesFormatter(StringFormatter):
"""Fix the position of the closing quotes"""

def _treat_string(self, tokeninfo: tokenize.TokenInfo) -> str:
"""Fix the position of end quotes for multi-line docstrings"""
new_string = tokeninfo.string
if "\n" not in new_string:
# Not a multiline docstring, nothing to do
return new_string
good_end = f"{(tokeninfo.start[1]) * ' '}{(new_string[0]) * 3}"
split_string = new_string.split("\n")

# Add new line with only quotes
if not new_string.endswith("\n" + good_end):
new_string = new_string[:-3] + "\n" + good_end
# Remove line with only quotes for potential single line string
elif len(split_string) == 2 and split_string[-1] == good_end:
new_string = "\n".join(split_string[:-1]) + tokeninfo.string[0] * 3
return new_string
5 changes: 2 additions & 3 deletions pydocstringformatter/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,8 @@ def _format_file(self, filename: Path) -> bool:
new_tokeninfo = tokeninfo

if utils._is_docstring(new_tokeninfo, tokens[index - 1]):
new_tokeninfo = formatting._format_beginning_quotes(new_tokeninfo)
new_tokeninfo = formatting._format_closing_quotes(new_tokeninfo)

for formatter in formatting.FORMATTERS:
new_tokeninfo = formatter.treat_token(new_tokeninfo)
changed_tokens.append(new_tokeninfo)

if tokeninfo != new_tokeninfo:
Expand Down
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ py-version="3.8"

[tool.pylint.'MESSAGES CONTROL']
disable = [
"missing-module-docstring"
"missing-module-docstring",
"too-few-public-methods",
]
enable = [
"c-extension-no-member",
Expand Down