diff --git a/CHANGES.rst b/CHANGES.rst index 86c1ddb88..009723737 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -32,6 +32,8 @@ Unreleased - On Python 3.12+, we now disable SQLite writing journal files, which should be a little faster. +- The new 3.12 soft keyword ``type`` is properly bolded in HTML reports. + .. _issue 1605: https://github.com/nedbat/coveragepy/pull/1605 .. _issue 1684: https://github.com/nedbat/coveragepy/issues/1684 .. _pull 1685: https://github.com/nedbat/coveragepy/pull/1685 diff --git a/coverage/phystokens.py b/coverage/phystokens.py index d5659268d..5fd5dacc5 100644 --- a/coverage/phystokens.py +++ b/coverage/phystokens.py @@ -77,19 +77,25 @@ def _phys_tokens(toks: TokenInfos) -> TokenInfos: last_lineno = elineno -class MatchCaseFinder(ast.NodeVisitor): - """Helper for finding match/case lines.""" +class SoftKeywordFinder(ast.NodeVisitor): + """Helper for finding lines with soft keywords, like match/case lines.""" def __init__(self, source: str) -> None: - # This will be the set of line numbers that start match or case statements. - self.match_case_lines: Set[TLineNo] = set() + # This will be the set of line numbers that start with a soft keyword. + self.soft_key_lines: Set[TLineNo] = set() self.visit(ast.parse(source)) if sys.version_info >= (3, 10): def visit_Match(self, node: ast.Match) -> None: """Invoked by ast.NodeVisitor.visit""" - self.match_case_lines.add(node.lineno) + self.soft_key_lines.add(node.lineno) for case in node.cases: - self.match_case_lines.add(case.pattern.lineno) + self.soft_key_lines.add(case.pattern.lineno) + self.generic_visit(node) + + if sys.version_info >= (3, 12): + def visit_TypeAlias(self, node: ast.TypeAlias) -> None: + """Invoked by ast.NodeVisitor.visit""" + self.soft_key_lines.add(node.lineno) self.generic_visit(node) @@ -117,7 +123,7 @@ def source_token_lines(source: str) -> TSourceTokenLines: tokgen = generate_tokens(source) if env.PYBEHAVIOR.soft_keywords: - match_case_lines = MatchCaseFinder(source).match_case_lines + soft_key_lines = SoftKeywordFinder(source).soft_key_lines for ttype, ttext, (sline, scol), (_, ecol), _ in _phys_tokens(tokgen): mark_start = True @@ -152,7 +158,7 @@ def source_token_lines(source: str) -> TSourceTokenLines: is_start_of_line = True else: is_start_of_line = False - if is_start_of_line and sline in match_case_lines: + if is_start_of_line and sline in soft_key_lines: tok_class = "key" line.append((tok_class, part)) mark_end = True diff --git a/tests/test_phystokens.py b/tests/test_phystokens.py index 0312189df..ca1efeae5 100644 --- a/tests/test_phystokens.py +++ b/tests/test_phystokens.py @@ -7,6 +7,7 @@ import os.path import re +import sys import textwrap import warnings @@ -118,7 +119,7 @@ class SoftKeywordTest(CoverageTest): run_in_temp_dir = False - def test_soft_keywords(self) -> None: + def test_soft_keywords_match_case(self) -> None: source = textwrap.dedent("""\ match re.match(something): case ["what"]: @@ -147,6 +148,16 @@ def match(): assert tokens[10][2] == ("nam", "match") assert tokens[11][3] == ("nam", "case") + @pytest.mark.skipif(sys.version_info < (3, 12), reason="type is a soft keyword in 3.12") + def test_soft_keyword_type(self) -> None: + source = textwrap.dedent("""\ + type Point = tuple[float, float] + type(int) + """) + tokens = list(source_token_lines(source)) + assert tokens[0][0] == ("key", "type") + assert tokens[1][0] == ("nam", "type") + # The default source file encoding. DEF_ENCODING = "utf-8"