Skip to content

Commit

Permalink
Lexing tuning.
Browse files Browse the repository at this point in the history
- Add separate TestCaseSettingLexer and KeywordSettingLexer and remove
  TestOrKeywordSettingLexer.
- Remove unnecessary TestOrKeywordContext base class.
- Add ABC as an explicit base class in more places.

To same extend related to #4740.
  • Loading branch information
pekkaklarck committed May 3, 2023
1 parent 558fb75 commit 23c0573
Show file tree
Hide file tree
Showing 3 changed files with 45 additions and 33 deletions.
34 changes: 18 additions & 16 deletions src/robot/parsing/lexer/blocklexers.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,27 +13,29 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from abc import ABC
from collections.abc import Iterator

from robot.utils import normalize_whitespace

from .context import (FileContext, LexingContext, SuiteFileContext,
TestOrKeywordContext)
from .context import (FileContext, KeywordContext, LexingContext, SuiteFileContext,
TestCaseContext)
from .statementlexers import (BreakLexer, CommentLexer, CommentSectionHeaderLexer,
ContinueLexer, ElseHeaderLexer, ElseIfHeaderLexer,
EndLexer, ExceptHeaderLexer, FinallyHeaderLexer,
ForHeaderLexer, IfHeaderLexer, ImplicitCommentLexer,
InlineIfHeaderLexer, InvalidSectionHeaderLexer,
KeywordCallLexer, KeywordSectionHeaderLexer, Lexer,
ReturnLexer, SettingLexer, SettingSectionHeaderLexer,
StatementTokens, SyntaxErrorLexer, TaskSectionHeaderLexer,
TestCaseSectionHeaderLexer, TestOrKeywordSettingLexer,
KeywordCallLexer, KeywordSectionHeaderLexer,
KeywordSettingLexer, Lexer, ReturnLexer, SettingLexer,
SettingSectionHeaderLexer, StatementTokens,
SyntaxErrorLexer, TaskSectionHeaderLexer,
TestCaseSectionHeaderLexer, TestCaseSettingLexer,
TryHeaderLexer, VariableLexer, VariableSectionHeaderLexer,
WhileHeaderLexer)
from .tokens import Token


class BlockLexer(Lexer):
class BlockLexer(Lexer, ABC):

def __init__(self, ctx: LexingContext):
super().__init__(ctx)
Expand Down Expand Up @@ -86,7 +88,7 @@ def lexer_classes(self) -> 'tuple[type[Lexer], ...]':
InvalidSectionLexer, ImplicitCommentSectionLexer)


class SectionLexer(BlockLexer):
class SectionLexer(BlockLexer, ABC):
ctx: FileContext

def accepts_more(self, statement: StatementTokens) -> bool:
Expand Down Expand Up @@ -165,7 +167,7 @@ def lexer_classes(self) -> 'tuple[type[Lexer], ...]':
return (InvalidSectionHeaderLexer, CommentLexer)


class TestOrKeywordLexer(BlockLexer):
class TestOrKeywordLexer(BlockLexer, ABC):
name_type: str
_name_seen = False

Expand Down Expand Up @@ -196,10 +198,10 @@ def __init__(self, ctx: SuiteFileContext):
super().__init__(ctx.test_case_context())

def lex(self):
self._lex_with_priority(priority=TestOrKeywordSettingLexer)
self._lex_with_priority(priority=TestCaseSettingLexer)

def lexer_classes(self) -> 'tuple[type[Lexer], ...]':
return (TestOrKeywordSettingLexer, ForLexer, InlineIfLexer, IfLexer,
return (TestCaseSettingLexer, ForLexer, InlineIfLexer, IfLexer,
TryLexer, WhileLexer, SyntaxErrorLexer, KeywordCallLexer)


Expand All @@ -210,14 +212,14 @@ def __init__(self, ctx: FileContext):
super().__init__(ctx.keyword_context())

def lexer_classes(self) -> 'tuple[type[Lexer], ...]':
return (TestOrKeywordSettingLexer, ForLexer, InlineIfLexer, IfLexer,
ReturnLexer, TryLexer, WhileLexer, SyntaxErrorLexer, KeywordCallLexer)
return (KeywordSettingLexer, ForLexer, InlineIfLexer, IfLexer, ReturnLexer,
TryLexer, WhileLexer, SyntaxErrorLexer, KeywordCallLexer)


class NestedBlockLexer(BlockLexer):
ctx: TestOrKeywordContext
class NestedBlockLexer(BlockLexer, ABC):
ctx: 'TestCaseContext|KeywordContext'

def __init__(self, ctx: TestOrKeywordContext):
def __init__(self, ctx: 'TestCaseContext|KeywordContext'):
super().__init__(ctx)
self._block_level = 0

Expand Down
16 changes: 6 additions & 10 deletions src/robot/parsing/lexer/context.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,21 +129,17 @@ def _get_invalid_section_error(self, header: str) -> str:
f"'Settings', 'Variables', 'Keywords' and 'Comments'.")


# TODO: Try removing base class
class TestOrKeywordContext(LexingContext):

@property
def template_set(self) -> bool:
return False


class TestCaseContext(TestOrKeywordContext):
class TestCaseContext(LexingContext):
settings: TestCaseSettings

@property
def template_set(self) -> bool:
return self.settings.template_set


class KeywordContext(TestOrKeywordContext):
class KeywordContext(LexingContext):
settings: KeywordSettings

@property
def template_set(self) -> bool:
return False
28 changes: 21 additions & 7 deletions src/robot/parsing/lexer/statementlexers.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,13 @@
from robot.utils import normalize_whitespace
from robot.variables import is_assign

from .context import FileContext, LexingContext, TestOrKeywordContext
from .context import FileContext, LexingContext, KeywordContext, TestCaseContext
from .tokens import Token


StatementTokens = List[Token]


# TODO: Try making generic.
class Lexer(ABC):

def __init__(self, ctx: LexingContext):
Expand All @@ -52,7 +51,7 @@ def lex(self):
class StatementLexer(Lexer, ABC):
token_type: str

def __init__(self, ctx: 'FileContext|TestOrKeywordContext'):
def __init__(self, ctx: LexingContext):
super().__init__(ctx)
self.statement: StatementTokens = []

Expand Down Expand Up @@ -153,25 +152,41 @@ def lex(self):


class SettingLexer(StatementLexer):
ctx: FileContext

def lex(self):
self.ctx.lex_setting(self.statement)


# TODO: Try splitting to TestSettingLexer and KeywordSettingLexer. Same with Context.
class TestOrKeywordSettingLexer(SettingLexer):
class TestCaseSettingLexer(StatementLexer):
ctx: TestCaseContext

def lex(self):
self.ctx.lex_setting(self.statement)

def handles(self, statement: StatementTokens) -> bool:
marker = statement[0].value
return bool(marker and marker[0] == '[' and marker[-1] == ']')


class KeywordSettingLexer(StatementLexer):
ctx: KeywordContext

def lex(self):
self.ctx.lex_setting(self.statement)

def handles(self, statement: StatementTokens) -> bool:
marker = statement[0].value
return bool(marker and marker[0] == '[' and marker[-1] == ']')


class VariableLexer(TypeAndArguments):
ctx: FileContext
token_type = Token.VARIABLE


class KeywordCallLexer(StatementLexer):
ctx: TestOrKeywordContext
ctx: 'TestCaseContext|KeywordContext'

def lex(self):
if self.ctx.template_set:
Expand All @@ -196,7 +211,6 @@ def _lex_as_keyword_call(self):


class ForHeaderLexer(StatementLexer):
ctx: TestOrKeywordContext
separators = ('IN', 'IN RANGE', 'IN ENUMERATE', 'IN ZIP')

def handles(self, statement: StatementTokens) -> bool:
Expand Down

0 comments on commit 23c0573

Please sign in to comment.