Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions parser/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ default = ["location"]
location = ["rustpython-ast/location", "rustpython-parser-core/location"]
serde = ["dep:serde", "rustpython-parser-core/serde"]
all-nodes-with-ranges = ["rustpython-ast/all-nodes-with-ranges"]
full-lexer = []

[build-dependencies]
anyhow = { workspace = true }
Expand Down
31 changes: 27 additions & 4 deletions parser/src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -450,6 +450,7 @@ where
}

/// Lex a single comment.
#[cfg(feature = "full-lexer")]
fn lex_comment(&mut self) -> LexResult {
let start_pos = self.get_pos();
let mut value = String::new();
Expand All @@ -465,6 +466,20 @@ where
}
}

/// Discard comment if full-lexer is not enabled.
#[cfg(not(feature = "full-lexer"))]
fn lex_comment(&mut self) {
loop {
match self.window[0] {
Some('\n' | '\r') | None => {
return;
}
Some(_) => {}
}
self.next_char().unwrap();
}
}

/// Lex a string literal.
fn lex_string(&mut self, kind: StringKind) -> LexResult {
let start_pos = self.get_pos();
Expand Down Expand Up @@ -611,8 +626,9 @@ where
tabs += 1;
}
Some('#') => {
let comment = self.lex_comment()?;
self.emit(comment);
let _comment = self.lex_comment();
#[cfg(feature = "full-lexer")]
self.emit(_comment?);
spaces = 0;
tabs = 0;
}
Expand Down Expand Up @@ -753,8 +769,9 @@ where
self.emit(number);
}
'#' => {
let comment = self.lex_comment()?;
self.emit(comment);
let _comment = self.lex_comment();
#[cfg(feature = "full-lexer")]
self.emit(_comment?);
}
'"' | '\'' => {
let string = self.lex_string(StringKind::String)?;
Expand Down Expand Up @@ -1101,6 +1118,7 @@ where
self.at_begin_of_line = true;
self.emit((Tok::Newline, TextRange::new(tok_start, tok_end)));
} else {
#[cfg(feature = "full-lexer")]
self.emit((Tok::NonLogicalNewline, TextRange::new(tok_start, tok_end)));
}
}
Expand Down Expand Up @@ -1408,6 +1426,7 @@ mod tests {
($($name:ident: $eol:expr,)*) => {
$(
#[test]
#[cfg(feature = "full-lexer")]
fn $name() {
let source = format!(r"99232 # {}", $eol);
let tokens = lex_source(&source);
Expand All @@ -1428,6 +1447,7 @@ mod tests {
($($name:ident: $eol:expr,)*) => {
$(
#[test]
#[cfg(feature = "full-lexer")]
fn $name() {
let source = format!("123 # Foo{}456", $eol);
let tokens = lex_source(&source);
Expand Down Expand Up @@ -1607,6 +1627,7 @@ mod tests {
($($name:ident: $eol:expr,)*) => {
$(
#[test]
#[cfg(feature = "full-lexer")]
fn $name() {
let source = r"x = [

Expand Down Expand Up @@ -1669,6 +1690,7 @@ mod tests {
}

#[test]
#[cfg(feature = "full-lexer")]
fn test_non_logical_newline_in_string_continuation() {
let source = r"(
'a'
Expand Down Expand Up @@ -1698,6 +1720,7 @@ mod tests {
}

#[test]
#[cfg(feature = "full-lexer")]
fn test_logical_newline_line_comment() {
let source = "#Hello\n#World";
let tokens = lex_source(source);
Expand Down
7 changes: 4 additions & 3 deletions parser/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -190,9 +190,10 @@ pub fn parse_tokens(
source_path: &str,
) -> Result<ast::Mod, ParseError> {
let marker_token = (Tok::start_marker(mode), Default::default());
let lexer = iter::once(Ok(marker_token))
.chain(lxr)
.filter_ok(|(tok, _)| !matches!(tok, Tok::Comment { .. } | Tok::NonLogicalNewline));
let lexer = iter::once(Ok(marker_token)).chain(lxr);
#[cfg(feature = "full-lexer")]
let lexer =
lexer.filter_ok(|(tok, _)| !matches!(tok, Tok::Comment { .. } | Tok::NonLogicalNewline));
python::TopParser::new()
.parse(
lexer
Expand Down
2 changes: 1 addition & 1 deletion parser/src/python.lalrpop
Original file line number Diff line number Diff line change
Expand Up @@ -1743,6 +1743,6 @@ extern {
name => token::Tok::Name { name: <String> },
"\n" => token::Tok::Newline,
";" => token::Tok::Semi,
"#" => token::Tok::Comment(_),
// "#" => token::Tok::Comment(_),
}
}
Loading