Skip to content

Commit

Permalink
Update parser API to merge lexing and parsing
Browse files Browse the repository at this point in the history
  • Loading branch information
dhruvmanila committed May 23, 2024
1 parent 9b33c47 commit 9e13da6
Show file tree
Hide file tree
Showing 9 changed files with 370 additions and 514 deletions.
1 change: 1 addition & 0 deletions crates/ruff_python_parser/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ license = { workspace = true }

[dependencies]
ruff_python_ast = { workspace = true }
ruff_python_trivia = { workspace = true }
ruff_text_size = { workspace = true }

anyhow = { workspace = true }
Expand Down
21 changes: 4 additions & 17 deletions crates/ruff_python_parser/src/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,23 +9,6 @@
//! as a `Result<Spanned, LexicalError>`, where [`Spanned`] is a tuple containing the
//! start and end [`TextSize`] and a [`Tok`] denoting the token.
//!
//! # Example
//!
//! ```
//! use ruff_python_parser::{lexer::lex, Tok, Mode};
//!
//! let source = "x = 'RustPython'";
//! let tokens = lex(source, Mode::Module)
//! .map(|tok| tok.expect("Failed to lex"))
//! .collect::<Vec<_>>();
//!
//! for (token, range) in tokens {
//! println!(
//! "{token:?}@{range:?}",
//! );
//! }
//! ```
//!
//! [Lexical analysis]: https://docs.python.org/3/reference/lexical_analysis.html

use std::{char, cmp::Ordering, str::FromStr};
Expand Down Expand Up @@ -1381,6 +1364,10 @@ impl Token {
self.kind
}

pub(crate) const fn is_comment(self) -> bool {
matches!(self.kind, TokenKind::Comment)
}

pub(crate) const fn is_trivia(self) -> bool {
matches!(self.kind, TokenKind::Comment | TokenKind::NonLogicalNewline)
}
Expand Down
Loading

0 comments on commit 9e13da6

Please sign in to comment.