Skip to content

Commit

Permalink
Upgrade RustPython to match new flattened exports (#3141)
Browse files Browse the repository at this point in the history
  • Loading branch information
charliermarsh committed Feb 22, 2023
1 parent ba61bb6 commit 2f9de33
Show file tree
Hide file tree
Showing 49 changed files with 195 additions and 235 deletions.
8 changes: 4 additions & 4 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "f2f0b7a487a87
once_cell = { version = "1.16.0" }
regex = { version = "1.6.0" }
rustc-hash = { version = "1.1.0" }
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "6d71f758170d504817cc47720762c41d9031506d" }
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "6d71f758170d504817cc47720762c41d9031506d" }
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "edf5995a1e4c366976304ca05432dd27c913054e" }
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "edf5995a1e4c366976304ca05432dd27c913054e" }
schemars = { version = "0.8.11" }
serde = { version = "1.0.147", features = ["derive"] }
serde_json = { version = "1.0.87" }
Expand Down
4 changes: 4 additions & 0 deletions crates/ruff/resources/test/fixtures/flake8_return/RET503.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,3 +289,7 @@ def x(y):
return 1
case 1:
print() # error


def foo(baz: str) -> str:
return baz
29 changes: 12 additions & 17 deletions crates/ruff/src/ast/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,7 @@ use rustpython_parser::ast::{
Arguments, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprKind, Keyword, KeywordData,
Located, Location, MatchCase, Pattern, PatternKind, Stmt, StmtKind,
};
use rustpython_parser::lexer;
use rustpython_parser::lexer::Tok;
use rustpython_parser::mode::Mode;
use rustpython_parser::token::StringKind;
use rustpython_parser::{lexer, Mode, StringKind, Tok};
use smallvec::{smallvec, SmallVec};

use crate::ast::types::{Binding, BindingKind, CallPath, Range};
Expand Down Expand Up @@ -656,7 +653,7 @@ pub fn has_comments<T>(located: &Located<T>, locator: &Locator) -> bool {

/// Returns `true` if a [`Range`] includes at least one comment.
pub fn has_comments_in(range: Range, locator: &Locator) -> bool {
for tok in lexer::make_tokenizer_located(locator.slice(&range), Mode::Module, range.location) {
for tok in lexer::lex_located(locator.slice(&range), Mode::Module, range.location) {
match tok {
Ok((_, tok, _)) => {
if matches!(tok, Tok::Comment(..)) {
Expand Down Expand Up @@ -871,8 +868,7 @@ pub fn match_parens(start: Location, locator: &Locator) -> Option<Range> {
let mut fix_start = None;
let mut fix_end = None;
let mut count: usize = 0;
for (start, tok, end) in lexer::make_tokenizer_located(contents, Mode::Module, start).flatten()
{
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, start).flatten() {
if matches!(tok, Tok::Lpar) {
if count == 0 {
fix_start = Some(start);
Expand Down Expand Up @@ -904,8 +900,7 @@ pub fn identifier_range(stmt: &Stmt, locator: &Locator) -> Range {
| StmtKind::AsyncFunctionDef { .. }
) {
let contents = locator.slice(&Range::from_located(stmt));
for (start, tok, end) in
lexer::make_tokenizer_located(contents, Mode::Module, stmt.location).flatten()
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt.location).flatten()
{
if matches!(tok, Tok::Name { .. }) {
return Range::new(start, end);
Expand Down Expand Up @@ -937,7 +932,7 @@ pub fn find_names<'a, T, U>(
locator: &'a Locator,
) -> impl Iterator<Item = Range> + 'a {
let contents = locator.slice(&Range::from_located(located));
lexer::make_tokenizer_located(contents, Mode::Module, located.location)
lexer::lex_located(contents, Mode::Module, located.location)
.flatten()
.filter(|(_, tok, _)| matches!(tok, Tok::Name { .. }))
.map(|(start, _, end)| Range {
Expand All @@ -955,7 +950,7 @@ pub fn excepthandler_name_range(handler: &Excepthandler, locator: &Locator) -> O
(Some(_), Some(type_)) => {
let type_end_location = type_.end_location.unwrap();
let contents = locator.slice(&Range::new(type_end_location, body[0].location));
let range = lexer::make_tokenizer_located(contents, Mode::Module, type_end_location)
let range = lexer::lex_located(contents, Mode::Module, type_end_location)
.flatten()
.tuple_windows()
.find(|(tok, next_tok)| {
Expand All @@ -982,7 +977,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
location: handler.location,
end_location: end,
});
let range = lexer::make_tokenizer_located(contents, Mode::Module, handler.location)
let range = lexer::lex_located(contents, Mode::Module, handler.location)
.flatten()
.find(|(_, kind, _)| matches!(kind, Tok::Except { .. }))
.map(|(location, _, end_location)| Range {
Expand All @@ -996,7 +991,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
/// Find f-strings that don't contain any formatted values in a `JoinedStr`.
pub fn find_useless_f_strings(expr: &Expr, locator: &Locator) -> Vec<(Range, Range)> {
let contents = locator.slice(&Range::from_located(expr));
lexer::make_tokenizer_located(contents, Mode::Module, expr.location)
lexer::lex_located(contents, Mode::Module, expr.location)
.flatten()
.filter_map(|(location, tok, end_location)| match tok {
Tok::String {
Expand Down Expand Up @@ -1050,7 +1045,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
.expect("Expected orelse to be non-empty")
.location,
});
let range = lexer::make_tokenizer_located(contents, Mode::Module, body_end)
let range = lexer::lex_located(contents, Mode::Module, body_end)
.flatten()
.find(|(_, kind, _)| matches!(kind, Tok::Else))
.map(|(location, _, end_location)| Range {
Expand All @@ -1066,7 +1061,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
/// Return the `Range` of the first `Tok::Colon` token in a `Range`.
pub fn first_colon_range(range: Range, locator: &Locator) -> Option<Range> {
let contents = locator.slice(&range);
let range = lexer::make_tokenizer_located(contents, Mode::Module, range.location)
let range = lexer::lex_located(contents, Mode::Module, range.location)
.flatten()
.find(|(_, kind, _)| matches!(kind, Tok::Colon))
.map(|(location, _, end_location)| Range {
Expand Down Expand Up @@ -1096,7 +1091,7 @@ pub fn elif_else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
_ => return None,
};
let contents = locator.slice(&Range::new(start, end));
let range = lexer::make_tokenizer_located(contents, Mode::Module, start)
let range = lexer::lex_located(contents, Mode::Module, start)
.flatten()
.find(|(_, kind, _)| matches!(kind, Tok::Elif | Tok::Else))
.map(|(location, _, end_location)| Range {
Expand Down Expand Up @@ -1212,8 +1207,8 @@ pub fn is_logger_candidate(func: &Expr) -> bool {
#[cfg(test)]
mod tests {
use anyhow::Result;
use rustpython_parser as parser;
use rustpython_parser::ast::Location;
use rustpython_parser::parser;

use crate::ast::helpers::{
elif_else_range, else_range, first_colon_range, identifier_range, match_trailing_content,
Expand Down
8 changes: 2 additions & 6 deletions crates/ruff/src/ast/operations.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
use bitflags::bitflags;
use rustc_hash::FxHashMap;
use rustpython_parser::ast::{Cmpop, Constant, Expr, ExprKind, Located, Stmt, StmtKind};
use rustpython_parser::lexer;
use rustpython_parser::lexer::Tok;
use rustpython_parser::mode::Mode;
use rustpython_parser::{lexer, Mode, Tok};

use crate::ast::helpers::any_over_expr;
use crate::ast::types::{BindingKind, Scope};
Expand Down Expand Up @@ -285,9 +283,7 @@ pub type LocatedCmpop<U = ()> = Located<Cmpop, U>;
/// `CPython` doesn't either. This method iterates over the token stream and
/// re-identifies [`Cmpop`] nodes, annotating them with valid ranges.
pub fn locate_cmpops(contents: &str) -> Vec<LocatedCmpop> {
let mut tok_iter = lexer::make_tokenizer(contents, Mode::Module)
.flatten()
.peekable();
let mut tok_iter = lexer::lex(contents, Mode::Module).flatten().peekable();
let mut ops: Vec<LocatedCmpop> = vec![];
let mut count: usize = 0;
loop {
Expand Down
18 changes: 5 additions & 13 deletions crates/ruff/src/autofix/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,7 @@ use libcst_native::{
Codegen, CodegenState, ImportNames, ParenthesizableWhitespace, SmallStatement, Statement,
};
use rustpython_parser::ast::{ExcepthandlerKind, Expr, Keyword, Location, Stmt, StmtKind};
use rustpython_parser::lexer;
use rustpython_parser::lexer::Tok;
use rustpython_parser::mode::Mode;
use rustpython_parser::{lexer, Mode, Tok};

use crate::ast::helpers;
use crate::ast::helpers::to_absolute;
Expand Down Expand Up @@ -372,9 +370,7 @@ pub fn remove_argument(
if n_arguments == 1 {
// Case 1: there is only one argument.
let mut count: usize = 0;
for (start, tok, end) in
lexer::make_tokenizer_located(contents, Mode::Module, stmt_at).flatten()
{
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
if matches!(tok, Tok::Lpar) {
if count == 0 {
fix_start = Some(if remove_parentheses {
Expand Down Expand Up @@ -406,9 +402,7 @@ pub fn remove_argument(
{
// Case 2: argument or keyword is _not_ the last node.
let mut seen_comma = false;
for (start, tok, end) in
lexer::make_tokenizer_located(contents, Mode::Module, stmt_at).flatten()
{
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
if seen_comma {
if matches!(tok, Tok::NonLogicalNewline) {
// Also delete any non-logical newlines after the comma.
Expand All @@ -431,9 +425,7 @@ pub fn remove_argument(
} else {
// Case 3: argument or keyword is the last node, so we have to find the last
// comma in the stmt.
for (start, tok, _) in
lexer::make_tokenizer_located(contents, Mode::Module, stmt_at).flatten()
{
for (start, tok, _) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
if start == expr_at {
fix_end = Some(expr_end);
break;
Expand All @@ -455,8 +447,8 @@ pub fn remove_argument(
#[cfg(test)]
mod tests {
use anyhow::Result;
use rustpython_parser as parser;
use rustpython_parser::ast::Location;
use rustpython_parser::parser;

use crate::autofix::helpers::{next_stmt_break, trailing_semicolon};
use crate::source_code::Locator;
Expand Down
11 changes: 5 additions & 6 deletions crates/ruff/src/checkers/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,18 @@ use std::path::Path;
use itertools::Itertools;
use log::error;
use nohash_hasher::IntMap;
use ruff_python::builtins::{BUILTINS, MAGIC_GLOBALS};
use ruff_python::typing::TYPING_EXTENSIONS;
use rustc_hash::{FxHashMap, FxHashSet};
use rustpython_common::cformat::{CFormatError, CFormatErrorType};
use rustpython_parser as parser;
use rustpython_parser::ast::{
Arg, Arguments, Comprehension, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprContext,
ExprKind, KeywordData, Located, Location, Operator, Pattern, PatternKind, Stmt, StmtKind,
Suite,
};
use rustpython_parser::parser;
use smallvec::smallvec;

use ruff_python::builtins::{BUILTINS, MAGIC_GLOBALS};
use ruff_python::typing::TYPING_EXTENSIONS;

use crate::ast::helpers::{
binding_range, collect_call_path, extract_handler_names, from_relative_import, to_module_path,
};
Expand Down Expand Up @@ -2060,8 +2059,8 @@ where
value,
..
} => {
// If we're in a class or module scope, then the annotation needs to be available
// at runtime.
// If we're in a class or module scope, then the annotation needs to be
// available at runtime.
// See: https://docs.python.org/3/reference/simple_stmts.html#annotated-assignment-statements
if !self.annotations_future_enabled
&& matches!(
Expand Down
13 changes: 6 additions & 7 deletions crates/ruff/src/checkers/logical_lines.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,9 +152,8 @@ pub fn check_logical_lines(

#[cfg(test)]
mod tests {
use rustpython_parser::lexer;
use rustpython_parser::lexer::LexResult;
use rustpython_parser::mode::Mode;
use rustpython_parser::{lexer, Mode};

use crate::checkers::logical_lines::iter_logical_lines;
use crate::source_code::Locator;
Expand All @@ -165,7 +164,7 @@ mod tests {
x = 1
y = 2
z = x + 1"#;
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents, Mode::Module).collect();
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
let locator = Locator::new(contents);
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
.into_iter()
Expand All @@ -186,7 +185,7 @@ x = [
]
y = 2
z = x + 1"#;
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents, Mode::Module).collect();
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
let locator = Locator::new(contents);
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
.into_iter()
Expand All @@ -200,7 +199,7 @@ z = x + 1"#;
assert_eq!(actual, expected);

let contents = "x = 'abc'";
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents, Mode::Module).collect();
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
let locator = Locator::new(contents);
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
.into_iter()
Expand All @@ -213,7 +212,7 @@ z = x + 1"#;
def f():
x = 1
f()"#;
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents, Mode::Module).collect();
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
let locator = Locator::new(contents);
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
.into_iter()
Expand All @@ -228,7 +227,7 @@ def f():
# Comment goes here.
x = 1
f()"#;
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents, Mode::Module).collect();
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
let locator = Locator::new(contents);
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
.into_iter()
Expand Down
3 changes: 2 additions & 1 deletion crates/ruff/src/checkers/tokens.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
//! Lint rules based on token traversal.

use rustpython_parser::lexer::{LexResult, Tok};
use rustpython_parser::lexer::LexResult;
use rustpython_parser::Tok;

use crate::lex::docstring_detection::StateMachine;
use crate::registry::{Diagnostic, Rule};
Expand Down
Loading

0 comments on commit 2f9de33

Please sign in to comment.