Skip to content

Commit

Permalink
Use SimpleTokenizer instead of BackwardTokenizer
Browse files Browse the repository at this point in the history
  • Loading branch information
alanhdu committed Nov 15, 2023
1 parent c003bbf commit 9f37011
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 63 deletions.
8 changes: 2 additions & 6 deletions crates/ruff_linter/src/checkers/ast/analyze/expression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -937,11 +937,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
}
}
Expr::Dict(
dict @ ast::ExprDict {
keys,
values,
range: _,
},
dict
) => {
if checker.any_enabled(&[
Rule::MultiValueRepeatedKeyLiteral,
Expand All @@ -950,7 +946,7 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
pyflakes::rules::repeated_keys(checker, dict);
}
if checker.enabled(Rule::UnnecessarySpread) {
flake8_pie::rules::unnecessary_spread(checker, keys, values);
flake8_pie::rules::unnecessary_spread(checker, dict);
}
}
Expr::Set(ast::ExprSet { elts, range: _ }) => {
Expand Down
98 changes: 47 additions & 51 deletions crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_spread.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use ruff_python_ast::Expr;
use ruff_python_ast::{self as ast, Expr};

use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_trivia::{BackwardsTokenizer, SimpleTokenKind};
use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer};
use ruff_text_size::{Ranged, TextSize};

use crate::checkers::ast::Checker;
Expand Down Expand Up @@ -45,64 +45,60 @@ impl Violation for UnnecessarySpread {
}

/// PIE800
pub(crate) fn unnecessary_spread(checker: &mut Checker, keys: &[Option<Expr>], values: &[Expr]) {
for item in keys.iter().zip(values.iter()) {
pub(crate) fn unnecessary_spread(checker: &mut Checker, dict: &ast::ExprDict) {
let mut prev_end = dict.range.start() + TextSize::from(1);
for item in dict.keys.iter().zip(dict.values.iter()) {
if let (None, value) = item {
// We only care about when the key is None which indicates a spread `**`
// inside a dict.
if let Expr::Dict(dict) = value {
if let Expr::Dict(inner) = value {
let mut diagnostic = Diagnostic::new(UnnecessarySpread, value.range());
if checker.settings.preview.is_enabled() {
// Delete the `**{`
let tokenizer = BackwardsTokenizer::up_to(
dict.range.start(),
checker.locator().contents(),
&[],
);
let mut start = None;
for tok in tokenizer {
if let SimpleTokenKind::DoubleStar = tok.kind() {
start = Some(tok.range.start());
break;
}
}
// unwrap is ok, b/c item.0 can't be None without a DoubleStar
let first =
Edit::deletion(start.unwrap(), dict.range.start() + TextSize::from(1));
diagnostic.set_fix(unnecessary_spread_fix(checker, inner, prev_end));
}
checker.diagnostics.push(diagnostic);
}
}
prev_end = item.1.end();
}
}

// Delete the `}` (and possibly a trailing comma) but preserve comments
let mut edits = Vec::with_capacity(1);
let mut end = dict.range.end();
fn unnecessary_spread_fix(checker: &mut Checker, inner: &ast::ExprDict, prev_end: TextSize) -> Fix {
let tokenizer = SimpleTokenizer::starts_at(prev_end, checker.locator().contents());
let mut start = None;
for tok in tokenizer {
if let SimpleTokenKind::DoubleStar = tok.kind() {
start = Some(tok.range.start());
break;
}
}
// unwrap is ok, b/c item.0 can't be None without a DoubleStar
let doublestar = start.unwrap();

let tokenizer = BackwardsTokenizer::up_to(
dict.range.end() - TextSize::from(1),
checker.locator().contents(),
&[],
);
for tok in tokenizer {
match tok.kind() {
SimpleTokenKind::Comment => {
if tok.range.end() != end {
edits.push(Edit::deletion(tok.range.end(), end));
}
end = tok.range.start();
}
SimpleTokenKind::Comma
| SimpleTokenKind::Whitespace
| SimpleTokenKind::Newline
| SimpleTokenKind::Continuation => {}
_ => {
if tok.range.end() != end {
edits.push(Edit::deletion(tok.range.end(), end));
}
break;
}
}
}
diagnostic.set_fix(Fix::safe_edits(first, edits));
if let Some(last) = inner.values.last() {
let tokenizer =
SimpleTokenizer::starts_at(last.range().end(), checker.locator().contents());
let mut edits = vec![];
for tok in tokenizer.skip_trivia() {
match tok.kind() {
SimpleTokenKind::Comma => {
edits.push(Edit::range_deletion(tok.range()));
}
checker.diagnostics.push(diagnostic);
SimpleTokenKind::RBrace => {
edits.push(Edit::range_deletion(tok.range));
break;
}
_ => {}
}
}
Fix::safe_edits(
// Delete the first `**{`
Edit::deletion(doublestar, inner.start() + TextSize::from(1)),
// Delete the trailing `}`
edits,
)
} else {
// Can just delete the entire thing
Fix::safe_edit(Edit::deletion(doublestar, inner.end()))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -101,13 +101,14 @@ PIE800.py:12:7: PIE800 [*] Unnecessary spread `**`
12 |- **{
12 |+
13 13 | # all
14 14 | "bar": 10, # the
15 |- # comments
14 |- "bar": 10, # the
14 |+ "bar": 10 # the
15 15 | # comments
16 |- },
15 |+ # comments,
17 16 | }
18 17 |
19 18 | {**foo, **buzz, **{bar: 10}} # PIE800
16 |+ ,
17 17 | }
18 18 |
19 19 | {**foo, **buzz, **{bar: 10}} # PIE800

PIE800.py:19:19: PIE800 [*] Unnecessary spread `**`
|
Expand Down

0 comments on commit 9f37011

Please sign in to comment.