Skip to content

Commit

Permalink
[flake8-bugbear] Add fix for duplicate-value (B033) (#9510)
Browse files Browse the repository at this point in the history
## Summary

Adds autofix for
[B033](https://docs.astral.sh/ruff/rules/duplicate-value/)

## Test Plan

`cargo test`
  • Loading branch information
diceroll123 committed Jan 14, 2024
1 parent 953d48b commit 0c0d3db
Show file tree
Hide file tree
Showing 6 changed files with 342 additions and 12 deletions.
18 changes: 18 additions & 0 deletions crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B033.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,28 @@
# Errors.
###
incorrect_set = {"value1", 23, 5, "value1"}
incorrect_set = {1, 1, 2}
incorrect_set_multiline = {
"value1",
23,
5,
"value1",
# B033
}
incorrect_set = {1, 1}
incorrect_set = {1, 1,}
incorrect_set = {0, 1, 1,}
incorrect_set = {0, 1, 1}
incorrect_set = {
0,
1,
1,
}

###
# Non-errors.
###
correct_set = {"value1", 23, 5}
correct_set = {5, "5"}
correct_set = {5}
correct_set = {}
4 changes: 2 additions & 2 deletions crates/ruff_linter/src/checkers/ast/analyze/expression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -980,9 +980,9 @@ pub(crate) fn expression(expr: &Expr, checker: &mut Checker) {
flake8_pie::rules::unnecessary_spread(checker, dict);
}
}
Expr::Set(ast::ExprSet { elts, range: _ }) => {
Expr::Set(set) => {
if checker.enabled(Rule::DuplicateValue) {
flake8_bugbear::rules::duplicate_value(checker, elts);
flake8_bugbear::rules::duplicate_value(checker, set);
}
}
Expr::Yield(_) => {
Expand Down
19 changes: 19 additions & 0 deletions crates/ruff_linter/src/rules/flake8_bugbear/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ mod tests {

use crate::assert_messages;
use crate::registry::Rule;
use crate::settings::types::PreviewMode;
use crate::settings::LinterSettings;
use crate::test::test_path;

Expand Down Expand Up @@ -69,6 +70,24 @@ mod tests {
Ok(())
}

#[test_case(Rule::DuplicateValue, Path::new("B033.py"))]
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!(
"preview__{}_{}",
rule_code.noqa_code(),
path.to_string_lossy()
);
let diagnostics = test_path(
Path::new("flake8_bugbear").join(path).as_path(),
&LinterSettings {
preview: PreviewMode::Enabled,
..LinterSettings::for_rule(rule_code)
},
)?;
assert_messages!(snapshot, diagnostics);
Ok(())
}

#[test]
fn zip_without_explicit_strict() -> Result<()> {
let snapshot = "B905.py";
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
use ruff_python_ast::Expr;
use anyhow::{Context, Result};
use rustc_hash::FxHashSet;

use ruff_diagnostics::{Diagnostic, Violation};
use ruff_diagnostics::{Diagnostic, Edit, Fix, FixAvailability, Violation};
use ruff_macros::{derive_message_formats, violation};
use ruff_python_ast as ast;
use ruff_python_ast::comparable::ComparableExpr;
use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer};
use ruff_text_size::Ranged;

use crate::checkers::ast::Checker;
Expand Down Expand Up @@ -31,28 +33,82 @@ pub struct DuplicateValue {
}

impl Violation for DuplicateValue {
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;

#[derive_message_formats]
fn message(&self) -> String {
let DuplicateValue { value } = self;
format!("Sets should not contain duplicate item `{value}`")
}

fn fix_title(&self) -> Option<String> {
Some(format!("Remove duplicate item"))
}
}

/// B033
pub(crate) fn duplicate_value(checker: &mut Checker, elts: &Vec<Expr>) {
pub(crate) fn duplicate_value(checker: &mut Checker, set: &ast::ExprSet) {
let mut seen_values: FxHashSet<ComparableExpr> = FxHashSet::default();
for elt in elts {
for (index, elt) in set.elts.iter().enumerate() {
if elt.is_literal_expr() {
let comparable_value: ComparableExpr = elt.into();

if !seen_values.insert(comparable_value) {
checker.diagnostics.push(Diagnostic::new(
let mut diagnostic = Diagnostic::new(
DuplicateValue {
value: checker.generator().expr(elt),
},
elt.range(),
));
);

if checker.settings.preview.is_enabled() {
diagnostic.try_set_fix(|| {
remove_member(set, index, checker.locator().contents()).map(Fix::safe_edit)
});
}

checker.diagnostics.push(diagnostic);
}
};
}
}

/// Remove the member at the given index from the [`ast::ExprSet`].
fn remove_member(set: &ast::ExprSet, index: usize, source: &str) -> Result<Edit> {
if index < set.elts.len() - 1 {
// Case 1: the expression is _not_ the last node, so delete from the start of the
// expression to the end of the subsequent comma.
// Ex) Delete `"a"` in `{"a", "b", "c"}`.
let mut tokenizer = SimpleTokenizer::starts_at(set.elts[index].end(), source);

// Find the trailing comma.
tokenizer
.find(|token| token.kind == SimpleTokenKind::Comma)
.context("Unable to find trailing comma")?;

// Find the next non-whitespace token.
let next = tokenizer
.find(|token| {
token.kind != SimpleTokenKind::Whitespace && token.kind != SimpleTokenKind::Newline
})
.context("Unable to find next token")?;

Ok(Edit::deletion(set.elts[index].start(), next.start()))
} else if index > 0 {
// Case 2: the expression is the last node, but not the _only_ node, so delete from the
// start of the previous comma to the end of the expression.
// Ex) Delete `"c"` in `{"a", "b", "c"}`.
let mut tokenizer = SimpleTokenizer::starts_at(set.elts[index - 1].end(), source);

// Find the trailing comma.
let comma = tokenizer
.find(|token| token.kind == SimpleTokenKind::Comma)
.context("Unable to find trailing comma")?;

Ok(Edit::deletion(comma.start(), set.elts[index].end()))
} else {
// Case 3: expression is the only node, so delete it.
// Ex) Delete `"a"` in `{"a"}`.
Ok(Edit::range_deletion(set.elts[index].range()))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,85 @@ B033.py:4:35: B033 Sets should not contain duplicate item `"value1"`
3 | ###
4 | incorrect_set = {"value1", 23, 5, "value1"}
| ^^^^^^^^ B033
5 | incorrect_set = {1, 1}
5 | incorrect_set = {1, 1, 2}
6 | incorrect_set_multiline = {
|
= help: Remove duplicate item

B033.py:5:21: B033 Sets should not contain duplicate item `1`
|
3 | ###
4 | incorrect_set = {"value1", 23, 5, "value1"}
5 | incorrect_set = {1, 1}
5 | incorrect_set = {1, 1, 2}
| ^ B033
6 |
7 | ###
6 | incorrect_set_multiline = {
7 | "value1",
|
= help: Remove duplicate item

B033.py:10:5: B033 Sets should not contain duplicate item `"value1"`
|
8 | 23,
9 | 5,
10 | "value1",
| ^^^^^^^^ B033
11 | # B033
12 | }
|
= help: Remove duplicate item

B033.py:13:21: B033 Sets should not contain duplicate item `1`
|
11 | # B033
12 | }
13 | incorrect_set = {1, 1}
| ^ B033
14 | incorrect_set = {1, 1,}
15 | incorrect_set = {0, 1, 1,}
|
= help: Remove duplicate item

B033.py:14:21: B033 Sets should not contain duplicate item `1`
|
12 | }
13 | incorrect_set = {1, 1}
14 | incorrect_set = {1, 1,}
| ^ B033
15 | incorrect_set = {0, 1, 1,}
16 | incorrect_set = {0, 1, 1}
|
= help: Remove duplicate item

B033.py:15:24: B033 Sets should not contain duplicate item `1`
|
13 | incorrect_set = {1, 1}
14 | incorrect_set = {1, 1,}
15 | incorrect_set = {0, 1, 1,}
| ^ B033
16 | incorrect_set = {0, 1, 1}
17 | incorrect_set = {
|
= help: Remove duplicate item

B033.py:16:24: B033 Sets should not contain duplicate item `1`
|
14 | incorrect_set = {1, 1,}
15 | incorrect_set = {0, 1, 1,}
16 | incorrect_set = {0, 1, 1}
| ^ B033
17 | incorrect_set = {
18 | 0,
|
= help: Remove duplicate item

B033.py:20:5: B033 Sets should not contain duplicate item `1`
|
18 | 0,
19 | 1,
20 | 1,
| ^ B033
21 | }
|
= help: Remove duplicate item


0 comments on commit 0c0d3db

Please sign in to comment.