diff --git a/crates/ruff_linter/resources/test/fixtures/ruff/RUF068.py b/crates/ruff_linter/resources/test/fixtures/ruff/RUF068.py new file mode 100644 index 0000000000..e7e798c619 --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/ruff/RUF068.py @@ -0,0 +1,41 @@ +import typing + + +class A: ... + + +class B: ... + + +# Good +__all__ = "A" + "B" +__all__: list[str] = ["A", "B"] +__all__: typing.Any = ("A", "B") +__all__ = ["A", "B"] +__all__ = [A, "B", "B"] +__all__ += ["A", "B"] +__all__.extend(["A", "B"]) + +# Bad +__all__: list[str] = ["A", "B", "A"] +__all__: typing.Any = ("A", "B", "B") +__all__ = ["A", "B", "A"] +__all__ = ["A", "A", "B", "B"] +__all__ = [ + "A", + "A", + "B", + "B" +] +__all__ += ["B", "B"] +__all__.extend(["B", "B"]) + +# Bad, unsafe +__all__ = [ + "A", + "A", + "B", + # Comment + "B", # 2 + # 3 +] diff --git a/crates/ruff_linter/src/checkers/ast/analyze/expression.rs b/crates/ruff_linter/src/checkers/ast/analyze/expression.rs index b066678ff4..c9cb6bb116 100644 --- a/crates/ruff_linter/src/checkers/ast/analyze/expression.rs +++ b/crates/ruff_linter/src/checkers/ast/analyze/expression.rs @@ -1245,6 +1245,9 @@ pub(crate) fn expression(expr: &Expr, checker: &Checker) { if checker.is_rule_enabled(Rule::UnsortedDunderAll) { ruff::rules::sort_dunder_all_extend_call(checker, call); } + if checker.is_rule_enabled(Rule::DuplicateEntryInDunderAll) { + ruff::rules::duplicate_entry_in_dunder_all_extend_call(checker, call); + } if checker.is_rule_enabled(Rule::DefaultFactoryKwarg) { ruff::rules::default_factory_kwarg(checker, call); } diff --git a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs index 4b0e39bab8..d22ae4ad40 100644 --- a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs +++ b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs @@ -966,6 +966,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { if checker.is_rule_enabled(Rule::UnsortedDunderAll) { ruff::rules::sort_dunder_all_aug_assign(checker, aug_assign); } + if checker.is_rule_enabled(Rule::DuplicateEntryInDunderAll) { + ruff::rules::duplicate_entry_in_dunder_all_aug_assign(checker, aug_assign); + } } Stmt::If( if_ @ ast::StmtIf { @@ -1434,6 +1437,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { if checker.is_rule_enabled(Rule::UnsortedDunderAll) { ruff::rules::sort_dunder_all_assign(checker, assign); } + if checker.is_rule_enabled(Rule::DuplicateEntryInDunderAll) { + ruff::rules::duplicate_entry_in_dunder_all_assign(checker, assign); + } if checker.source_type.is_stub() { if checker.any_rule_enabled(&[ Rule::UnprefixedTypeParam, @@ -1525,6 +1531,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) { if checker.is_rule_enabled(Rule::UnsortedDunderAll) { ruff::rules::sort_dunder_all_ann_assign(checker, assign_stmt); } + if checker.is_rule_enabled(Rule::DuplicateEntryInDunderAll) { + ruff::rules::duplicate_entry_in_dunder_all_ann_assign(checker, assign_stmt); + } if checker.source_type.is_stub() { if let Some(value) = value { if checker.is_rule_enabled(Rule::AssignmentDefaultInStub) { diff --git a/crates/ruff_linter/src/codes.rs b/crates/ruff_linter/src/codes.rs index d7e46216ea..a87be1a320 100644 --- a/crates/ruff_linter/src/codes.rs +++ b/crates/ruff_linter/src/codes.rs @@ -1061,6 +1061,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> { (Ruff, "065") => rules::ruff::rules::LoggingEagerConversion, (Ruff, "066") => rules::ruff::rules::PropertyWithoutReturn, (Ruff, "067") => rules::ruff::rules::NonEmptyInitModule, + (Ruff, "068") => rules::ruff::rules::DuplicateEntryInDunderAll, (Ruff, "100") => rules::ruff::rules::UnusedNOQA, (Ruff, "101") => rules::ruff::rules::RedirectedNOQA, diff --git a/crates/ruff_linter/src/fix/edits.rs b/crates/ruff_linter/src/fix/edits.rs index b5420e4ee1..521d37dd9d 100644 --- a/crates/ruff_linter/src/fix/edits.rs +++ b/crates/ruff_linter/src/fix/edits.rs @@ -284,6 +284,46 @@ pub(crate) fn add_argument(argument: &str, arguments: &Arguments, tokens: &Token } } +/// Remove the member at the given index from a sequence of expressions. +pub(crate) fn remove_member(elts: &[ast::Expr], index: usize, source: &str) -> Result { + if index < elts.len() - 1 { + // Case 1: the expression is _not_ the last node, so delete from the start of the + // expression to the end of the subsequent comma. + // Ex) Delete `"a"` in `{"a", "b", "c"}`. + let mut tokenizer = SimpleTokenizer::starts_at(elts[index].end(), source); + + // Find the trailing comma. + tokenizer + .find(|token| token.kind == SimpleTokenKind::Comma) + .context("Unable to find trailing comma")?; + + // Find the next non-whitespace token. + let next = tokenizer + .find(|token| { + token.kind != SimpleTokenKind::Whitespace && token.kind != SimpleTokenKind::Newline + }) + .context("Unable to find next token")?; + + Ok(Edit::deletion(elts[index].start(), next.start())) + } else if index > 0 { + // Case 2: the expression is the last node, but not the _only_ node, so delete from the + // start of the previous comma to the end of the expression. + // Ex) Delete `"c"` in `{"a", "b", "c"}`. + let mut tokenizer = SimpleTokenizer::starts_at(elts[index - 1].end(), source); + + // Find the trailing comma. + let comma = tokenizer + .find(|token| token.kind == SimpleTokenKind::Comma) + .context("Unable to find trailing comma")?; + + Ok(Edit::deletion(comma.start(), elts[index].end())) + } else { + // Case 3: expression is the only node, so delete it. + // Ex) Delete `"a"` in `{"a"}`. + Ok(Edit::range_deletion(elts[index].range())) + } +} + /// Generic function to add a (regular) parameter to a function definition. pub(crate) fn add_parameter(parameter: &str, parameters: &Parameters, source: &str) -> Edit { if let Some(last) = parameters.args.iter().rfind(|arg| arg.default.is_none()) { diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/rules/duplicate_value.rs b/crates/ruff_linter/src/rules/flake8_bugbear/rules/duplicate_value.rs index 2d6c79e804..0cb37df459 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/rules/duplicate_value.rs +++ b/crates/ruff_linter/src/rules/flake8_bugbear/rules/duplicate_value.rs @@ -1,15 +1,15 @@ -use anyhow::{Context, Result}; +use ruff_diagnostics::Fix; use rustc_hash::FxHashMap; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast as ast; use ruff_python_ast::Expr; use ruff_python_ast::comparable::HashableExpr; -use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer}; use ruff_text_size::Ranged; use crate::checkers::ast::Checker; -use crate::{Edit, Fix, FixAvailability, Violation}; +use crate::fix::edits; +use crate::{FixAvailability, Violation}; /// ## What it does /// Checks for set literals that contain duplicate items. @@ -70,49 +70,10 @@ pub(crate) fn duplicate_value(checker: &Checker, set: &ast::ExprSet) { ); diagnostic.try_set_fix(|| { - remove_member(set, index, checker.locator().contents()).map(Fix::safe_edit) + edits::remove_member(&set.elts, index, checker.locator().contents()) + .map(Fix::safe_edit) }); } } } } - -/// Remove the member at the given index from the [`ast::ExprSet`]. -fn remove_member(set: &ast::ExprSet, index: usize, source: &str) -> Result { - if index < set.len() - 1 { - // Case 1: the expression is _not_ the last node, so delete from the start of the - // expression to the end of the subsequent comma. - // Ex) Delete `"a"` in `{"a", "b", "c"}`. - let mut tokenizer = SimpleTokenizer::starts_at(set.elts[index].end(), source); - - // Find the trailing comma. - tokenizer - .find(|token| token.kind == SimpleTokenKind::Comma) - .context("Unable to find trailing comma")?; - - // Find the next non-whitespace token. - let next = tokenizer - .find(|token| { - token.kind != SimpleTokenKind::Whitespace && token.kind != SimpleTokenKind::Newline - }) - .context("Unable to find next token")?; - - Ok(Edit::deletion(set.elts[index].start(), next.start())) - } else if index > 0 { - // Case 2: the expression is the last node, but not the _only_ node, so delete from the - // start of the previous comma to the end of the expression. - // Ex) Delete `"c"` in `{"a", "b", "c"}`. - let mut tokenizer = SimpleTokenizer::starts_at(set.elts[index - 1].end(), source); - - // Find the trailing comma. - let comma = tokenizer - .find(|token| token.kind == SimpleTokenKind::Comma) - .context("Unable to find trailing comma")?; - - Ok(Edit::deletion(comma.start(), set.elts[index].end())) - } else { - // Case 3: expression is the only node, so delete it. - // Ex) Delete `"a"` in `{"a"}`. - Ok(Edit::range_deletion(set.elts[index].range())) - } -} diff --git a/crates/ruff_linter/src/rules/ruff/mod.rs b/crates/ruff_linter/src/rules/ruff/mod.rs index f20fd77020..71cd109c2a 100644 --- a/crates/ruff_linter/src/rules/ruff/mod.rs +++ b/crates/ruff_linter/src/rules/ruff/mod.rs @@ -117,6 +117,7 @@ mod tests { #[test_case(Rule::LoggingEagerConversion, Path::new("RUF065_0.py"))] #[test_case(Rule::LoggingEagerConversion, Path::new("RUF065_1.py"))] #[test_case(Rule::PropertyWithoutReturn, Path::new("RUF066.py"))] + #[test_case(Rule::DuplicateEntryInDunderAll, Path::new("RUF068.py"))] #[test_case(Rule::RedirectedNOQA, Path::new("RUF101_0.py"))] #[test_case(Rule::RedirectedNOQA, Path::new("RUF101_1.py"))] #[test_case(Rule::InvalidRuleCode, Path::new("RUF102.py"))] diff --git a/crates/ruff_linter/src/rules/ruff/rules/duplicate_entry_in_dunder_all.rs b/crates/ruff_linter/src/rules/ruff/rules/duplicate_entry_in_dunder_all.rs new file mode 100644 index 0000000000..fceb6ddf74 --- /dev/null +++ b/crates/ruff_linter/src/rules/ruff/rules/duplicate_entry_in_dunder_all.rs @@ -0,0 +1,180 @@ +use rustc_hash::{FxBuildHasher, FxHashMap}; + +use ruff_diagnostics::{Applicability, Fix}; +use ruff_macros::{ViolationMetadata, derive_message_formats}; +use ruff_python_ast as ast; +use ruff_text_size::Ranged; + +use crate::checkers::ast::Checker; +use crate::fix::edits; +use crate::{FixAvailability, Violation}; + +/// ## What it does +/// Detects duplicate elements in `__all__` definitions. +/// +/// ## Why is this bad? +/// Duplicate elements in `__all__` serve no purpose and can indicate copy-paste errors or +/// incomplete refactoring. +/// +/// ## Example +/// ```python +/// __all__ = [ +/// "DatabaseConnection", +/// "Product", +/// "User", +/// "DatabaseConnection", # Duplicate +/// ] +/// ``` +/// +/// Use instead: +/// ```python +/// __all__ = [ +/// "DatabaseConnection", +/// "Product", +/// "User", +/// ] +/// ``` +/// +/// ## Fix Safety +/// This rule's fix is marked as unsafe if the replacement would remove comments attached to the +/// original expression, potentially losing important context or documentation. +/// +/// For example: +/// ```python +/// __all__ = [ +/// "PublicAPI", +/// # TODO: Remove this in v2.0 +/// "PublicAPI", # Deprecated alias +/// ] +/// ``` +#[derive(ViolationMetadata)] +#[violation_metadata(preview_since = "0.14.14")] +pub(crate) struct DuplicateEntryInDunderAll; + +impl Violation for DuplicateEntryInDunderAll { + const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes; + + #[derive_message_formats] + fn message(&self) -> String { + "`__all__` contains duplicate entries".to_string() + } + + fn fix_title(&self) -> Option { + Some("Remove duplicate entries from `__all__`".to_string()) + } +} + +/// Apply RUF068 to `StmtAssign` AST node. For example: `__all__ = ["a", "b", "a"]`. +pub(crate) fn duplicate_entry_in_dunder_all_assign( + checker: &Checker, + ast::StmtAssign { value, targets, .. }: &ast::StmtAssign, +) { + if let [expr] = targets.as_slice() { + duplicate_entry_in_dunder_all(checker, expr, value); + } +} + +/// Apply RUF068 to `StmtAugAssign` AST node. For example: `__all__ += ["a", "b", "a"]`. +pub(crate) fn duplicate_entry_in_dunder_all_aug_assign( + checker: &Checker, + node: &ast::StmtAugAssign, +) { + if node.op.is_add() { + duplicate_entry_in_dunder_all(checker, &node.target, &node.value); + } +} + +/// Apply RUF068 to `__all__.extend()`. +pub(crate) fn duplicate_entry_in_dunder_all_extend_call( + checker: &Checker, + ast::ExprCall { + func, + arguments: ast::Arguments { args, keywords, .. }, + .. + }: &ast::ExprCall, +) { + let ([value_passed], []) = (&**args, &**keywords) else { + return; + }; + let ast::Expr::Attribute(ast::ExprAttribute { value, attr, .. }) = &**func else { + return; + }; + if attr == "extend" { + duplicate_entry_in_dunder_all(checker, value, value_passed); + } +} + +/// Apply RUF068 to a `StmtAnnAssign` AST node. +/// For example: `__all__: list[str] = ["a", "b", "a"]`. +pub(crate) fn duplicate_entry_in_dunder_all_ann_assign( + checker: &Checker, + node: &ast::StmtAnnAssign, +) { + if let Some(value) = &node.value { + duplicate_entry_in_dunder_all(checker, &node.target, value); + } +} + +/// RUF068 +/// This routine checks whether `__all__` contains duplicated entries, and emits +/// a violation if it does. +fn duplicate_entry_in_dunder_all(checker: &Checker, target: &ast::Expr, value: &ast::Expr) { + let ast::Expr::Name(ast::ExprName { id, .. }) = target else { + return; + }; + + if id != "__all__" { + return; + } + + // We're only interested in `__all__` in the global scope + if !checker.semantic().current_scope().kind.is_module() { + return; + } + + let elts = match value { + ast::Expr::List(ast::ExprList { elts, .. }) => elts, + ast::Expr::Tuple(ast::ExprTuple { elts, .. }) => elts, + _ => return, + }; + + // It's impossible to have duplicates if there is one or no element + if elts.len() <= 1 { + return; + } + + let mut deduplicated_elts = FxHashMap::with_capacity_and_hasher(elts.len(), FxBuildHasher); + let source = checker.locator().contents(); + + for (index, expr) in elts.iter().enumerate() { + let Some(string_value) = expr.as_string_literal_expr() else { + // In the example below we're ignoring `foo`: + // __all__ = [foo, "bar", "bar"] + continue; + }; + + let name = string_value.value.to_str(); + + if let Some(previous_expr) = deduplicated_elts.insert(name, expr) { + let mut diagnostic = checker.report_diagnostic(DuplicateEntryInDunderAll, expr.range()); + + diagnostic.secondary_annotation( + format_args!("previous occurrence of `{name}` here"), + previous_expr, + ); + + diagnostic.set_primary_message(format_args!("`{name}` duplicated here")); + + diagnostic.try_set_fix(|| { + edits::remove_member(elts, index, source).map(|edit| { + let applicability = if checker.comment_ranges().intersects(edit.range()) { + Applicability::Unsafe + } else { + Applicability::Safe + }; + Fix::applicable_edit(edit, applicability) + }) + }); + } + } +} diff --git a/crates/ruff_linter/src/rules/ruff/rules/mod.rs b/crates/ruff_linter/src/rules/ruff/rules/mod.rs index 61c43b5af2..3791399968 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/mod.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/mod.rs @@ -8,6 +8,7 @@ pub(crate) use collection_literal_concatenation::*; pub(crate) use dataclass_enum::*; pub(crate) use decimal_from_float_literal::*; pub(crate) use default_factory_kwarg::*; +pub(crate) use duplicate_entry_in_dunder_all::*; pub(crate) use explicit_f_string_type_conversion::*; pub(crate) use falsy_dict_get_fallback::*; pub(crate) use function_call_in_dataclass_default::*; @@ -76,6 +77,7 @@ mod confusables; mod dataclass_enum; mod decimal_from_float_literal; mod default_factory_kwarg; +mod duplicate_entry_in_dunder_all; mod explicit_f_string_type_conversion; mod falsy_dict_get_fallback; mod function_call_in_dataclass_default; diff --git a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF068_RUF068.py.snap b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF068_RUF068.py.snap new file mode 100644 index 0000000000..a518626974 --- /dev/null +++ b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF068_RUF068.py.snap @@ -0,0 +1,263 @@ +--- +source: crates/ruff_linter/src/rules/ruff/mod.rs +--- +RUF068 [*] `__all__` contains duplicate entries + --> RUF068.py:15:15 + | +13 | __all__: typing.Any = ("A", "B") +14 | __all__ = ["A", "B"] +15 | __all__ = [A, "B", "B"] + | --- ^^^ `B` duplicated here + | | + | previous occurrence of `B` here +16 | __all__ += ["A", "B"] +17 | __all__.extend(["A", "B"]) + | +help: Remove duplicate entries from `__all__` +12 | __all__: list[str] = ["A", "B"] +13 | __all__: typing.Any = ("A", "B") +14 | __all__ = ["A", "B"] + - __all__ = [A, "B", "B"] +15 + __all__ = [A, "B"] +16 | __all__ += ["A", "B"] +17 | __all__.extend(["A", "B"]) +18 | + +RUF068 [*] `__all__` contains duplicate entries + --> RUF068.py:20:23 + | +19 | # Bad +20 | __all__: list[str] = ["A", "B", "A"] + | --- ^^^ `A` duplicated here + | | + | previous occurrence of `A` here +21 | __all__: typing.Any = ("A", "B", "B") +22 | __all__ = ["A", "B", "A"] + | +help: Remove duplicate entries from `__all__` +17 | __all__.extend(["A", "B"]) +18 | +19 | # Bad + - __all__: list[str] = ["A", "B", "A"] +20 + __all__: list[str] = ["A", "B"] +21 | __all__: typing.Any = ("A", "B", "B") +22 | __all__ = ["A", "B", "A"] +23 | __all__ = ["A", "A", "B", "B"] + +RUF068 [*] `__all__` contains duplicate entries + --> RUF068.py:21:29 + | +19 | # Bad +20 | __all__: list[str] = ["A", "B", "A"] +21 | __all__: typing.Any = ("A", "B", "B") + | --- ^^^ `B` duplicated here + | | + | previous occurrence of `B` here +22 | __all__ = ["A", "B", "A"] +23 | __all__ = ["A", "A", "B", "B"] + | +help: Remove duplicate entries from `__all__` +18 | +19 | # Bad +20 | __all__: list[str] = ["A", "B", "A"] + - __all__: typing.Any = ("A", "B", "B") +21 + __all__: typing.Any = ("A", "B") +22 | __all__ = ["A", "B", "A"] +23 | __all__ = ["A", "A", "B", "B"] +24 | __all__ = [ + +RUF068 [*] `__all__` contains duplicate entries + --> RUF068.py:22:12 + | +20 | __all__: list[str] = ["A", "B", "A"] +21 | __all__: typing.Any = ("A", "B", "B") +22 | __all__ = ["A", "B", "A"] + | --- ^^^ `A` duplicated here + | | + | previous occurrence of `A` here +23 | __all__ = ["A", "A", "B", "B"] +24 | __all__ = [ + | +help: Remove duplicate entries from `__all__` +19 | # Bad +20 | __all__: list[str] = ["A", "B", "A"] +21 | __all__: typing.Any = ("A", "B", "B") + - __all__ = ["A", "B", "A"] +22 + __all__ = ["A", "B"] +23 | __all__ = ["A", "A", "B", "B"] +24 | __all__ = [ +25 | "A", + +RUF068 [*] `__all__` contains duplicate entries + --> RUF068.py:23:12 + | +21 | __all__: typing.Any = ("A", "B", "B") +22 | __all__ = ["A", "B", "A"] +23 | __all__ = ["A", "A", "B", "B"] + | --- ^^^ `A` duplicated here + | | + | previous occurrence of `A` here +24 | __all__ = [ +25 | "A", + | +help: Remove duplicate entries from `__all__` +20 | __all__: list[str] = ["A", "B", "A"] +21 | __all__: typing.Any = ("A", "B", "B") +22 | __all__ = ["A", "B", "A"] + - __all__ = ["A", "A", "B", "B"] +23 + __all__ = ["A", "B", "B"] +24 | __all__ = [ +25 | "A", +26 | "A", + +RUF068 [*] `__all__` contains duplicate entries + --> RUF068.py:23:22 + | +21 | __all__: typing.Any = ("A", "B", "B") +22 | __all__ = ["A", "B", "A"] +23 | __all__ = ["A", "A", "B", "B"] + | --- ^^^ `B` duplicated here + | | + | previous occurrence of `B` here +24 | __all__ = [ +25 | "A", + | +help: Remove duplicate entries from `__all__` +20 | __all__: list[str] = ["A", "B", "A"] +21 | __all__: typing.Any = ("A", "B", "B") +22 | __all__ = ["A", "B", "A"] + - __all__ = ["A", "A", "B", "B"] +23 + __all__ = ["A", "A", "B"] +24 | __all__ = [ +25 | "A", +26 | "A", + +RUF068 [*] `__all__` contains duplicate entries + --> RUF068.py:25:5 + | +23 | __all__ = ["A", "A", "B", "B"] +24 | __all__ = [ +25 | "A", + | --- previous occurrence of `A` here +26 | "A", + | ^^^ `A` duplicated here +27 | "B", +28 | "B" + | +help: Remove duplicate entries from `__all__` +23 | __all__ = ["A", "A", "B", "B"] +24 | __all__ = [ +25 | "A", + - "A", +26 | "B", +27 | "B" +28 | ] + +RUF068 [*] `__all__` contains duplicate entries + --> RUF068.py:27:5 + | +25 | "A", +26 | "A", +27 | "B", + | --- previous occurrence of `B` here +28 | "B" + | ^^^ `B` duplicated here +29 | ] +30 | __all__ += ["B", "B"] + | +help: Remove duplicate entries from `__all__` +24 | __all__ = [ +25 | "A", +26 | "A", + - "B", +27 | "B" +28 | ] +29 | __all__ += ["B", "B"] + +RUF068 [*] `__all__` contains duplicate entries + --> RUF068.py:30:13 + | +28 | "B" +29 | ] +30 | __all__ += ["B", "B"] + | --- ^^^ `B` duplicated here + | | + | previous occurrence of `B` here +31 | __all__.extend(["B", "B"]) + | +help: Remove duplicate entries from `__all__` +27 | "B", +28 | "B" +29 | ] + - __all__ += ["B", "B"] +30 + __all__ += ["B"] +31 | __all__.extend(["B", "B"]) +32 | +33 | # Bad, unsafe + +RUF068 [*] `__all__` contains duplicate entries + --> RUF068.py:31:17 + | +29 | ] +30 | __all__ += ["B", "B"] +31 | __all__.extend(["B", "B"]) + | --- ^^^ `B` duplicated here + | | + | previous occurrence of `B` here +32 | +33 | # Bad, unsafe + | +help: Remove duplicate entries from `__all__` +28 | "B" +29 | ] +30 | __all__ += ["B", "B"] + - __all__.extend(["B", "B"]) +31 + __all__.extend(["B"]) +32 | +33 | # Bad, unsafe +34 | __all__ = [ + +RUF068 [*] `__all__` contains duplicate entries + --> RUF068.py:35:5 + | +33 | # Bad, unsafe +34 | __all__ = [ +35 | "A", + | --- previous occurrence of `A` here +36 | "A", + | ^^^ `A` duplicated here +37 | "B", +38 | # Comment + | +help: Remove duplicate entries from `__all__` +33 | # Bad, unsafe +34 | __all__ = [ +35 | "A", + - "A", +36 | "B", +37 | # Comment +38 | "B", # 2 + +RUF068 [*] `__all__` contains duplicate entries + --> RUF068.py:37:5 + | +35 | "A", +36 | "A", +37 | "B", + | --- previous occurrence of `B` here +38 | # Comment +39 | "B", # 2 + | ^^^ `B` duplicated here +40 | # 3 +41 | ] + | +help: Remove duplicate entries from `__all__` +34 | __all__ = [ +35 | "A", +36 | "A", + - "B", + - # Comment +37 | "B", # 2 +38 | # 3 +39 | ] +note: This is an unsafe fix and may change runtime behavior diff --git a/ruff.schema.json b/ruff.schema.json index 03fa58c683..0269943882 100644 --- a/ruff.schema.json +++ b/ruff.schema.json @@ -4088,6 +4088,7 @@ "RUF065", "RUF066", "RUF067", + "RUF068", "RUF1", "RUF10", "RUF100",