mirror of https://github.com/astral-sh/ruff
Merge a5694f1ef0 into b0bc990cbf
This commit is contained in:
commit
002edcdf78
|
|
@ -205,3 +205,27 @@ def func():
|
||||||
if a.endswith("foo"):
|
if a.endswith("foo"):
|
||||||
a = a[: -len("foo")]
|
a = a[: -len("foo")]
|
||||||
print(a)
|
print(a)
|
||||||
|
|
||||||
|
|
||||||
|
class Seq:
|
||||||
|
def __init__(self, inner):
|
||||||
|
self.inner = inner
|
||||||
|
|
||||||
|
def startswith(self, prefix):
|
||||||
|
return tuple(self.inner[: len(prefix)]) == tuple(prefix)
|
||||||
|
|
||||||
|
def __getitem__(self, item):
|
||||||
|
return type(self)(self.inner[item])
|
||||||
|
|
||||||
|
|
||||||
|
def tuple_affix_should_be_suppressed(text: str) -> str:
|
||||||
|
prefix = ("123",)
|
||||||
|
if text.startswith(prefix):
|
||||||
|
text = text[len(prefix):]
|
||||||
|
return text
|
||||||
|
|
||||||
|
|
||||||
|
def custom_sequence_fix_is_unsafe(seq: Seq) -> Seq:
|
||||||
|
if seq.startswith("123"):
|
||||||
|
seq = seq[3:]
|
||||||
|
return seq
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,8 @@
|
||||||
|
use ruff_diagnostics::Applicability;
|
||||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||||
use ruff_python_ast::{self as ast, PythonVersion};
|
use ruff_python_ast::{self as ast, PythonVersion};
|
||||||
use ruff_python_semantic::SemanticModel;
|
use ruff_python_semantic::analyze::typing::{find_binding_value, is_bytes, is_string};
|
||||||
|
use ruff_python_semantic::{Binding, SemanticModel};
|
||||||
use ruff_text_size::Ranged;
|
use ruff_text_size::Ranged;
|
||||||
|
|
||||||
use crate::Locator;
|
use crate::Locator;
|
||||||
|
|
@ -79,6 +81,10 @@ pub(crate) fn slice_to_remove_affix_expr(checker: &Checker, if_expr: &ast::ExprI
|
||||||
let kind = removal_data.affix_query.kind;
|
let kind = removal_data.affix_query.kind;
|
||||||
let text = removal_data.text;
|
let text = removal_data.text;
|
||||||
|
|
||||||
|
let Some(applicability) = affix_applicability(&removal_data, checker.semantic()) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
let mut diagnostic = checker.report_diagnostic(
|
let mut diagnostic = checker.report_diagnostic(
|
||||||
SliceToRemovePrefixOrSuffix {
|
SliceToRemovePrefixOrSuffix {
|
||||||
affix_kind: kind,
|
affix_kind: kind,
|
||||||
|
|
@ -89,11 +95,8 @@ pub(crate) fn slice_to_remove_affix_expr(checker: &Checker, if_expr: &ast::ExprI
|
||||||
let replacement =
|
let replacement =
|
||||||
generate_removeaffix_expr(text, &removal_data.affix_query, checker.locator());
|
generate_removeaffix_expr(text, &removal_data.affix_query, checker.locator());
|
||||||
|
|
||||||
diagnostic.set_fix(Fix::safe_edit(Edit::replacement(
|
let edit = Edit::replacement(replacement, if_expr.start(), if_expr.end());
|
||||||
replacement,
|
diagnostic.set_fix(Fix::applicable_edit(edit, applicability));
|
||||||
if_expr.start(),
|
|
||||||
if_expr.end(),
|
|
||||||
)));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -108,6 +111,10 @@ pub(crate) fn slice_to_remove_affix_stmt(checker: &Checker, if_stmt: &ast::StmtI
|
||||||
let kind = removal_data.affix_query.kind;
|
let kind = removal_data.affix_query.kind;
|
||||||
let text = removal_data.text;
|
let text = removal_data.text;
|
||||||
|
|
||||||
|
let Some(applicability) = affix_applicability(&removal_data, checker.semantic()) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
let mut diagnostic = checker.report_diagnostic(
|
let mut diagnostic = checker.report_diagnostic(
|
||||||
SliceToRemovePrefixOrSuffix {
|
SliceToRemovePrefixOrSuffix {
|
||||||
affix_kind: kind,
|
affix_kind: kind,
|
||||||
|
|
@ -122,11 +129,8 @@ pub(crate) fn slice_to_remove_affix_stmt(checker: &Checker, if_stmt: &ast::StmtI
|
||||||
checker.locator(),
|
checker.locator(),
|
||||||
);
|
);
|
||||||
|
|
||||||
diagnostic.set_fix(Fix::safe_edit(Edit::replacement(
|
let edit = Edit::replacement(replacement, if_stmt.start(), if_stmt.end());
|
||||||
replacement,
|
diagnostic.set_fix(Fix::applicable_edit(edit, applicability));
|
||||||
if_stmt.start(),
|
|
||||||
if_stmt.end(),
|
|
||||||
)));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -501,3 +505,90 @@ struct RemoveAffixData<'a> {
|
||||||
/// Contains the prefix or suffix used in `text.startswith(prefix)` or `text.endswith(suffix)`
|
/// Contains the prefix or suffix used in `text.startswith(prefix)` or `text.endswith(suffix)`
|
||||||
affix_query: AffixQuery<'a>,
|
affix_query: AffixQuery<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Determines the applicability of the affix removal fix based on type information.
|
||||||
|
///
|
||||||
|
/// Returns:
|
||||||
|
/// - `None` if the fix should be suppressed (incompatible types like tuple affixes)
|
||||||
|
/// - `Some(Applicability::Safe)` if both text and affix are deterministically typed as str or bytes
|
||||||
|
/// - `Some(Applicability::Unsafe)` if type information is unknown or uncertain
|
||||||
|
fn affix_applicability(data: &RemoveAffixData, semantic: &SemanticModel) -> Option<Applicability> {
|
||||||
|
// Check for tuple affixes - these should be suppressed
|
||||||
|
if is_tuple_affix(data.affix_query.affix, semantic) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let text_is_str = is_expr_string(data.text, semantic);
|
||||||
|
let text_is_bytes = is_expr_bytes(data.text, semantic);
|
||||||
|
let affix_is_str = is_expr_string(data.affix_query.affix, semantic);
|
||||||
|
let affix_is_bytes = is_expr_bytes(data.affix_query.affix, semantic);
|
||||||
|
|
||||||
|
match (text_is_str, text_is_bytes, affix_is_str, affix_is_bytes) {
|
||||||
|
// Both are deterministically str
|
||||||
|
(true, false, true, false) => Some(Applicability::Safe),
|
||||||
|
// Both are deterministically bytes
|
||||||
|
(false, true, false, true) => Some(Applicability::Safe),
|
||||||
|
// Type mismatch - suppress the fix
|
||||||
|
(true, false, false, true) | (false, true, true, false) => None,
|
||||||
|
// Unknown or ambiguous types - mark as unsafe
|
||||||
|
_ => Some(Applicability::Unsafe),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if an expression is a tuple or a variable bound to a tuple.
|
||||||
|
fn is_tuple_affix(expr: &ast::Expr, semantic: &SemanticModel) -> bool {
|
||||||
|
if matches!(expr, ast::Expr::Tuple(_)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let ast::Expr::Name(name) = expr {
|
||||||
|
if let Some(binding_id) = semantic.only_binding(name) {
|
||||||
|
let binding = semantic.binding(binding_id);
|
||||||
|
if let Some(value) = find_binding_value(binding, semantic) {
|
||||||
|
return matches!(value, ast::Expr::Tuple(_));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if an expression is deterministically a string literal or a variable bound to a string.
|
||||||
|
fn is_expr_string(expr: &ast::Expr, semantic: &SemanticModel) -> bool {
|
||||||
|
is_expr_type(
|
||||||
|
expr,
|
||||||
|
semantic,
|
||||||
|
|expr| expr.is_string_literal_expr() || expr.is_f_string_expr(),
|
||||||
|
is_string,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if an expression is deterministically a bytes literal or a variable bound to bytes.
|
||||||
|
fn is_expr_bytes(expr: &ast::Expr, semantic: &SemanticModel) -> bool {
|
||||||
|
is_expr_type(
|
||||||
|
expr,
|
||||||
|
semantic,
|
||||||
|
|expr| matches!(expr, ast::Expr::BytesLiteral(_)),
|
||||||
|
is_bytes,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_expr_type(
|
||||||
|
expr: &ast::Expr,
|
||||||
|
semantic: &SemanticModel,
|
||||||
|
literal_check: impl Fn(&ast::Expr) -> bool,
|
||||||
|
binding_check: impl Fn(&Binding, &SemanticModel) -> bool,
|
||||||
|
) -> bool {
|
||||||
|
if literal_check(expr) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let ast::Expr::Name(name) = expr {
|
||||||
|
if let Some(binding_id) = semantic.only_binding(name) {
|
||||||
|
let binding = semantic.binding(binding_id);
|
||||||
|
return binding_check(binding, semantic);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -130,6 +130,7 @@ help: Use removesuffix instead of ternary expression conditional upon endswith.
|
||||||
147 |
|
147 |
|
||||||
148 | def remove_prefix_comparable_literal_expr() -> None:
|
148 | def remove_prefix_comparable_literal_expr() -> None:
|
||||||
149 | return ("abc" "def")[3:] if ("abc" "def").startswith("abc") else "abc" "def"
|
149 | return ("abc" "def")[3:] if ("abc" "def").startswith("abc") else "abc" "def"
|
||||||
|
note: This is an unsafe fix and may change runtime behavior
|
||||||
|
|
||||||
FURB188 [*] Prefer `str.removeprefix()` over conditionally replacing with slice.
|
FURB188 [*] Prefer `str.removeprefix()` over conditionally replacing with slice.
|
||||||
--> FURB188.py:149:12
|
--> FURB188.py:149:12
|
||||||
|
|
@ -213,6 +214,7 @@ help: Use removeprefix instead of assignment conditional upon startswith.
|
||||||
161 | if text.startswith("!"):
|
161 | if text.startswith("!"):
|
||||||
162 | text = text[1::None]
|
162 | text = text[1::None]
|
||||||
163 | print(text)
|
163 | print(text)
|
||||||
|
note: This is an unsafe fix and may change runtime behavior
|
||||||
|
|
||||||
FURB188 [*] Prefer `str.removeprefix()` over conditionally replacing with slice.
|
FURB188 [*] Prefer `str.removeprefix()` over conditionally replacing with slice.
|
||||||
--> FURB188.py:162:5
|
--> FURB188.py:162:5
|
||||||
|
|
@ -234,6 +236,7 @@ help: Use removeprefix instead of assignment conditional upon startswith.
|
||||||
163 | print(text)
|
163 | print(text)
|
||||||
164 |
|
164 |
|
||||||
165 |
|
165 |
|
||||||
|
note: This is an unsafe fix and may change runtime behavior
|
||||||
|
|
||||||
FURB188 [*] Prefer `str.removeprefix()` over conditionally replacing with slice.
|
FURB188 [*] Prefer `str.removeprefix()` over conditionally replacing with slice.
|
||||||
--> FURB188.py:183:5
|
--> FURB188.py:183:5
|
||||||
|
|
@ -254,6 +257,7 @@ help: Use removeprefix instead of assignment conditional upon startswith.
|
||||||
184 |
|
184 |
|
||||||
185 |
|
185 |
|
||||||
186 | def handle_surrogates():
|
186 | def handle_surrogates():
|
||||||
|
note: This is an unsafe fix and may change runtime behavior
|
||||||
|
|
||||||
FURB188 [*] Prefer `str.removeprefix()` over conditionally replacing with slice.
|
FURB188 [*] Prefer `str.removeprefix()` over conditionally replacing with slice.
|
||||||
--> FURB188.py:190:5
|
--> FURB188.py:190:5
|
||||||
|
|
@ -298,6 +302,7 @@ help: Use removeprefix instead of assignment conditional upon startswith.
|
||||||
194 |
|
194 |
|
||||||
195 | # should not be linted
|
195 | # should not be linted
|
||||||
196 | text = "\ud800\udc00heythere"
|
196 | text = "\ud800\udc00heythere"
|
||||||
|
note: This is an unsafe fix and may change runtime behavior
|
||||||
|
|
||||||
FURB188 [*] Prefer `str.removesuffix()` over conditionally replacing with slice.
|
FURB188 [*] Prefer `str.removesuffix()` over conditionally replacing with slice.
|
||||||
--> FURB188.py:205:5
|
--> FURB188.py:205:5
|
||||||
|
|
@ -317,3 +322,24 @@ help: Use removesuffix instead of assignment conditional upon endswith.
|
||||||
- a = a[: -len("foo")]
|
- a = a[: -len("foo")]
|
||||||
205 + a = a.removesuffix("foo")
|
205 + a = a.removesuffix("foo")
|
||||||
206 | print(a)
|
206 | print(a)
|
||||||
|
207 |
|
||||||
|
208 |
|
||||||
|
|
||||||
|
FURB188 [*] Prefer `str.removeprefix()` over conditionally replacing with slice.
|
||||||
|
--> FURB188.py:229:5
|
||||||
|
|
|
||||||
|
228 | def custom_sequence_fix_is_unsafe(seq: Seq) -> Seq:
|
||||||
|
229 | / if seq.startswith("123"):
|
||||||
|
230 | | seq = seq[3:]
|
||||||
|
| |_____________________^
|
||||||
|
231 | return seq
|
||||||
|
|
|
||||||
|
help: Use removeprefix instead of assignment conditional upon startswith.
|
||||||
|
226 |
|
||||||
|
227 |
|
||||||
|
228 | def custom_sequence_fix_is_unsafe(seq: Seq) -> Seq:
|
||||||
|
- if seq.startswith("123"):
|
||||||
|
- seq = seq[3:]
|
||||||
|
229 + seq = seq.removeprefix("123")
|
||||||
|
230 | return seq
|
||||||
|
note: This is an unsafe fix and may change runtime behavior
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue