mirror of https://github.com/astral-sh/ruff
feat: rollback to SimpleTokenizer in remove_argument
This commit is contained in:
parent
e00dd646d8
commit
03be8b9454
|
|
@ -8,7 +8,9 @@ use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, ExprList, Par
|
|||
use ruff_python_codegen::Stylist;
|
||||
use ruff_python_index::Indexer;
|
||||
use ruff_python_trivia::textwrap::dedent_to;
|
||||
use ruff_python_trivia::{PythonWhitespace, has_leading_content, is_python_whitespace};
|
||||
use ruff_python_trivia::{
|
||||
PythonWhitespace, SimpleTokenKind, SimpleTokenizer, has_leading_content, is_python_whitespace,
|
||||
};
|
||||
use ruff_source_file::{LineRanges, NewlineWithTrailingNewline, UniversalNewlines};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
|
|
@ -205,6 +207,7 @@ pub(crate) fn remove_argument<T: Ranged>(
|
|||
argument: &T,
|
||||
arguments: &Arguments,
|
||||
parentheses: Parentheses,
|
||||
source: &str,
|
||||
tokens: &Tokens,
|
||||
) -> Result<Edit> {
|
||||
// Partition into arguments before and after the argument to remove.
|
||||
|
|
@ -226,20 +229,17 @@ pub(crate) fn remove_argument<T: Ranged>(
|
|||
if !after.is_empty() {
|
||||
// Case 1: argument or keyword is _not_ the last node, so delete from the start of the
|
||||
// argument to the end of the subsequent comma.
|
||||
let mut tokens_after = tokens.after(argument.end()).iter();
|
||||
let mut tokenizer = SimpleTokenizer::starts_at(argument.end(), source);
|
||||
|
||||
// Find the trailing comma.
|
||||
tokens_after
|
||||
.find(|token| token.kind() == TokenKind::Comma)
|
||||
tokenizer
|
||||
.find(|token| token.kind == SimpleTokenKind::Comma)
|
||||
.context("Unable to find trailing comma")?;
|
||||
|
||||
// Find the next non-whitespace token.
|
||||
let next = tokens_after
|
||||
let next = tokenizer
|
||||
.find(|token| {
|
||||
!matches!(
|
||||
token.kind(),
|
||||
TokenKind::Newline | TokenKind::NonLogicalNewline
|
||||
)
|
||||
token.kind != SimpleTokenKind::Whitespace && token.kind != SimpleTokenKind::Newline
|
||||
})
|
||||
.context("Unable to find next token")?;
|
||||
|
||||
|
|
@ -247,11 +247,11 @@ pub(crate) fn remove_argument<T: Ranged>(
|
|||
} else if let Some(previous) = before.iter().map(Ranged::end).max() {
|
||||
// Case 2: argument or keyword is the last node, so delete from the start of the
|
||||
// previous comma to the end of the argument.
|
||||
let mut tokens_after = tokens.after(previous).iter();
|
||||
let mut tokenizer = SimpleTokenizer::starts_at(previous, source);
|
||||
|
||||
// Find the trailing comma.
|
||||
let comma = tokens_after
|
||||
.find(|token| token.kind() == TokenKind::Comma)
|
||||
let comma = tokenizer
|
||||
.find(|token| token.kind == SimpleTokenKind::Comma)
|
||||
.context("Unable to find trailing comma")?;
|
||||
|
||||
Ok(Edit::deletion(comma.start(), parenthesized_range.end()))
|
||||
|
|
|
|||
|
|
@ -91,6 +91,7 @@ pub(crate) fn fastapi_redundant_response_model(checker: &Checker, function_def:
|
|||
response_model_arg,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.source(),
|
||||
checker.tokens(),
|
||||
)
|
||||
.map(Fix::unsafe_edit)
|
||||
|
|
|
|||
|
|
@ -115,7 +115,13 @@ pub(crate) fn exc_info_outside_except_handler(checker: &Checker, call: &ExprCall
|
|||
let mut diagnostic = checker.report_diagnostic(ExcInfoOutsideExceptHandler, exc_info.range);
|
||||
|
||||
diagnostic.try_set_fix(|| {
|
||||
let edit = remove_argument(exc_info, arguments, Parentheses::Preserve, checker.tokens())?;
|
||||
let edit = remove_argument(
|
||||
exc_info,
|
||||
arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.source(),
|
||||
checker.tokens(),
|
||||
)?;
|
||||
Ok(Fix::unsafe_edit(edit))
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -129,6 +129,7 @@ pub(crate) fn unnecessary_dict_kwargs(checker: &Checker, call: &ast::ExprCall) {
|
|||
keyword,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.source(),
|
||||
checker.tokens(),
|
||||
)
|
||||
.map(Fix::safe_edit)
|
||||
|
|
|
|||
|
|
@ -76,6 +76,7 @@ pub(crate) fn unnecessary_range_start(checker: &Checker, call: &ast::ExprCall) {
|
|||
start,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.source(),
|
||||
checker.tokens(),
|
||||
)
|
||||
.map(Fix::safe_edit)
|
||||
|
|
|
|||
|
|
@ -159,9 +159,16 @@ fn generate_fix(
|
|||
checker: &Checker,
|
||||
) -> anyhow::Result<Fix> {
|
||||
let locator = checker.locator();
|
||||
let source = locator.contents();
|
||||
let tokens = checker.tokens();
|
||||
|
||||
let deletion = remove_argument(generic_base, arguments, Parentheses::Preserve, tokens)?;
|
||||
let deletion = remove_argument(
|
||||
generic_base,
|
||||
arguments,
|
||||
Parentheses::Preserve,
|
||||
source,
|
||||
tokens,
|
||||
)?;
|
||||
let insertion = add_argument(locator.slice(generic_base), arguments, tokens);
|
||||
|
||||
Ok(Fix::unsafe_edits(deletion, [insertion]))
|
||||
|
|
|
|||
|
|
@ -768,6 +768,7 @@ fn check_fixture_decorator(checker: &Checker, func_name: &str, decorator: &Decor
|
|||
keyword,
|
||||
arguments,
|
||||
edits::Parentheses::Preserve,
|
||||
checker.source(),
|
||||
checker.tokens(),
|
||||
)
|
||||
.map(Fix::unsafe_edit)
|
||||
|
|
|
|||
|
|
@ -106,7 +106,13 @@ pub(crate) fn path_constructor_current_directory(
|
|||
diagnostic.set_fix(Fix::applicable_edit(edit, applicability(parent_range)));
|
||||
}
|
||||
None => diagnostic.try_set_fix(|| {
|
||||
let edit = remove_argument(arg, arguments, Parentheses::Preserve, checker.tokens())?;
|
||||
let edit = remove_argument(
|
||||
arg,
|
||||
arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.source(),
|
||||
checker.tokens(),
|
||||
)?;
|
||||
Ok(Fix::applicable_edit(edit, applicability(call.range())))
|
||||
}),
|
||||
}
|
||||
|
|
|
|||
|
|
@ -123,8 +123,14 @@ fn convert_inplace_argument_to_assignment(
|
|||
);
|
||||
|
||||
// Remove the `inplace` argument.
|
||||
let remove_argument =
|
||||
remove_argument(keyword, &call.arguments, Parentheses::Preserve, tokens).ok()?;
|
||||
let remove_argument = remove_argument(
|
||||
keyword,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
locator.contents(),
|
||||
tokens,
|
||||
)
|
||||
.ok()?;
|
||||
|
||||
Some(Fix::unsafe_edits(insert_assignment, [remove_argument]))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -94,18 +94,23 @@ pub(crate) fn duplicate_bases(checker: &Checker, name: &str, arguments: Option<&
|
|||
base.range(),
|
||||
);
|
||||
diagnostic.try_set_fix(|| {
|
||||
remove_argument(base, arguments, Parentheses::Remove, checker.tokens()).map(
|
||||
|edit| {
|
||||
Fix::applicable_edit(
|
||||
edit,
|
||||
if checker.comment_ranges().intersects(arguments.range()) {
|
||||
Applicability::Unsafe
|
||||
} else {
|
||||
Applicability::Safe
|
||||
},
|
||||
)
|
||||
},
|
||||
remove_argument(
|
||||
base,
|
||||
arguments,
|
||||
Parentheses::Remove,
|
||||
checker.locator().contents(),
|
||||
checker.tokens(),
|
||||
)
|
||||
.map(|edit| {
|
||||
Fix::applicable_edit(
|
||||
edit,
|
||||
if checker.comment_ranges().intersects(arguments.range()) {
|
||||
Applicability::Unsafe
|
||||
} else {
|
||||
Applicability::Safe
|
||||
},
|
||||
)
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -203,6 +203,7 @@ pub(crate) fn non_pep695_generic_class(checker: &Checker, class_def: &StmtClassD
|
|||
generic_expr,
|
||||
arguments,
|
||||
Parentheses::Remove,
|
||||
checker.source(),
|
||||
checker.tokens(),
|
||||
)?;
|
||||
Ok(Fix::unsafe_edits(
|
||||
|
|
|
|||
|
|
@ -97,7 +97,15 @@ pub(crate) fn replace_stdout_stderr(checker: &Checker, call: &ast::ExprCall) {
|
|||
|
||||
let mut diagnostic = checker.report_diagnostic(ReplaceStdoutStderr, call.range());
|
||||
if call.arguments.find_keyword("capture_output").is_none() {
|
||||
diagnostic.try_set_fix(|| generate_fix(stdout, stderr, call, checker.tokens()));
|
||||
diagnostic.try_set_fix(|| {
|
||||
generate_fix(
|
||||
stdout,
|
||||
stderr,
|
||||
call,
|
||||
checker.locator().contents(),
|
||||
checker.tokens(),
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -107,6 +115,7 @@ fn generate_fix(
|
|||
stdout: &Keyword,
|
||||
stderr: &Keyword,
|
||||
call: &ast::ExprCall,
|
||||
source: &str,
|
||||
tokens: &Tokens,
|
||||
) -> Result<Fix> {
|
||||
let (first, second) = if stdout.start() < stderr.start() {
|
||||
|
|
@ -121,6 +130,7 @@ fn generate_fix(
|
|||
second,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
source,
|
||||
tokens,
|
||||
)?],
|
||||
))
|
||||
|
|
|
|||
|
|
@ -77,6 +77,7 @@ pub(crate) fn replace_universal_newlines(checker: &Checker, call: &ast::ExprCall
|
|||
kwarg,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator().contents(),
|
||||
checker.tokens(),
|
||||
)
|
||||
.map(Fix::safe_edit)
|
||||
|
|
|
|||
|
|
@ -187,6 +187,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &Checker, call: &ast::ExprCall) {
|
|||
kwarg,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator().contents(),
|
||||
checker.tokens(),
|
||||
)
|
||||
.map(Fix::safe_edit)
|
||||
|
|
@ -204,6 +205,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &Checker, call: &ast::ExprCall) {
|
|||
arg,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator().contents(),
|
||||
checker.tokens(),
|
||||
)
|
||||
.map(Fix::safe_edit)
|
||||
|
|
@ -228,6 +230,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &Checker, call: &ast::ExprCall) {
|
|||
kwarg,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator().contents(),
|
||||
checker.tokens(),
|
||||
)
|
||||
.map(Fix::safe_edit)
|
||||
|
|
@ -245,6 +248,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &Checker, call: &ast::ExprCall) {
|
|||
arg,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator().contents(),
|
||||
checker.tokens(),
|
||||
)
|
||||
.map(Fix::safe_edit)
|
||||
|
|
|
|||
|
|
@ -65,8 +65,13 @@ pub(crate) fn useless_class_metaclass_type(checker: &Checker, class_def: &StmtCl
|
|||
);
|
||||
|
||||
diagnostic.try_set_fix(|| {
|
||||
let edit =
|
||||
remove_argument(keyword, arguments, Parentheses::Remove, checker.tokens())?;
|
||||
let edit = remove_argument(
|
||||
keyword,
|
||||
arguments,
|
||||
Parentheses::Remove,
|
||||
checker.locator().contents(),
|
||||
checker.tokens(),
|
||||
)?;
|
||||
|
||||
let range = edit.range();
|
||||
let applicability = if checker.comment_ranges().intersects(range) {
|
||||
|
|
|
|||
|
|
@ -68,7 +68,13 @@ pub(crate) fn useless_object_inheritance(checker: &Checker, class_def: &ast::Stm
|
|||
);
|
||||
|
||||
diagnostic.try_set_fix(|| {
|
||||
let edit = remove_argument(base, arguments, Parentheses::Remove, checker.tokens())?;
|
||||
let edit = remove_argument(
|
||||
base,
|
||||
arguments,
|
||||
Parentheses::Remove,
|
||||
checker.locator().contents(),
|
||||
checker.tokens(),
|
||||
)?;
|
||||
|
||||
let range = edit.range();
|
||||
let applicability = if checker.comment_ranges().intersects(range) {
|
||||
|
|
|
|||
|
|
@ -162,6 +162,7 @@ fn convert_type_vars(
|
|||
generic_base,
|
||||
class_arguments,
|
||||
Parentheses::Remove,
|
||||
source,
|
||||
checker.tokens(),
|
||||
)?;
|
||||
let replace_type_params =
|
||||
|
|
|
|||
|
|
@ -151,6 +151,7 @@ fn convert_to_positional(
|
|||
default_factory,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
locator.contents(),
|
||||
tokens,
|
||||
)?;
|
||||
|
||||
|
|
|
|||
|
|
@ -127,6 +127,7 @@ pub(crate) fn falsy_dict_get_fallback(checker: &Checker, expr: &Expr) {
|
|||
&fallback_arg,
|
||||
&call.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.locator().contents(),
|
||||
checker.tokens(),
|
||||
)
|
||||
.map(|edit| Fix::applicable_edit(edit, applicability))
|
||||
|
|
|
|||
|
|
@ -151,6 +151,7 @@ fn fix_unnecessary_literal_in_deque(
|
|||
&iterable,
|
||||
&deque.arguments,
|
||||
Parentheses::Preserve,
|
||||
checker.source(),
|
||||
checker.tokens(),
|
||||
)?
|
||||
};
|
||||
|
|
|
|||
Loading…
Reference in New Issue