mirror of https://github.com/astral-sh/ruff
feat: rollback to SimpleTokenizer in remove_argument
This commit is contained in:
parent
e00dd646d8
commit
03be8b9454
|
|
@ -8,7 +8,9 @@ use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, ExprList, Par
|
||||||
use ruff_python_codegen::Stylist;
|
use ruff_python_codegen::Stylist;
|
||||||
use ruff_python_index::Indexer;
|
use ruff_python_index::Indexer;
|
||||||
use ruff_python_trivia::textwrap::dedent_to;
|
use ruff_python_trivia::textwrap::dedent_to;
|
||||||
use ruff_python_trivia::{PythonWhitespace, has_leading_content, is_python_whitespace};
|
use ruff_python_trivia::{
|
||||||
|
PythonWhitespace, SimpleTokenKind, SimpleTokenizer, has_leading_content, is_python_whitespace,
|
||||||
|
};
|
||||||
use ruff_source_file::{LineRanges, NewlineWithTrailingNewline, UniversalNewlines};
|
use ruff_source_file::{LineRanges, NewlineWithTrailingNewline, UniversalNewlines};
|
||||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||||
|
|
||||||
|
|
@ -205,6 +207,7 @@ pub(crate) fn remove_argument<T: Ranged>(
|
||||||
argument: &T,
|
argument: &T,
|
||||||
arguments: &Arguments,
|
arguments: &Arguments,
|
||||||
parentheses: Parentheses,
|
parentheses: Parentheses,
|
||||||
|
source: &str,
|
||||||
tokens: &Tokens,
|
tokens: &Tokens,
|
||||||
) -> Result<Edit> {
|
) -> Result<Edit> {
|
||||||
// Partition into arguments before and after the argument to remove.
|
// Partition into arguments before and after the argument to remove.
|
||||||
|
|
@ -226,20 +229,17 @@ pub(crate) fn remove_argument<T: Ranged>(
|
||||||
if !after.is_empty() {
|
if !after.is_empty() {
|
||||||
// Case 1: argument or keyword is _not_ the last node, so delete from the start of the
|
// Case 1: argument or keyword is _not_ the last node, so delete from the start of the
|
||||||
// argument to the end of the subsequent comma.
|
// argument to the end of the subsequent comma.
|
||||||
let mut tokens_after = tokens.after(argument.end()).iter();
|
let mut tokenizer = SimpleTokenizer::starts_at(argument.end(), source);
|
||||||
|
|
||||||
// Find the trailing comma.
|
// Find the trailing comma.
|
||||||
tokens_after
|
tokenizer
|
||||||
.find(|token| token.kind() == TokenKind::Comma)
|
.find(|token| token.kind == SimpleTokenKind::Comma)
|
||||||
.context("Unable to find trailing comma")?;
|
.context("Unable to find trailing comma")?;
|
||||||
|
|
||||||
// Find the next non-whitespace token.
|
// Find the next non-whitespace token.
|
||||||
let next = tokens_after
|
let next = tokenizer
|
||||||
.find(|token| {
|
.find(|token| {
|
||||||
!matches!(
|
token.kind != SimpleTokenKind::Whitespace && token.kind != SimpleTokenKind::Newline
|
||||||
token.kind(),
|
|
||||||
TokenKind::Newline | TokenKind::NonLogicalNewline
|
|
||||||
)
|
|
||||||
})
|
})
|
||||||
.context("Unable to find next token")?;
|
.context("Unable to find next token")?;
|
||||||
|
|
||||||
|
|
@ -247,11 +247,11 @@ pub(crate) fn remove_argument<T: Ranged>(
|
||||||
} else if let Some(previous) = before.iter().map(Ranged::end).max() {
|
} else if let Some(previous) = before.iter().map(Ranged::end).max() {
|
||||||
// Case 2: argument or keyword is the last node, so delete from the start of the
|
// Case 2: argument or keyword is the last node, so delete from the start of the
|
||||||
// previous comma to the end of the argument.
|
// previous comma to the end of the argument.
|
||||||
let mut tokens_after = tokens.after(previous).iter();
|
let mut tokenizer = SimpleTokenizer::starts_at(previous, source);
|
||||||
|
|
||||||
// Find the trailing comma.
|
// Find the trailing comma.
|
||||||
let comma = tokens_after
|
let comma = tokenizer
|
||||||
.find(|token| token.kind() == TokenKind::Comma)
|
.find(|token| token.kind == SimpleTokenKind::Comma)
|
||||||
.context("Unable to find trailing comma")?;
|
.context("Unable to find trailing comma")?;
|
||||||
|
|
||||||
Ok(Edit::deletion(comma.start(), parenthesized_range.end()))
|
Ok(Edit::deletion(comma.start(), parenthesized_range.end()))
|
||||||
|
|
|
||||||
|
|
@ -91,6 +91,7 @@ pub(crate) fn fastapi_redundant_response_model(checker: &Checker, function_def:
|
||||||
response_model_arg,
|
response_model_arg,
|
||||||
&call.arguments,
|
&call.arguments,
|
||||||
Parentheses::Preserve,
|
Parentheses::Preserve,
|
||||||
|
checker.source(),
|
||||||
checker.tokens(),
|
checker.tokens(),
|
||||||
)
|
)
|
||||||
.map(Fix::unsafe_edit)
|
.map(Fix::unsafe_edit)
|
||||||
|
|
|
||||||
|
|
@ -115,7 +115,13 @@ pub(crate) fn exc_info_outside_except_handler(checker: &Checker, call: &ExprCall
|
||||||
let mut diagnostic = checker.report_diagnostic(ExcInfoOutsideExceptHandler, exc_info.range);
|
let mut diagnostic = checker.report_diagnostic(ExcInfoOutsideExceptHandler, exc_info.range);
|
||||||
|
|
||||||
diagnostic.try_set_fix(|| {
|
diagnostic.try_set_fix(|| {
|
||||||
let edit = remove_argument(exc_info, arguments, Parentheses::Preserve, checker.tokens())?;
|
let edit = remove_argument(
|
||||||
|
exc_info,
|
||||||
|
arguments,
|
||||||
|
Parentheses::Preserve,
|
||||||
|
checker.source(),
|
||||||
|
checker.tokens(),
|
||||||
|
)?;
|
||||||
Ok(Fix::unsafe_edit(edit))
|
Ok(Fix::unsafe_edit(edit))
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -129,6 +129,7 @@ pub(crate) fn unnecessary_dict_kwargs(checker: &Checker, call: &ast::ExprCall) {
|
||||||
keyword,
|
keyword,
|
||||||
&call.arguments,
|
&call.arguments,
|
||||||
Parentheses::Preserve,
|
Parentheses::Preserve,
|
||||||
|
checker.source(),
|
||||||
checker.tokens(),
|
checker.tokens(),
|
||||||
)
|
)
|
||||||
.map(Fix::safe_edit)
|
.map(Fix::safe_edit)
|
||||||
|
|
|
||||||
|
|
@ -76,6 +76,7 @@ pub(crate) fn unnecessary_range_start(checker: &Checker, call: &ast::ExprCall) {
|
||||||
start,
|
start,
|
||||||
&call.arguments,
|
&call.arguments,
|
||||||
Parentheses::Preserve,
|
Parentheses::Preserve,
|
||||||
|
checker.source(),
|
||||||
checker.tokens(),
|
checker.tokens(),
|
||||||
)
|
)
|
||||||
.map(Fix::safe_edit)
|
.map(Fix::safe_edit)
|
||||||
|
|
|
||||||
|
|
@ -159,9 +159,16 @@ fn generate_fix(
|
||||||
checker: &Checker,
|
checker: &Checker,
|
||||||
) -> anyhow::Result<Fix> {
|
) -> anyhow::Result<Fix> {
|
||||||
let locator = checker.locator();
|
let locator = checker.locator();
|
||||||
|
let source = locator.contents();
|
||||||
let tokens = checker.tokens();
|
let tokens = checker.tokens();
|
||||||
|
|
||||||
let deletion = remove_argument(generic_base, arguments, Parentheses::Preserve, tokens)?;
|
let deletion = remove_argument(
|
||||||
|
generic_base,
|
||||||
|
arguments,
|
||||||
|
Parentheses::Preserve,
|
||||||
|
source,
|
||||||
|
tokens,
|
||||||
|
)?;
|
||||||
let insertion = add_argument(locator.slice(generic_base), arguments, tokens);
|
let insertion = add_argument(locator.slice(generic_base), arguments, tokens);
|
||||||
|
|
||||||
Ok(Fix::unsafe_edits(deletion, [insertion]))
|
Ok(Fix::unsafe_edits(deletion, [insertion]))
|
||||||
|
|
|
||||||
|
|
@ -768,6 +768,7 @@ fn check_fixture_decorator(checker: &Checker, func_name: &str, decorator: &Decor
|
||||||
keyword,
|
keyword,
|
||||||
arguments,
|
arguments,
|
||||||
edits::Parentheses::Preserve,
|
edits::Parentheses::Preserve,
|
||||||
|
checker.source(),
|
||||||
checker.tokens(),
|
checker.tokens(),
|
||||||
)
|
)
|
||||||
.map(Fix::unsafe_edit)
|
.map(Fix::unsafe_edit)
|
||||||
|
|
|
||||||
|
|
@ -106,7 +106,13 @@ pub(crate) fn path_constructor_current_directory(
|
||||||
diagnostic.set_fix(Fix::applicable_edit(edit, applicability(parent_range)));
|
diagnostic.set_fix(Fix::applicable_edit(edit, applicability(parent_range)));
|
||||||
}
|
}
|
||||||
None => diagnostic.try_set_fix(|| {
|
None => diagnostic.try_set_fix(|| {
|
||||||
let edit = remove_argument(arg, arguments, Parentheses::Preserve, checker.tokens())?;
|
let edit = remove_argument(
|
||||||
|
arg,
|
||||||
|
arguments,
|
||||||
|
Parentheses::Preserve,
|
||||||
|
checker.source(),
|
||||||
|
checker.tokens(),
|
||||||
|
)?;
|
||||||
Ok(Fix::applicable_edit(edit, applicability(call.range())))
|
Ok(Fix::applicable_edit(edit, applicability(call.range())))
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -123,8 +123,14 @@ fn convert_inplace_argument_to_assignment(
|
||||||
);
|
);
|
||||||
|
|
||||||
// Remove the `inplace` argument.
|
// Remove the `inplace` argument.
|
||||||
let remove_argument =
|
let remove_argument = remove_argument(
|
||||||
remove_argument(keyword, &call.arguments, Parentheses::Preserve, tokens).ok()?;
|
keyword,
|
||||||
|
&call.arguments,
|
||||||
|
Parentheses::Preserve,
|
||||||
|
locator.contents(),
|
||||||
|
tokens,
|
||||||
|
)
|
||||||
|
.ok()?;
|
||||||
|
|
||||||
Some(Fix::unsafe_edits(insert_assignment, [remove_argument]))
|
Some(Fix::unsafe_edits(insert_assignment, [remove_argument]))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -94,18 +94,23 @@ pub(crate) fn duplicate_bases(checker: &Checker, name: &str, arguments: Option<&
|
||||||
base.range(),
|
base.range(),
|
||||||
);
|
);
|
||||||
diagnostic.try_set_fix(|| {
|
diagnostic.try_set_fix(|| {
|
||||||
remove_argument(base, arguments, Parentheses::Remove, checker.tokens()).map(
|
remove_argument(
|
||||||
|edit| {
|
base,
|
||||||
Fix::applicable_edit(
|
arguments,
|
||||||
edit,
|
Parentheses::Remove,
|
||||||
if checker.comment_ranges().intersects(arguments.range()) {
|
checker.locator().contents(),
|
||||||
Applicability::Unsafe
|
checker.tokens(),
|
||||||
} else {
|
|
||||||
Applicability::Safe
|
|
||||||
},
|
|
||||||
)
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
.map(|edit| {
|
||||||
|
Fix::applicable_edit(
|
||||||
|
edit,
|
||||||
|
if checker.comment_ranges().intersects(arguments.range()) {
|
||||||
|
Applicability::Unsafe
|
||||||
|
} else {
|
||||||
|
Applicability::Safe
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -203,6 +203,7 @@ pub(crate) fn non_pep695_generic_class(checker: &Checker, class_def: &StmtClassD
|
||||||
generic_expr,
|
generic_expr,
|
||||||
arguments,
|
arguments,
|
||||||
Parentheses::Remove,
|
Parentheses::Remove,
|
||||||
|
checker.source(),
|
||||||
checker.tokens(),
|
checker.tokens(),
|
||||||
)?;
|
)?;
|
||||||
Ok(Fix::unsafe_edits(
|
Ok(Fix::unsafe_edits(
|
||||||
|
|
|
||||||
|
|
@ -97,7 +97,15 @@ pub(crate) fn replace_stdout_stderr(checker: &Checker, call: &ast::ExprCall) {
|
||||||
|
|
||||||
let mut diagnostic = checker.report_diagnostic(ReplaceStdoutStderr, call.range());
|
let mut diagnostic = checker.report_diagnostic(ReplaceStdoutStderr, call.range());
|
||||||
if call.arguments.find_keyword("capture_output").is_none() {
|
if call.arguments.find_keyword("capture_output").is_none() {
|
||||||
diagnostic.try_set_fix(|| generate_fix(stdout, stderr, call, checker.tokens()));
|
diagnostic.try_set_fix(|| {
|
||||||
|
generate_fix(
|
||||||
|
stdout,
|
||||||
|
stderr,
|
||||||
|
call,
|
||||||
|
checker.locator().contents(),
|
||||||
|
checker.tokens(),
|
||||||
|
)
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -107,6 +115,7 @@ fn generate_fix(
|
||||||
stdout: &Keyword,
|
stdout: &Keyword,
|
||||||
stderr: &Keyword,
|
stderr: &Keyword,
|
||||||
call: &ast::ExprCall,
|
call: &ast::ExprCall,
|
||||||
|
source: &str,
|
||||||
tokens: &Tokens,
|
tokens: &Tokens,
|
||||||
) -> Result<Fix> {
|
) -> Result<Fix> {
|
||||||
let (first, second) = if stdout.start() < stderr.start() {
|
let (first, second) = if stdout.start() < stderr.start() {
|
||||||
|
|
@ -121,6 +130,7 @@ fn generate_fix(
|
||||||
second,
|
second,
|
||||||
&call.arguments,
|
&call.arguments,
|
||||||
Parentheses::Preserve,
|
Parentheses::Preserve,
|
||||||
|
source,
|
||||||
tokens,
|
tokens,
|
||||||
)?],
|
)?],
|
||||||
))
|
))
|
||||||
|
|
|
||||||
|
|
@ -77,6 +77,7 @@ pub(crate) fn replace_universal_newlines(checker: &Checker, call: &ast::ExprCall
|
||||||
kwarg,
|
kwarg,
|
||||||
&call.arguments,
|
&call.arguments,
|
||||||
Parentheses::Preserve,
|
Parentheses::Preserve,
|
||||||
|
checker.locator().contents(),
|
||||||
checker.tokens(),
|
checker.tokens(),
|
||||||
)
|
)
|
||||||
.map(Fix::safe_edit)
|
.map(Fix::safe_edit)
|
||||||
|
|
|
||||||
|
|
@ -187,6 +187,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &Checker, call: &ast::ExprCall) {
|
||||||
kwarg,
|
kwarg,
|
||||||
&call.arguments,
|
&call.arguments,
|
||||||
Parentheses::Preserve,
|
Parentheses::Preserve,
|
||||||
|
checker.locator().contents(),
|
||||||
checker.tokens(),
|
checker.tokens(),
|
||||||
)
|
)
|
||||||
.map(Fix::safe_edit)
|
.map(Fix::safe_edit)
|
||||||
|
|
@ -204,6 +205,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &Checker, call: &ast::ExprCall) {
|
||||||
arg,
|
arg,
|
||||||
&call.arguments,
|
&call.arguments,
|
||||||
Parentheses::Preserve,
|
Parentheses::Preserve,
|
||||||
|
checker.locator().contents(),
|
||||||
checker.tokens(),
|
checker.tokens(),
|
||||||
)
|
)
|
||||||
.map(Fix::safe_edit)
|
.map(Fix::safe_edit)
|
||||||
|
|
@ -228,6 +230,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &Checker, call: &ast::ExprCall) {
|
||||||
kwarg,
|
kwarg,
|
||||||
&call.arguments,
|
&call.arguments,
|
||||||
Parentheses::Preserve,
|
Parentheses::Preserve,
|
||||||
|
checker.locator().contents(),
|
||||||
checker.tokens(),
|
checker.tokens(),
|
||||||
)
|
)
|
||||||
.map(Fix::safe_edit)
|
.map(Fix::safe_edit)
|
||||||
|
|
@ -245,6 +248,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &Checker, call: &ast::ExprCall) {
|
||||||
arg,
|
arg,
|
||||||
&call.arguments,
|
&call.arguments,
|
||||||
Parentheses::Preserve,
|
Parentheses::Preserve,
|
||||||
|
checker.locator().contents(),
|
||||||
checker.tokens(),
|
checker.tokens(),
|
||||||
)
|
)
|
||||||
.map(Fix::safe_edit)
|
.map(Fix::safe_edit)
|
||||||
|
|
|
||||||
|
|
@ -65,8 +65,13 @@ pub(crate) fn useless_class_metaclass_type(checker: &Checker, class_def: &StmtCl
|
||||||
);
|
);
|
||||||
|
|
||||||
diagnostic.try_set_fix(|| {
|
diagnostic.try_set_fix(|| {
|
||||||
let edit =
|
let edit = remove_argument(
|
||||||
remove_argument(keyword, arguments, Parentheses::Remove, checker.tokens())?;
|
keyword,
|
||||||
|
arguments,
|
||||||
|
Parentheses::Remove,
|
||||||
|
checker.locator().contents(),
|
||||||
|
checker.tokens(),
|
||||||
|
)?;
|
||||||
|
|
||||||
let range = edit.range();
|
let range = edit.range();
|
||||||
let applicability = if checker.comment_ranges().intersects(range) {
|
let applicability = if checker.comment_ranges().intersects(range) {
|
||||||
|
|
|
||||||
|
|
@ -68,7 +68,13 @@ pub(crate) fn useless_object_inheritance(checker: &Checker, class_def: &ast::Stm
|
||||||
);
|
);
|
||||||
|
|
||||||
diagnostic.try_set_fix(|| {
|
diagnostic.try_set_fix(|| {
|
||||||
let edit = remove_argument(base, arguments, Parentheses::Remove, checker.tokens())?;
|
let edit = remove_argument(
|
||||||
|
base,
|
||||||
|
arguments,
|
||||||
|
Parentheses::Remove,
|
||||||
|
checker.locator().contents(),
|
||||||
|
checker.tokens(),
|
||||||
|
)?;
|
||||||
|
|
||||||
let range = edit.range();
|
let range = edit.range();
|
||||||
let applicability = if checker.comment_ranges().intersects(range) {
|
let applicability = if checker.comment_ranges().intersects(range) {
|
||||||
|
|
|
||||||
|
|
@ -162,6 +162,7 @@ fn convert_type_vars(
|
||||||
generic_base,
|
generic_base,
|
||||||
class_arguments,
|
class_arguments,
|
||||||
Parentheses::Remove,
|
Parentheses::Remove,
|
||||||
|
source,
|
||||||
checker.tokens(),
|
checker.tokens(),
|
||||||
)?;
|
)?;
|
||||||
let replace_type_params =
|
let replace_type_params =
|
||||||
|
|
|
||||||
|
|
@ -151,6 +151,7 @@ fn convert_to_positional(
|
||||||
default_factory,
|
default_factory,
|
||||||
&call.arguments,
|
&call.arguments,
|
||||||
Parentheses::Preserve,
|
Parentheses::Preserve,
|
||||||
|
locator.contents(),
|
||||||
tokens,
|
tokens,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -127,6 +127,7 @@ pub(crate) fn falsy_dict_get_fallback(checker: &Checker, expr: &Expr) {
|
||||||
&fallback_arg,
|
&fallback_arg,
|
||||||
&call.arguments,
|
&call.arguments,
|
||||||
Parentheses::Preserve,
|
Parentheses::Preserve,
|
||||||
|
checker.locator().contents(),
|
||||||
checker.tokens(),
|
checker.tokens(),
|
||||||
)
|
)
|
||||||
.map(|edit| Fix::applicable_edit(edit, applicability))
|
.map(|edit| Fix::applicable_edit(edit, applicability))
|
||||||
|
|
|
||||||
|
|
@ -151,6 +151,7 @@ fn fix_unnecessary_literal_in_deque(
|
||||||
&iterable,
|
&iterable,
|
||||||
&deque.arguments,
|
&deque.arguments,
|
||||||
Parentheses::Preserve,
|
Parentheses::Preserve,
|
||||||
|
checker.source(),
|
||||||
checker.tokens(),
|
checker.tokens(),
|
||||||
)?
|
)?
|
||||||
};
|
};
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue