Move `UP034` to use `TokenKind` instead of `Tok` (#11424)

## Summary

This PR follows up from #11420 to move `UP034` to use `TokenKind`
instead of `Tok`.

The main reason to have a separate PR is so that the reviewing is easy.
This required a lot more updates because the rule used an index (`i`) to
keep track of the current position in the token vector. Now, as it's
just an iterator, we just use `next` to move the iterator forward and
extract the relevant information.

This is part of https://github.com/astral-sh/ruff/issues/11401

## Test Plan

`cargo test`
This commit is contained in:
Dhruv Manilawala 2024-05-14 22:58:04 +05:30 committed by GitHub
parent bb1c107afd
commit 96f6288622
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 67 additions and 99 deletions

View File

@ -128,7 +128,7 @@ pub(crate) fn check_tokens(
} }
if settings.rules.enabled(Rule::ExtraneousParentheses) { if settings.rules.enabled(Rule::ExtraneousParentheses) {
pyupgrade::rules::extraneous_parentheses(&mut diagnostics, tokens, locator); pyupgrade::rules::extraneous_parentheses(&mut diagnostics, tokens.kinds(), locator);
} }
if source_type.is_stub() && settings.rules.enabled(Rule::TypeCommentInStub) { if source_type.is_stub() && settings.rules.enabled(Rule::TypeCommentInStub) {

View File

@ -1,6 +1,5 @@
use ruff_python_parser::lexer::LexResult; use ruff_python_parser::{TokenKind, TokenKindIter};
use ruff_python_parser::Tok; use ruff_text_size::TextRange;
use ruff_text_size::{Ranged, TextRange};
use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix}; use ruff_diagnostics::{AlwaysFixableViolation, Diagnostic, Edit, Fix};
use ruff_macros::{derive_message_formats, violation}; use ruff_macros::{derive_message_formats, violation};
@ -37,93 +36,70 @@ impl AlwaysFixableViolation for ExtraneousParentheses {
} }
// See: https://github.com/asottile/pyupgrade/blob/97ed6fb3cf2e650d4f762ba231c3f04c41797710/pyupgrade/_main.py#L148 // See: https://github.com/asottile/pyupgrade/blob/97ed6fb3cf2e650d4f762ba231c3f04c41797710/pyupgrade/_main.py#L148
fn match_extraneous_parentheses(tokens: &[LexResult], mut i: usize) -> Option<(usize, usize)> { fn match_extraneous_parentheses(tokens: &mut TokenKindIter) -> Option<(TextRange, TextRange)> {
i += 1; // Store the location of the extraneous opening parenthesis.
let start_range = loop {
let (token, range) = tokens.next()?;
loop { match token {
if i >= tokens.len() { TokenKind::Comment | TokenKind::NonLogicalNewline => {
return None; continue;
} }
let Ok((tok, _)) = &tokens[i] else { TokenKind::Lpar => {
return None; break range;
};
match tok {
Tok::Comment(..) | Tok::NonLogicalNewline => {
i += 1;
}
Tok::Lpar => {
break;
} }
_ => { _ => {
return None; return None;
} }
} }
} };
// Store the location of the extraneous opening parenthesis. // Verify that we're not in an empty tuple.
let start = i; let mut empty_tuple = true;
// Verify that we're not in a tuple or coroutine. // Verify that we're not in a tuple or coroutine.
let mut depth = 1u32; let mut depth = 1u32;
while depth > 0 {
i += 1;
if i >= tokens.len() {
return None;
}
let Ok((tok, _)) = &tokens[i] else {
return None;
};
// If we find a comma or a yield at depth 1 or 2, it's a tuple or coroutine.
if depth == 1 && matches!(tok, Tok::Comma | Tok::Yield) {
return None;
} else if matches!(tok, Tok::Lpar | Tok::Lbrace | Tok::Lsqb) {
depth = depth.saturating_add(1);
} else if matches!(tok, Tok::Rpar | Tok::Rbrace | Tok::Rsqb) {
depth = depth.saturating_sub(1);
}
}
// Store the location of the extraneous closing parenthesis. // Store the location of the extraneous closing parenthesis.
let end = i; let end_range = loop {
let (token, range) = tokens.next()?;
// Verify that we're not in an empty tuple. // If we find a comma or a yield at depth 1 or 2, it's a tuple or coroutine.
if (start + 1..i).all(|i| { if depth == 1 && matches!(token, TokenKind::Comma | TokenKind::Yield) {
matches!( return None;
tokens[i], } else if matches!(token, TokenKind::Lpar | TokenKind::Lbrace | TokenKind::Lsqb) {
Ok((Tok::Comment(..) | Tok::NonLogicalNewline, _)) depth = depth.saturating_add(1);
) } else if matches!(token, TokenKind::Rpar | TokenKind::Rbrace | TokenKind::Rsqb) {
}) { depth = depth.saturating_sub(1);
}
if depth == 0 {
break range;
}
if !matches!(token, TokenKind::Comment | TokenKind::NonLogicalNewline) {
empty_tuple = false;
}
};
if empty_tuple {
return None; return None;
} }
// Find the next non-coding token. // Find the next non-coding token.
i += 1; let token = loop {
loop { let (token, _) = tokens.next()?;
if i >= tokens.len() {
return None;
}
let Ok((tok, _)) = &tokens[i] else {
return None;
};
match tok {
Tok::Comment(..) | Tok::NonLogicalNewline => {
i += 1;
}
_ => {
break;
}
}
}
if i >= tokens.len() { match token {
return None; TokenKind::Comment | TokenKind::NonLogicalNewline => continue,
_ => {
break token;
}
} }
let Ok((tok, _)) = &tokens[i] else {
return None;
}; };
if matches!(tok, Tok::Rpar) {
Some((start, end)) if matches!(token, TokenKind::Rpar) {
Some((start_range, end_range))
} else { } else {
None None
} }
@ -132,20 +108,18 @@ fn match_extraneous_parentheses(tokens: &[LexResult], mut i: usize) -> Option<(u
/// UP034 /// UP034
pub(crate) fn extraneous_parentheses( pub(crate) fn extraneous_parentheses(
diagnostics: &mut Vec<Diagnostic>, diagnostics: &mut Vec<Diagnostic>,
tokens: &[LexResult], mut tokens: TokenKindIter,
locator: &Locator, locator: &Locator,
) { ) {
let mut i = 0; while let Some((token, _)) = tokens.next() {
while i < tokens.len() { if !matches!(token, TokenKind::Lpar) {
if matches!(tokens[i], Ok((Tok::Lpar, _))) { continue;
if let Some((start, end)) = match_extraneous_parentheses(tokens, i) { }
i = end + 1;
let Ok((_, start_range)) = &tokens[start] else { let Some((start_range, end_range)) = match_extraneous_parentheses(&mut tokens) else {
return; continue;
};
let Ok((.., end_range)) = &tokens[end] else {
return;
}; };
let mut diagnostic = Diagnostic::new( let mut diagnostic = Diagnostic::new(
ExtraneousParentheses, ExtraneousParentheses,
TextRange::new(start_range.start(), end_range.end()), TextRange::new(start_range.start(), end_range.end()),
@ -157,11 +131,5 @@ pub(crate) fn extraneous_parentheses(
end_range.end(), end_range.end(),
))); )));
diagnostics.push(diagnostic); diagnostics.push(diagnostic);
} else {
i += 1;
}
} else {
i += 1;
}
} }
} }