Use a consistent argument ordering for `Indexer` (#5200)

This commit is contained in:
Charlie Marsh 2023-06-19 22:59:51 -04:00 committed by GitHub
parent 62aa77df31
commit a797e05602
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 14 additions and 14 deletions

View File

@ -60,8 +60,8 @@ pub(crate) fn remove_unused_imports<'a>(
stmt: &Stmt, stmt: &Stmt,
parent: Option<&Stmt>, parent: Option<&Stmt>,
locator: &Locator, locator: &Locator,
indexer: &Indexer,
stylist: &Stylist, stylist: &Stylist,
indexer: &Indexer,
) -> Result<Edit> { ) -> Result<Edit> {
match codemods::remove_imports(unused_imports, stmt, locator, stylist)? { match codemods::remove_imports(unused_imports, stmt, locator, stylist)? {
None => Ok(delete_stmt(stmt, parent, locator, indexer)), None => Ok(delete_stmt(stmt, parent, locator, indexer)),

View File

@ -109,7 +109,7 @@ pub(crate) fn check_tokens(
// ERA001 // ERA001
if enforce_commented_out_code { if enforce_commented_out_code {
diagnostics.extend(eradicate::rules::commented_out_code( diagnostics.extend(eradicate::rules::commented_out_code(
indexer, locator, settings, locator, indexer, settings,
)); ));
} }
@ -141,7 +141,7 @@ pub(crate) fn check_tokens(
// E701, E702, E703 // E701, E702, E703
if enforce_compound_statements { if enforce_compound_statements {
diagnostics.extend( diagnostics.extend(
pycodestyle::rules::compound_statements(tokens, indexer, locator, settings) pycodestyle::rules::compound_statements(tokens, locator, indexer, settings)
.into_iter() .into_iter()
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())), .filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
); );
@ -187,7 +187,7 @@ pub(crate) fn check_tokens(
// PYI033 // PYI033
if enforce_type_comment_in_stub && is_stub { if enforce_type_comment_in_stub && is_stub {
diagnostics.extend(flake8_pyi::rules::type_comment_in_stub(indexer, locator)); diagnostics.extend(flake8_pyi::rules::type_comment_in_stub(locator, indexer));
} }
// TD001, TD002, TD003, TD004, TD005, TD006, TD007 // TD001, TD002, TD003, TD004, TD005, TD006, TD007
@ -204,7 +204,7 @@ pub(crate) fn check_tokens(
.collect(); .collect();
diagnostics.extend( diagnostics.extend(
flake8_todos::rules::todos(&todo_comments, indexer, locator, settings) flake8_todos::rules::todos(&todo_comments, locator, indexer, settings)
.into_iter() .into_iter()
.filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())), .filter(|diagnostic| settings.rules.enabled(diagnostic.kind.rule())),
); );

View File

@ -48,8 +48,8 @@ fn is_standalone_comment(line: &str) -> bool {
/// ERA001 /// ERA001
pub(crate) fn commented_out_code( pub(crate) fn commented_out_code(
indexer: &Indexer,
locator: &Locator, locator: &Locator,
indexer: &Indexer,
settings: &Settings, settings: &Settings,
) -> Vec<Diagnostic> { ) -> Vec<Diagnostic> {
let mut diagnostics = vec![]; let mut diagnostics = vec![];

View File

@ -34,7 +34,7 @@ impl Violation for TypeCommentInStub {
} }
/// PYI033 /// PYI033
pub(crate) fn type_comment_in_stub(indexer: &Indexer, locator: &Locator) -> Vec<Diagnostic> { pub(crate) fn type_comment_in_stub(locator: &Locator, indexer: &Indexer) -> Vec<Diagnostic> {
let mut diagnostics = vec![]; let mut diagnostics = vec![];
for range in indexer.comment_ranges() { for range in indexer.comment_ranges() {

View File

@ -236,8 +236,8 @@ static ISSUE_LINK_REGEX_SET: Lazy<RegexSet> = Lazy::new(|| {
pub(crate) fn todos( pub(crate) fn todos(
todo_comments: &[TodoComment], todo_comments: &[TodoComment],
indexer: &Indexer,
locator: &Locator, locator: &Locator,
indexer: &Indexer,
settings: &Settings, settings: &Settings,
) -> Vec<Diagnostic> { ) -> Vec<Diagnostic> {
let mut diagnostics: Vec<Diagnostic> = vec![]; let mut diagnostics: Vec<Diagnostic> = vec![];

View File

@ -211,8 +211,8 @@ fn fix_imports(checker: &Checker, stmt_id: NodeId, imports: &[Import]) -> Result
stmt, stmt,
parent, parent,
checker.locator, checker.locator,
checker.indexer,
checker.stylist, checker.stylist,
checker.indexer,
)?; )?;
// Step 2) Add the import to the top-level. // Step 2) Add the import to the top-level.

View File

@ -435,8 +435,8 @@ fn fix_imports(checker: &Checker, stmt_id: NodeId, imports: &[Import]) -> Result
stmt, stmt,
parent, parent,
checker.locator, checker.locator,
checker.indexer,
checker.stylist, checker.stylist,
checker.indexer,
)?; )?;
// Step 2) Add the import to a `TYPE_CHECKING` block. // Step 2) Add the import to a `TYPE_CHECKING` block.

View File

@ -101,8 +101,8 @@ impl AlwaysAutofixableViolation for UselessSemicolon {
/// E701, E702, E703 /// E701, E702, E703
pub(crate) fn compound_statements( pub(crate) fn compound_statements(
lxr: &[LexResult], lxr: &[LexResult],
indexer: &Indexer,
locator: &Locator, locator: &Locator,
indexer: &Indexer,
settings: &Settings, settings: &Settings,
) -> Vec<Diagnostic> { ) -> Vec<Diagnostic> {
let mut diagnostics = vec![]; let mut diagnostics = vec![];

View File

@ -234,8 +234,8 @@ fn fix_imports(checker: &Checker, stmt_id: NodeId, imports: &[Import]) -> Result
stmt, stmt,
parent, parent,
checker.locator, checker.locator,
checker.indexer,
checker.stylist, checker.stylist,
checker.indexer,
)?; )?;
Ok(Fix::automatic(edit).isolate(checker.isolation(parent))) Ok(Fix::automatic(edit).isolate(checker.isolation(parent)))
} }

View File

@ -137,8 +137,8 @@ pub(crate) fn unnecessary_builtin_import(
stmt, stmt,
parent, parent,
checker.locator, checker.locator,
checker.indexer,
checker.stylist, checker.stylist,
checker.indexer,
)?; )?;
Ok(Fix::suggested(edit).isolate(checker.isolation(parent))) Ok(Fix::suggested(edit).isolate(checker.isolation(parent)))
}); });

View File

@ -122,8 +122,8 @@ pub(crate) fn unnecessary_future_import(checker: &mut Checker, stmt: &Stmt, name
stmt, stmt,
parent, parent,
checker.locator, checker.locator,
checker.indexer,
checker.stylist, checker.stylist,
checker.indexer,
)?; )?;
Ok(Fix::suggested(edit).isolate(checker.isolation(parent))) Ok(Fix::suggested(edit).isolate(checker.isolation(parent)))
}); });