Pass `Range` struct by value (#3376)

This commit is contained in:
Charlie Marsh 2023-03-07 09:53:31 -05:00 committed by GitHub
parent bced58ce40
commit a5d302fcbf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
54 changed files with 116 additions and 116 deletions

View File

@ -652,7 +652,7 @@ pub fn has_comments<T>(located: &Located<T>, locator: &Locator) -> bool {
/// Returns `true` if a [`Range`] includes at least one comment. /// Returns `true` if a [`Range`] includes at least one comment.
pub fn has_comments_in(range: Range, locator: &Locator) -> bool { pub fn has_comments_in(range: Range, locator: &Locator) -> bool {
for tok in lexer::lex_located(locator.slice(&range), Mode::Module, range.location) { for tok in lexer::lex_located(locator.slice(range), Mode::Module, range.location) {
match tok { match tok {
Ok((_, tok, _)) => { Ok((_, tok, _)) => {
if matches!(tok, Tok::Comment(..)) { if matches!(tok, Tok::Comment(..)) {
@ -854,7 +854,7 @@ pub fn to_relative(absolute: Location, base: Location) -> Location {
/// Return `true` if a [`Located`] has leading content. /// Return `true` if a [`Located`] has leading content.
pub fn match_leading_content<T>(located: &Located<T>, locator: &Locator) -> bool { pub fn match_leading_content<T>(located: &Located<T>, locator: &Locator) -> bool {
let range = Range::new(Location::new(located.location.row(), 0), located.location); let range = Range::new(Location::new(located.location.row(), 0), located.location);
let prefix = locator.slice(&range); let prefix = locator.slice(range);
prefix.chars().any(|char| !char.is_whitespace()) prefix.chars().any(|char| !char.is_whitespace())
} }
@ -864,7 +864,7 @@ pub fn match_trailing_content<T>(located: &Located<T>, locator: &Locator) -> boo
located.end_location.unwrap(), located.end_location.unwrap(),
Location::new(located.end_location.unwrap().row() + 1, 0), Location::new(located.end_location.unwrap().row() + 1, 0),
); );
let suffix = locator.slice(&range); let suffix = locator.slice(range);
for char in suffix.chars() { for char in suffix.chars() {
if char == '#' { if char == '#' {
return false; return false;
@ -882,7 +882,7 @@ pub fn match_trailing_comment<T>(located: &Located<T>, locator: &Locator) -> Opt
located.end_location.unwrap(), located.end_location.unwrap(),
Location::new(located.end_location.unwrap().row() + 1, 0), Location::new(located.end_location.unwrap().row() + 1, 0),
); );
let suffix = locator.slice(&range); let suffix = locator.slice(range);
for (i, char) in suffix.chars().enumerate() { for (i, char) in suffix.chars().enumerate() {
if char == '#' { if char == '#' {
return Some(i); return Some(i);
@ -940,7 +940,7 @@ pub fn identifier_range(stmt: &Stmt, locator: &Locator) -> Range {
| StmtKind::FunctionDef { .. } | StmtKind::FunctionDef { .. }
| StmtKind::AsyncFunctionDef { .. } | StmtKind::AsyncFunctionDef { .. }
) { ) {
let contents = locator.slice(&Range::from_located(stmt)); let contents = locator.slice(Range::from_located(stmt));
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt.location).flatten() for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt.location).flatten()
{ {
if matches!(tok, Tok::Name { .. }) { if matches!(tok, Tok::Name { .. }) {
@ -972,7 +972,7 @@ pub fn find_names<'a, T, U>(
located: &'a Located<T, U>, located: &'a Located<T, U>,
locator: &'a Locator, locator: &'a Locator,
) -> impl Iterator<Item = Range> + 'a { ) -> impl Iterator<Item = Range> + 'a {
let contents = locator.slice(&Range::from_located(located)); let contents = locator.slice(Range::from_located(located));
lexer::lex_located(contents, Mode::Module, located.location) lexer::lex_located(contents, Mode::Module, located.location)
.flatten() .flatten()
.filter(|(_, tok, _)| matches!(tok, Tok::Name { .. })) .filter(|(_, tok, _)| matches!(tok, Tok::Name { .. }))
@ -990,7 +990,7 @@ pub fn excepthandler_name_range(handler: &Excepthandler, locator: &Locator) -> O
match (name, type_) { match (name, type_) {
(Some(_), Some(type_)) => { (Some(_), Some(type_)) => {
let type_end_location = type_.end_location.unwrap(); let type_end_location = type_.end_location.unwrap();
let contents = locator.slice(&Range::new(type_end_location, body[0].location)); let contents = locator.slice(Range::new(type_end_location, body[0].location));
let range = lexer::lex_located(contents, Mode::Module, type_end_location) let range = lexer::lex_located(contents, Mode::Module, type_end_location)
.flatten() .flatten()
.tuple_windows() .tuple_windows()
@ -1014,7 +1014,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
.expect("Expected body to be non-empty") .expect("Expected body to be non-empty")
.location .location
}; };
let contents = locator.slice(&Range { let contents = locator.slice(Range {
location: handler.location, location: handler.location,
end_location: end, end_location: end,
}); });
@ -1031,7 +1031,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
/// Find f-strings that don't contain any formatted values in a `JoinedStr`. /// Find f-strings that don't contain any formatted values in a `JoinedStr`.
pub fn find_useless_f_strings(expr: &Expr, locator: &Locator) -> Vec<(Range, Range)> { pub fn find_useless_f_strings(expr: &Expr, locator: &Locator) -> Vec<(Range, Range)> {
let contents = locator.slice(&Range::from_located(expr)); let contents = locator.slice(Range::from_located(expr));
lexer::lex_located(contents, Mode::Module, expr.location) lexer::lex_located(contents, Mode::Module, expr.location)
.flatten() .flatten()
.filter_map(|(location, tok, end_location)| match tok { .filter_map(|(location, tok, end_location)| match tok {
@ -1039,7 +1039,7 @@ pub fn find_useless_f_strings(expr: &Expr, locator: &Locator) -> Vec<(Range, Ran
kind: StringKind::FString | StringKind::RawFString, kind: StringKind::FString | StringKind::RawFString,
.. ..
} => { } => {
let first_char = locator.slice(&Range { let first_char = locator.slice(Range {
location, location,
end_location: Location::new(location.row(), location.column() + 1), end_location: Location::new(location.row(), location.column() + 1),
}); });
@ -1079,7 +1079,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
.expect("Expected body to be non-empty") .expect("Expected body to be non-empty")
.end_location .end_location
.unwrap(); .unwrap();
let contents = locator.slice(&Range { let contents = locator.slice(Range {
location: body_end, location: body_end,
end_location: orelse end_location: orelse
.first() .first()
@ -1101,7 +1101,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
/// Return the `Range` of the first `Tok::Colon` token in a `Range`. /// Return the `Range` of the first `Tok::Colon` token in a `Range`.
pub fn first_colon_range(range: Range, locator: &Locator) -> Option<Range> { pub fn first_colon_range(range: Range, locator: &Locator) -> Option<Range> {
let contents = locator.slice(&range); let contents = locator.slice(range);
let range = lexer::lex_located(contents, Mode::Module, range.location) let range = lexer::lex_located(contents, Mode::Module, range.location)
.flatten() .flatten()
.find(|(_, kind, _)| matches!(kind, Tok::Colon)) .find(|(_, kind, _)| matches!(kind, Tok::Colon))
@ -1157,7 +1157,7 @@ pub fn elif_else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
[stmt, ..] => stmt.location, [stmt, ..] => stmt.location,
_ => return None, _ => return None,
}; };
let contents = locator.slice(&Range::new(start, end)); let contents = locator.slice(Range::new(start, end));
let range = lexer::lex_located(contents, Mode::Module, start) let range = lexer::lex_located(contents, Mode::Module, start)
.flatten() .flatten()
.find(|(_, kind, _)| matches!(kind, Tok::Elif | Tok::Else)) .find(|(_, kind, _)| matches!(kind, Tok::Elif | Tok::Else))

View File

@ -8,7 +8,7 @@ use crate::source_code::Locator;
/// Extract the leading indentation from a line. /// Extract the leading indentation from a line.
pub fn indentation<'a, T>(locator: &'a Locator, located: &'a Located<T>) -> Option<&'a str> { pub fn indentation<'a, T>(locator: &'a Locator, located: &'a Located<T>) -> Option<&'a str> {
let range = Range::from_located(located); let range = Range::from_located(located);
let indentation = locator.slice(&Range::new( let indentation = locator.slice(Range::new(
Location::new(range.location.row(), 0), Location::new(range.location.row(), 0),
Location::new(range.location.row(), range.location.column()), Location::new(range.location.row(), range.location.column()),
)); ));

View File

@ -227,7 +227,7 @@ pub fn remove_unused_imports<'a>(
indexer: &Indexer, indexer: &Indexer,
stylist: &Stylist, stylist: &Stylist,
) -> Result<Fix> { ) -> Result<Fix> {
let module_text = locator.slice(&Range::from_located(stmt)); let module_text = locator.slice(Range::from_located(stmt));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let Some(Statement::Simple(body)) = tree.body.first_mut() else { let Some(Statement::Simple(body)) = tree.body.first_mut() else {

View File

@ -54,7 +54,7 @@ fn apply_fixes<'a>(
} }
// Add all contents from `last_pos` to `fix.location`. // Add all contents from `last_pos` to `fix.location`.
let slice = locator.slice(&Range::new(last_pos, fix.location)); let slice = locator.slice(Range::new(last_pos, fix.location));
output.push_str(slice); output.push_str(slice);
// Add the patch itself. // Add the patch itself.
@ -78,7 +78,7 @@ pub(crate) fn apply_fix(fix: &Fix, locator: &Locator) -> String {
let mut output = String::with_capacity(locator.len()); let mut output = String::with_capacity(locator.len());
// Add all contents from `last_pos` to `fix.location`. // Add all contents from `last_pos` to `fix.location`.
let slice = locator.slice(&Range::new(Location::new(1, 0), fix.location)); let slice = locator.slice(Range::new(Location::new(1, 0), fix.location));
output.push_str(slice); output.push_str(slice);
// Add the patch itself. // Add the patch itself.

View File

@ -5282,8 +5282,8 @@ impl<'a> Checker<'a> {
// Extract a `Docstring` from a `Definition`. // Extract a `Docstring` from a `Definition`.
let expr = definition.docstring.unwrap(); let expr = definition.docstring.unwrap();
let contents = self.locator.slice(&Range::from_located(expr)); let contents = self.locator.slice(Range::from_located(expr));
let indentation = self.locator.slice(&Range::new( let indentation = self.locator.slice(Range::new(
Location::new(expr.location.row(), 0), Location::new(expr.location.row(), 0),
Location::new(expr.location.row(), expr.location.column()), Location::new(expr.location.row(), expr.location.column()),
)); ));

View File

@ -57,7 +57,7 @@ pub fn check_logical_lines(
// Extract the indentation level. // Extract the indentation level.
let start_loc = line.mapping[0].1; let start_loc = line.mapping[0].1;
let start_line = locator.slice(&Range::new(Location::new(start_loc.row(), 0), start_loc)); let start_line = locator.slice(Range::new(Location::new(start_loc.row(), 0), start_loc));
let indent_level = expand_indent(start_line); let indent_level = expand_indent(start_line);
let indent_size = 4; let indent_size = 4;

View File

@ -74,9 +74,9 @@ impl Source {
} else { } else {
Location::new(diagnostic.end_location.row() + 1, 0) Location::new(diagnostic.end_location.row() + 1, 0)
}; };
let source = locator.slice(&Range::new(location, end_location)); let source = locator.slice(Range::new(location, end_location));
let num_chars_in_range = locator let num_chars_in_range = locator
.slice(&Range::new(diagnostic.location, diagnostic.end_location)) .slice(Range::new(diagnostic.location, diagnostic.end_location))
.chars() .chars()
.count(); .count();
Source { Source {

View File

@ -126,7 +126,7 @@ pub fn rule_is_ignored(
locator: &Locator, locator: &Locator,
) -> bool { ) -> bool {
let noqa_lineno = noqa_line_for.get(&lineno).unwrap_or(&lineno); let noqa_lineno = noqa_line_for.get(&lineno).unwrap_or(&lineno);
let line = locator.slice(&Range::new( let line = locator.slice(Range::new(
Location::new(*noqa_lineno, 0), Location::new(*noqa_lineno, 0),
Location::new(noqa_lineno + 1, 0), Location::new(noqa_lineno + 1, 0),
)); ));

View File

@ -55,7 +55,7 @@ pub fn commented_out_code(
) -> Option<Diagnostic> { ) -> Option<Diagnostic> {
let location = Location::new(start.row(), 0); let location = Location::new(start.row(), 0);
let end_location = Location::new(end.row() + 1, 0); let end_location = Location::new(end.row() + 1, 0);
let line = locator.slice(&Range::new(location, end_location)); let line = locator.slice(Range::new(location, end_location));
// Verify that the comment is on its own line, and that it contains code. // Verify that the comment is on its own line, and that it contains code.
if is_standalone_comment(line) && comment_contains_code(line, &settings.task_tags[..]) { if is_standalone_comment(line) && comment_contains_code(line, &settings.task_tags[..]) {

View File

@ -9,7 +9,7 @@ use crate::source_code::Locator;
/// ANN204 /// ANN204
pub fn add_return_none_annotation(locator: &Locator, stmt: &Stmt) -> Result<Fix> { pub fn add_return_none_annotation(locator: &Locator, stmt: &Stmt) -> Result<Fix> {
let range = Range::from_located(stmt); let range = Range::from_located(stmt);
let contents = locator.slice(&range); let contents = locator.slice(range);
// Find the colon (following the `def` keyword). // Find the colon (following the `def` keyword).
let mut seen_lpar = false; let mut seen_lpar = false;

View File

@ -308,7 +308,7 @@ pub fn trailing_commas(
// rather than just inserting a comma at the end. This prevents the UP034 autofix // rather than just inserting a comma at the end. This prevents the UP034 autofix
// removing any brackets in the same linter pass - doing both at the same time could // removing any brackets in the same linter pass - doing both at the same time could
// lead to a syntax error. // lead to a syntax error.
let contents = locator.slice(&Range::new(missing_comma.0, missing_comma.2)); let contents = locator.slice(Range::new(missing_comma.0, missing_comma.2));
diagnostic.amend(Fix::replacement( diagnostic.amend(Fix::replacement(
format!("{contents},"), format!("{contents},"),
missing_comma.0, missing_comma.0,

View File

@ -35,7 +35,7 @@ pub fn fix_unnecessary_generator_list(
expr: &rustpython_parser::ast::Expr, expr: &rustpython_parser::ast::Expr,
) -> Result<Fix> { ) -> Result<Fix> {
// Expr(Call(GeneratorExp)))) -> Expr(ListComp))) // Expr(Call(GeneratorExp)))) -> Expr(ListComp)))
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
let call = match_call(body)?; let call = match_call(body)?;
@ -82,7 +82,7 @@ pub fn fix_unnecessary_generator_set(
parent: Option<&rustpython_parser::ast::Expr>, parent: Option<&rustpython_parser::ast::Expr>,
) -> Result<Fix> { ) -> Result<Fix> {
// Expr(Call(GeneratorExp)))) -> Expr(SetComp))) // Expr(Call(GeneratorExp)))) -> Expr(SetComp)))
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
let call = match_call(body)?; let call = match_call(body)?;
@ -139,7 +139,7 @@ pub fn fix_unnecessary_generator_dict(
expr: &rustpython_parser::ast::Expr, expr: &rustpython_parser::ast::Expr,
parent: Option<&rustpython_parser::ast::Expr>, parent: Option<&rustpython_parser::ast::Expr>,
) -> Result<Fix> { ) -> Result<Fix> {
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
let call = match_call(body)?; let call = match_call(body)?;
@ -213,7 +213,7 @@ pub fn fix_unnecessary_list_comprehension_set(
) -> Result<Fix> { ) -> Result<Fix> {
// Expr(Call(ListComp)))) -> // Expr(Call(ListComp)))) ->
// Expr(SetComp))) // Expr(SetComp)))
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
let call = match_call(body)?; let call = match_call(body)?;
@ -257,7 +257,7 @@ pub fn fix_unnecessary_list_comprehension_dict(
stylist: &Stylist, stylist: &Stylist,
expr: &rustpython_parser::ast::Expr, expr: &rustpython_parser::ast::Expr,
) -> Result<Fix> { ) -> Result<Fix> {
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
let call = match_call(body)?; let call = match_call(body)?;
@ -356,7 +356,7 @@ pub fn fix_unnecessary_literal_set(
expr: &rustpython_parser::ast::Expr, expr: &rustpython_parser::ast::Expr,
) -> Result<Fix> { ) -> Result<Fix> {
// Expr(Call(List|Tuple)))) -> Expr(Set))) // Expr(Call(List|Tuple)))) -> Expr(Set)))
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
let mut call = match_call(body)?; let mut call = match_call(body)?;
@ -407,7 +407,7 @@ pub fn fix_unnecessary_literal_dict(
expr: &rustpython_parser::ast::Expr, expr: &rustpython_parser::ast::Expr,
) -> Result<Fix> { ) -> Result<Fix> {
// Expr(Call(List|Tuple)))) -> Expr(Dict))) // Expr(Call(List|Tuple)))) -> Expr(Dict)))
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
let call = match_call(body)?; let call = match_call(body)?;
@ -480,7 +480,7 @@ pub fn fix_unnecessary_collection_call(
expr: &rustpython_parser::ast::Expr, expr: &rustpython_parser::ast::Expr,
) -> Result<Fix> { ) -> Result<Fix> {
// Expr(Call("list" | "tuple" | "dict")))) -> Expr(List|Tuple|Dict) // Expr(Call("list" | "tuple" | "dict")))) -> Expr(List|Tuple|Dict)
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
let call = match_call(body)?; let call = match_call(body)?;
@ -595,7 +595,7 @@ pub fn fix_unnecessary_literal_within_tuple_call(
stylist: &Stylist, stylist: &Stylist,
expr: &rustpython_parser::ast::Expr, expr: &rustpython_parser::ast::Expr,
) -> Result<Fix> { ) -> Result<Fix> {
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
let call = match_call(body)?; let call = match_call(body)?;
@ -654,7 +654,7 @@ pub fn fix_unnecessary_literal_within_list_call(
stylist: &Stylist, stylist: &Stylist,
expr: &rustpython_parser::ast::Expr, expr: &rustpython_parser::ast::Expr,
) -> Result<Fix> { ) -> Result<Fix> {
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
let call = match_call(body)?; let call = match_call(body)?;
@ -716,7 +716,7 @@ pub fn fix_unnecessary_list_call(
expr: &rustpython_parser::ast::Expr, expr: &rustpython_parser::ast::Expr,
) -> Result<Fix> { ) -> Result<Fix> {
// Expr(Call(List|Tuple)))) -> Expr(List|Tuple))) // Expr(Call(List|Tuple)))) -> Expr(List|Tuple)))
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
let call = match_call(body)?; let call = match_call(body)?;
@ -746,7 +746,7 @@ pub fn fix_unnecessary_call_around_sorted(
stylist: &Stylist, stylist: &Stylist,
expr: &rustpython_parser::ast::Expr, expr: &rustpython_parser::ast::Expr,
) -> Result<Fix> { ) -> Result<Fix> {
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
let outer_call = match_call(body)?; let outer_call = match_call(body)?;
@ -873,7 +873,7 @@ pub fn fix_unnecessary_double_cast_or_process(
stylist: &Stylist, stylist: &Stylist,
expr: &rustpython_parser::ast::Expr, expr: &rustpython_parser::ast::Expr,
) -> Result<Fix> { ) -> Result<Fix> {
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let body = match_expr(&mut tree)?; let body = match_expr(&mut tree)?;
let mut outer_call = match_call(body)?; let mut outer_call = match_call(body)?;
@ -912,7 +912,7 @@ pub fn fix_unnecessary_comprehension(
stylist: &Stylist, stylist: &Stylist,
expr: &rustpython_parser::ast::Expr, expr: &rustpython_parser::ast::Expr,
) -> Result<Fix> { ) -> Result<Fix> {
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
@ -988,7 +988,7 @@ pub fn fix_unnecessary_map(
parent: Option<&rustpython_parser::ast::Expr>, parent: Option<&rustpython_parser::ast::Expr>,
kind: &str, kind: &str,
) -> Result<Fix> { ) -> Result<Fix> {
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
let call = match_call(body)?; let call = match_call(body)?;

View File

@ -13,7 +13,7 @@ pub fn fix_unnecessary_comprehension_any_all(
expr: &rustpython_parser::ast::Expr, expr: &rustpython_parser::ast::Expr,
) -> Result<Fix> { ) -> Result<Fix> {
// Expr(ListComp) -> Expr(GeneratorExp) // Expr(ListComp) -> Expr(GeneratorExp)
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;

View File

@ -57,16 +57,16 @@ fn is_valid_default_value_with_annotation(default: &Expr, checker: &Checker) ->
ExprKind::Constant { ExprKind::Constant {
value: Constant::Str(..), value: Constant::Str(..),
.. ..
} => return checker.locator.slice(&Range::from_located(default)).len() <= 50, } => return checker.locator.slice(Range::from_located(default)).len() <= 50,
ExprKind::Constant { ExprKind::Constant {
value: Constant::Bytes(..), value: Constant::Bytes(..),
.. ..
} => return checker.locator.slice(&Range::from_located(default)).len() <= 50, } => return checker.locator.slice(Range::from_located(default)).len() <= 50,
ExprKind::Constant { ExprKind::Constant {
value: Constant::Int(..), value: Constant::Int(..),
.. ..
} => { } => {
return checker.locator.slice(&Range::from_located(default)).len() <= 10; return checker.locator.slice(Range::from_located(default)).len() <= 10;
} }
ExprKind::UnaryOp { ExprKind::UnaryOp {
op: Unaryop::USub, op: Unaryop::USub,
@ -77,7 +77,7 @@ fn is_valid_default_value_with_annotation(default: &Expr, checker: &Checker) ->
.. ..
} = &operand.node } = &operand.node
{ {
return checker.locator.slice(&Range::from_located(operand)).len() <= 10; return checker.locator.slice(Range::from_located(operand)).len() <= 10;
} }
} }
ExprKind::BinOp { ExprKind::BinOp {
@ -101,7 +101,7 @@ fn is_valid_default_value_with_annotation(default: &Expr, checker: &Checker) ->
.. ..
} = &left.node } = &left.node
{ {
return checker.locator.slice(&Range::from_located(left)).len() <= 10; return checker.locator.slice(Range::from_located(left)).len() <= 10;
} else if let ExprKind::UnaryOp { } else if let ExprKind::UnaryOp {
op: Unaryop::USub, op: Unaryop::USub,
operand, operand,
@ -114,7 +114,7 @@ fn is_valid_default_value_with_annotation(default: &Expr, checker: &Checker) ->
.. ..
} = &operand.node } = &operand.node
{ {
return checker.locator.slice(&Range::from_located(operand)).len() <= 10; return checker.locator.slice(Range::from_located(operand)).len() <= 10;
} }
} }
} }

View File

@ -328,7 +328,7 @@ fn fix_composite_condition(stmt: &Stmt, locator: &Locator, stylist: &Stylist) ->
}; };
// Extract the module text. // Extract the module text.
let contents = locator.slice(&Range::new( let contents = locator.slice(Range::new(
Location::new(stmt.location.row(), 0), Location::new(stmt.location.row(), 0),
Location::new(stmt.end_location.unwrap().row() + 1, 0), Location::new(stmt.end_location.unwrap().row() + 1, 0),
)); ));

View File

@ -264,7 +264,7 @@ fn docstring(
) -> Option<Diagnostic> { ) -> Option<Diagnostic> {
let quotes_settings = &settings.flake8_quotes; let quotes_settings = &settings.flake8_quotes;
let text = locator.slice(&Range::new(start, end)); let text = locator.slice(Range::new(start, end));
let trivia: Trivia = text.into(); let trivia: Trivia = text.into();
if trivia if trivia
@ -309,7 +309,7 @@ fn strings(
let trivia = sequence let trivia = sequence
.iter() .iter()
.map(|(start, end)| { .map(|(start, end)| {
let text = locator.slice(&Range::new(*start, *end)); let text = locator.slice(Range::new(*start, *end));
let trivia: Trivia = text.into(); let trivia: Trivia = text.into();
trivia trivia
}) })

View File

@ -39,7 +39,7 @@ pub(crate) fn fix_nested_if_statements(
}; };
// Extract the module text. // Extract the module text.
let contents = locator.slice(&Range::new( let contents = locator.slice(Range::new(
Location::new(stmt.location.row(), 0), Location::new(stmt.location.row(), 0),
Location::new(stmt.end_location.unwrap().row() + 1, 0), Location::new(stmt.end_location.unwrap().row() + 1, 0),
)); ));

View File

@ -20,7 +20,7 @@ pub(crate) fn fix_multiple_with_statements(
}; };
// Extract the module text. // Extract the module text.
let contents = locator.slice(&Range::new( let contents = locator.slice(Range::new(
Location::new(stmt.location.row(), 0), Location::new(stmt.location.row(), 0),
Location::new(stmt.end_location.unwrap().row() + 1, 0), Location::new(stmt.end_location.unwrap().row() + 1, 0),
)); ));

View File

@ -47,8 +47,8 @@ fn key_in_dict(checker: &mut Checker, left: &Expr, right: &Expr, range: Range) {
} }
// Slice exact content to preserve formatting. // Slice exact content to preserve formatting.
let left_content = checker.locator.slice(&Range::from_located(left)); let left_content = checker.locator.slice(Range::from_located(left));
let value_content = checker.locator.slice(&Range::from_located(value)); let value_content = checker.locator.slice(Range::from_located(value));
let mut diagnostic = Diagnostic::new( let mut diagnostic = Diagnostic::new(
KeyInDict { KeyInDict {

View File

@ -62,7 +62,7 @@ fn is_constant_like(expr: &Expr) -> bool {
/// Generate a fix to reverse a comparison. /// Generate a fix to reverse a comparison.
fn reverse_comparison(expr: &Expr, locator: &Locator, stylist: &Stylist) -> Result<String> { fn reverse_comparison(expr: &Expr, locator: &Locator, stylist: &Stylist) -> Result<String> {
let range = Range::from_located(expr); let range = Range::from_located(expr);
let contents = locator.slice(&range); let contents = locator.slice(range);
let mut expression = match_expression(contents)?; let mut expression = match_expression(contents)?;
let mut comparison = match_comparison(&mut expression)?; let mut comparison = match_comparison(&mut expression)?;

View File

@ -14,7 +14,7 @@ pub struct Comment<'a> {
} }
/// Collect all comments in an import block. /// Collect all comments in an import block.
pub fn collect_comments<'a>(range: &Range, locator: &'a Locator) -> Vec<Comment<'a>> { pub fn collect_comments<'a>(range: Range, locator: &'a Locator) -> Vec<Comment<'a>> {
let contents = locator.slice(range); let contents = locator.slice(range);
lexer::lex_located(contents, Mode::Module, range.location) lexer::lex_located(contents, Mode::Module, range.location)
.flatten() .flatten()

View File

@ -9,7 +9,7 @@ use crate::source_code::Locator;
/// Return `true` if a `StmtKind::ImportFrom` statement ends with a magic /// Return `true` if a `StmtKind::ImportFrom` statement ends with a magic
/// trailing comma. /// trailing comma.
pub fn trailing_comma(stmt: &Stmt, locator: &Locator) -> TrailingComma { pub fn trailing_comma(stmt: &Stmt, locator: &Locator) -> TrailingComma {
let contents = locator.slice(&Range::from_located(stmt)); let contents = locator.slice(Range::from_located(stmt));
let mut count: usize = 0; let mut count: usize = 0;
let mut trailing_comma = TrailingComma::Absent; let mut trailing_comma = TrailingComma::Absent;
for (_, tok, _) in lexer::lex_located(contents, Mode::Module, stmt.location).flatten() { for (_, tok, _) in lexer::lex_located(contents, Mode::Module, stmt.location).flatten() {

View File

@ -83,7 +83,7 @@ pub fn organize_imports(
autofix: flags::Autofix, autofix: flags::Autofix,
package: Option<&Path>, package: Option<&Path>,
) -> Option<Diagnostic> { ) -> Option<Diagnostic> {
let indentation = locator.slice(&extract_indentation_range(&block.imports)); let indentation = locator.slice(extract_indentation_range(&block.imports));
let indentation = leading_space(indentation); let indentation = leading_space(indentation);
let range = extract_range(&block.imports); let range = extract_range(&block.imports);
@ -98,7 +98,7 @@ pub fn organize_imports(
// Extract comments. Take care to grab any inline comments from the last line. // Extract comments. Take care to grab any inline comments from the last line.
let comments = comments::collect_comments( let comments = comments::collect_comments(
&Range::new( Range::new(
range.location, range.location,
Location::new(range.end_location.row() + 1, 0), Location::new(range.end_location.row() + 1, 0),
), ),
@ -148,7 +148,7 @@ pub fn organize_imports(
Location::new(range.location.row(), 0), Location::new(range.location.row(), 0),
Location::new(range.end_location.row() + 1 + num_trailing_lines, 0), Location::new(range.end_location.row() + 1 + num_trailing_lines, 0),
); );
let actual = locator.slice(&range); let actual = locator.slice(range);
if matches_ignoring_indentation(actual, &expected) { if matches_ignoring_indentation(actual, &expected) {
None None
} else { } else {

View File

@ -50,7 +50,7 @@ pub fn fix_inplace_argument(
// Apply the deletion step. // Apply the deletion step.
// TODO(charlie): Find a way to // TODO(charlie): Find a way to
let contents = locator.slice(&Range::new(expr.location, expr.end_location.unwrap())); let contents = locator.slice(Range::new(expr.location, expr.end_location.unwrap()));
let output = apply_fix(&fix_me, &Locator::new(contents)); let output = apply_fix(&fix_me, &Locator::new(contents));
// Obtain the name prefix. // Obtain the name prefix.

View File

@ -89,7 +89,7 @@ fn build_line<'a>(
s = format!("\"{}\"", "x".repeat(value.len()).clone()); s = format!("\"{}\"", "x".repeat(value.len()).clone());
&s &s
} else { } else {
locator.slice(&Range { locator.slice(Range {
location: *start, location: *start,
end_location: *end, end_location: *end,
}) })
@ -97,7 +97,7 @@ fn build_line<'a>(
if let Some(prev) = prev { if let Some(prev) = prev {
if prev.row() != start.row() { if prev.row() != start.row() {
let prev_text = locator.slice(&Range { let prev_text = locator.slice(Range {
location: Location::new(prev.row(), prev.column() - 1), location: Location::new(prev.row(), prev.column() - 1),
end_location: Location::new(prev.row(), prev.column()), end_location: Location::new(prev.row(), prev.column()),
}); });
@ -109,7 +109,7 @@ fn build_line<'a>(
length += 1; length += 1;
} }
} else if prev.column() != start.column() { } else if prev.column() != start.column() {
let prev_text = locator.slice(&Range { let prev_text = locator.slice(Range {
location: *prev, location: *prev,
end_location: *start, end_location: *start,
}); });

View File

@ -52,7 +52,7 @@ pub fn invalid_escape_sequence(
) -> Vec<Diagnostic> { ) -> Vec<Diagnostic> {
let mut diagnostics = vec![]; let mut diagnostics = vec![];
let text = locator.slice(&Range::new(start, end)); let text = locator.slice(Range::new(start, end));
// Determine whether the string is single- or triple-quoted. // Determine whether the string is single- or triple-quoted.
let Ok(quote) = extract_quote(text) else { let Ok(quote) = extract_quote(text) else {

View File

@ -55,7 +55,7 @@ pub fn lambda_assignment(checker: &mut Checker, target: &Expr, value: &Expr, stm
&& !match_leading_content(stmt, checker.locator) && !match_leading_content(stmt, checker.locator)
&& !match_trailing_content(stmt, checker.locator) && !match_trailing_content(stmt, checker.locator)
{ {
let first_line = checker.locator.slice(&Range::new( let first_line = checker.locator.slice(Range::new(
Location::new(stmt.location.row(), 0), Location::new(stmt.location.row(), 0),
Location::new(stmt.location.row() + 1, 0), Location::new(stmt.location.row() + 1, 0),
)); ));

View File

@ -59,7 +59,7 @@ pub fn whitespace_before_comment(
let mut prev_end = Location::new(0, 0); let mut prev_end = Location::new(0, 0);
for (start, tok, end) in tokens { for (start, tok, end) in tokens {
if let Tok::Comment(text) = tok { if let Tok::Comment(text) = tok {
let line = locator.slice(&Range::new( let line = locator.slice(Range::new(
Location::new(start.row(), 0), Location::new(start.row(), 0),
Location::new(start.row(), start.column()), Location::new(start.row(), start.column()),
)); ));

View File

@ -73,7 +73,7 @@ pub fn blank_before_after_class(checker: &mut Checker, docstring: &Docstring) {
{ {
let before = checker let before = checker
.locator .locator
.slice(&Range::new(parent.location, docstring.expr.location)); .slice(Range::new(parent.location, docstring.expr.location));
let blank_lines_before = before let blank_lines_before = before
.lines() .lines()
@ -133,7 +133,7 @@ pub fn blank_before_after_class(checker: &mut Checker, docstring: &Docstring) {
.rules .rules
.enabled(&Rule::OneBlankLineAfterClass) .enabled(&Rule::OneBlankLineAfterClass)
{ {
let after = checker.locator.slice(&Range::new( let after = checker.locator.slice(Range::new(
docstring.expr.end_location.unwrap(), docstring.expr.end_location.unwrap(),
parent.end_location.unwrap(), parent.end_location.unwrap(),
)); ));

View File

@ -64,7 +64,7 @@ pub fn blank_before_after_function(checker: &mut Checker, docstring: &Docstring)
{ {
let before = checker let before = checker
.locator .locator
.slice(&Range::new(parent.location, docstring.expr.location)); .slice(Range::new(parent.location, docstring.expr.location));
let blank_lines_before = before let blank_lines_before = before
.lines() .lines()
@ -95,7 +95,7 @@ pub fn blank_before_after_function(checker: &mut Checker, docstring: &Docstring)
.rules .rules
.enabled(&Rule::NoBlankLineAfterFunction) .enabled(&Rule::NoBlankLineAfterFunction)
{ {
let after = checker.locator.slice(&Range::new( let after = checker.locator.slice(Range::new(
docstring.expr.end_location.unwrap(), docstring.expr.end_location.unwrap(),
parent.end_location.unwrap(), parent.end_location.unwrap(),
)); ));

View File

@ -105,7 +105,7 @@ pub fn multi_line_summary_start(checker: &mut Checker, docstring: &Docstring) {
| DefinitionKind::NestedFunction(parent) | DefinitionKind::NestedFunction(parent)
| DefinitionKind::Method(parent) = &docstring.kind | DefinitionKind::Method(parent) = &docstring.kind
{ {
let parent_indentation = checker.locator.slice(&Range::new( let parent_indentation = checker.locator.slice(Range::new(
Location::new(parent.location.row(), 0), Location::new(parent.location.row(), 0),
Location::new(parent.location.row(), parent.location.column()), Location::new(parent.location.row(), parent.location.column()),
)); ));

View File

@ -16,7 +16,7 @@ pub fn remove_unused_format_arguments_from_dict(
locator: &Locator, locator: &Locator,
stylist: &Stylist, stylist: &Stylist,
) -> Result<Fix> { ) -> Result<Fix> {
let module_text = locator.slice(&Range::from_located(stmt)); let module_text = locator.slice(Range::from_located(stmt));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
@ -67,7 +67,7 @@ pub fn remove_unused_keyword_arguments_from_format_call(
locator: &Locator, locator: &Locator,
stylist: &Stylist, stylist: &Stylist,
) -> Result<Fix> { ) -> Result<Fix> {
let module_text = locator.slice(&location); let module_text = locator.slice(location);
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut body = match_expr(&mut tree)?; let mut body = match_expr(&mut tree)?;
@ -114,7 +114,7 @@ pub fn remove_exception_handler_assignment(
excepthandler: &Excepthandler, excepthandler: &Excepthandler,
locator: &Locator, locator: &Locator,
) -> Result<Fix> { ) -> Result<Fix> {
let contents = locator.slice(&Range::from_located(excepthandler)); let contents = locator.slice(Range::from_located(excepthandler));
let mut fix_start = None; let mut fix_start = None;
let mut fix_end = None; let mut fix_end = None;

View File

@ -56,7 +56,7 @@ fn fix_f_string_missing_placeholders(
tok_range: &Range, tok_range: &Range,
checker: &mut Checker, checker: &mut Checker,
) -> Fix { ) -> Fix {
let content = checker.locator.slice(&Range::new( let content = checker.locator.slice(Range::new(
prefix_range.end_location, prefix_range.end_location,
tok_range.end_location, tok_range.end_location,
)); ));

View File

@ -61,7 +61,7 @@ pub fn invalid_literal_comparison(
comparators: &[Expr], comparators: &[Expr],
location: Range, location: Range,
) { ) {
let located = Lazy::new(|| locate_cmpops(checker.locator.slice(&location))); let located = Lazy::new(|| locate_cmpops(checker.locator.slice(location)));
let mut left = left; let mut left = left;
for (index, (op, right)) in izip!(ops, comparators).enumerate() { for (index, (op, right)) in izip!(ops, comparators).enumerate() {
if matches!(op, Cmpop::Is | Cmpop::IsNot) if matches!(op, Cmpop::Is | Cmpop::IsNot)

View File

@ -243,7 +243,7 @@ fn is_valid_dict(
/// PLE1307 /// PLE1307
pub fn bad_string_format_type(checker: &mut Checker, expr: &Expr, right: &Expr) { pub fn bad_string_format_type(checker: &mut Checker, expr: &Expr, right: &Expr) {
// Grab each string segment (in case there's an implicit concatenation). // Grab each string segment (in case there's an implicit concatenation).
let content = checker.locator.slice(&Range::from_located(expr)); let content = checker.locator.slice(Range::from_located(expr));
let mut strings: Vec<(Location, Location)> = vec![]; let mut strings: Vec<(Location, Location)> = vec![];
for (start, tok, end) in lexer::lex_located(content, Mode::Module, expr.location).flatten() { for (start, tok, end) in lexer::lex_located(content, Mode::Module, expr.location).flatten() {
if matches!(tok, Tok::String { .. }) { if matches!(tok, Tok::String { .. }) {
@ -262,7 +262,7 @@ pub fn bad_string_format_type(checker: &mut Checker, expr: &Expr, right: &Expr)
// Parse each string segment. // Parse each string segment.
let mut format_strings = vec![]; let mut format_strings = vec![];
for (start, end) in &strings { for (start, end) in &strings {
let string = checker.locator.slice(&Range::new(*start, *end)); let string = checker.locator.slice(Range::new(*start, *end));
let (Some(leader), Some(trailer)) = (leading_quote(string), trailing_quote(string)) else { let (Some(leader), Some(trailer)) = (leading_quote(string), trailing_quote(string)) else {
return; return;
}; };

View File

@ -24,7 +24,7 @@ pub fn collapsible_else_if(orelse: &[Stmt], locator: &Locator) -> Option<Diagnos
if matches!(first.node, StmtKind::If { .. }) { if matches!(first.node, StmtKind::If { .. }) {
// Determine whether this is an `elif`, or an `if` in an `else` block. // Determine whether this is an `elif`, or an `if` in an `else` block.
if locator if locator
.slice(&Range { .slice(Range {
location: first.location, location: first.location,
end_location: first.end_location.unwrap(), end_location: first.end_location.unwrap(),
}) })

View File

@ -19,7 +19,7 @@ pub fn adjust_indentation(
locator: &Locator, locator: &Locator,
stylist: &Stylist, stylist: &Stylist,
) -> Result<String> { ) -> Result<String> {
let contents = locator.slice(&range); let contents = locator.slice(range);
let module_text = format!("def f():{}{contents}", stylist.line_ending().as_str()); let module_text = format!("def f():{}{contents}", stylist.line_ending().as_str());
@ -68,7 +68,7 @@ pub fn remove_class_def_base(
/// Generate a fix to remove arguments from a `super` call. /// Generate a fix to remove arguments from a `super` call.
pub fn remove_super_arguments(locator: &Locator, stylist: &Stylist, expr: &Expr) -> Option<Fix> { pub fn remove_super_arguments(locator: &Locator, stylist: &Stylist, expr: &Expr) -> Option<Fix> {
let range = Range::from_located(expr); let range = Range::from_located(expr);
let contents = locator.slice(&range); let contents = locator.slice(range);
let mut tree = libcst_native::parse_module(contents, None).ok()?; let mut tree = libcst_native::parse_module(contents, None).ok()?;
@ -150,7 +150,7 @@ pub fn remove_import_members(contents: &str, members: &[&str]) -> String {
// It's possible that `last_pos` is after `fix.location`, if we're removing the // It's possible that `last_pos` is after `fix.location`, if we're removing the
// first _two_ members. // first _two_ members.
if start_location > last_pos { if start_location > last_pos {
let slice = locator.slice(&Range::new(last_pos, start_location)); let slice = locator.slice(Range::new(last_pos, start_location));
output.push_str(slice); output.push_str(slice);
} }

View File

@ -138,7 +138,7 @@ pub fn extraneous_parentheses(
let mut diagnostic = let mut diagnostic =
Diagnostic::new(ExtraneousParentheses, Range::new(*start, *end)); Diagnostic::new(ExtraneousParentheses, Range::new(*start, *end));
if autofix.into() && settings.rules.should_fix(&Rule::ExtraneousParentheses) { if autofix.into() && settings.rules.should_fix(&Rule::ExtraneousParentheses) {
let contents = locator.slice(&Range::new(*start, *end)); let contents = locator.slice(Range::new(*start, *end));
diagnostic.amend(Fix::replacement( diagnostic.amend(Fix::replacement(
contents[1..contents.len() - 1].to_string(), contents[1..contents.len() - 1].to_string(),
*start, *start,

View File

@ -45,7 +45,7 @@ impl<'a> FormatSummaryValues<'a> {
let mut extracted_kwargs: FxHashMap<&str, String> = FxHashMap::default(); let mut extracted_kwargs: FxHashMap<&str, String> = FxHashMap::default();
if let ExprKind::Call { args, keywords, .. } = &expr.node { if let ExprKind::Call { args, keywords, .. } = &expr.node {
for arg in args { for arg in args {
let arg = checker.locator.slice(&Range::from_located(arg)); let arg = checker.locator.slice(Range::from_located(arg));
if contains_invalids(arg) { if contains_invalids(arg) {
return None; return None;
} }
@ -54,7 +54,7 @@ impl<'a> FormatSummaryValues<'a> {
for keyword in keywords { for keyword in keywords {
let KeywordData { arg, value } = &keyword.node; let KeywordData { arg, value } = &keyword.node;
if let Some(key) = arg { if let Some(key) = arg {
let kwarg = checker.locator.slice(&Range::from_located(value)); let kwarg = checker.locator.slice(Range::from_located(value));
if contains_invalids(kwarg) { if contains_invalids(kwarg) {
return None; return None;
} }
@ -125,7 +125,7 @@ fn try_convert_to_f_string(checker: &Checker, expr: &Expr) -> Option<String> {
return None; return None;
}; };
let contents = checker.locator.slice(&Range::from_located(value)); let contents = checker.locator.slice(Range::from_located(value));
// Tokenize: we need to avoid trying to fix implicit string concatenations. // Tokenize: we need to avoid trying to fix implicit string concatenations.
if lexer::lex(contents, Mode::Module) if lexer::lex(contents, Mode::Module)
@ -258,7 +258,7 @@ pub(crate) fn f_strings(checker: &mut Checker, summary: &FormatSummary, expr: &E
}; };
// Avoid refactors that increase the resulting string length. // Avoid refactors that increase the resulting string length.
let existing = checker.locator.slice(&Range::from_located(expr)); let existing = checker.locator.slice(Range::from_located(expr));
if contents.len() > existing.len() { if contents.len() > existing.len() {
return; return;
} }

View File

@ -86,7 +86,7 @@ fn generate_call(
locator: &Locator, locator: &Locator,
stylist: &Stylist, stylist: &Stylist,
) -> Result<String> { ) -> Result<String> {
let module_text = locator.slice(&Range::from_located(expr)); let module_text = locator.slice(Range::from_located(expr));
let mut expression = match_expression(module_text)?; let mut expression = match_expression(module_text)?;
let mut call = match_call(&mut expression)?; let mut call = match_call(&mut expression)?;

View File

@ -350,7 +350,7 @@ impl<'a> ImportReplacer<'a> {
let matched = ImportReplacer::format_import_from(&matched_names, target); let matched = ImportReplacer::format_import_from(&matched_names, target);
let unmatched = fixes::remove_import_members( let unmatched = fixes::remove_import_members(
self.locator.slice(&Range::from_located(self.stmt)), self.locator.slice(Range::from_located(self.stmt)),
&matched_names &matched_names
.iter() .iter()
.map(|name| name.name.as_str()) .map(|name| name.name.as_str())

View File

@ -116,7 +116,7 @@ pub fn native_literals(
// rust-python merges adjacent string/bytes literals into one node, but we can't // rust-python merges adjacent string/bytes literals into one node, but we can't
// safely remove the outer call in this situation. We're following pyupgrade // safely remove the outer call in this situation. We're following pyupgrade
// here and skip. // here and skip.
let arg_code = checker.locator.slice(&Range::from_located(arg)); let arg_code = checker.locator.slice(Range::from_located(arg));
if lexer::lex_located(arg_code, Mode::Module, arg.location) if lexer::lex_located(arg_code, Mode::Module, arg.location)
.flatten() .flatten()
.filter(|(_, tok, _)| matches!(tok, Tok::String { .. })) .filter(|(_, tok, _)| matches!(tok, Tok::String { .. }))

View File

@ -56,7 +56,7 @@ fn metadata<T>(locator: &Locator, located: &Located<T>) -> Option<BlockMetadata>
// Start the selection at the start-of-line. This ensures consistent indentation // Start the selection at the start-of-line. This ensures consistent indentation
// in the token stream, in the event that the entire block is indented. // in the token stream, in the event that the entire block is indented.
let text = locator.slice(&Range::new( let text = locator.slice(Range::new(
Location::new(located.location.row(), 0), Location::new(located.location.row(), 0),
located.end_location.unwrap(), located.end_location.unwrap(),
)); ));
@ -198,7 +198,7 @@ fn fix_py2_block(
Some(Fix::replacement( Some(Fix::replacement(
checker checker
.locator .locator
.slice(&Range::new(start.location, end.end_location.unwrap())) .slice(Range::new(start.location, end.end_location.unwrap()))
.to_string(), .to_string(),
stmt.location, stmt.location,
stmt.end_location.unwrap(), stmt.end_location.unwrap(),
@ -265,7 +265,7 @@ fn fix_py3_block(
Some(Fix::replacement( Some(Fix::replacement(
checker checker
.locator .locator
.slice(&Range::new(start.location, end.end_location.unwrap())) .slice(Range::new(start.location, end.end_location.unwrap()))
.to_string(), .to_string(),
stmt.location, stmt.location,
stmt.end_location.unwrap(), stmt.end_location.unwrap(),
@ -297,7 +297,7 @@ fn fix_py3_block(
// Replace the `elif` with an `else, preserve the body of the elif, and remove // Replace the `elif` with an `else, preserve the body of the elif, and remove
// the rest. // the rest.
let end = body.last().unwrap(); let end = body.last().unwrap();
let text = checker.locator.slice(&Range::new( let text = checker.locator.slice(Range::new(
test.end_location.unwrap(), test.end_location.unwrap(),
end.end_location.unwrap(), end.end_location.unwrap(),
)); ));

View File

@ -142,7 +142,7 @@ fn percent_to_format(format_string: &CFormatString) -> String {
fn clean_params_tuple(checker: &mut Checker, right: &Expr) -> String { fn clean_params_tuple(checker: &mut Checker, right: &Expr) -> String {
let mut contents = checker let mut contents = checker
.locator .locator
.slice(&Range::from_located(right)) .slice(Range::from_located(right))
.to_string(); .to_string();
if let ExprKind::Tuple { elts, .. } = &right.node { if let ExprKind::Tuple { elts, .. } = &right.node {
if elts.len() == 1 { if elts.len() == 1 {
@ -196,7 +196,7 @@ fn clean_params_dictionary(checker: &mut Checker, right: &Expr) -> Option<String
} }
} }
let value_string = checker.locator.slice(&Range::from_located(value)); let value_string = checker.locator.slice(Range::from_located(value));
arguments.push(format!("{key_string}={value_string}")); arguments.push(format!("{key_string}={value_string}"));
} else { } else {
// If there are any non-string keys, abort. // If there are any non-string keys, abort.
@ -204,7 +204,7 @@ fn clean_params_dictionary(checker: &mut Checker, right: &Expr) -> Option<String
} }
} }
None => { None => {
let value_string = checker.locator.slice(&Range::from_located(value)); let value_string = checker.locator.slice(Range::from_located(value));
arguments.push(format!("**{value_string}")); arguments.push(format!("**{value_string}"));
} }
} }
@ -320,7 +320,7 @@ pub(crate) fn printf_string_formatting(
let mut strings: Vec<(Location, Location)> = vec![]; let mut strings: Vec<(Location, Location)> = vec![];
let mut extension = None; let mut extension = None;
for (start, tok, end) in lexer::lex_located( for (start, tok, end) in lexer::lex_located(
checker.locator.slice(&Range::from_located(expr)), checker.locator.slice(Range::from_located(expr)),
Mode::Module, Mode::Module,
expr.location, expr.location,
) )
@ -345,7 +345,7 @@ pub(crate) fn printf_string_formatting(
// Parse each string segment. // Parse each string segment.
let mut format_strings = vec![]; let mut format_strings = vec![];
for (start, end) in &strings { for (start, end) in &strings {
let string = checker.locator.slice(&Range::new(*start, *end)); let string = checker.locator.slice(Range::new(*start, *end));
let (Some(leader), Some(trailer)) = (leading_quote(string), trailing_quote(string)) else { let (Some(leader), Some(trailer)) = (leading_quote(string), trailing_quote(string)) else {
return; return;
}; };
@ -383,10 +383,10 @@ pub(crate) fn printf_string_formatting(
// Add the content before the string segment. // Add the content before the string segment.
match prev { match prev {
None => { None => {
contents.push_str(checker.locator.slice(&Range::new(expr.location, *start))); contents.push_str(checker.locator.slice(Range::new(expr.location, *start)));
} }
Some(prev) => { Some(prev) => {
contents.push_str(checker.locator.slice(&Range::new(prev, *start))); contents.push_str(checker.locator.slice(Range::new(prev, *start)));
} }
} }
// Add the string itself. // Add the string itself.
@ -395,7 +395,7 @@ pub(crate) fn printf_string_formatting(
} }
if let Some((.., end)) = extension { if let Some((.., end)) = extension {
contents.push_str(checker.locator.slice(&Range::new(prev.unwrap(), end))); contents.push_str(checker.locator.slice(Range::new(prev.unwrap(), end)));
} }
// Add the `.format` call. // Add the `.format` call.

View File

@ -134,7 +134,7 @@ fn create_check(
} }
fn create_remove_param_fix(locator: &Locator, expr: &Expr, mode_param: &Expr) -> Result<Fix> { fn create_remove_param_fix(locator: &Locator, expr: &Expr, mode_param: &Expr) -> Result<Fix> {
let content = locator.slice(&Range::new(expr.location, expr.end_location.unwrap())); let content = locator.slice(Range::new(expr.location, expr.end_location.unwrap()));
// Find the last comma before mode_param and create a deletion fix // Find the last comma before mode_param and create a deletion fix
// starting from the comma and ending after mode_param. // starting from the comma and ending after mode_param.
let mut fix_start: Option<Location> = None; let mut fix_start: Option<Location> = None;

View File

@ -81,7 +81,7 @@ fn generate_fix(
}; };
let mut contents = String::from("capture_output=True"); let mut contents = String::from("capture_output=True");
if let Some(middle) = if let Some(middle) =
extract_middle(locator.slice(&Range::new(first.end_location.unwrap(), last.location))) extract_middle(locator.slice(Range::new(first.end_location.unwrap(), last.location)))
{ {
if middle.multi_line { if middle.multi_line {
let Some(indent) = indentation(locator, first) else { let Some(indent) = indentation(locator, first) else {

View File

@ -24,7 +24,7 @@ impl AlwaysAutofixableViolation for RewriteCElementTree {
fn add_check_for_node<T>(checker: &mut Checker, node: &Located<T>) { fn add_check_for_node<T>(checker: &mut Checker, node: &Located<T>) {
let mut diagnostic = Diagnostic::new(RewriteCElementTree, Range::from_located(node)); let mut diagnostic = Diagnostic::new(RewriteCElementTree, Range::from_located(node));
if checker.patch(diagnostic.kind.rule()) { if checker.patch(diagnostic.kind.rule()) {
let contents = checker.locator.slice(&Range::from_located(node)); let contents = checker.locator.slice(Range::from_located(node));
diagnostic.amend(Fix::replacement( diagnostic.amend(Fix::replacement(
contents.replacen("cElementTree", "ElementTree", 1), contents.replacen("cElementTree", "ElementTree", 1),
node.location, node.location,

View File

@ -143,7 +143,7 @@ fn format_import(
locator: &Locator, locator: &Locator,
stylist: &Stylist, stylist: &Stylist,
) -> Result<String> { ) -> Result<String> {
let module_text = locator.slice(&Range::from_located(stmt)); let module_text = locator.slice(Range::from_located(stmt));
let mut tree = match_module(module_text)?; let mut tree = match_module(module_text)?;
let mut import = match_import(&mut tree)?; let mut import = match_import(&mut tree)?;
@ -177,7 +177,7 @@ fn format_import_from(
locator: &Locator, locator: &Locator,
stylist: &Stylist, stylist: &Stylist,
) -> Result<String> { ) -> Result<String> {
let module_text = locator.slice(&Range::from_located(stmt)); let module_text = locator.slice(Range::from_located(stmt));
let mut tree = match_module(module_text).unwrap(); let mut tree = match_module(module_text).unwrap();
let mut import = match_import_from(&mut tree)?; let mut import = match_import_from(&mut tree)?;

View File

@ -174,7 +174,7 @@ pub fn rewrite_yield_from(checker: &mut Checker, stmt: &Stmt) {
let mut diagnostic = Diagnostic::new(RewriteYieldFrom, Range::from_located(item.stmt)); let mut diagnostic = Diagnostic::new(RewriteYieldFrom, Range::from_located(item.stmt));
if checker.patch(diagnostic.kind.rule()) { if checker.patch(diagnostic.kind.rule()) {
let contents = checker.locator.slice(&Range::from_located(item.iter)); let contents = checker.locator.slice(Range::from_located(item.iter));
let contents = format!("yield from {contents}"); let contents = format!("yield from {contents}");
diagnostic.amend(Fix::replacement( diagnostic.amend(Fix::replacement(
contents, contents,

View File

@ -100,7 +100,7 @@ fn replace_with_bytes_literal(
) -> Diagnostic { ) -> Diagnostic {
let mut diagnostic = Diagnostic::new(UnnecessaryEncodeUTF8, Range::from_located(expr)); let mut diagnostic = Diagnostic::new(UnnecessaryEncodeUTF8, Range::from_located(expr));
if patch { if patch {
let content = locator.slice(&Range::new( let content = locator.slice(Range::new(
constant.location, constant.location,
constant.end_location.unwrap(), constant.end_location.unwrap(),
)); ));

View File

@ -97,7 +97,7 @@ pub fn unpack_list_comprehension(checker: &mut Checker, targets: &[Expr], value:
let mut diagnostic = let mut diagnostic =
Diagnostic::new(RewriteListComprehension, Range::from_located(value)); Diagnostic::new(RewriteListComprehension, Range::from_located(value));
if checker.patch(diagnostic.kind.rule()) { if checker.patch(diagnostic.kind.rule()) {
let existing = checker.locator.slice(&Range::from_located(value)); let existing = checker.locator.slice(Range::from_located(value));
let mut content = String::with_capacity(existing.len()); let mut content = String::with_capacity(existing.len());
content.push('('); content.push('(');

View File

@ -1693,7 +1693,7 @@ pub fn ambiguous_unicode_character(
) -> Vec<Diagnostic> { ) -> Vec<Diagnostic> {
let mut diagnostics = vec![]; let mut diagnostics = vec![];
let text = locator.slice(&Range::new(start, end)); let text = locator.slice(Range::new(start, end));
let mut col_offset = 0; let mut col_offset = 0;
let mut row_offset = 0; let mut row_offset = 0;

View File

@ -125,7 +125,7 @@ impl<'a> Locator<'a> {
} }
/// Take the source code between the given [`Range`]. /// Take the source code between the given [`Range`].
pub fn slice(&self, range: &Range) -> &'a str { pub fn slice(&self, range: Range) -> &'a str {
let index = self.get_or_init_index(); let index = self.get_or_init_index();
let start = truncate(range.location, index, self.contents); let start = truncate(range.location, index, self.contents);
let end = truncate(range.end_location, index, self.contents); let end = truncate(range.end_location, index, self.contents);

View File

@ -167,7 +167,7 @@ fn detect_indentation(contents: &str, locator: &Locator) -> Option<Indentation>
for (_start, tok, end) in lexer::lex(contents, Mode::Module).flatten() { for (_start, tok, end) in lexer::lex(contents, Mode::Module).flatten() {
if let Tok::Indent { .. } = tok { if let Tok::Indent { .. } = tok {
let start = Location::new(end.row(), 0); let start = Location::new(end.row(), 0);
let whitespace = locator.slice(&Range::new(start, end)); let whitespace = locator.slice(Range::new(start, end));
return Some(Indentation(whitespace.to_string())); return Some(Indentation(whitespace.to_string()));
} }
} }
@ -178,7 +178,7 @@ fn detect_indentation(contents: &str, locator: &Locator) -> Option<Indentation>
fn detect_quote(contents: &str, locator: &Locator) -> Option<Quote> { fn detect_quote(contents: &str, locator: &Locator) -> Option<Quote> {
for (start, tok, end) in lexer::lex(contents, Mode::Module).flatten() { for (start, tok, end) in lexer::lex(contents, Mode::Module).flatten() {
if let Tok::String { .. } = tok { if let Tok::String { .. } = tok {
let content = locator.slice(&Range::new(start, end)); let content = locator.slice(Range::new(start, end));
if let Some(pattern) = leading_quote(content) { if let Some(pattern) = leading_quote(content) {
if pattern.contains("\"\"\"") { if pattern.contains("\"\"\"") {
continue; continue;