mirror of https://github.com/astral-sh/ruff
Improve top-of-file insertions for required imports (#3779)
This commit is contained in:
parent
cb588d1d6d
commit
2e6eddc7bd
|
|
@ -3,9 +3,9 @@ use rustpython_parser::{lexer, Mode, Tok};
|
||||||
|
|
||||||
use ruff_python_ast::helpers::is_docstring_stmt;
|
use ruff_python_ast::helpers::is_docstring_stmt;
|
||||||
use ruff_python_ast::newlines::StrExt;
|
use ruff_python_ast::newlines::StrExt;
|
||||||
use ruff_python_ast::source_code::Locator;
|
use ruff_python_ast::source_code::{Locator, Stylist};
|
||||||
|
|
||||||
use super::types::TrailingComma;
|
use crate::rules::isort::types::TrailingComma;
|
||||||
|
|
||||||
/// Return `true` if a `StmtKind::ImportFrom` statement ends with a magic
|
/// Return `true` if a `StmtKind::ImportFrom` statement ends with a magic
|
||||||
/// trailing comma.
|
/// trailing comma.
|
||||||
|
|
@ -102,23 +102,73 @@ fn match_docstring_end(body: &[Stmt]) -> Option<Location> {
|
||||||
Some(stmt.end_location.unwrap())
|
Some(stmt.end_location.unwrap())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Find the end of the first token that isn't a docstring, comment, or
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
/// whitespace.
|
pub(super) struct Insertion {
|
||||||
pub fn find_splice_location(body: &[Stmt], locator: &Locator) -> Location {
|
/// The content to add before the insertion.
|
||||||
// Find the first AST node that isn't a docstring.
|
pub prefix: &'static str,
|
||||||
let mut splice = match_docstring_end(body).unwrap_or_default();
|
/// The location at which to insert.
|
||||||
|
pub location: Location,
|
||||||
|
/// The content to add after the insertion.
|
||||||
|
pub suffix: &'static str,
|
||||||
|
}
|
||||||
|
|
||||||
// Find the first token that isn't a comment or whitespace.
|
impl Insertion {
|
||||||
let contents = locator.skip(splice);
|
pub fn new(prefix: &'static str, location: Location, suffix: &'static str) -> Self {
|
||||||
for (.., tok, end) in lexer::lex_located(contents, Mode::Module, splice).flatten() {
|
Self {
|
||||||
|
prefix,
|
||||||
|
location,
|
||||||
|
suffix,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Find the location at which a "top-of-file" import should be inserted,
|
||||||
|
/// along with a prefix and suffix to use for the insertion.
|
||||||
|
///
|
||||||
|
/// For example, given the following code:
|
||||||
|
///
|
||||||
|
/// ```python
|
||||||
|
/// """Hello, world!"""
|
||||||
|
///
|
||||||
|
/// import os
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// The location returned will be the start of the `import os` statement,
|
||||||
|
/// along with a trailing newline suffix.
|
||||||
|
pub(super) fn top_of_file_insertion(
|
||||||
|
body: &[Stmt],
|
||||||
|
locator: &Locator,
|
||||||
|
stylist: &Stylist,
|
||||||
|
) -> Insertion {
|
||||||
|
// Skip over any docstrings.
|
||||||
|
let mut location = if let Some(location) = match_docstring_end(body) {
|
||||||
|
// If the first token after the docstring is a semicolon, insert after the semicolon as an
|
||||||
|
// inline statement;
|
||||||
|
let first_token = lexer::lex_located(locator.skip(location), Mode::Module, location)
|
||||||
|
.flatten()
|
||||||
|
.next();
|
||||||
|
if let Some((.., Tok::Semi, end)) = first_token {
|
||||||
|
return Insertion::new(" ", end, ";");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, advance to the next row.
|
||||||
|
Location::new(location.row() + 1, 0)
|
||||||
|
} else {
|
||||||
|
Location::default()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Skip over any comments and empty lines.
|
||||||
|
for (.., tok, end) in
|
||||||
|
lexer::lex_located(locator.skip(location), Mode::Module, location).flatten()
|
||||||
|
{
|
||||||
if matches!(tok, Tok::Comment(..) | Tok::Newline) {
|
if matches!(tok, Tok::Comment(..) | Tok::Newline) {
|
||||||
splice = end;
|
location = Location::new(end.row() + 1, 0);
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
splice
|
return Insertion::new("", location, stylist.line_ending().as_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
@ -126,79 +176,120 @@ mod tests {
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use rustpython_parser as parser;
|
use rustpython_parser as parser;
|
||||||
use rustpython_parser::ast::Location;
|
use rustpython_parser::ast::Location;
|
||||||
|
use rustpython_parser::lexer::LexResult;
|
||||||
|
|
||||||
use ruff_python_ast::source_code::Locator;
|
use ruff_python_ast::source_code::{LineEnding, Locator, Stylist};
|
||||||
|
|
||||||
use super::find_splice_location;
|
use crate::rules::isort::helpers::{top_of_file_insertion, Insertion};
|
||||||
|
|
||||||
fn splice_contents(contents: &str) -> Result<Location> {
|
fn insert(contents: &str) -> Result<Insertion> {
|
||||||
let program = parser::parse_program(contents, "<filename>")?;
|
let program = parser::parse_program(contents, "<filename>")?;
|
||||||
|
let tokens: Vec<LexResult> = ruff_rustpython::tokenize(contents);
|
||||||
let locator = Locator::new(contents);
|
let locator = Locator::new(contents);
|
||||||
Ok(find_splice_location(&program, &locator))
|
let stylist = Stylist::from_tokens(&tokens, &locator);
|
||||||
|
Ok(top_of_file_insertion(&program, &locator, &stylist))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn splice() -> Result<()> {
|
fn top_of_file_insertions() -> Result<()> {
|
||||||
let contents = "";
|
let contents = "";
|
||||||
assert_eq!(splice_contents(contents)?, Location::new(1, 0));
|
assert_eq!(
|
||||||
|
insert(contents)?,
|
||||||
|
Insertion::new("", Location::new(1, 0), LineEnding::default().as_str())
|
||||||
|
);
|
||||||
|
|
||||||
|
let contents = r#"
|
||||||
|
"""Hello, world!""""#
|
||||||
|
.trim_start();
|
||||||
|
assert_eq!(
|
||||||
|
insert(contents)?,
|
||||||
|
Insertion::new("", Location::new(2, 0), LineEnding::default().as_str())
|
||||||
|
);
|
||||||
|
|
||||||
let contents = r#"
|
let contents = r#"
|
||||||
"""Hello, world!"""
|
"""Hello, world!"""
|
||||||
"#
|
"#
|
||||||
.trim();
|
.trim_start();
|
||||||
assert_eq!(splice_contents(contents)?, Location::new(1, 19));
|
assert_eq!(
|
||||||
|
insert(contents)?,
|
||||||
|
Insertion::new("", Location::new(2, 0), "\n")
|
||||||
|
);
|
||||||
|
|
||||||
let contents = r#"
|
let contents = r#"
|
||||||
"""Hello, world!"""
|
"""Hello, world!"""
|
||||||
"""Hello, world!"""
|
"""Hello, world!"""
|
||||||
"#
|
"#
|
||||||
.trim();
|
.trim_start();
|
||||||
assert_eq!(splice_contents(contents)?, Location::new(2, 19));
|
assert_eq!(
|
||||||
|
insert(contents)?,
|
||||||
|
Insertion::new("", Location::new(3, 0), "\n")
|
||||||
|
);
|
||||||
|
|
||||||
let contents = r#"
|
let contents = r#"
|
||||||
x = 1
|
x = 1
|
||||||
"#
|
"#
|
||||||
.trim();
|
.trim_start();
|
||||||
assert_eq!(splice_contents(contents)?, Location::new(1, 0));
|
assert_eq!(
|
||||||
|
insert(contents)?,
|
||||||
|
Insertion::new("", Location::new(1, 0), "\n")
|
||||||
|
);
|
||||||
|
|
||||||
let contents = r#"
|
let contents = r#"
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"#
|
"#
|
||||||
.trim();
|
.trim_start();
|
||||||
assert_eq!(splice_contents(contents)?, Location::new(1, 22));
|
assert_eq!(
|
||||||
|
insert(contents)?,
|
||||||
|
Insertion::new("", Location::new(2, 0), "\n")
|
||||||
|
);
|
||||||
|
|
||||||
let contents = r#"
|
let contents = r#"
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""Hello, world!"""
|
"""Hello, world!"""
|
||||||
"#
|
"#
|
||||||
.trim();
|
.trim_start();
|
||||||
assert_eq!(splice_contents(contents)?, Location::new(2, 19));
|
assert_eq!(
|
||||||
|
insert(contents)?,
|
||||||
|
Insertion::new("", Location::new(3, 0), "\n")
|
||||||
|
);
|
||||||
|
|
||||||
let contents = r#"
|
let contents = r#"
|
||||||
"""Hello, world!"""
|
"""Hello, world!"""
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"#
|
"#
|
||||||
.trim();
|
.trim_start();
|
||||||
assert_eq!(splice_contents(contents)?, Location::new(2, 22));
|
assert_eq!(
|
||||||
|
insert(contents)?,
|
||||||
|
Insertion::new("", Location::new(3, 0), "\n")
|
||||||
|
);
|
||||||
|
|
||||||
let contents = r#"
|
let contents = r#"
|
||||||
"""%s""" % "Hello, world!"
|
"""%s""" % "Hello, world!"
|
||||||
"#
|
"#
|
||||||
.trim();
|
.trim_start();
|
||||||
assert_eq!(splice_contents(contents)?, Location::new(1, 0));
|
assert_eq!(
|
||||||
|
insert(contents)?,
|
||||||
|
Insertion::new("", Location::new(1, 0), "\n")
|
||||||
|
);
|
||||||
|
|
||||||
let contents = r#"
|
let contents = r#"
|
||||||
"""Hello, world!"""; x = 1
|
"""Hello, world!"""; x = 1
|
||||||
"#
|
"#
|
||||||
.trim();
|
.trim_start();
|
||||||
assert_eq!(splice_contents(contents)?, Location::new(1, 19));
|
assert_eq!(
|
||||||
|
insert(contents)?,
|
||||||
|
Insertion::new(" ", Location::new(1, 20), ";")
|
||||||
|
);
|
||||||
|
|
||||||
let contents = r#"
|
let contents = r#"
|
||||||
"""Hello, world!"""; x = 1; y = \
|
"""Hello, world!"""; x = 1; y = \
|
||||||
2
|
2
|
||||||
"#
|
"#
|
||||||
.trim();
|
.trim_start();
|
||||||
assert_eq!(splice_contents(contents)?, Location::new(1, 19));
|
assert_eq!(
|
||||||
|
insert(contents)?,
|
||||||
|
Insertion::new(" ", Location::new(1, 20), ";")
|
||||||
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -11,9 +11,9 @@ use ruff_python_ast::source_code::{Locator, Stylist};
|
||||||
use ruff_python_ast::types::Range;
|
use ruff_python_ast::types::Range;
|
||||||
|
|
||||||
use crate::registry::Rule;
|
use crate::registry::Rule;
|
||||||
|
use crate::rules::isort::helpers::{top_of_file_insertion, Insertion};
|
||||||
use crate::settings::{flags, Settings};
|
use crate::settings::{flags, Settings};
|
||||||
|
|
||||||
use super::super::helpers;
|
|
||||||
use super::super::track::Block;
|
use super::super::track::Block;
|
||||||
|
|
||||||
/// ## What it does
|
/// ## What it does
|
||||||
|
|
@ -169,32 +169,15 @@ fn add_required_import(
|
||||||
Range::new(Location::default(), Location::default()),
|
Range::new(Location::default(), Location::default()),
|
||||||
);
|
);
|
||||||
if autofix.into() && settings.rules.should_fix(Rule::MissingRequiredImport) {
|
if autofix.into() && settings.rules.should_fix(Rule::MissingRequiredImport) {
|
||||||
// Determine the location at which the import should be inserted.
|
let Insertion {
|
||||||
let splice = helpers::find_splice_location(python_ast, locator);
|
prefix,
|
||||||
|
location,
|
||||||
// Generate the edit.
|
suffix,
|
||||||
let mut contents = String::with_capacity(required_import.len() + 1);
|
} = top_of_file_insertion(python_ast, locator, stylist);
|
||||||
|
diagnostic.set_fix(Edit::insertion(
|
||||||
// Newline (LF/CRLF)
|
format!("{prefix}{required_import}{suffix}"),
|
||||||
let line_sep = stylist.line_ending().as_str();
|
location,
|
||||||
|
));
|
||||||
// If we're inserting beyond the start of the file, we add
|
|
||||||
// a newline _before_, since the splice represents the _end_ of the last
|
|
||||||
// irrelevant token (e.g., the end of a comment or the end of
|
|
||||||
// docstring). This ensures that we properly handle awkward cases like
|
|
||||||
// docstrings that are followed by semicolons.
|
|
||||||
if splice > Location::default() {
|
|
||||||
contents.push_str(line_sep);
|
|
||||||
}
|
|
||||||
contents.push_str(&required_import);
|
|
||||||
|
|
||||||
// If we're inserting at the start of the file, add a trailing newline instead.
|
|
||||||
if splice == Location::default() {
|
|
||||||
contents.push_str(line_sep);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Construct the fix.
|
|
||||||
diagnostic.set_fix(Edit::insertion(contents, splice));
|
|
||||||
}
|
}
|
||||||
Some(diagnostic)
|
Some(diagnostic)
|
||||||
}
|
}
|
||||||
|
|
@ -224,8 +207,8 @@ pub fn add_required_imports(
|
||||||
);
|
);
|
||||||
return vec![];
|
return vec![];
|
||||||
}
|
}
|
||||||
|
let stmt = &body[0];
|
||||||
match &body[0].node {
|
match &stmt.node {
|
||||||
StmtKind::ImportFrom {
|
StmtKind::ImportFrom {
|
||||||
module,
|
module,
|
||||||
names,
|
names,
|
||||||
|
|
|
||||||
|
|
@ -15,13 +15,13 @@ expression: diagnostics
|
||||||
column: 0
|
column: 0
|
||||||
fix:
|
fix:
|
||||||
edits:
|
edits:
|
||||||
- content: "\nfrom __future__ import annotations"
|
- content: "from __future__ import annotations\n"
|
||||||
location:
|
location:
|
||||||
row: 1
|
row: 2
|
||||||
column: 19
|
column: 0
|
||||||
end_location:
|
end_location:
|
||||||
row: 1
|
row: 2
|
||||||
column: 19
|
column: 0
|
||||||
parent: ~
|
parent: ~
|
||||||
- kind:
|
- kind:
|
||||||
name: MissingRequiredImport
|
name: MissingRequiredImport
|
||||||
|
|
@ -36,12 +36,12 @@ expression: diagnostics
|
||||||
column: 0
|
column: 0
|
||||||
fix:
|
fix:
|
||||||
edits:
|
edits:
|
||||||
- content: "\nfrom __future__ import generator_stop"
|
- content: "from __future__ import generator_stop\n"
|
||||||
location:
|
location:
|
||||||
row: 1
|
row: 2
|
||||||
column: 19
|
column: 0
|
||||||
end_location:
|
end_location:
|
||||||
row: 1
|
row: 2
|
||||||
column: 19
|
column: 0
|
||||||
parent: ~
|
parent: ~
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -15,12 +15,12 @@ expression: diagnostics
|
||||||
column: 0
|
column: 0
|
||||||
fix:
|
fix:
|
||||||
edits:
|
edits:
|
||||||
- content: "\nfrom __future__ import annotations"
|
- content: "from __future__ import annotations\n"
|
||||||
location:
|
location:
|
||||||
row: 1
|
row: 2
|
||||||
column: 19
|
column: 0
|
||||||
end_location:
|
end_location:
|
||||||
row: 1
|
row: 2
|
||||||
column: 19
|
column: 0
|
||||||
parent: ~
|
parent: ~
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -15,12 +15,12 @@ expression: diagnostics
|
||||||
column: 0
|
column: 0
|
||||||
fix:
|
fix:
|
||||||
edits:
|
edits:
|
||||||
- content: "\nfrom __future__ import annotations"
|
- content: "from __future__ import annotations\n"
|
||||||
location:
|
location:
|
||||||
row: 3
|
row: 4
|
||||||
column: 3
|
column: 0
|
||||||
end_location:
|
end_location:
|
||||||
row: 3
|
row: 4
|
||||||
column: 3
|
column: 0
|
||||||
parent: ~
|
parent: ~
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -15,13 +15,13 @@ expression: diagnostics
|
||||||
column: 0
|
column: 0
|
||||||
fix:
|
fix:
|
||||||
edits:
|
edits:
|
||||||
- content: "\nfrom __future__ import annotations"
|
- content: "from __future__ import annotations\n"
|
||||||
location:
|
location:
|
||||||
row: 1
|
row: 2
|
||||||
column: 19
|
column: 0
|
||||||
end_location:
|
end_location:
|
||||||
row: 1
|
row: 2
|
||||||
column: 19
|
column: 0
|
||||||
parent: ~
|
parent: ~
|
||||||
- kind:
|
- kind:
|
||||||
name: MissingRequiredImport
|
name: MissingRequiredImport
|
||||||
|
|
@ -36,12 +36,12 @@ expression: diagnostics
|
||||||
column: 0
|
column: 0
|
||||||
fix:
|
fix:
|
||||||
edits:
|
edits:
|
||||||
- content: "\nfrom __future__ import generator_stop"
|
- content: "from __future__ import generator_stop\n"
|
||||||
location:
|
location:
|
||||||
row: 1
|
row: 2
|
||||||
column: 19
|
column: 0
|
||||||
end_location:
|
end_location:
|
||||||
row: 1
|
row: 2
|
||||||
column: 19
|
column: 0
|
||||||
parent: ~
|
parent: ~
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -15,12 +15,12 @@ expression: diagnostics
|
||||||
column: 0
|
column: 0
|
||||||
fix:
|
fix:
|
||||||
edits:
|
edits:
|
||||||
- content: "\nimport os"
|
- content: "import os\n"
|
||||||
location:
|
location:
|
||||||
row: 1
|
row: 2
|
||||||
column: 19
|
column: 0
|
||||||
end_location:
|
end_location:
|
||||||
row: 1
|
row: 2
|
||||||
column: 19
|
column: 0
|
||||||
parent: ~
|
parent: ~
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue