Address TODOs

This commit is contained in:
Charlie Marsh 2022-09-04 17:15:58 -04:00
parent 7e35c18946
commit da9a8cfd34
10 changed files with 474 additions and 193 deletions

6
Cargo.lock generated
View File

@ -1786,7 +1786,7 @@ dependencies = [
[[package]]
name = "rustpython-ast"
version = "0.1.0"
source = "git+https://github.com/charliermarsh/RustPython.git?rev=1613f6c6990011a4bc559e79aaf28d715f9f729b#1613f6c6990011a4bc559e79aaf28d715f9f729b"
source = "git+https://github.com/charliermarsh/RustPython.git?rev=ff6112d7c70729be76eaad8c9e5b8ef24d13de99#ff6112d7c70729be76eaad8c9e5b8ef24d13de99"
dependencies = [
"num-bigint",
"rustpython-compiler-core",
@ -1795,7 +1795,7 @@ dependencies = [
[[package]]
name = "rustpython-compiler-core"
version = "0.1.2"
source = "git+https://github.com/charliermarsh/RustPython.git?rev=1613f6c6990011a4bc559e79aaf28d715f9f729b#1613f6c6990011a4bc559e79aaf28d715f9f729b"
source = "git+https://github.com/charliermarsh/RustPython.git?rev=ff6112d7c70729be76eaad8c9e5b8ef24d13de99#ff6112d7c70729be76eaad8c9e5b8ef24d13de99"
dependencies = [
"bincode",
"bitflags",
@ -1812,7 +1812,7 @@ dependencies = [
[[package]]
name = "rustpython-parser"
version = "0.1.2"
source = "git+https://github.com/charliermarsh/RustPython.git?rev=1613f6c6990011a4bc559e79aaf28d715f9f729b#1613f6c6990011a4bc559e79aaf28d715f9f729b"
source = "git+https://github.com/charliermarsh/RustPython.git?rev=ff6112d7c70729be76eaad8c9e5b8ef24d13de99#ff6112d7c70729be76eaad8c9e5b8ef24d13de99"
dependencies = [
"ahash",
"anyhow",

View File

@ -24,7 +24,7 @@ notify = { version = "4.0.17" }
once_cell = { version = "1.13.1" }
rayon = { version = "1.5.3" }
regex = { version = "1.6.0" }
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/charliermarsh/RustPython.git", rev = "1613f6c6990011a4bc559e79aaf28d715f9f729b" }
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/charliermarsh/RustPython.git", rev = "ff6112d7c70729be76eaad8c9e5b8ef24d13de99" }
serde = { version = "1.0.143", features = ["derive"] }
serde_json = { version = "1.0.83" }
toml = { version = "0.5.9" }

View File

@ -22,8 +22,20 @@ def f():
...
class F(
object,
):
...
class G(
object, # )
):
...
object = A
class F(object):
class H(object):
...

View File

@ -4,49 +4,74 @@ use std::path::Path;
use anyhow::Result;
use rustpython_parser::ast::Location;
use crate::checks::Check;
use crate::checks::{Check, Fix};
pub enum Mode {
Generate,
Apply,
None,
}
impl From<bool> for Mode {
fn from(value: bool) -> Self {
match value {
true => Mode::Apply,
false => Mode::None,
}
}
}
/// Auto-fix errors in a file, and write the fixed source code to disk.
pub fn fix_file(checks: &mut [Check], contents: &str, path: &Path, mode: &Mode) -> Result<()> {
if !matches!(mode, Mode::Apply) {
return Ok(());
};
// TODO(charlie): This should take Vec<Fix>.
// TODO(charlie): Add tests.
pub fn apply_fixes(checks: &mut Vec<Check>, contents: &str, path: &Path) -> Result<()> {
if checks.iter().all(|check| check.fix.is_none()) {
return Ok(());
}
let output = apply_fixes(
checks.iter_mut().filter_map(|check| check.fix.as_mut()),
contents,
);
fs::write(path, output).map_err(|e| e.into())
}
/// Apply a series of fixes.
fn apply_fixes<'a>(fixes: impl Iterator<Item = &'a mut Fix>, contents: &str) -> String {
let lines: Vec<&str> = contents.lines().collect();
let mut last_pos: Location = Location::new(0, 0);
let mut output = "".to_string();
let mut last_pos: Location = Location::new(0, 0);
for check in checks {
if let Some(fix) = &check.fix {
if last_pos.row() > fix.start.row()
|| (last_pos.row()) == fix.start.row() && last_pos.column() > fix.start.column()
{
continue;
}
if fix.start.row() > last_pos.row() {
if last_pos.row() > 0 || last_pos.column() > 0 {
output.push_str(&lines[last_pos.row() - 1][last_pos.column() - 1..]);
output.push('\n');
}
for line in &lines[last_pos.row()..fix.start.row() - 1] {
output.push_str(line);
output.push('\n');
}
output.push_str(&lines[fix.start.row() - 1][..fix.start.column() - 1]);
output.push_str(&fix.content);
} else {
output.push_str(
&lines[last_pos.row() - 1][last_pos.column() - 1..fix.start.column() - 1],
);
output.push_str(&fix.content);
}
last_pos = fix.end;
check.fixed = true;
for fix in fixes {
// Best-effort approach: if this fix overlaps with a fix we've already applied, skip it.
if last_pos > fix.start {
continue;
}
if fix.start.row() > last_pos.row() {
if last_pos.row() > 0 || last_pos.column() > 0 {
output.push_str(&lines[last_pos.row() - 1][last_pos.column() - 1..]);
output.push('\n');
}
for line in &lines[last_pos.row()..fix.start.row() - 1] {
output.push_str(line);
output.push('\n');
}
output.push_str(&lines[fix.start.row() - 1][..fix.start.column() - 1]);
output.push_str(&fix.content);
} else {
output.push_str(
&lines[last_pos.row() - 1][last_pos.column() - 1..fix.start.column() - 1],
);
output.push_str(&fix.content);
}
last_pos = fix.end;
fix.applied = true;
}
if last_pos.row() > 0 || last_pos.column() > 0 {
@ -58,5 +83,137 @@ pub fn apply_fixes(checks: &mut Vec<Check>, contents: &str, path: &Path) -> Resu
output.push('\n');
}
fs::write(path, output).map_err(|e| e.into())
output
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use rustpython_parser::ast::Location;
use crate::autofix::apply_fixes;
use crate::checks::Fix;
#[test]
fn empty_file() -> Result<()> {
let mut fixes = vec![];
let actual = apply_fixes(fixes.iter_mut(), "");
let expected = "";
assert_eq!(actual, expected);
Ok(())
}
#[test]
fn apply_single_replacement() -> Result<()> {
let mut fixes = vec![Fix {
content: "Bar".to_string(),
start: Location::new(1, 9),
end: Location::new(1, 15),
applied: false,
}];
let actual = apply_fixes(
fixes.iter_mut(),
"class A(object):
...
",
);
let expected = "class A(Bar):
...
";
assert_eq!(actual, expected);
Ok(())
}
#[test]
fn apply_single_removal() -> Result<()> {
let mut fixes = vec![Fix {
content: "".to_string(),
start: Location::new(1, 8),
end: Location::new(1, 16),
applied: false,
}];
let actual = apply_fixes(
fixes.iter_mut(),
"class A(object):
...
",
);
let expected = "class A:
...
";
assert_eq!(actual, expected);
Ok(())
}
#[test]
fn apply_double_removal() -> Result<()> {
let mut fixes = vec![
Fix {
content: "".to_string(),
start: Location::new(1, 8),
end: Location::new(1, 17),
applied: false,
},
Fix {
content: "".to_string(),
start: Location::new(1, 17),
end: Location::new(1, 24),
applied: false,
},
];
let actual = apply_fixes(
fixes.iter_mut(),
"class A(object, object):
...
",
);
let expected = "class A:
...
";
assert_eq!(actual, expected);
Ok(())
}
#[test]
fn ignore_overlapping_fixes() -> Result<()> {
let mut fixes = vec![
Fix {
content: "".to_string(),
start: Location::new(1, 8),
end: Location::new(1, 16),
applied: false,
},
Fix {
content: "ignored".to_string(),
start: Location::new(1, 10),
end: Location::new(1, 12),
applied: false,
},
];
let actual = apply_fixes(
fixes.iter_mut(),
"class A(object):
...
",
);
let expected = "class A:
...
";
assert_eq!(actual, expected);
Ok(())
}
}

View File

@ -12,11 +12,12 @@ use crate::builtins::{BUILTINS, MAGIC_GLOBALS};
use crate::checks::{Check, CheckCode, CheckKind};
use crate::settings::Settings;
use crate::visitor::{walk_excepthandler, Visitor};
use crate::{fixer, visitor};
use crate::{autofix, fixer, visitor};
struct Checker<'a> {
lines: &'a [&'a str],
content: &'a str,
settings: &'a Settings,
autofix: &'a autofix::Mode,
path: &'a str,
checks: Vec<Check>,
scopes: Vec<Scope>,
@ -29,11 +30,17 @@ struct Checker<'a> {
}
impl Checker<'_> {
pub fn new<'a>(settings: &'a Settings, path: &'a str, lines: &'a [&'a str]) -> Checker<'a> {
pub fn new<'a>(
settings: &'a Settings,
autofix: &'a autofix::Mode,
path: &'a str,
content: &'a str,
) -> Checker<'a> {
Checker {
settings,
autofix,
path,
lines,
content,
checks: vec![],
scopes: vec![],
dead_scopes: vec![],
@ -138,16 +145,18 @@ impl Visitor for Checker<'_> {
CheckKind::UselessObjectInheritance(name.to_string()),
expr.location,
);
// TODO(charlie): Only bother to do this if autofix is
// enabled.
if let Some(fix) = fixer::remove_object_base(
self.lines,
&stmt.location,
expr.location,
bases,
keywords,
) {
check.amend(fix);
if matches!(self.autofix, autofix::Mode::Generate)
|| matches!(self.autofix, autofix::Mode::Apply)
{
if let Some(fix) = fixer::remove_class_def_base(
self.content,
&stmt.location,
expr.location,
bases,
keywords,
) {
check.amend(fix);
}
}
self.checks.push(check);
}
@ -344,10 +353,10 @@ impl Visitor for Checker<'_> {
for (idx, handler) in handlers.iter().enumerate() {
let ExcepthandlerKind::ExceptHandler { type_, .. } = &handler.node;
if type_.is_none() && idx < handlers.len() - 1 {
self.checks.push(Check {
kind: CheckKind::DefaultExceptNotLast,
location: handler.location,
});
self.checks.push(Check::new(
CheckKind::DefaultExceptNotLast,
handler.location,
));
}
}
}
@ -814,9 +823,14 @@ impl Checker<'_> {
}
}
pub fn check_ast(python_ast: &Suite, content: &str, settings: &Settings, path: &str) -> Vec<Check> {
let lines: Vec<&str> = content.lines().collect();
let mut checker = Checker::new(settings, path, &lines);
pub fn check_ast(
python_ast: &Suite,
content: &str,
settings: &Settings,
autofix: &autofix::Mode,
path: &str,
) -> Vec<Check> {
let mut checker = Checker::new(settings, autofix, path, content);
checker.push_scope(Scope::new(ScopeKind::Module));
checker.bind_builtins();

View File

@ -225,19 +225,19 @@ impl CheckKind {
}
}
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Fix {
pub content: String,
pub start: Location,
pub end: Location,
pub applied: bool,
}
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Check {
pub kind: CheckKind,
pub location: Location,
pub fix: Option<Fix>,
pub fixed: bool,
}
static NO_QA_REGEX: Lazy<Regex> = Lazy::new(|| {
@ -251,7 +251,6 @@ impl Check {
kind,
location,
fix: None,
fixed: false,
}
}

View File

@ -4,43 +4,62 @@ use rustpython_parser::token::Tok;
use crate::checks::Fix;
fn to_absolute(location: &Location, base: &Location) -> Location {
if location.row() == 1 {
/// Convert a location within a file (relative to `base`) to an absolute position.
fn to_absolute(relative: &Location, base: &Location) -> Location {
if relative.row() == 1 {
Location::new(
location.row() + base.row() - 1,
location.column() + base.column() - 1,
relative.row() + base.row() - 1,
relative.column() + base.column() - 1,
)
} else {
Location::new(location.row() + base.row() - 1, location.column())
Location::new(relative.row() + base.row() - 1, relative.column())
}
}
pub fn remove_object_base(
lines: &[&str],
/// Generate a fix to remove a base from a ClassDef statement.
pub fn remove_class_def_base(
content: &str,
stmt_at: &Location,
expr_at: Location,
bases: &[Expr],
keywords: &[Keyword],
) -> Option<Fix> {
// TODO(charlie): Pre-compute these offsets.
let mut offset = 0;
for i in content.lines().take(stmt_at.row() - 1) {
offset += i.len();
offset += 1;
}
offset += stmt_at.column() - 1;
let output = &content[offset..];
// Case 1: `object` is the only base.
if bases.len() == 1 && keywords.is_empty() {
let lxr = lexer::make_tokenizer(&lines[stmt_at.row() - 1][stmt_at.column() - 1..]);
let mut fix_start = None;
let mut fix_end = None;
let mut count: usize = 0;
for (start, tok, end) in lxr.flatten() {
if matches!(tok, Tok::Lpar) {
if count == 0 {
fix_start = Some(to_absolute(&start, stmt_at));
}
count += 1;
}
for result in lexer::make_tokenizer(output) {
match result {
Ok((start, tok, end)) => {
if matches!(tok, Tok::Lpar) {
if count == 0 {
fix_start = Some(to_absolute(&start, stmt_at));
}
count += 1;
}
if matches!(tok, Tok::Rpar) {
count -= 1;
if count == 0 {
fix_end = Some(to_absolute(&end, stmt_at));
if matches!(tok, Tok::Rpar) {
count -= 1;
if count == 0 {
fix_end = Some(to_absolute(&end, stmt_at));
}
}
if fix_start.is_some() && fix_end.is_some() {
break;
};
}
Err(_) => break,
}
if fix_start.is_some() && fix_end.is_some() {
@ -48,72 +67,99 @@ pub fn remove_object_base(
};
}
return Some(Fix {
content: "".to_string(),
start: fix_start.unwrap(),
end: fix_end.unwrap(),
});
return match (fix_start, fix_end) {
(Some(start), Some(end)) => Some(Fix {
content: "".to_string(),
start,
end,
applied: false,
}),
_ => None,
};
}
// Case 2: `object` is _not_ the last node.
let mut closest_after_expr: Option<Location> = None;
for location in bases
if bases
.iter()
.map(|node| node.location)
.chain(keywords.iter().map(|node| node.location))
.any(|location| location > expr_at)
{
// If the node comes after the node we're removing...
if location.row() > expr_at.row()
|| (location.row() == expr_at.row() && location.column() > expr_at.column())
{
match closest_after_expr {
None => closest_after_expr = Some(location),
Some(existing) => {
// And before the next closest node...
if location.row() < existing.row()
|| (location.row() == existing.row()
&& location.column() < existing.column())
// Case 2: `object` is _not_ the last node.
let mut fix_start: Option<Location> = None;
let mut fix_end: Option<Location> = None;
let mut seen_comma = false;
for result in lexer::make_tokenizer(output) {
match result {
Ok((start, tok, _)) => {
let start = to_absolute(&start, stmt_at);
if seen_comma
&& !matches!(tok, Tok::Newline)
&& !matches!(tok, Tok::Indent)
&& !matches!(tok, Tok::Dedent)
&& !matches!(tok, Tok::StartExpression)
&& !matches!(tok, Tok::StartModule)
&& !matches!(tok, Tok::StartInteractive)
{
closest_after_expr = Some(location);
fix_end = Some(start);
break;
}
if start == expr_at {
fix_start = Some(start);
}
if fix_start.is_some() && matches!(tok, Tok::Comma) {
seen_comma = true;
}
}
Err(_) => break,
}
if fix_start.is_some() && fix_end.is_some() {
break;
};
}
}
match closest_after_expr {
Some(end) => {
return Some(Fix {
match (fix_start, fix_end) {
(Some(start), Some(end)) => Some(Fix {
content: "".to_string(),
start: expr_at,
start,
end,
});
applied: false,
}),
_ => None,
}
None => {}
}
} else {
// Case 3: `object` is the last node, so we have to find the last token that isn't a comma.
let mut fix_start: Option<Location> = None;
let mut fix_end: Option<Location> = None;
for result in lexer::make_tokenizer(output) {
match result {
Ok((start, tok, end)) => {
let start = to_absolute(&start, stmt_at);
let end = to_absolute(&end, stmt_at);
if start == expr_at {
fix_end = Some(end);
break;
}
if matches!(tok, Tok::Comma) {
fix_start = Some(start);
}
}
Err(_) => break,
}
// Case 3: `object` is the last node, so we have to find the last token that isn't a comma.
let lxr = lexer::make_tokenizer(&lines[stmt_at.row() - 1][stmt_at.column() - 1..]);
let mut fix_start: Option<Location> = None;
let mut fix_end: Option<Location> = None;
for (start, tok, end) in lxr.flatten() {
let start = to_absolute(&start, stmt_at);
let end = to_absolute(&end, stmt_at);
if start == expr_at {
fix_end = Some(end);
break;
if fix_start.is_some() && fix_end.is_some() {
break;
};
}
if matches!(tok, Tok::Comma) {
fix_start = Some(start);
}
}
match (fix_start, fix_end) {
(Some(start), Some(end)) => Some(Fix {
content: "".to_string(),
start,
end,
}),
_ => None,
match (fix_start, fix_end) {
(Some(start), Some(end)) => Some(Fix {
content: "".to_string(),
start,
end,
applied: false,
}),
_ => None,
}
}
}

View File

@ -4,19 +4,19 @@ use anyhow::Result;
use log::debug;
use rustpython_parser::parser;
use crate::autofix::apply_fixes;
use crate::autofix::fix_file;
use crate::check_ast::check_ast;
use crate::check_lines::check_lines;
use crate::checks::{Check, LintSource};
use crate::message::Message;
use crate::settings::Settings;
use crate::{cache, fs};
use crate::{autofix, cache, fs};
pub fn check_path(
path: &Path,
settings: &Settings,
mode: &cache::Mode,
autofix: bool,
autofix: &autofix::Mode,
) -> Result<Vec<Message>> {
// Check the cache.
if let Some(messages) = cache::get(path, settings, mode) {
@ -38,23 +38,21 @@ pub fn check_path(
{
let path = path.to_string_lossy();
let python_ast = parser::parse_program(&contents, &path)?;
checks.extend(check_ast(&python_ast, &contents, settings, &path));
checks.extend(check_ast(&python_ast, &contents, settings, autofix, &path));
}
// Run the lines-based checks.
check_lines(&mut checks, &contents, settings);
// Apply autofix.
if autofix {
apply_fixes(&mut checks, &contents, path)?;
}
fix_file(&mut checks, &contents, path, autofix)?;
// Convert to messages.
let messages: Vec<Message> = checks
.into_iter()
.map(|check| Message {
kind: check.kind,
fixed: check.fixed,
fix: check.fix,
location: check.location,
filename: path.to_string_lossy().to_string(),
})
@ -72,10 +70,10 @@ mod tests {
use anyhow::Result;
use rustpython_parser::ast::Location;
use crate::checks::{CheckCode, CheckKind};
use crate::checks::{CheckCode, CheckKind, Fix};
use crate::linter::check_path;
use crate::message::Message;
use crate::{cache, settings};
use crate::{autofix, cache, settings};
#[test]
fn e402() -> Result<()> {
@ -111,13 +109,13 @@ mod tests {
select: BTreeSet::from([CheckCode::E501]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![Message {
kind: CheckKind::LineTooLong,
location: Location::new(5, 89),
filename: "./resources/test/fixtures/E501.py".to_string(),
fixed: false,
fix: None,
}];
assert_eq!(actual.len(), expected.len());
for i in 0..actual.len() {
@ -137,26 +135,26 @@ mod tests {
select: BTreeSet::from([CheckCode::F401]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![
Message {
kind: CheckKind::UnusedImport("logging.handlers".to_string()),
location: Location::new(12, 1),
filename: "./resources/test/fixtures/F401.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::UnusedImport("functools".to_string()),
location: Location::new(3, 1),
filename: "./resources/test/fixtures/F401.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::UnusedImport("collections.OrderedDict".to_string()),
location: Location::new(4, 1),
filename: "./resources/test/fixtures/F401.py".to_string(),
fixed: false,
fix: None,
},
];
assert_eq!(actual.len(), expected.len());
@ -177,20 +175,20 @@ mod tests {
select: BTreeSet::from([CheckCode::F403]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![
Message {
kind: CheckKind::ImportStarUsage,
location: Location::new(1, 1),
filename: "./resources/test/fixtures/F403.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::ImportStarUsage,
location: Location::new(2, 1),
filename: "./resources/test/fixtures/F403.py".to_string(),
fixed: false,
fix: None,
},
];
assert_eq!(actual.len(), expected.len());
@ -210,26 +208,26 @@ mod tests {
select: BTreeSet::from([CheckCode::F541]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![
Message {
kind: CheckKind::FStringMissingPlaceholders,
location: Location::new(4, 7),
filename: "./resources/test/fixtures/F541.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::FStringMissingPlaceholders,
location: Location::new(5, 7),
filename: "./resources/test/fixtures/F541.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::FStringMissingPlaceholders,
location: Location::new(7, 7),
filename: "./resources/test/fixtures/F541.py".to_string(),
fixed: false,
fix: None,
},
];
assert_eq!(actual.len(), expected.len());
@ -250,17 +248,20 @@ mod tests {
select: BTreeSet::from([CheckCode::F631]),
},
&cache::Mode::None,
&autofix::Mode::Generate,
)?;
let expected = vec![
Message {
kind: CheckKind::AssertTuple,
location: Location::new(1, 1),
filename: "./resources/test/fixtures/F631.py".to_string(),
fix: None,
},
Message {
kind: CheckKind::AssertTuple,
location: Location::new(2, 1),
filename: "./resources/test/fixtures/F631.py".to_string(),
fix: None,
},
];
assert_eq!(actual.len(), expected.len());
@ -281,20 +282,20 @@ mod tests {
select: BTreeSet::from([CheckCode::F634]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![
Message {
kind: CheckKind::IfTuple,
location: Location::new(1, 1),
filename: "./resources/test/fixtures/F634.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::IfTuple,
location: Location::new(7, 5),
filename: "./resources/test/fixtures/F634.py".to_string(),
fixed: false,
fix: None,
},
];
assert_eq!(actual.len(), expected.len());
@ -315,26 +316,26 @@ mod tests {
select: BTreeSet::from([CheckCode::F704]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![
Message {
kind: CheckKind::YieldOutsideFunction,
location: Location::new(6, 5),
filename: "./resources/test/fixtures/F704.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::YieldOutsideFunction,
location: Location::new(9, 1),
filename: "./resources/test/fixtures/F704.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::YieldOutsideFunction,
location: Location::new(10, 1),
filename: "./resources/test/fixtures/F704.py".to_string(),
fixed: false,
fix: None,
},
];
assert_eq!(actual.len(), expected.len());
@ -355,20 +356,20 @@ mod tests {
select: BTreeSet::from([CheckCode::F706]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![
Message {
kind: CheckKind::ReturnOutsideFunction,
location: Location::new(6, 5),
filename: "./resources/test/fixtures/F706.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::ReturnOutsideFunction,
location: Location::new(9, 1),
filename: "./resources/test/fixtures/F706.py".to_string(),
fixed: false,
fix: None,
},
];
assert_eq!(actual.len(), expected.len());
@ -389,22 +390,26 @@ mod tests {
select: BTreeSet::from([CheckCode::F707]),
},
&cache::Mode::None,
&autofix::Mode::Generate,
)?;
let expected = vec![
Message {
kind: CheckKind::DefaultExceptNotLast,
location: Location::new(3, 1),
filename: "./resources/test/fixtures/F707.py".to_string(),
fix: None,
},
Message {
kind: CheckKind::DefaultExceptNotLast,
location: Location::new(10, 1),
filename: "./resources/test/fixtures/F707.py".to_string(),
fix: None,
},
Message {
kind: CheckKind::DefaultExceptNotLast,
location: Location::new(19, 1),
filename: "./resources/test/fixtures/F707.py".to_string(),
fix: None,
},
];
assert_eq!(actual.len(), expected.len());
@ -425,32 +430,32 @@ mod tests {
select: BTreeSet::from([CheckCode::F821]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![
Message {
kind: CheckKind::UndefinedName("self".to_string()),
location: Location::new(2, 12),
filename: "./resources/test/fixtures/F821.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::UndefinedName("self".to_string()),
location: Location::new(6, 13),
filename: "./resources/test/fixtures/F821.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::UndefinedName("self".to_string()),
location: Location::new(10, 9),
filename: "./resources/test/fixtures/F821.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::UndefinedName("numeric_string".to_string()),
location: Location::new(21, 12),
filename: "./resources/test/fixtures/F821.py".to_string(),
fixed: false,
fix: None,
},
];
assert_eq!(actual.len(), expected.len());
@ -471,13 +476,13 @@ mod tests {
select: BTreeSet::from([CheckCode::F822]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![Message {
kind: CheckKind::UndefinedExport("b".to_string()),
location: Location::new(3, 1),
filename: "./resources/test/fixtures/F822.py".to_string(),
fixed: false,
fix: None,
}];
assert_eq!(actual.len(), expected.len());
for i in 0..actual.len() {
@ -497,13 +502,13 @@ mod tests {
select: BTreeSet::from([CheckCode::F823]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![Message {
kind: CheckKind::UndefinedLocal("my_var".to_string()),
location: Location::new(6, 5),
filename: "./resources/test/fixtures/F823.py".to_string(),
fixed: false,
fix: None,
}];
assert_eq!(actual.len(), expected.len());
for i in 0..actual.len() {
@ -523,26 +528,26 @@ mod tests {
select: BTreeSet::from([CheckCode::F831]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![
Message {
kind: CheckKind::DuplicateArgumentName,
location: Location::new(1, 25),
filename: "./resources/test/fixtures/F831.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::DuplicateArgumentName,
location: Location::new(5, 28),
filename: "./resources/test/fixtures/F831.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::DuplicateArgumentName,
location: Location::new(9, 27),
filename: "./resources/test/fixtures/F831.py".to_string(),
fixed: false,
fix: None,
},
];
assert_eq!(actual.len(), expected.len());
@ -563,20 +568,20 @@ mod tests {
select: BTreeSet::from([CheckCode::F841]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![
Message {
kind: CheckKind::UnusedVariable("e".to_string()),
location: Location::new(3, 1),
filename: "./resources/test/fixtures/F841.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::UnusedVariable("z".to_string()),
location: Location::new(16, 5),
filename: "./resources/test/fixtures/F841.py".to_string(),
fixed: false,
fix: None,
},
];
assert_eq!(actual.len(), expected.len());
@ -597,20 +602,20 @@ mod tests {
select: BTreeSet::from([CheckCode::F901]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![
Message {
kind: CheckKind::RaiseNotImplemented,
location: Location::new(2, 5),
filename: "./resources/test/fixtures/F901.py".to_string(),
fixed: false,
fix: None,
},
Message {
kind: CheckKind::RaiseNotImplemented,
location: Location::new(6, 5),
filename: "./resources/test/fixtures/F901.py".to_string(),
fixed: false,
fix: None,
},
];
assert_eq!(actual.len(), expected.len());
@ -631,32 +636,74 @@ mod tests {
select: BTreeSet::from([CheckCode::R0205]),
},
&cache::Mode::None,
false,
&autofix::Mode::Generate,
)?;
let expected = vec![
Message {
kind: CheckKind::UselessObjectInheritance("B".to_string()),
location: Location::new(5, 9),
filename: "./resources/test/fixtures/R0205.py".to_string(),
fixed: false,
fix: Some(Fix {
content: "".to_string(),
start: Location::new(5, 8),
end: Location::new(5, 16),
applied: false,
}),
},
Message {
kind: CheckKind::UselessObjectInheritance("C".to_string()),
location: Location::new(9, 12),
filename: "./resources/test/fixtures/R0205.py".to_string(),
fixed: false,
fix: Some(Fix {
content: "".to_string(),
start: Location::new(9, 10),
end: Location::new(9, 18),
applied: false,
}),
},
Message {
kind: CheckKind::UselessObjectInheritance("D".to_string()),
location: Location::new(14, 5),
filename: "./resources/test/fixtures/R0205.py".to_string(),
fixed: false,
fix: Some(Fix {
content: "".to_string(),
start: Location::new(14, 5),
end: Location::new(15, 5),
applied: false,
}),
},
Message {
kind: CheckKind::UselessObjectInheritance("E".to_string()),
location: Location::new(21, 13),
filename: "./resources/test/fixtures/R0205.py".to_string(),
fixed: false,
fix: Some(Fix {
content: "".to_string(),
start: Location::new(21, 12),
end: Location::new(21, 20),
applied: false,
}),
},
Message {
kind: CheckKind::UselessObjectInheritance("F".to_string()),
location: Location::new(26, 5),
filename: "./resources/test/fixtures/R0205.py".to_string(),
fix: Some(Fix {
content: "".to_string(),
start: Location::new(25, 8),
end: Location::new(27, 2),
applied: false,
}),
},
Message {
kind: CheckKind::UselessObjectInheritance("G".to_string()),
location: Location::new(32, 5),
filename: "./resources/test/fixtures/R0205.py".to_string(),
fix: Some(Fix {
content: "".to_string(),
start: Location::new(31, 8),
end: Location::new(33, 2),
applied: false,
}),
},
];
assert_eq!(actual.len(), expected.len());

View File

@ -70,7 +70,7 @@ fn run_once(
let mut messages: Vec<Message> = files
.par_iter()
.map(|entry| {
check_path(entry.path(), settings, &cache.into(), autofix).unwrap_or_else(|e| {
check_path(entry.path(), settings, &cache.into(), &autofix.into()).unwrap_or_else(|e| {
error!("Failed to check {}: {e:?}", entry.path().to_string_lossy());
vec![]
})
@ -86,7 +86,13 @@ fn run_once(
fn report_once(messages: &[Message]) -> Result<()> {
let (fixed, outstanding): (Vec<&Message>, Vec<&Message>) =
messages.iter().partition(|message| message.fixed);
messages.iter().partition(|message| {
message
.fix
.as_ref()
.map(|fix| fix.applied)
.unwrap_or_default()
});
// TODO(charlie): If autofix is disabled, but some rules are fixable, tell the user.
if fixed.is_empty() {

View File

@ -5,12 +5,12 @@ use colored::Colorize;
use rustpython_parser::ast::Location;
use serde::{Deserialize, Serialize};
use crate::checks::CheckKind;
use crate::checks::{CheckKind, Fix};
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Message {
pub kind: CheckKind,
pub fixed: bool,
pub fix: Option<Fix>,
pub location: Location,
pub filename: String,
}