mirror of https://github.com/astral-sh/ruff
Use take-while to terminate on parse errors (#279)
This commit is contained in:
parent
886def13bd
commit
1d5592d937
|
|
@ -256,7 +256,7 @@ impl CheckCode {
|
||||||
pub fn lint_source(&self) -> &'static LintSource {
|
pub fn lint_source(&self) -> &'static LintSource {
|
||||||
match self {
|
match self {
|
||||||
CheckCode::E501 | CheckCode::M001 => &LintSource::Lines,
|
CheckCode::E501 | CheckCode::M001 => &LintSource::Lines,
|
||||||
CheckCode::E902 | CheckCode::E999 => &LintSource::FileSystem,
|
CheckCode::E902 => &LintSource::FileSystem,
|
||||||
_ => &LintSource::AST,
|
_ => &LintSource::AST,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -15,19 +15,30 @@ use crate::noqa::add_noqa;
|
||||||
use crate::settings::Settings;
|
use crate::settings::Settings;
|
||||||
use crate::{cache, fs, noqa};
|
use crate::{cache, fs, noqa};
|
||||||
|
|
||||||
|
/// Collect tokens up to and including the first error.
|
||||||
|
fn tokenize(contents: &str) -> Vec<LexResult> {
|
||||||
|
let mut tokens: Vec<LexResult> = vec![];
|
||||||
|
for tok in lexer::make_tokenizer(contents) {
|
||||||
|
let is_err = tok.is_err();
|
||||||
|
tokens.push(tok);
|
||||||
|
if is_err {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tokens
|
||||||
|
}
|
||||||
|
|
||||||
fn check_path(
|
fn check_path(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
contents: &str,
|
contents: &str,
|
||||||
tokens: Vec<LexResult>,
|
tokens: Vec<LexResult>,
|
||||||
|
noqa_line_for: &[usize],
|
||||||
settings: &Settings,
|
settings: &Settings,
|
||||||
autofix: &fixer::Mode,
|
autofix: &fixer::Mode,
|
||||||
) -> Result<Vec<Check>> {
|
) -> Result<Vec<Check>> {
|
||||||
// Aggregate all checks.
|
// Aggregate all checks.
|
||||||
let mut checks: Vec<Check> = vec![];
|
let mut checks: Vec<Check> = vec![];
|
||||||
|
|
||||||
// Determine the noqa line for every line in the source.
|
|
||||||
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
|
|
||||||
|
|
||||||
// Run the AST-based checks.
|
// Run the AST-based checks.
|
||||||
if settings
|
if settings
|
||||||
.select
|
.select
|
||||||
|
|
@ -50,7 +61,7 @@ fn check_path(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run the lines-based checks.
|
// Run the lines-based checks.
|
||||||
check_lines(&mut checks, contents, &noqa_line_for, settings, autofix);
|
check_lines(&mut checks, contents, noqa_line_for, settings, autofix);
|
||||||
|
|
||||||
// Create path ignores.
|
// Create path ignores.
|
||||||
if !checks.is_empty() && !settings.per_file_ignores.is_empty() {
|
if !checks.is_empty() && !settings.per_file_ignores.is_empty() {
|
||||||
|
|
@ -84,10 +95,13 @@ pub fn lint_path(
|
||||||
let contents = fs::read_file(path)?;
|
let contents = fs::read_file(path)?;
|
||||||
|
|
||||||
// Tokenize once.
|
// Tokenize once.
|
||||||
let tokens: Vec<LexResult> = lexer::make_tokenizer(&contents).collect();
|
let tokens: Vec<LexResult> = tokenize(&contents);
|
||||||
|
|
||||||
|
// Determine the noqa line for every line in the source.
|
||||||
|
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
|
||||||
|
|
||||||
// Generate checks.
|
// Generate checks.
|
||||||
let mut checks = check_path(path, &contents, tokens, settings, autofix)?;
|
let mut checks = check_path(path, &contents, tokens, &noqa_line_for, settings, autofix)?;
|
||||||
|
|
||||||
// Apply autofix.
|
// Apply autofix.
|
||||||
if matches!(autofix, fixer::Mode::Apply) {
|
if matches!(autofix, fixer::Mode::Apply) {
|
||||||
|
|
@ -114,13 +128,20 @@ pub fn add_noqa_to_path(path: &Path, settings: &Settings) -> Result<usize> {
|
||||||
let contents = fs::read_file(path)?;
|
let contents = fs::read_file(path)?;
|
||||||
|
|
||||||
// Tokenize once.
|
// Tokenize once.
|
||||||
let tokens: Vec<LexResult> = lexer::make_tokenizer(&contents).collect();
|
let tokens: Vec<LexResult> = tokenize(&contents);
|
||||||
|
|
||||||
// Determine the noqa line for every line in the source.
|
// Determine the noqa line for every line in the source.
|
||||||
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
|
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
|
||||||
|
|
||||||
// Generate checks.
|
// Generate checks.
|
||||||
let checks = check_path(path, &contents, tokens, settings, &fixer::Mode::None)?;
|
let checks = check_path(
|
||||||
|
path,
|
||||||
|
&contents,
|
||||||
|
tokens,
|
||||||
|
&noqa_line_for,
|
||||||
|
settings,
|
||||||
|
&fixer::Mode::None,
|
||||||
|
)?;
|
||||||
|
|
||||||
add_noqa(&checks, &contents, &noqa_line_for, path)
|
add_noqa(&checks, &contents, &noqa_line_for, path)
|
||||||
}
|
}
|
||||||
|
|
@ -131,14 +152,14 @@ mod tests {
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use rustpython_parser::lexer;
|
|
||||||
use rustpython_parser::lexer::LexResult;
|
use rustpython_parser::lexer::LexResult;
|
||||||
|
|
||||||
use crate::autofix::fixer;
|
use crate::autofix::fixer;
|
||||||
use crate::checks::{Check, CheckCode};
|
use crate::checks::{Check, CheckCode};
|
||||||
use crate::fs;
|
|
||||||
use crate::linter;
|
use crate::linter;
|
||||||
|
use crate::linter::tokenize;
|
||||||
use crate::settings;
|
use crate::settings;
|
||||||
|
use crate::{fs, noqa};
|
||||||
|
|
||||||
fn check_path(
|
fn check_path(
|
||||||
path: &Path,
|
path: &Path,
|
||||||
|
|
@ -146,8 +167,9 @@ mod tests {
|
||||||
autofix: &fixer::Mode,
|
autofix: &fixer::Mode,
|
||||||
) -> Result<Vec<Check>> {
|
) -> Result<Vec<Check>> {
|
||||||
let contents = fs::read_file(path)?;
|
let contents = fs::read_file(path)?;
|
||||||
let tokens: Vec<LexResult> = lexer::make_tokenizer(&contents).collect();
|
let tokens: Vec<LexResult> = tokenize(&contents);
|
||||||
linter::check_path(path, &contents, tokens, settings, autofix)
|
let noqa_line_for = noqa::extract_noqa_line_for(&tokens);
|
||||||
|
linter::check_path(path, &contents, tokens, &noqa_line_for, settings, autofix)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -692,4 +714,16 @@ mod tests {
|
||||||
insta::assert_yaml_snapshot!(checks);
|
insta::assert_yaml_snapshot!(checks);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn e999() -> Result<()> {
|
||||||
|
let mut checks = check_path(
|
||||||
|
Path::new("./resources/test/fixtures/E999.py"),
|
||||||
|
&settings::Settings::for_rule(CheckCode::E999),
|
||||||
|
&fixer::Mode::Generate,
|
||||||
|
)?;
|
||||||
|
checks.sort_by_key(|check| check.location);
|
||||||
|
insta::assert_yaml_snapshot!(checks);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
11
src/noqa.rs
11
src/noqa.rs
|
|
@ -1,13 +1,14 @@
|
||||||
use std::cmp::{max, min};
|
use std::cmp::{max, min};
|
||||||
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
|
use std::fs;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
use crate::checks::{Check, CheckCode};
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use rustpython_parser::lexer::{LexResult, Tok};
|
use rustpython_parser::lexer::{LexResult, Tok};
|
||||||
use std::collections::{BTreeMap, BTreeSet};
|
|
||||||
use std::fs;
|
use crate::checks::{Check, CheckCode};
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
static NO_QA_REGEX: Lazy<Regex> = Lazy::new(|| {
|
static NO_QA_REGEX: Lazy<Regex> = Lazy::new(|| {
|
||||||
Regex::new(r"(?i)(?P<noqa>\s*# noqa(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)")
|
Regex::new(r"(?i)(?P<noqa>\s*# noqa(?::\s?(?P<codes>([A-Z]+[0-9]+(?:[,\s]+)?)+))?)")
|
||||||
|
|
@ -160,12 +161,12 @@ pub fn add_noqa(
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::checks::{Check, CheckKind};
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use rustpython_parser::ast::Location;
|
use rustpython_parser::ast::Location;
|
||||||
use rustpython_parser::lexer;
|
use rustpython_parser::lexer;
|
||||||
use rustpython_parser::lexer::LexResult;
|
use rustpython_parser::lexer::LexResult;
|
||||||
|
|
||||||
|
use crate::checks::{Check, CheckKind};
|
||||||
use crate::noqa::{add_noqa_inner, extract_noqa_line_for};
|
use crate::noqa::{add_noqa_inner, extract_noqa_line_for};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,11 @@
|
||||||
|
---
|
||||||
|
source: src/linter.rs
|
||||||
|
expression: checks
|
||||||
|
---
|
||||||
|
- kind:
|
||||||
|
SyntaxError: Got unexpected EOF
|
||||||
|
location:
|
||||||
|
row: 2
|
||||||
|
column: 1
|
||||||
|
fix: ~
|
||||||
|
|
||||||
Loading…
Reference in New Issue