mirror of
https://github.com/astral-sh/ruff
synced 2026-01-22 22:10:48 -05:00
Only enforce multi-line noqa directives for strings (#258)
This commit is contained in:
@@ -39,8 +39,6 @@ pub fn check_lines(
|
||||
|
||||
let lines: Vec<&str> = contents.lines().collect();
|
||||
for (lineno, line) in lines.iter().enumerate() {
|
||||
let mut did_insert: bool = false;
|
||||
|
||||
// Grab the noqa (logical) line number for the current (physical) line.
|
||||
// If there are newlines at the end of the file, they won't be represented in
|
||||
// `noqa_line_for`, so fallback to the current line.
|
||||
@@ -49,45 +47,19 @@ pub fn check_lines(
|
||||
.map(|lineno| lineno - 1)
|
||||
.unwrap_or(lineno);
|
||||
|
||||
if enforce_noqa && !did_insert {
|
||||
// Try the current physical line.
|
||||
if enforce_noqa {
|
||||
noqa_directives
|
||||
.entry(lineno)
|
||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[lineno]), vec![]));
|
||||
// Try the current logical line.
|
||||
if lineno != noqa_lineno {
|
||||
noqa_directives
|
||||
.entry(noqa_lineno)
|
||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
|
||||
}
|
||||
did_insert = true;
|
||||
.entry(noqa_lineno)
|
||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
|
||||
}
|
||||
|
||||
// Remove any ignored checks.
|
||||
// TODO(charlie): Only validate checks for the current line.
|
||||
for (index, check) in checks.iter().enumerate() {
|
||||
if check.location.row() == lineno + 1 {
|
||||
if !did_insert {
|
||||
// Try the current physical line.
|
||||
noqa_directives
|
||||
.entry(lineno)
|
||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[lineno]), vec![]));
|
||||
// Try the current logical line.
|
||||
if lineno != noqa_lineno {
|
||||
noqa_directives.entry(noqa_lineno).or_insert_with(|| {
|
||||
(noqa::extract_noqa_directive(lines[noqa_lineno]), vec![])
|
||||
});
|
||||
}
|
||||
did_insert = true;
|
||||
}
|
||||
|
||||
let noqa = if lineno != noqa_lineno
|
||||
&& matches!(noqa_directives.get(&lineno).unwrap(), (Directive::None, _))
|
||||
{
|
||||
noqa_directives.get_mut(&noqa_lineno).unwrap()
|
||||
} else {
|
||||
noqa_directives.get_mut(&lineno).unwrap()
|
||||
};
|
||||
let noqa = noqa_directives
|
||||
.entry(noqa_lineno)
|
||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
|
||||
|
||||
match noqa {
|
||||
(Directive::All(_), matches) => {
|
||||
@@ -109,26 +81,9 @@ pub fn check_lines(
|
||||
if enforce_line_too_long {
|
||||
let line_length = line.chars().count();
|
||||
if should_enforce_line_length(line, line_length, settings.line_length) {
|
||||
if !did_insert {
|
||||
// Try the current physical line.
|
||||
noqa_directives
|
||||
.entry(lineno)
|
||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[lineno]), vec![]));
|
||||
// Try the current logical line.
|
||||
if lineno != noqa_lineno {
|
||||
noqa_directives.entry(noqa_lineno).or_insert_with(|| {
|
||||
(noqa::extract_noqa_directive(lines[noqa_lineno]), vec![])
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let noqa = if lineno != noqa_lineno
|
||||
&& matches!(noqa_directives.get(&lineno).unwrap(), (Directive::None, _))
|
||||
{
|
||||
noqa_directives.get_mut(&noqa_lineno).unwrap()
|
||||
} else {
|
||||
noqa_directives.get_mut(&lineno).unwrap()
|
||||
};
|
||||
let noqa = noqa_directives
|
||||
.entry(noqa_lineno)
|
||||
.or_insert_with(|| (noqa::extract_noqa_directive(lines[noqa_lineno]), vec![]));
|
||||
|
||||
let check = Check::new(
|
||||
CheckKind::LineTooLong(line_length, settings.line_length),
|
||||
|
||||
@@ -27,7 +27,7 @@ fn check_path(
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents).collect();
|
||||
|
||||
// Determine the noqa line for every line in the source.
|
||||
let noqa_line_for = noqa::extract_line_map(&lxr);
|
||||
let noqa_line_for = noqa::extract_noqa_line_for(&lxr);
|
||||
|
||||
// Run the AST-based checks.
|
||||
if settings
|
||||
|
||||
57
src/noqa.rs
57
src/noqa.rs
@@ -37,9 +37,10 @@ pub fn extract_noqa_directive(line: &str) -> Directive {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_line_map(lxr: &[LexResult]) -> Vec<usize> {
|
||||
pub fn extract_noqa_line_for(lxr: &[LexResult]) -> Vec<usize> {
|
||||
let mut line_map: Vec<usize> = vec![];
|
||||
|
||||
let mut last_is_string = false;
|
||||
let mut last_seen = usize::MIN;
|
||||
let mut min_line = usize::MAX;
|
||||
let mut max_line = usize::MIN;
|
||||
@@ -53,7 +54,14 @@ pub fn extract_line_map(lxr: &[LexResult]) -> Vec<usize> {
|
||||
min_line = min(min_line, start.row());
|
||||
max_line = max(max_line, start.row());
|
||||
|
||||
line_map.extend(vec![max_line; (max_line + 1) - min_line]);
|
||||
// For now, we only care about preserving noqa directives across multi-line strings.
|
||||
if last_is_string {
|
||||
line_map.extend(vec![max_line; (max_line + 1) - min_line]);
|
||||
} else {
|
||||
for i in (min_line - 1)..(max_line) {
|
||||
line_map.push(i + 1);
|
||||
}
|
||||
}
|
||||
|
||||
min_line = usize::MAX;
|
||||
max_line = usize::MIN;
|
||||
@@ -69,6 +77,7 @@ pub fn extract_line_map(lxr: &[LexResult]) -> Vec<usize> {
|
||||
max_line = max(max_line, end.row());
|
||||
}
|
||||
last_seen = start.row();
|
||||
last_is_string = matches!(tok, Tok::String { .. });
|
||||
}
|
||||
|
||||
line_map
|
||||
@@ -80,7 +89,7 @@ mod tests {
|
||||
use rustpython_parser::lexer;
|
||||
use rustpython_parser::lexer::LexResult;
|
||||
|
||||
use crate::noqa::extract_line_map;
|
||||
use crate::noqa::extract_noqa_line_for;
|
||||
|
||||
#[test]
|
||||
fn line_map() -> Result<()> {
|
||||
@@ -90,50 +99,50 @@ y = 2
|
||||
z = x + 1",
|
||||
)
|
||||
.collect();
|
||||
println!("{:?}", extract_line_map(&lxr));
|
||||
assert_eq!(extract_line_map(&lxr), vec![1, 2, 3]);
|
||||
println!("{:?}", extract_noqa_line_for(&lxr));
|
||||
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 2, 3]);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
"
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1",
|
||||
x = 1
|
||||
y = 2
|
||||
z = x + 1",
|
||||
)
|
||||
.collect();
|
||||
println!("{:?}", extract_line_map(&lxr));
|
||||
assert_eq!(extract_line_map(&lxr), vec![1, 2, 3, 4]);
|
||||
println!("{:?}", extract_noqa_line_for(&lxr));
|
||||
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 2, 3, 4]);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
"x = 1
|
||||
y = 2
|
||||
z = x + 1
|
||||
y = 2
|
||||
z = x + 1
|
||||
",
|
||||
)
|
||||
.collect();
|
||||
println!("{:?}", extract_line_map(&lxr));
|
||||
assert_eq!(extract_line_map(&lxr), vec![1, 2, 3]);
|
||||
println!("{:?}", extract_noqa_line_for(&lxr));
|
||||
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 2, 3]);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
"x = 1
|
||||
|
||||
y = 2
|
||||
z = x + 1
|
||||
y = 2
|
||||
z = x + 1
|
||||
",
|
||||
)
|
||||
.collect();
|
||||
println!("{:?}", extract_line_map(&lxr));
|
||||
assert_eq!(extract_line_map(&lxr), vec![1, 2, 3, 4]);
|
||||
println!("{:?}", extract_noqa_line_for(&lxr));
|
||||
assert_eq!(extract_noqa_line_for(&lxr), vec![1, 2, 3, 4]);
|
||||
|
||||
let lxr: Vec<LexResult> = lexer::make_tokenizer(
|
||||
"x = '''abc
|
||||
def
|
||||
ghi
|
||||
'''
|
||||
y = 2
|
||||
z = x + 1",
|
||||
def
|
||||
ghi
|
||||
'''
|
||||
y = 2
|
||||
z = x + 1",
|
||||
)
|
||||
.collect();
|
||||
assert_eq!(extract_line_map(&lxr), vec![4, 4, 4, 4, 5, 6]);
|
||||
assert_eq!(extract_noqa_line_for(&lxr), vec![4, 4, 4, 4, 5, 6]);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -26,10 +26,4 @@ expression: checks
|
||||
row: 31
|
||||
column: 6
|
||||
fix: ~
|
||||
- kind:
|
||||
UnusedNOQA: E501
|
||||
location:
|
||||
row: 38
|
||||
column: 6
|
||||
fix: ~
|
||||
|
||||
|
||||
Reference in New Issue
Block a user