[`pygrep_hooks`] Fix `blanket-noqa` panic when last line has noqa with no newline (`PGH004`) (#11108)

## Summary

Resolves #11102

The error stems from these lines

f5c7a62aa6/crates/ruff_linter/src/noqa.rs (L697-L702)
I don't really understand the purpose of incrementing the last index,
but it makes the resulting range invalid for indexing into `contents`.

For now I just detect if the index is too high in `blanket_noqa` and
adjust it if necessary.

## Test Plan

Created fixture from issue example.
This commit is contained in:
Auguste Lalande 2024-04-25 14:39:38 -04:00 committed by GitHub
parent 77c93fd63c
commit 3364ef957d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 24 additions and 14 deletions

View File

@ -0,0 +1 @@
#noqa

View File

@ -652,6 +652,8 @@ pub(crate) struct NoqaDirectiveLine<'a> {
pub(crate) directive: Directive<'a>,
/// The codes that are ignored by the directive.
pub(crate) matches: Vec<NoqaCode>,
// Whether the directive applies to range.end
pub(crate) includes_end: bool,
}
impl Ranged for NoqaDirectiveLine<'_> {
@ -684,23 +686,18 @@ impl<'a> NoqaDirectives<'a> {
}
Ok(Some(directive)) => {
// noqa comments are guaranteed to be single line.
let range = locator.line_range(range.start());
directives.push(NoqaDirectiveLine {
range: locator.line_range(range.start()),
range,
directive,
matches: Vec::new(),
includes_end: range.end() == locator.contents().text_len(),
});
}
Ok(None) => {}
}
}
// Extend a mapping at the end of the file to also include the EOF token.
if let Some(last) = directives.last_mut() {
if last.range.end() == locator.contents().text_len() {
last.range = last.range.add_end(TextSize::from(1));
}
}
Self { inner: directives }
}
@ -724,11 +721,15 @@ impl<'a> NoqaDirectives<'a> {
.binary_search_by(|directive| {
if directive.range.end() < offset {
std::cmp::Ordering::Less
} else if directive.range.contains(offset) {
std::cmp::Ordering::Equal
} else {
} else if directive.range.start() > offset {
std::cmp::Ordering::Greater
}
// At this point, end >= offset, start <= offset
else if !directive.includes_end && directive.range.end() == offset {
std::cmp::Ordering::Less
} else {
std::cmp::Ordering::Equal
}
})
.ok()
}

View File

@ -16,6 +16,7 @@ mod tests {
#[test_case(Rule::BlanketTypeIgnore, Path::new("PGH003_0.py"))]
#[test_case(Rule::BlanketTypeIgnore, Path::new("PGH003_1.py"))]
#[test_case(Rule::BlanketNOQA, Path::new("PGH004_0.py"))]
#[test_case(Rule::BlanketNOQA, Path::new("PGH004_1.py"))]
#[test_case(Rule::InvalidMockAccess, Path::new("PGH005_0.py"))]
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());

View File

@ -88,9 +88,8 @@ pub(crate) fn blanket_noqa(
) {
for directive_line in noqa_directives.lines() {
if let Directive::All(all) = &directive_line.directive {
let line = locator.slice(directive_line.range);
let offset = directive_line.range.start();
let noqa_end = all.end() - offset;
let line = locator.slice(directive_line);
let noqa_end = all.end() - directive_line.start();
// Skip the `# noqa`, plus any trailing whitespace.
let mut cursor = Cursor::new(&line[noqa_end.to_usize()..]);

View File

@ -0,0 +1,8 @@
---
source: crates/ruff_linter/src/rules/pygrep_hooks/mod.rs
---
PGH004_1.py:1:1: PGH004 Use specific rule codes when using `noqa`
|
1 | #noqa
| ^^^^^ PGH004
|