diff --git a/crates/ruff/src/commands/check.rs b/crates/ruff/src/commands/check.rs index 672485b869..249654f9e8 100644 --- a/crates/ruff/src/commands/check.rs +++ b/crates/ruff/src/commands/check.rs @@ -131,8 +131,7 @@ pub(crate) fn check( Diagnostics::new( vec![Message::from_diagnostic( - OldDiagnostic::new(IOError { message }, TextRange::default()), - dummy, + OldDiagnostic::new(IOError { message }, TextRange::default(), &dummy), None, )], FxHashMap::default(), diff --git a/crates/ruff/src/diagnostics.rs b/crates/ruff/src/diagnostics.rs index 7e2de003d3..d562c009b1 100644 --- a/crates/ruff/src/diagnostics.rs +++ b/crates/ruff/src/diagnostics.rs @@ -69,8 +69,8 @@ impl Diagnostics { message: err.to_string(), }, TextRange::default(), + &source_file, ), - source_file, None, )], FxHashMap::default(), @@ -235,7 +235,7 @@ pub(crate) fn lint_path( }; let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish(); - lint_pyproject_toml(source_file, settings) + lint_pyproject_toml(&source_file, settings) } else { vec![] }; @@ -396,7 +396,7 @@ pub(crate) fn lint_stdin( } return Ok(Diagnostics { - messages: lint_pyproject_toml(source_file, &settings.linter), + messages: lint_pyproject_toml(&source_file, &settings.linter), fixed: FixMap::from_iter([(fs::relativize_path(path), FixTable::default())]), notebook_indexes: FxHashMap::default(), }); diff --git a/crates/ruff_linter/src/checkers/ast/mod.rs b/crates/ruff_linter/src/checkers/ast/mod.rs index 869fa3a724..db57ecc87f 100644 --- a/crates/ruff_linter/src/checkers/ast/mod.rs +++ b/crates/ruff_linter/src/checkers/ast/mod.rs @@ -57,7 +57,7 @@ use ruff_python_semantic::{ }; use ruff_python_stdlib::builtins::{MAGIC_GLOBALS, python_builtins}; use ruff_python_trivia::CommentRanges; -use ruff_source_file::{OneIndexed, SourceRow}; +use ruff_source_file::{OneIndexed, SourceFile, SourceRow}; use ruff_text_size::{Ranged, TextRange, TextSize}; use crate::checkers::ast::annotation::AnnotationContext; @@ -224,8 +224,6 @@ pub(crate) struct Checker<'a> { visit: deferred::Visit<'a>, /// A set of deferred nodes to be analyzed after the AST traversal (e.g., `for` loops). analyze: deferred::Analyze, - /// The cumulative set of diagnostics computed across all lint rules. - diagnostics: RefCell>, /// The list of names already seen by flake8-bugbear diagnostics, to avoid duplicate violations. flake8_bugbear_seen: RefCell>, /// The end offset of the last visited statement. @@ -239,6 +237,7 @@ pub(crate) struct Checker<'a> { semantic_checker: SemanticSyntaxChecker, /// Errors collected by the `semantic_checker`. semantic_errors: RefCell>, + context: &'a LintContext<'a>, } impl<'a> Checker<'a> { @@ -259,6 +258,7 @@ impl<'a> Checker<'a> { cell_offsets: Option<&'a CellOffsets>, notebook_index: Option<&'a NotebookIndex>, target_version: TargetVersion, + context: &'a LintContext<'a>, ) -> Checker<'a> { let semantic = SemanticModel::new(&settings.typing_modules, path, module); Self { @@ -279,7 +279,6 @@ impl<'a> Checker<'a> { semantic, visit: deferred::Visit::default(), analyze: deferred::Analyze::default(), - diagnostics: RefCell::default(), flake8_bugbear_seen: RefCell::default(), cell_offsets, notebook_index, @@ -288,6 +287,7 @@ impl<'a> Checker<'a> { target_version, semantic_checker: SemanticSyntaxChecker::new(), semantic_errors: RefCell::default(), + context, } } } @@ -389,10 +389,7 @@ impl<'a> Checker<'a> { kind: T, range: TextRange, ) -> DiagnosticGuard<'chk, 'a> { - DiagnosticGuard { - checker: self, - diagnostic: Some(OldDiagnostic::new(kind, range)), - } + self.context.report_diagnostic(kind, range) } /// Return a [`DiagnosticGuard`] for reporting a diagnostic if the corresponding rule is @@ -405,15 +402,8 @@ impl<'a> Checker<'a> { kind: T, range: TextRange, ) -> Option> { - let diagnostic = OldDiagnostic::new(kind, range); - if self.enabled(diagnostic.rule()) { - Some(DiagnosticGuard { - checker: self, - diagnostic: Some(diagnostic), - }) - } else { - None - } + self.context + .report_diagnostic_if_enabled(kind, range, self.settings) } /// Adds a [`TextRange`] to the set of ranges of variable names @@ -2891,30 +2881,26 @@ impl<'a> Checker<'a> { } else { if self.semantic.global_scope().uses_star_imports() { if self.enabled(Rule::UndefinedLocalWithImportStarUsage) { - self.diagnostics.get_mut().push( - OldDiagnostic::new( - pyflakes::rules::UndefinedLocalWithImportStarUsage { - name: name.to_string(), - }, - range, - ) - .with_parent(definition.start()), - ); + self.report_diagnostic( + pyflakes::rules::UndefinedLocalWithImportStarUsage { + name: name.to_string(), + }, + range, + ) + .set_parent(definition.start()); } } else { if self.enabled(Rule::UndefinedExport) { if is_undefined_export_in_dunder_init_enabled(self.settings) || !self.path.ends_with("__init__.py") { - self.diagnostics.get_mut().push( - OldDiagnostic::new( - pyflakes::rules::UndefinedExport { - name: name.to_string(), - }, - range, - ) - .with_parent(definition.start()), - ); + self.report_diagnostic( + pyflakes::rules::UndefinedExport { + name: name.to_string(), + }, + range, + ) + .set_parent(definition.start()); } } } @@ -2975,7 +2961,8 @@ pub(crate) fn check_ast( cell_offsets: Option<&CellOffsets>, notebook_index: Option<&NotebookIndex>, target_version: TargetVersion, -) -> (Vec, Vec) { + context: &LintContext, +) -> Vec { let module_path = package .map(PackageRoot::path) .and_then(|package| to_module_path(package, path)); @@ -3015,6 +3002,7 @@ pub(crate) fn check_ast( cell_offsets, notebook_index, target_version, + context, ); checker.bind_builtins(); @@ -3041,12 +3029,83 @@ pub(crate) fn check_ast( analyze::deferred_scopes(&checker); let Checker { - diagnostics, - semantic_errors, - .. + semantic_errors, .. } = checker; - (diagnostics.into_inner(), semantic_errors.into_inner()) + semantic_errors.into_inner() +} + +/// A type for collecting diagnostics in a given file. +/// +/// [`LintContext::report_diagnostic`] can be used to obtain a [`DiagnosticGuard`], which will push +/// a [`Violation`] to the contained [`OldDiagnostic`] collection on `Drop`. +pub(crate) struct LintContext<'a> { + diagnostics: RefCell>, + source_file: &'a SourceFile, +} + +impl<'a> LintContext<'a> { + /// Create a new collector with the given `source_file` and an empty collection of + /// `OldDiagnostic`s. + pub(crate) fn new(source_file: &'a SourceFile) -> Self { + Self { + diagnostics: RefCell::default(), + source_file, + } + } + + /// Return a [`DiagnosticGuard`] for reporting a diagnostic. + /// + /// The guard derefs to an [`OldDiagnostic`], so it can be used to further modify the diagnostic + /// before it is added to the collection in the collector on `Drop`. + pub(crate) fn report_diagnostic<'chk, T: Violation>( + &'chk self, + kind: T, + range: TextRange, + ) -> DiagnosticGuard<'chk, 'a> { + DiagnosticGuard { + context: self, + diagnostic: Some(OldDiagnostic::new(kind, range, self.source_file)), + } + } + + /// Return a [`DiagnosticGuard`] for reporting a diagnostic if the corresponding rule is + /// enabled. + /// + /// Prefer [`DiagnosticsCollector::report_diagnostic`] in general because the conversion from an + /// `OldDiagnostic` to a `Rule` is somewhat expensive. + pub(crate) fn report_diagnostic_if_enabled<'chk, T: Violation>( + &'chk self, + kind: T, + range: TextRange, + settings: &LinterSettings, + ) -> Option> { + let diagnostic = OldDiagnostic::new(kind, range, self.source_file); + if settings.rules.enabled(diagnostic.rule()) { + Some(DiagnosticGuard { + context: self, + diagnostic: Some(diagnostic), + }) + } else { + None + } + } + + pub(crate) fn into_diagnostics(self) -> Vec { + self.diagnostics.into_inner() + } + + pub(crate) fn is_empty(&self) -> bool { + self.diagnostics.borrow().is_empty() + } + + pub(crate) fn as_mut_vec(&mut self) -> &mut Vec { + self.diagnostics.get_mut() + } + + pub(crate) fn iter(&mut self) -> impl Iterator { + self.diagnostics.get_mut().iter() + } } /// An abstraction for mutating a diagnostic. @@ -3058,7 +3117,7 @@ pub(crate) fn check_ast( /// adding fixes or parent ranges. pub(crate) struct DiagnosticGuard<'a, 'b> { /// The parent checker that will receive the diagnostic on `Drop`. - checker: &'a Checker<'b>, + context: &'a LintContext<'b>, /// The diagnostic that we want to report. /// /// This is always `Some` until the `Drop` (or `defuse`) call. @@ -3100,7 +3159,7 @@ impl Drop for DiagnosticGuard<'_, '_> { } if let Some(diagnostic) = self.diagnostic.take() { - self.checker.diagnostics.borrow_mut().push(diagnostic); + self.context.diagnostics.borrow_mut().push(diagnostic); } } } diff --git a/crates/ruff_linter/src/checkers/filesystem.rs b/crates/ruff_linter/src/checkers/filesystem.rs index be09e345da..69c95a1dec 100644 --- a/crates/ruff_linter/src/checkers/filesystem.rs +++ b/crates/ruff_linter/src/checkers/filesystem.rs @@ -4,7 +4,7 @@ use ruff_python_ast::PythonVersion; use ruff_python_trivia::CommentRanges; use crate::Locator; -use crate::OldDiagnostic; +use crate::checkers::ast::LintContext; use crate::package::PackageRoot; use crate::preview::is_allow_nested_roots_enabled; use crate::registry::Rule; @@ -20,13 +20,12 @@ pub(crate) fn check_file_path( comment_ranges: &CommentRanges, settings: &LinterSettings, target_version: PythonVersion, -) -> Vec { - let mut diagnostics: Vec = vec![]; - + context: &LintContext, +) { // flake8-no-pep420 if settings.rules.enabled(Rule::ImplicitNamespacePackage) { let allow_nested_roots = is_allow_nested_roots_enabled(settings); - if let Some(diagnostic) = implicit_namespace_package( + implicit_namespace_package( path, package, locator, @@ -34,26 +33,17 @@ pub(crate) fn check_file_path( &settings.project_root, &settings.src, allow_nested_roots, - ) { - diagnostics.push(diagnostic); - } + context, + ); } // pep8-naming if settings.rules.enabled(Rule::InvalidModuleName) { - if let Some(diagnostic) = - invalid_module_name(path, package, &settings.pep8_naming.ignore_names) - { - diagnostics.push(diagnostic); - } + invalid_module_name(path, package, &settings.pep8_naming.ignore_names, context); } // flake8-builtins if settings.rules.enabled(Rule::StdlibModuleShadowing) { - if let Some(diagnostic) = stdlib_module_shadowing(path, settings, target_version) { - diagnostics.push(diagnostic); - } + stdlib_module_shadowing(path, settings, target_version, context); } - - diagnostics } diff --git a/crates/ruff_linter/src/checkers/imports.rs b/crates/ruff_linter/src/checkers/imports.rs index 8600612142..d01249bcd4 100644 --- a/crates/ruff_linter/src/checkers/imports.rs +++ b/crates/ruff_linter/src/checkers/imports.rs @@ -8,7 +8,6 @@ use ruff_python_index::Indexer; use ruff_python_parser::Parsed; use crate::Locator; -use crate::OldDiagnostic; use crate::directives::IsortDirectives; use crate::package::PackageRoot; use crate::registry::Rule; @@ -16,6 +15,8 @@ use crate::rules::isort; use crate::rules::isort::block::{Block, BlockBuilder}; use crate::settings::LinterSettings; +use super::ast::LintContext; + #[expect(clippy::too_many_arguments)] pub(crate) fn check_imports( parsed: &Parsed, @@ -28,7 +29,8 @@ pub(crate) fn check_imports( source_type: PySourceType, cell_offsets: Option<&CellOffsets>, target_version: PythonVersion, -) -> Vec { + context: &LintContext, +) { // Extract all import blocks from the AST. let tracker = { let mut tracker = @@ -40,11 +42,10 @@ pub(crate) fn check_imports( let blocks: Vec<&Block> = tracker.iter().collect(); // Enforce import rules. - let mut diagnostics = vec![]; if settings.rules.enabled(Rule::UnsortedImports) { for block in &blocks { if !block.imports.is_empty() { - if let Some(diagnostic) = isort::rules::organize_imports( + isort::rules::organize_imports( block, locator, stylist, @@ -54,21 +55,19 @@ pub(crate) fn check_imports( source_type, parsed.tokens(), target_version, - ) { - diagnostics.push(diagnostic); - } + context, + ); } } } if settings.rules.enabled(Rule::MissingRequiredImport) { - diagnostics.extend(isort::rules::add_required_imports( + isort::rules::add_required_imports( parsed, locator, stylist, settings, source_type, - )); + context, + ); } - - diagnostics } diff --git a/crates/ruff_linter/src/checkers/logical_lines.rs b/crates/ruff_linter/src/checkers/logical_lines.rs index cabbb1ad8b..f6c31e2f44 100644 --- a/crates/ruff_linter/src/checkers/logical_lines.rs +++ b/crates/ruff_linter/src/checkers/logical_lines.rs @@ -4,10 +4,8 @@ use ruff_python_parser::{TokenKind, Tokens}; use ruff_source_file::LineRanges; use ruff_text_size::{Ranged, TextRange}; -use crate::Locator; -use crate::OldDiagnostic; use crate::line_width::IndentWidth; -use crate::registry::{AsRule, Rule}; +use crate::registry::Rule; use crate::rules::pycodestyle::rules::logical_lines::{ LogicalLines, TokenFlags, extraneous_whitespace, indentation, missing_whitespace, missing_whitespace_after_keyword, missing_whitespace_around_operator, redundant_backslash, @@ -16,6 +14,9 @@ use crate::rules::pycodestyle::rules::logical_lines::{ whitespace_before_parameters, }; use crate::settings::LinterSettings; +use crate::{Locator, Violation}; + +use super::ast::{DiagnosticGuard, LintContext}; /// Return the amount of indentation, expanding tabs to the next multiple of the settings' tab size. pub(crate) fn expand_indent(line: &str, indent_width: IndentWidth) -> usize { @@ -40,8 +41,9 @@ pub(crate) fn check_logical_lines( indexer: &Indexer, stylist: &Stylist, settings: &LinterSettings, -) -> Vec { - let mut context = LogicalLinesContext::new(settings); + lint_context: &LintContext, +) { + let mut context = LogicalLinesContext::new(settings, lint_context); let mut prev_line = None; let mut prev_indent_level = None; @@ -170,7 +172,7 @@ pub(crate) fn check_logical_lines( let indent_size = 4; if enforce_indentation { - for diagnostic in indentation( + indentation( &line, prev_line.as_ref(), indent_char, @@ -178,11 +180,9 @@ pub(crate) fn check_logical_lines( prev_indent_level, indent_size, range, - ) { - if settings.rules.enabled(diagnostic.rule()) { - context.push_diagnostic(diagnostic); - } - } + lint_context, + settings, + ); } if !line.is_comment_only() { @@ -190,26 +190,24 @@ pub(crate) fn check_logical_lines( prev_indent_level = Some(indent_level); } } - context.diagnostics } -#[derive(Debug, Clone)] -pub(crate) struct LogicalLinesContext<'a> { +pub(crate) struct LogicalLinesContext<'a, 'b> { settings: &'a LinterSettings, - diagnostics: Vec, + context: &'a LintContext<'b>, } -impl<'a> LogicalLinesContext<'a> { - fn new(settings: &'a LinterSettings) -> Self { - Self { - settings, - diagnostics: Vec::new(), - } +impl<'a, 'b> LogicalLinesContext<'a, 'b> { + fn new(settings: &'a LinterSettings, context: &'a LintContext<'b>) -> Self { + Self { settings, context } } - pub(crate) fn push_diagnostic(&mut self, diagnostic: OldDiagnostic) { - if self.settings.rules.enabled(diagnostic.rule()) { - self.diagnostics.push(diagnostic); - } + pub(crate) fn report_diagnostic<'chk, T: Violation>( + &'chk self, + kind: T, + range: TextRange, + ) -> Option> { + self.context + .report_diagnostic_if_enabled(kind, range, self.settings) } } diff --git a/crates/ruff_linter/src/checkers/noqa.rs b/crates/ruff_linter/src/checkers/noqa.rs index 09d34b25c5..d87e4343a9 100644 --- a/crates/ruff_linter/src/checkers/noqa.rs +++ b/crates/ruff_linter/src/checkers/noqa.rs @@ -8,7 +8,6 @@ use rustc_hash::FxHashSet; use ruff_python_trivia::CommentRanges; use ruff_text_size::Ranged; -use crate::Locator; use crate::fix::edits::delete_comment; use crate::noqa::{ Code, Directive, FileExemption, FileNoqaDirectives, NoqaDirectives, NoqaMapping, @@ -20,11 +19,13 @@ use crate::rules::pygrep_hooks; use crate::rules::ruff; use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA}; use crate::settings::LinterSettings; -use crate::{Edit, Fix, OldDiagnostic}; +use crate::{Edit, Fix, Locator}; + +use super::ast::LintContext; #[expect(clippy::too_many_arguments)] pub(crate) fn check_noqa( - diagnostics: &mut Vec, + context: &mut LintContext, path: &Path, locator: &Locator, comment_ranges: &CommentRanges, @@ -46,7 +47,7 @@ pub(crate) fn check_noqa( let mut ignored_diagnostics = vec![]; // Remove any ignored diagnostics. - 'outer: for (index, diagnostic) in diagnostics.iter().enumerate() { + 'outer: for (index, diagnostic) in context.iter().enumerate() { let rule = diagnostic.rule(); if matches!(rule, Rule::BlanketNOQA) { @@ -135,11 +136,9 @@ pub(crate) fn check_noqa( Directive::All(directive) => { if matches.is_empty() { let edit = delete_comment(directive.range(), locator); - let mut diagnostic = - OldDiagnostic::new(UnusedNOQA { codes: None }, directive.range()); + let mut diagnostic = context + .report_diagnostic(UnusedNOQA { codes: None }, directive.range()); diagnostic.set_fix(Fix::safe_edit(edit)); - - diagnostics.push(diagnostic); } } Directive::Codes(directive) => { @@ -159,9 +158,7 @@ pub(crate) fn check_noqa( if seen_codes.insert(original_code) { let is_code_used = if is_file_level { - diagnostics - .iter() - .any(|diag| diag.rule().noqa_code() == code) + context.iter().any(|diag| diag.rule().noqa_code() == code) } else { matches.iter().any(|match_| *match_ == code) } || settings @@ -212,7 +209,7 @@ pub(crate) fn check_noqa( directive.range(), ) }; - let mut diagnostic = OldDiagnostic::new( + let mut diagnostic = context.report_diagnostic( UnusedNOQA { codes: Some(UnusedCodes { disabled: disabled_codes @@ -236,7 +233,6 @@ pub(crate) fn check_noqa( directive.range(), ); diagnostic.set_fix(Fix::safe_edit(edit)); - diagnostics.push(diagnostic); } } } @@ -247,8 +243,8 @@ pub(crate) fn check_noqa( && !per_file_ignores.contains(Rule::RedirectedNOQA) && !exemption.includes(Rule::RedirectedNOQA) { - ruff::rules::redirected_noqa(diagnostics, &noqa_directives); - ruff::rules::redirected_file_noqa(diagnostics, &file_noqa_directives); + ruff::rules::redirected_noqa(context, &noqa_directives); + ruff::rules::redirected_file_noqa(context, &file_noqa_directives); } if settings.rules.enabled(Rule::BlanketNOQA) @@ -256,7 +252,7 @@ pub(crate) fn check_noqa( && !exemption.enumerates(Rule::BlanketNOQA) { pygrep_hooks::rules::blanket_noqa( - diagnostics, + context, &noqa_directives, locator, &file_noqa_directives, @@ -267,7 +263,7 @@ pub(crate) fn check_noqa( && !per_file_ignores.contains(Rule::InvalidRuleCode) && !exemption.enumerates(Rule::InvalidRuleCode) { - ruff::rules::invalid_noqa_code(diagnostics, &noqa_directives, locator, &settings.external); + ruff::rules::invalid_noqa_code(context, &noqa_directives, locator, &settings.external); } ignored_diagnostics.sort_unstable(); diff --git a/crates/ruff_linter/src/checkers/physical_lines.rs b/crates/ruff_linter/src/checkers/physical_lines.rs index 1edfed656a..6bdf97e34b 100644 --- a/crates/ruff_linter/src/checkers/physical_lines.rs +++ b/crates/ruff_linter/src/checkers/physical_lines.rs @@ -6,7 +6,6 @@ use ruff_source_file::UniversalNewlines; use ruff_text_size::TextSize; use crate::Locator; -use crate::OldDiagnostic; use crate::registry::Rule; use crate::rules::flake8_copyright::rules::missing_copyright_notice; use crate::rules::pycodestyle::rules::{ @@ -17,15 +16,16 @@ use crate::rules::pylint; use crate::rules::ruff::rules::indented_form_feed; use crate::settings::LinterSettings; +use super::ast::LintContext; + pub(crate) fn check_physical_lines( locator: &Locator, stylist: &Stylist, indexer: &Indexer, doc_lines: &[TextSize], settings: &LinterSettings, -) -> Vec { - let mut diagnostics: Vec = vec![]; - + context: &LintContext, +) { let enforce_doc_line_too_long = settings.rules.enabled(Rule::DocLineTooLong); let enforce_line_too_long = settings.rules.enabled(Rule::LineTooLong); let enforce_no_newline_at_end_of_file = settings.rules.enabled(Rule::MissingNewlineAtEndOfFile); @@ -45,54 +45,38 @@ pub(crate) fn check_physical_lines( .is_some() { if enforce_doc_line_too_long { - if let Some(diagnostic) = doc_line_too_long(&line, comment_ranges, settings) { - diagnostics.push(diagnostic); - } + doc_line_too_long(&line, comment_ranges, settings, context); } } if enforce_mixed_spaces_and_tabs { - if let Some(diagnostic) = mixed_spaces_and_tabs(&line) { - diagnostics.push(diagnostic); - } + mixed_spaces_and_tabs(&line, context); } if enforce_line_too_long { - if let Some(diagnostic) = line_too_long(&line, comment_ranges, settings) { - diagnostics.push(diagnostic); - } + line_too_long(&line, comment_ranges, settings, context); } if enforce_bidirectional_unicode { - diagnostics.extend(pylint::rules::bidirectional_unicode(&line)); + pylint::rules::bidirectional_unicode(&line, context); } if enforce_trailing_whitespace || enforce_blank_line_contains_whitespace { - if let Some(diagnostic) = trailing_whitespace(&line, locator, indexer, settings) { - diagnostics.push(diagnostic); - } + trailing_whitespace(&line, locator, indexer, settings, context); } if settings.rules.enabled(Rule::IndentedFormFeed) { - if let Some(diagnostic) = indented_form_feed(&line) { - diagnostics.push(diagnostic); - } + indented_form_feed(&line, context); } } if enforce_no_newline_at_end_of_file { - if let Some(diagnostic) = no_newline_at_end_of_file(locator, stylist) { - diagnostics.push(diagnostic); - } + no_newline_at_end_of_file(locator, stylist, context); } if enforce_copyright_notice { - if let Some(diagnostic) = missing_copyright_notice(locator, settings) { - diagnostics.push(diagnostic); - } + missing_copyright_notice(locator, settings, context); } - - diagnostics } #[cfg(test)] @@ -100,8 +84,10 @@ mod tests { use ruff_python_codegen::Stylist; use ruff_python_index::Indexer; use ruff_python_parser::parse_module; + use ruff_source_file::SourceFileBuilder; use crate::Locator; + use crate::checkers::ast::LintContext; use crate::line_width::LineLength; use crate::registry::Rule; use crate::rules::pycodestyle; @@ -118,6 +104,8 @@ mod tests { let stylist = Stylist::from_tokens(parsed.tokens(), locator.contents()); let check_with_max_line_length = |line_length: LineLength| { + let source_file = SourceFileBuilder::new("", line).finish(); + let diagnostics = LintContext::new(&source_file); check_physical_lines( &locator, &stylist, @@ -130,7 +118,9 @@ mod tests { }, ..LinterSettings::for_rule(Rule::LineTooLong) }, - ) + &diagnostics, + ); + diagnostics.into_diagnostics() }; let line_length = LineLength::try_from(8).unwrap(); assert_eq!(check_with_max_line_length(line_length), vec![]); diff --git a/crates/ruff_linter/src/checkers/tokens.rs b/crates/ruff_linter/src/checkers/tokens.rs index 0ea5973175..63eaa882ab 100644 --- a/crates/ruff_linter/src/checkers/tokens.rs +++ b/crates/ruff_linter/src/checkers/tokens.rs @@ -9,7 +9,6 @@ use ruff_python_index::Indexer; use ruff_python_parser::Tokens; use crate::Locator; -use crate::OldDiagnostic; use crate::directives::TodoComment; use crate::registry::{AsRule, Rule}; use crate::rules::pycodestyle::rules::BlankLinesChecker; @@ -19,6 +18,8 @@ use crate::rules::{ }; use crate::settings::LinterSettings; +use super::ast::LintContext; + #[expect(clippy::too_many_arguments)] pub(crate) fn check_tokens( tokens: &Tokens, @@ -29,8 +30,8 @@ pub(crate) fn check_tokens( settings: &LinterSettings, source_type: PySourceType, cell_offsets: Option<&CellOffsets>, -) -> Vec { - let mut diagnostics: Vec = vec![]; + context: &mut LintContext, +) { let comment_ranges = indexer.comment_ranges(); if settings.rules.any_enabled(&[ @@ -41,16 +42,23 @@ pub(crate) fn check_tokens( Rule::BlankLinesAfterFunctionOrClass, Rule::BlankLinesBeforeNestedDefinition, ]) { - BlankLinesChecker::new(locator, stylist, settings, source_type, cell_offsets) - .check_lines(tokens, &mut diagnostics); + BlankLinesChecker::new( + locator, + stylist, + settings, + source_type, + cell_offsets, + context, + ) + .check_lines(tokens); } if settings.rules.enabled(Rule::BlanketTypeIgnore) { - pygrep_hooks::rules::blanket_type_ignore(&mut diagnostics, comment_ranges, locator); + pygrep_hooks::rules::blanket_type_ignore(context, comment_ranges, locator); } if settings.rules.enabled(Rule::EmptyComment) { - pylint::rules::empty_comments(&mut diagnostics, comment_ranges, locator); + pylint::rules::empty_comments(context, comment_ranges, locator); } if settings @@ -58,25 +66,20 @@ pub(crate) fn check_tokens( .enabled(Rule::AmbiguousUnicodeCharacterComment) { for range in comment_ranges { - ruff::rules::ambiguous_unicode_character_comment( - &mut diagnostics, - locator, - range, - settings, - ); + ruff::rules::ambiguous_unicode_character_comment(context, locator, range, settings); } } if settings.rules.enabled(Rule::CommentedOutCode) { - eradicate::rules::commented_out_code(&mut diagnostics, locator, comment_ranges, settings); + eradicate::rules::commented_out_code(context, locator, comment_ranges, settings); } if settings.rules.enabled(Rule::UTF8EncodingDeclaration) { - pyupgrade::rules::unnecessary_coding_comment(&mut diagnostics, locator, comment_ranges); + pyupgrade::rules::unnecessary_coding_comment(context, locator, comment_ranges); } if settings.rules.enabled(Rule::TabIndentation) { - pycodestyle::rules::tab_indentation(&mut diagnostics, locator, indexer); + pycodestyle::rules::tab_indentation(context, locator, indexer); } if settings.rules.any_enabled(&[ @@ -87,7 +90,7 @@ pub(crate) fn check_tokens( Rule::InvalidCharacterZeroWidthSpace, ]) { for token in tokens { - pylint::rules::invalid_string_characters(&mut diagnostics, token, locator); + pylint::rules::invalid_string_characters(context, token, locator); } } @@ -97,7 +100,7 @@ pub(crate) fn check_tokens( Rule::UselessSemicolon, ]) { pycodestyle::rules::compound_statements( - &mut diagnostics, + context, tokens, locator, indexer, @@ -110,13 +113,7 @@ pub(crate) fn check_tokens( Rule::SingleLineImplicitStringConcatenation, Rule::MultiLineImplicitStringConcatenation, ]) { - flake8_implicit_str_concat::rules::implicit( - &mut diagnostics, - tokens, - locator, - indexer, - settings, - ); + flake8_implicit_str_concat::rules::implicit(context, tokens, locator, indexer, settings); } if settings.rules.any_enabled(&[ @@ -124,15 +121,15 @@ pub(crate) fn check_tokens( Rule::TrailingCommaOnBareTuple, Rule::ProhibitedTrailingComma, ]) { - flake8_commas::rules::trailing_commas(&mut diagnostics, tokens, locator, indexer); + flake8_commas::rules::trailing_commas(context, tokens, locator, indexer); } if settings.rules.enabled(Rule::ExtraneousParentheses) { - pyupgrade::rules::extraneous_parentheses(&mut diagnostics, tokens, locator); + pyupgrade::rules::extraneous_parentheses(context, tokens, locator); } if source_type.is_stub() && settings.rules.enabled(Rule::TypeCommentInStub) { - flake8_pyi::rules::type_comment_in_stub(&mut diagnostics, locator, comment_ranges); + flake8_pyi::rules::type_comment_in_stub(context, locator, comment_ranges); } if settings.rules.any_enabled(&[ @@ -142,13 +139,7 @@ pub(crate) fn check_tokens( Rule::ShebangNotFirstLine, Rule::ShebangMissingPython, ]) { - flake8_executable::rules::from_tokens( - &mut diagnostics, - path, - locator, - comment_ranges, - settings, - ); + flake8_executable::rules::from_tokens(context, path, locator, comment_ranges, settings); } if settings.rules.any_enabled(&[ @@ -172,19 +163,15 @@ pub(crate) fn check_tokens( TodoComment::from_comment(comment, *comment_range, i) }) .collect(); - flake8_todos::rules::todos(&mut diagnostics, &todo_comments, locator, comment_ranges); - flake8_fixme::rules::todos(&mut diagnostics, &todo_comments); + flake8_todos::rules::todos(context, &todo_comments, locator, comment_ranges); + flake8_fixme::rules::todos(context, &todo_comments); } if settings.rules.enabled(Rule::TooManyNewlinesAtEndOfFile) { - pycodestyle::rules::too_many_newlines_at_end_of_file( - &mut diagnostics, - tokens, - cell_offsets, - ); + pycodestyle::rules::too_many_newlines_at_end_of_file(context, tokens, cell_offsets); } - diagnostics.retain(|diagnostic| settings.rules.enabled(diagnostic.rule())); - - diagnostics + context + .as_mut_vec() + .retain(|diagnostic| settings.rules.enabled(diagnostic.rule())); } diff --git a/crates/ruff_linter/src/diagnostic.rs b/crates/ruff_linter/src/diagnostic.rs index 68dc05734b..8ae3f7c18c 100644 --- a/crates/ruff_linter/src/diagnostic.rs +++ b/crates/ruff_linter/src/diagnostic.rs @@ -1,6 +1,7 @@ use anyhow::Result; use log::debug; +use ruff_source_file::SourceFile; use ruff_text_size::{Ranged, TextRange, TextSize}; use crate::registry::AsRule; @@ -18,6 +19,8 @@ pub struct OldDiagnostic { pub parent: Option, pub(crate) rule: Rule, + + pub(crate) file: SourceFile, } impl OldDiagnostic { @@ -26,7 +29,7 @@ impl OldDiagnostic { // diagnostic refactor, but if it still exists in this form at the end of the refactor, we // should just update the call sites. #[expect(clippy::needless_pass_by_value)] - pub fn new(kind: T, range: TextRange) -> Self { + pub fn new(kind: T, range: TextRange, file: &SourceFile) -> Self { Self { body: Violation::message(&kind), suggestion: Violation::fix_title(&kind), @@ -34,6 +37,7 @@ impl OldDiagnostic { fix: None, parent: None, rule: T::rule(), + file: file.clone(), } } diff --git a/crates/ruff_linter/src/fix/edits.rs b/crates/ruff_linter/src/fix/edits.rs index fc51bdcebb..7d32438022 100644 --- a/crates/ruff_linter/src/fix/edits.rs +++ b/crates/ruff_linter/src/fix/edits.rs @@ -600,14 +600,12 @@ mod tests { use ruff_python_parser::{parse_expression, parse_module}; use ruff_text_size::{Ranged, TextRange, TextSize}; - use crate::Locator; - use crate::codes::Rule; use crate::fix::apply_fixes; use crate::fix::edits::{ add_to_dunder_all, make_redundant_alias, next_stmt_break, trailing_semicolon, }; use crate::message::Message; - use crate::{Edit, Fix, OldDiagnostic}; + use crate::{Edit, Fix, Locator, OldDiagnostic}; /// Parse the given source using [`Mode::Module`] and return the first statement. fn parse_first_stmt(source: &str) -> Result { @@ -741,21 +739,13 @@ x = 1 \ let diag = OldDiagnostic::new( MissingNewlineAtEndOfFile, // The choice of rule here is arbitrary. TextRange::default(), + &SourceFileBuilder::new("", "").finish(), ) .with_fix(Fix::safe_edits( iter.next().ok_or(anyhow!("expected edits nonempty"))?, iter, )); - Message::diagnostic( - diag.body, - diag.suggestion, - diag.range, - diag.fix, - diag.parent, - SourceFileBuilder::new("", "").finish(), - None, - Rule::MissingNewlineAtEndOfFile, - ) + Message::from_diagnostic(diag, None) }; assert_eq!(apply_fixes([diag].iter(), &locator).code, expect); Ok(()) diff --git a/crates/ruff_linter/src/fix/mod.rs b/crates/ruff_linter/src/fix/mod.rs index 261893e440..ae77973ed7 100644 --- a/crates/ruff_linter/src/fix/mod.rs +++ b/crates/ruff_linter/src/fix/mod.rs @@ -177,12 +177,12 @@ mod tests { edit.into_iter() .map(|edit| { // The choice of rule here is arbitrary. - let diagnostic = OldDiagnostic::new(MissingNewlineAtEndOfFile, edit.range()); - Message::from_diagnostic( - diagnostic.with_fix(Fix::safe_edit(edit)), - SourceFileBuilder::new(filename, source).finish(), - None, - ) + let diagnostic = OldDiagnostic::new( + MissingNewlineAtEndOfFile, + edit.range(), + &SourceFileBuilder::new(filename, source).finish(), + ); + Message::from_diagnostic(diagnostic.with_fix(Fix::safe_edit(edit)), None) }) .collect() } diff --git a/crates/ruff_linter/src/linter.rs b/crates/ruff_linter/src/linter.rs index cab0b25cbc..99e35681ec 100644 --- a/crates/ruff_linter/src/linter.rs +++ b/crates/ruff_linter/src/linter.rs @@ -1,6 +1,4 @@ use std::borrow::Cow; -use std::cell::LazyCell; -use std::ops::Deref; use std::path::Path; use anyhow::{Result, anyhow}; @@ -14,11 +12,11 @@ use ruff_python_ast::{ModModule, PySourceType, PythonVersion}; use ruff_python_codegen::Stylist; use ruff_python_index::Indexer; use ruff_python_parser::{ParseError, ParseOptions, Parsed, UnsupportedSyntaxError}; -use ruff_source_file::SourceFileBuilder; +use ruff_source_file::{SourceFile, SourceFileBuilder}; use ruff_text_size::Ranged; use crate::OldDiagnostic; -use crate::checkers::ast::check_ast; +use crate::checkers::ast::{LintContext, check_ast}; use crate::checkers::filesystem::check_file_path; use crate::checkers::imports::check_imports; use crate::checkers::noqa::check_noqa; @@ -113,8 +111,11 @@ pub fn check_path( parsed: &Parsed, target_version: TargetVersion, ) -> Vec { + let source_file = + SourceFileBuilder::new(path.to_string_lossy().as_ref(), locator.contents()).finish(); + // Aggregate all diagnostics. - let mut diagnostics = vec![]; + let mut diagnostics = LintContext::new(&source_file); // Aggregate all semantic syntax errors. let mut semantic_syntax_errors = vec![]; @@ -136,7 +137,7 @@ pub fn check_path( .iter_enabled() .any(|rule_code| rule_code.lint_source().is_tokens()) { - diagnostics.extend(check_tokens( + check_tokens( tokens, path, locator, @@ -145,7 +146,8 @@ pub fn check_path( settings, source_type, source_kind.as_ipy_notebook().map(Notebook::cell_offsets), - )); + &mut diagnostics, + ); } // Run the filesystem-based rules. @@ -154,14 +156,15 @@ pub fn check_path( .iter_enabled() .any(|rule_code| rule_code.lint_source().is_filesystem()) { - diagnostics.extend(check_file_path( + check_file_path( path, package, locator, comment_ranges, settings, target_version.linter_version(), - )); + &diagnostics, + ); } // Run the logical line-based rules. @@ -170,9 +173,14 @@ pub fn check_path( .iter_enabled() .any(|rule_code| rule_code.lint_source().is_logical_lines()) { - diagnostics.extend(crate::checkers::logical_lines::check_logical_lines( - tokens, locator, indexer, stylist, settings, - )); + crate::checkers::logical_lines::check_logical_lines( + tokens, + locator, + indexer, + stylist, + settings, + &diagnostics, + ); } // Run the AST-based rules only if there are no syntax errors. @@ -180,7 +188,7 @@ pub fn check_path( let cell_offsets = source_kind.as_ipy_notebook().map(Notebook::cell_offsets); let notebook_index = source_kind.as_ipy_notebook().map(Notebook::index); - let (new_diagnostics, new_semantic_syntax_errors) = check_ast( + semantic_syntax_errors.extend(check_ast( parsed, locator, stylist, @@ -194,9 +202,8 @@ pub fn check_path( cell_offsets, notebook_index, target_version, - ); - diagnostics.extend(new_diagnostics); - semantic_syntax_errors.extend(new_semantic_syntax_errors); + &diagnostics, + )); let use_imports = !directives.isort.skip_file && settings @@ -205,7 +212,7 @@ pub fn check_path( .any(|rule_code| rule_code.lint_source().is_imports()); if use_imports || use_doc_lines { if use_imports { - let import_diagnostics = check_imports( + check_imports( parsed, locator, indexer, @@ -216,9 +223,8 @@ pub fn check_path( source_type, cell_offsets, target_version.linter_version(), + &diagnostics, ); - - diagnostics.extend(import_diagnostics); } if use_doc_lines { doc_lines.extend(doc_lines_from_ast(parsed.suite(), locator)); @@ -238,9 +244,14 @@ pub fn check_path( .iter_enabled() .any(|rule_code| rule_code.lint_source().is_physical_lines()) { - diagnostics.extend(check_physical_lines( - locator, stylist, indexer, &doc_lines, settings, - )); + check_physical_lines( + locator, + stylist, + indexer, + &doc_lines, + settings, + &diagnostics, + ); } // Raise violations for internal test rules @@ -250,47 +261,70 @@ pub fn check_path( if !settings.rules.enabled(*test_rule) { continue; } - let diagnostic = match test_rule { + match test_rule { Rule::StableTestRule => { - test_rules::StableTestRule::diagnostic(locator, comment_ranges) - } - Rule::StableTestRuleSafeFix => { - test_rules::StableTestRuleSafeFix::diagnostic(locator, comment_ranges) - } - Rule::StableTestRuleUnsafeFix => { - test_rules::StableTestRuleUnsafeFix::diagnostic(locator, comment_ranges) + test_rules::StableTestRule::diagnostic(locator, comment_ranges, &diagnostics); } + Rule::StableTestRuleSafeFix => test_rules::StableTestRuleSafeFix::diagnostic( + locator, + comment_ranges, + &diagnostics, + ), + Rule::StableTestRuleUnsafeFix => test_rules::StableTestRuleUnsafeFix::diagnostic( + locator, + comment_ranges, + &diagnostics, + ), Rule::StableTestRuleDisplayOnlyFix => { - test_rules::StableTestRuleDisplayOnlyFix::diagnostic(locator, comment_ranges) + test_rules::StableTestRuleDisplayOnlyFix::diagnostic( + locator, + comment_ranges, + &diagnostics, + ); } Rule::PreviewTestRule => { - test_rules::PreviewTestRule::diagnostic(locator, comment_ranges) + test_rules::PreviewTestRule::diagnostic(locator, comment_ranges, &diagnostics); } Rule::DeprecatedTestRule => { - test_rules::DeprecatedTestRule::diagnostic(locator, comment_ranges) + test_rules::DeprecatedTestRule::diagnostic( + locator, + comment_ranges, + &diagnostics, + ); } Rule::AnotherDeprecatedTestRule => { - test_rules::AnotherDeprecatedTestRule::diagnostic(locator, comment_ranges) + test_rules::AnotherDeprecatedTestRule::diagnostic( + locator, + comment_ranges, + &diagnostics, + ); } Rule::RemovedTestRule => { - test_rules::RemovedTestRule::diagnostic(locator, comment_ranges) - } - Rule::AnotherRemovedTestRule => { - test_rules::AnotherRemovedTestRule::diagnostic(locator, comment_ranges) - } - Rule::RedirectedToTestRule => { - test_rules::RedirectedToTestRule::diagnostic(locator, comment_ranges) - } - Rule::RedirectedFromTestRule => { - test_rules::RedirectedFromTestRule::diagnostic(locator, comment_ranges) + test_rules::RemovedTestRule::diagnostic(locator, comment_ranges, &diagnostics); } + Rule::AnotherRemovedTestRule => test_rules::AnotherRemovedTestRule::diagnostic( + locator, + comment_ranges, + &diagnostics, + ), + Rule::RedirectedToTestRule => test_rules::RedirectedToTestRule::diagnostic( + locator, + comment_ranges, + &diagnostics, + ), + Rule::RedirectedFromTestRule => test_rules::RedirectedFromTestRule::diagnostic( + locator, + comment_ranges, + &diagnostics, + ), Rule::RedirectedFromPrefixTestRule => { - test_rules::RedirectedFromPrefixTestRule::diagnostic(locator, comment_ranges) + test_rules::RedirectedFromPrefixTestRule::diagnostic( + locator, + comment_ranges, + &diagnostics, + ); } _ => unreachable!("All test rules must have an implementation"), - }; - if let Some(diagnostic) = diagnostic { - diagnostics.push(diagnostic); } } } @@ -308,7 +342,9 @@ pub fn check_path( RuleSet::empty() }; if !per_file_ignores.is_empty() { - diagnostics.retain(|diagnostic| !per_file_ignores.contains(diagnostic.rule())); + diagnostics + .as_mut_vec() + .retain(|diagnostic| !per_file_ignores.contains(diagnostic.rule())); } // Enforce `noqa` directives. @@ -330,11 +366,13 @@ pub fn check_path( ); if noqa.is_enabled() { for index in ignored.iter().rev() { - diagnostics.swap_remove(*index); + diagnostics.as_mut_vec().swap_remove(*index); } } } + let mut diagnostics = diagnostics.into_diagnostics(); + if parsed.has_valid_syntax() { // Remove fixes for any rules marked as unfixable. for diagnostic in &mut diagnostics { @@ -372,9 +410,9 @@ pub fn check_path( parsed.errors(), syntax_errors, &semantic_syntax_errors, - path, locator, directives, + &source_file, ) } @@ -507,35 +545,24 @@ fn diagnostics_to_messages( parse_errors: &[ParseError], unsupported_syntax_errors: &[UnsupportedSyntaxError], semantic_syntax_errors: &[SemanticSyntaxError], - path: &Path, locator: &Locator, directives: &Directives, + source_file: &SourceFile, ) -> Vec { - let file = LazyCell::new(|| { - let mut builder = - SourceFileBuilder::new(path.to_string_lossy().as_ref(), locator.contents()); - - if let Some(line_index) = locator.line_index() { - builder.set_line_index(line_index.clone()); - } - - builder.finish() - }); - parse_errors .iter() - .map(|parse_error| Message::from_parse_error(parse_error, locator, file.deref().clone())) + .map(|parse_error| Message::from_parse_error(parse_error, locator, source_file.clone())) .chain(unsupported_syntax_errors.iter().map(|syntax_error| { - Message::from_unsupported_syntax_error(syntax_error, file.deref().clone()) + Message::from_unsupported_syntax_error(syntax_error, source_file.clone()) })) .chain( semantic_syntax_errors .iter() - .map(|error| Message::from_semantic_syntax_error(error, file.deref().clone())), + .map(|error| Message::from_semantic_syntax_error(error, source_file.clone())), ) .chain(diagnostics.into_iter().map(|diagnostic| { let noqa_offset = directives.noqa_line_for.resolve(diagnostic.start()); - Message::from_diagnostic(diagnostic, file.deref().clone(), Some(noqa_offset)) + Message::from_diagnostic(diagnostic, Some(noqa_offset)) })) .collect() } diff --git a/crates/ruff_linter/src/message/mod.rs b/crates/ruff_linter/src/message/mod.rs index 74eef57f24..49bd23cd1e 100644 --- a/crates/ruff_linter/src/message/mod.rs +++ b/crates/ruff_linter/src/message/mod.rs @@ -114,11 +114,7 @@ impl Message { } /// Create a [`Message`] from the given [`OldDiagnostic`] corresponding to a rule violation. - pub fn from_diagnostic( - diagnostic: OldDiagnostic, - file: SourceFile, - noqa_offset: Option, - ) -> Message { + pub fn from_diagnostic(diagnostic: OldDiagnostic, noqa_offset: Option) -> Message { let OldDiagnostic { body, suggestion, @@ -126,6 +122,7 @@ impl Message { fix, parent, rule, + file, } = diagnostic; Self::diagnostic( body, diff --git a/crates/ruff_linter/src/noqa.rs b/crates/ruff_linter/src/noqa.rs index d09aad4fc2..a7ff0c0c45 100644 --- a/crates/ruff_linter/src/noqa.rs +++ b/crates/ruff_linter/src/noqa.rs @@ -1252,14 +1252,9 @@ mod tests { } /// Create a [`Message`] with a placeholder filename and rule code from `diagnostic`. - fn message_from_diagnostic( - diagnostic: OldDiagnostic, - path: impl AsRef, - source: &str, - ) -> Message { + fn message_from_diagnostic(diagnostic: OldDiagnostic) -> Message { let noqa_offset = diagnostic.start(); - let file = SourceFileBuilder::new(path.as_ref().to_string_lossy(), source).finish(); - Message::from_diagnostic(diagnostic, file, Some(noqa_offset)) + Message::from_diagnostic(diagnostic, Some(noqa_offset)) } #[test] @@ -2842,13 +2837,15 @@ mod tests { assert_eq!(count, 0); assert_eq!(output, format!("{contents}")); + let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish(); let messages = [OldDiagnostic::new( UnusedVariable { name: "x".to_string(), }, TextRange::new(TextSize::from(0), TextSize::from(0)), + &source_file, )] - .map(|d| message_from_diagnostic(d, path, contents)); + .map(message_from_diagnostic); let contents = "x = 1"; let noqa_line_for = NoqaMapping::default(); @@ -2864,19 +2861,22 @@ mod tests { assert_eq!(count, 1); assert_eq!(output, "x = 1 # noqa: F841\n"); + let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish(); let messages = [ OldDiagnostic::new( AmbiguousVariableName("x".to_string()), TextRange::new(TextSize::from(0), TextSize::from(0)), + &source_file, ), OldDiagnostic::new( UnusedVariable { name: "x".to_string(), }, TextRange::new(TextSize::from(0), TextSize::from(0)), + &source_file, ), ] - .map(|d| message_from_diagnostic(d, path, contents)); + .map(message_from_diagnostic); let contents = "x = 1 # noqa: E741\n"; let noqa_line_for = NoqaMapping::default(); let comment_ranges = @@ -2893,19 +2893,22 @@ mod tests { assert_eq!(count, 1); assert_eq!(output, "x = 1 # noqa: E741, F841\n"); + let source_file = SourceFileBuilder::new(path.to_string_lossy(), contents).finish(); let messages = [ OldDiagnostic::new( AmbiguousVariableName("x".to_string()), TextRange::new(TextSize::from(0), TextSize::from(0)), + &source_file, ), OldDiagnostic::new( UnusedVariable { name: "x".to_string(), }, TextRange::new(TextSize::from(0), TextSize::from(0)), + &source_file, ), ] - .map(|d| message_from_diagnostic(d, path, contents)); + .map(message_from_diagnostic); let contents = "x = 1 # noqa"; let noqa_line_for = NoqaMapping::default(); let comment_ranges = @@ -2936,11 +2939,13 @@ print( ) "#; let noqa_line_for = [TextRange::new(8.into(), 68.into())].into_iter().collect(); + let source_file = SourceFileBuilder::new(path.to_string_lossy(), source).finish(); let messages = [OldDiagnostic::new( PrintfStringFormatting, TextRange::new(12.into(), 79.into()), + &source_file, )] - .map(|d| message_from_diagnostic(d, path, source)); + .map(message_from_diagnostic); let comment_ranges = CommentRanges::default(); let edits = generate_noqa_edits( path, @@ -2968,11 +2973,13 @@ print( foo; bar = "; + let source_file = SourceFileBuilder::new(path.to_string_lossy(), source).finish(); let messages = [OldDiagnostic::new( UselessSemicolon, TextRange::new(4.into(), 5.into()), + &source_file, )] - .map(|d| message_from_diagnostic(d, path, source)); + .map(message_from_diagnostic); let noqa_line_for = NoqaMapping::default(); let comment_ranges = CommentRanges::default(); let edits = generate_noqa_edits( diff --git a/crates/ruff_linter/src/pyproject_toml.rs b/crates/ruff_linter/src/pyproject_toml.rs index d702161535..b9e3c51b15 100644 --- a/crates/ruff_linter/src/pyproject_toml.rs +++ b/crates/ruff_linter/src/pyproject_toml.rs @@ -12,7 +12,7 @@ use crate::registry::Rule; use crate::rules::ruff::rules::InvalidPyprojectToml; use crate::settings::LinterSettings; -pub fn lint_pyproject_toml(source_file: SourceFile, settings: &LinterSettings) -> Vec { +pub fn lint_pyproject_toml(source_file: &SourceFile, settings: &LinterSettings) -> Vec { let Some(err) = toml::from_str::(source_file.source_text()).err() else { return Vec::default(); }; @@ -29,8 +29,9 @@ pub fn lint_pyproject_toml(source_file: SourceFile, settings: &LinterSettings) - source_file.name(), ); if settings.rules.enabled(Rule::IOError) { - let diagnostic = OldDiagnostic::new(IOError { message }, TextRange::default()); - messages.push(Message::from_diagnostic(diagnostic, source_file, None)); + let diagnostic = + OldDiagnostic::new(IOError { message }, TextRange::default(), source_file); + messages.push(Message::from_diagnostic(diagnostic, None)); } else { warn!( "{}{}{} {message}", @@ -51,8 +52,12 @@ pub fn lint_pyproject_toml(source_file: SourceFile, settings: &LinterSettings) - if settings.rules.enabled(Rule::InvalidPyprojectToml) { let toml_err = err.message().to_string(); - let diagnostic = OldDiagnostic::new(InvalidPyprojectToml { message: toml_err }, range); - messages.push(Message::from_diagnostic(diagnostic, source_file, None)); + let diagnostic = OldDiagnostic::new( + InvalidPyprojectToml { message: toml_err }, + range, + source_file, + ); + messages.push(Message::from_diagnostic(diagnostic, None)); } messages diff --git a/crates/ruff_linter/src/rules/eradicate/rules/commented_out_code.rs b/crates/ruff_linter/src/rules/eradicate/rules/commented_out_code.rs index 118b28207b..2165b6c704 100644 --- a/crates/ruff_linter/src/rules/eradicate/rules/commented_out_code.rs +++ b/crates/ruff_linter/src/rules/eradicate/rules/commented_out_code.rs @@ -4,8 +4,9 @@ use ruff_source_file::{LineRanges, UniversalNewlineIterator}; use ruff_text_size::TextRange; use crate::Locator; +use crate::checkers::ast::LintContext; use crate::settings::LinterSettings; -use crate::{Edit, Fix, FixAvailability, OldDiagnostic, Violation}; +use crate::{Edit, Fix, FixAvailability, Violation}; use super::super::detection::comment_contains_code; @@ -47,7 +48,7 @@ impl Violation for CommentedOutCode { /// ERA001 pub(crate) fn commented_out_code( - diagnostics: &mut Vec, + context: &LintContext, locator: &Locator, comment_ranges: &CommentRanges, settings: &LinterSettings, @@ -65,11 +66,11 @@ pub(crate) fn commented_out_code( // Verify that the comment is on its own line, and that it contains code. if is_own_line_comment(line) && comment_contains_code(line, &settings.task_tags[..]) { - let mut diagnostic = OldDiagnostic::new(CommentedOutCode, range); - diagnostic.set_fix(Fix::display_only_edit(Edit::range_deletion( - locator.full_lines_range(range), - ))); - diagnostics.push(diagnostic); + context + .report_diagnostic(CommentedOutCode, range) + .set_fix(Fix::display_only_edit(Edit::range_deletion( + locator.full_lines_range(range), + ))); } } } diff --git a/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs b/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs index cdb8d31f78..a83206cdc1 100644 --- a/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs +++ b/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs @@ -533,7 +533,7 @@ fn check_dynamically_typed<'a, 'b, F>( checker: &'a Checker<'b>, annotation: &Expr, func: F, - diagnostics: &mut Vec>, + context: &mut Vec>, ) where F: FnOnce() -> String, { @@ -545,14 +545,13 @@ fn check_dynamically_typed<'a, 'b, F>( checker, checker.target_version(), ) { - diagnostics + context .push(checker.report_diagnostic(AnyType { name: func() }, annotation.range())); } } } else { if type_hint_resolves_to_any(annotation, checker, checker.target_version()) { - diagnostics - .push(checker.report_diagnostic(AnyType { name: func() }, annotation.range())); + context.push(checker.report_diagnostic(AnyType { name: func() }, annotation.range())); } } } diff --git a/crates/ruff_linter/src/rules/flake8_builtins/rules/stdlib_module_shadowing.rs b/crates/ruff_linter/src/rules/flake8_builtins/rules/stdlib_module_shadowing.rs index 4246e4edc4..66f9e6f657 100644 --- a/crates/ruff_linter/src/rules/flake8_builtins/rules/stdlib_module_shadowing.rs +++ b/crates/ruff_linter/src/rules/flake8_builtins/rules/stdlib_module_shadowing.rs @@ -7,8 +7,9 @@ use ruff_python_stdlib::path::is_module_file; use ruff_python_stdlib::sys::is_known_standard_library; use ruff_text_size::TextRange; +use crate::Violation; +use crate::checkers::ast::LintContext; use crate::settings::LinterSettings; -use crate::{OldDiagnostic, Violation}; /// ## What it does /// Checks for modules that use the same names as Python standard-library @@ -69,9 +70,10 @@ pub(crate) fn stdlib_module_shadowing( mut path: &Path, settings: &LinterSettings, target_version: PythonVersion, -) -> Option { + context: &LintContext, +) { if !PySourceType::try_from_path(path).is_some_and(PySourceType::is_py_file) { - return None; + return; } // strip src and root prefixes before converting to a fully-qualified module path @@ -83,7 +85,8 @@ pub(crate) fn stdlib_module_shadowing( // for modules like `modname/__init__.py`, use the parent directory name, otherwise just trim // the `.py` extension let path = if is_module_file(path) { - Cow::from(path.parent()?) + let Some(parent) = path.parent() else { return }; + Cow::from(parent) } else { Cow::from(path.with_extension("")) }; @@ -96,23 +99,25 @@ pub(crate) fn stdlib_module_shadowing( .map(|c| c.as_os_str().to_string_lossy()) .rev(); - let module_name = components.next()?; + let Some(module_name) = components.next() else { + return; + }; if is_allowed_module(settings, target_version, &module_name) { - return None; + return; } // not allowed generally, but check for a parent in non-strict mode if !settings.flake8_builtins.strict_checking && components.next().is_some() { - return None; + return; } - Some(OldDiagnostic::new( + context.report_diagnostic( StdlibModuleShadowing { name: module_name.to_string(), }, TextRange::default(), - )) + ); } /// Return the longest prefix of `path` between `settings.src` and `settings.project_root`. diff --git a/crates/ruff_linter/src/rules/flake8_commas/rules/trailing_commas.rs b/crates/ruff_linter/src/rules/flake8_commas/rules/trailing_commas.rs index e176bdc67b..6b2dc0fb51 100644 --- a/crates/ruff_linter/src/rules/flake8_commas/rules/trailing_commas.rs +++ b/crates/ruff_linter/src/rules/flake8_commas/rules/trailing_commas.rs @@ -4,8 +4,9 @@ use ruff_python_parser::{TokenKind, Tokens}; use ruff_text_size::{Ranged, TextRange}; use crate::Locator; +use crate::checkers::ast::LintContext; use crate::{AlwaysFixableViolation, Violation}; -use crate::{Edit, Fix, OldDiagnostic}; +use crate::{Edit, Fix}; /// Simplified token type. #[derive(Copy, Clone, PartialEq, Eq)] @@ -238,7 +239,7 @@ impl AlwaysFixableViolation for ProhibitedTrailingComma { /// COM812, COM818, COM819 pub(crate) fn trailing_commas( - diagnostics: &mut Vec, + lint_context: &LintContext, tokens: &Tokens, locator: &Locator, indexer: &Indexer, @@ -291,9 +292,7 @@ pub(crate) fn trailing_commas( // Update the comma context stack. let context = update_context(token, prev, prev_prev, &mut stack); - if let Some(diagnostic) = check_token(token, prev, prev_prev, context, locator) { - diagnostics.push(diagnostic); - } + check_token(token, prev, prev_prev, context, locator, lint_context); // Pop the current context if the current token ended it. // The top context is never popped (if unbalanced closing brackets). @@ -319,7 +318,8 @@ fn check_token( prev_prev: SimpleToken, context: Context, locator: &Locator, -) -> Option { + lint_context: &LintContext, +) { // Is it allowed to have a trailing comma before this token? let comma_allowed = token.ty == TokenType::ClosingBracket && match context.ty { @@ -352,20 +352,22 @@ fn check_token( }; if comma_prohibited { - let mut diagnostic = OldDiagnostic::new(ProhibitedTrailingComma, prev.range()); - diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(diagnostic.range()))); - return Some(diagnostic); + let mut diagnostic = lint_context.report_diagnostic(ProhibitedTrailingComma, prev.range()); + let range = diagnostic.range(); + diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range))); + return; } // Is prev a prohibited trailing comma on a bare tuple? // Approximation: any comma followed by a statement-ending newline. let bare_comma_prohibited = prev.ty == TokenType::Comma && token.ty == TokenType::Newline; if bare_comma_prohibited { - return Some(OldDiagnostic::new(TrailingCommaOnBareTuple, prev.range())); + lint_context.report_diagnostic(TrailingCommaOnBareTuple, prev.range()); + return; } if !comma_allowed { - return None; + return; } // Comma is required if: @@ -383,7 +385,7 @@ fn check_token( ); if comma_required { let mut diagnostic = - OldDiagnostic::new(MissingTrailingComma, TextRange::empty(prev_prev.end())); + lint_context.report_diagnostic(MissingTrailingComma, TextRange::empty(prev_prev.end())); // Create a replacement that includes the final bracket (or other token), // rather than just inserting a comma at the end. This prevents the UP034 fix // removing any brackets in the same linter pass - doing both at the same time could @@ -393,9 +395,6 @@ fn check_token( format!("{contents},"), prev_prev.range(), ))); - Some(diagnostic) - } else { - None } } diff --git a/crates/ruff_linter/src/rules/flake8_copyright/rules/missing_copyright_notice.rs b/crates/ruff_linter/src/rules/flake8_copyright/rules/missing_copyright_notice.rs index c6612e9ca4..f2c578036c 100644 --- a/crates/ruff_linter/src/rules/flake8_copyright/rules/missing_copyright_notice.rs +++ b/crates/ruff_linter/src/rules/flake8_copyright/rules/missing_copyright_notice.rs @@ -2,8 +2,9 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_text_size::{TextRange, TextSize}; use crate::Locator; +use crate::Violation; +use crate::checkers::ast::LintContext; use crate::settings::LinterSettings; -use crate::{OldDiagnostic, Violation}; /// ## What it does /// Checks for the absence of copyright notices within Python files. @@ -32,10 +33,11 @@ impl Violation for MissingCopyrightNotice { pub(crate) fn missing_copyright_notice( locator: &Locator, settings: &LinterSettings, -) -> Option { + context: &LintContext, +) { // Ignore files that are too small to contain a copyright notice. if locator.len() < settings.flake8_copyright.min_file_size { - return None; + return; } // Only search the first 4096 bytes in the file. @@ -47,15 +49,12 @@ pub(crate) fn missing_copyright_notice( Some(ref author) => { // Ensure that it's immediately followed by the author. if contents[match_.end()..].trim_start().starts_with(author) { - return None; + return; } } - None => return None, + None => return, } } - Some(OldDiagnostic::new( - MissingCopyrightNotice, - TextRange::default(), - )) + context.report_diagnostic(MissingCopyrightNotice, TextRange::default()); } diff --git a/crates/ruff_linter/src/rules/flake8_executable/rules/mod.rs b/crates/ruff_linter/src/rules/flake8_executable/rules/mod.rs index bde95037ff..82065b8af0 100644 --- a/crates/ruff_linter/src/rules/flake8_executable/rules/mod.rs +++ b/crates/ruff_linter/src/rules/flake8_executable/rules/mod.rs @@ -8,7 +8,7 @@ pub(crate) use shebang_not_executable::*; pub(crate) use shebang_not_first_line::*; use crate::Locator; -use crate::OldDiagnostic; +use crate::checkers::ast::LintContext; use crate::codes::Rule; use crate::comments::shebang::ShebangDirective; use crate::settings::LinterSettings; @@ -20,7 +20,7 @@ mod shebang_not_executable; mod shebang_not_first_line; pub(crate) fn from_tokens( - diagnostics: &mut Vec, + context: &LintContext, path: &Path, locator: &Locator, comment_ranges: &CommentRanges, @@ -32,31 +32,21 @@ pub(crate) fn from_tokens( if let Some(shebang) = ShebangDirective::try_extract(comment) { has_any_shebang = true; - if let Some(diagnostic) = shebang_missing_python(range, &shebang) { - diagnostics.push(diagnostic); - } + shebang_missing_python(range, &shebang, context); if settings.rules.enabled(Rule::ShebangNotExecutable) { - if let Some(diagnostic) = shebang_not_executable(path, range) { - diagnostics.push(diagnostic); - } + shebang_not_executable(path, range, context); } - if let Some(diagnostic) = shebang_leading_whitespace(range, locator) { - diagnostics.push(diagnostic); - } + shebang_leading_whitespace(context, range, locator); - if let Some(diagnostic) = shebang_not_first_line(range, locator) { - diagnostics.push(diagnostic); - } + shebang_not_first_line(range, locator, context); } } if !has_any_shebang { if settings.rules.enabled(Rule::ShebangMissingExecutableFile) { - if let Some(diagnostic) = shebang_missing_executable_file(path) { - diagnostics.push(diagnostic); - } + shebang_missing_executable_file(path, context); } } } diff --git a/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_leading_whitespace.rs b/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_leading_whitespace.rs index bf75f3a94d..f7ea698b50 100644 --- a/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_leading_whitespace.rs +++ b/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_leading_whitespace.rs @@ -3,7 +3,8 @@ use ruff_python_trivia::is_python_whitespace; use ruff_text_size::{TextRange, TextSize}; use crate::Locator; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::checkers::ast::LintContext; +use crate::{AlwaysFixableViolation, Edit, Fix}; /// ## What it does /// Checks for whitespace before a shebang directive. @@ -45,12 +46,13 @@ impl AlwaysFixableViolation for ShebangLeadingWhitespace { /// EXE004 pub(crate) fn shebang_leading_whitespace( + context: &LintContext, range: TextRange, locator: &Locator, -) -> Option { +) { // If the shebang is at the beginning of the file, abort. if range.start() == TextSize::from(0) { - return None; + return; } // If the entire prefix _isn't_ whitespace, abort (this is handled by EXE005). @@ -59,11 +61,11 @@ pub(crate) fn shebang_leading_whitespace( .chars() .all(|c| is_python_whitespace(c) || matches!(c, '\r' | '\n')) { - return None; + return; } let prefix = TextRange::up_to(range.start()); - let mut diagnostic = OldDiagnostic::new(ShebangLeadingWhitespace, prefix); - diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(prefix))); - Some(diagnostic) + context + .report_diagnostic(ShebangLeadingWhitespace, prefix) + .set_fix(Fix::safe_edit(Edit::range_deletion(prefix))); } diff --git a/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_missing_executable_file.rs b/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_missing_executable_file.rs index d36428e16d..8057afbeef 100644 --- a/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_missing_executable_file.rs +++ b/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_missing_executable_file.rs @@ -1,15 +1,11 @@ -#![allow(unused_imports)] - use std::path::Path; -use ruff_text_size::{Ranged, TextRange}; - use ruff_macros::{ViolationMetadata, derive_message_formats}; -use crate::registry::AsRule; +use crate::Violation; +use crate::checkers::ast::LintContext; #[cfg(target_family = "unix")] use crate::rules::flake8_executable::helpers::is_executable; -use crate::{OldDiagnostic, Violation}; /// ## What it does /// Checks for executable `.py` files that do not have a shebang. @@ -49,22 +45,20 @@ impl Violation for ShebangMissingExecutableFile { /// EXE002 #[cfg(target_family = "unix")] -pub(crate) fn shebang_missing_executable_file(filepath: &Path) -> Option { +pub(crate) fn shebang_missing_executable_file(filepath: &Path, context: &LintContext) { // WSL supports Windows file systems, which do not have executable bits. // Instead, everything is executable. Therefore, we skip this rule on WSL. + if is_wsl::is_wsl() { - return None; + return; } if let Ok(true) = is_executable(filepath) { - return Some(OldDiagnostic::new( + context.report_diagnostic( ShebangMissingExecutableFile, - TextRange::default(), - )); + ruff_text_size::TextRange::default(), + ); } - None } #[cfg(not(target_family = "unix"))] -pub(crate) fn shebang_missing_executable_file(_filepath: &Path) -> Option { - None -} +pub(crate) fn shebang_missing_executable_file(_filepath: &Path, _diagnostics: &LintContext) {} diff --git a/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_missing_python.rs b/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_missing_python.rs index d3ab16a5be..f1beb8eb8a 100644 --- a/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_missing_python.rs +++ b/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_missing_python.rs @@ -2,8 +2,9 @@ use ruff_text_size::TextRange; use ruff_macros::{ViolationMetadata, derive_message_formats}; +use crate::Violation; +use crate::checkers::ast::LintContext; use crate::comments::shebang::ShebangDirective; -use crate::{OldDiagnostic, Violation}; /// ## What it does /// Checks for a shebang directive in `.py` files that does not contain `python`, @@ -44,10 +45,11 @@ impl Violation for ShebangMissingPython { pub(crate) fn shebang_missing_python( range: TextRange, shebang: &ShebangDirective, -) -> Option { + context: &LintContext, +) { if shebang.contains("python") || shebang.contains("pytest") || shebang.contains("uv run") { - return None; + return; } - Some(OldDiagnostic::new(ShebangMissingPython, range)) + context.report_diagnostic(ShebangMissingPython, range); } diff --git a/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_not_executable.rs b/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_not_executable.rs index 0b2093f360..80cdb9859a 100644 --- a/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_not_executable.rs +++ b/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_not_executable.rs @@ -3,9 +3,10 @@ use std::path::Path; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_text_size::TextRange; +use crate::Violation; +use crate::checkers::ast::LintContext; #[cfg(target_family = "unix")] use crate::rules::flake8_executable::helpers::is_executable; -use crate::{OldDiagnostic, Violation}; /// ## What it does /// Checks for a shebang directive in a file that is not executable. @@ -48,21 +49,23 @@ impl Violation for ShebangNotExecutable { /// EXE001 #[cfg(target_family = "unix")] -pub(crate) fn shebang_not_executable(filepath: &Path, range: TextRange) -> Option { +pub(crate) fn shebang_not_executable(filepath: &Path, range: TextRange, context: &LintContext) { // WSL supports Windows file systems, which do not have executable bits. // Instead, everything is executable. Therefore, we skip this rule on WSL. + if is_wsl::is_wsl() { - return None; + return; } if let Ok(false) = is_executable(filepath) { - return Some(OldDiagnostic::new(ShebangNotExecutable, range)); + context.report_diagnostic(ShebangNotExecutable, range); } - - None } #[cfg(not(target_family = "unix"))] -pub(crate) fn shebang_not_executable(_filepath: &Path, _range: TextRange) -> Option { - None +pub(crate) fn shebang_not_executable( + _filepath: &Path, + _range: TextRange, + _diagnostics: &LintContext, +) { } diff --git a/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_not_first_line.rs b/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_not_first_line.rs index 891ff6aebb..31145048ef 100644 --- a/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_not_first_line.rs +++ b/crates/ruff_linter/src/rules/flake8_executable/rules/shebang_not_first_line.rs @@ -3,7 +3,8 @@ use ruff_python_trivia::is_python_whitespace; use ruff_text_size::{TextRange, TextSize}; use crate::Locator; -use crate::{OldDiagnostic, Violation}; +use crate::Violation; +use crate::checkers::ast::LintContext; /// ## What it does /// Checks for a shebang directive that is not at the beginning of the file. @@ -42,10 +43,10 @@ impl Violation for ShebangNotFirstLine { } /// EXE005 -pub(crate) fn shebang_not_first_line(range: TextRange, locator: &Locator) -> Option { +pub(crate) fn shebang_not_first_line(range: TextRange, locator: &Locator, context: &LintContext) { // If the shebang is at the beginning of the file, abort. if range.start() == TextSize::from(0) { - return None; + return; } // If the entire prefix is whitespace, abort (this is handled by EXE004). @@ -54,8 +55,8 @@ pub(crate) fn shebang_not_first_line(range: TextRange, locator: &Locator) -> Opt .chars() .all(|c| is_python_whitespace(c) || matches!(c, '\r' | '\n')) { - return None; + return; } - Some(OldDiagnostic::new(ShebangNotFirstLine, range)) + context.report_diagnostic(ShebangNotFirstLine, range); } diff --git a/crates/ruff_linter/src/rules/flake8_fixme/rules/todos.rs b/crates/ruff_linter/src/rules/flake8_fixme/rules/todos.rs index 84f2b0fe83..06b4efdb3e 100644 --- a/crates/ruff_linter/src/rules/flake8_fixme/rules/todos.rs +++ b/crates/ruff_linter/src/rules/flake8_fixme/rules/todos.rs @@ -1,7 +1,8 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; +use crate::Violation; +use crate::checkers::ast::LintContext; use crate::directives::{TodoComment, TodoDirectiveKind}; -use crate::{OldDiagnostic, Violation}; /// ## What it does /// Checks for "TODO" comments. @@ -114,19 +115,25 @@ impl Violation for LineContainsHack { } } -pub(crate) fn todos(diagnostics: &mut Vec, directive_ranges: &[TodoComment]) { - diagnostics.extend( - directive_ranges - .iter() - .map(|TodoComment { directive, .. }| match directive.kind { - // FIX001 - TodoDirectiveKind::Fixme => OldDiagnostic::new(LineContainsFixme, directive.range), - // FIX002 - TodoDirectiveKind::Hack => OldDiagnostic::new(LineContainsHack, directive.range), - // FIX003 - TodoDirectiveKind::Todo => OldDiagnostic::new(LineContainsTodo, directive.range), - // FIX004 - TodoDirectiveKind::Xxx => OldDiagnostic::new(LineContainsXxx, directive.range), - }), - ); +pub(crate) fn todos(context: &LintContext, directive_ranges: &[TodoComment]) { + for TodoComment { directive, .. } in directive_ranges { + match directive.kind { + // FIX001 + TodoDirectiveKind::Fixme => { + context.report_diagnostic(LineContainsFixme, directive.range); + } + // FIX002 + TodoDirectiveKind::Hack => { + context.report_diagnostic(LineContainsHack, directive.range); + } + // FIX003 + TodoDirectiveKind::Todo => { + context.report_diagnostic(LineContainsTodo, directive.range); + } + // FIX004 + TodoDirectiveKind::Xxx => { + context.report_diagnostic(LineContainsXxx, directive.range); + } + } + } } diff --git a/crates/ruff_linter/src/rules/flake8_implicit_str_concat/rules/implicit.rs b/crates/ruff_linter/src/rules/flake8_implicit_str_concat/rules/implicit.rs index db3f798f24..6b946933e2 100644 --- a/crates/ruff_linter/src/rules/flake8_implicit_str_concat/rules/implicit.rs +++ b/crates/ruff_linter/src/rules/flake8_implicit_str_concat/rules/implicit.rs @@ -10,8 +10,9 @@ use ruff_source_file::LineRanges; use ruff_text_size::{Ranged, TextRange}; use crate::Locator; +use crate::checkers::ast::LintContext; use crate::settings::LinterSettings; -use crate::{Edit, Fix, FixAvailability, OldDiagnostic, Violation}; +use crate::{Edit, Fix, FixAvailability, Violation}; /// ## What it does /// Checks for implicitly concatenated strings on a single line. @@ -103,7 +104,7 @@ impl Violation for MultiLineImplicitStringConcatenation { /// ISC001, ISC002 pub(crate) fn implicit( - diagnostics: &mut Vec, + context: &LintContext, tokens: &Tokens, locator: &Locator, indexer: &Indexer, @@ -145,12 +146,12 @@ pub(crate) fn implicit( }; if locator.contains_line_break(TextRange::new(a_range.end(), b_range.start())) { - diagnostics.push(OldDiagnostic::new( + context.report_diagnostic( MultiLineImplicitStringConcatenation, TextRange::new(a_range.start(), b_range.end()), - )); + ); } else { - let mut diagnostic = OldDiagnostic::new( + let mut diagnostic = context.report_diagnostic( SingleLineImplicitStringConcatenation, TextRange::new(a_range.start(), b_range.end()), ); @@ -158,8 +159,6 @@ pub(crate) fn implicit( if let Some(fix) = concatenate_strings(a_range, b_range, locator) { diagnostic.set_fix(fix); } - - diagnostics.push(diagnostic); } } } diff --git a/crates/ruff_linter/src/rules/flake8_no_pep420/rules/implicit_namespace_package.rs b/crates/ruff_linter/src/rules/flake8_no_pep420/rules/implicit_namespace_package.rs index f5b6a4366e..0e78057e95 100644 --- a/crates/ruff_linter/src/rules/flake8_no_pep420/rules/implicit_namespace_package.rs +++ b/crates/ruff_linter/src/rules/flake8_no_pep420/rules/implicit_namespace_package.rs @@ -7,10 +7,11 @@ use ruff_python_trivia::CommentRanges; use ruff_text_size::{TextRange, TextSize}; use crate::Locator; +use crate::Violation; +use crate::checkers::ast::LintContext; use crate::comments::shebang::ShebangDirective; use crate::fs; use crate::package::PackageRoot; -use crate::{OldDiagnostic, Violation}; /// ## What it does /// Checks for packages that are missing an `__init__.py` file. @@ -56,6 +57,7 @@ impl Violation for ImplicitNamespacePackage { } /// INP001 +#[expect(clippy::too_many_arguments)] pub(crate) fn implicit_namespace_package( path: &Path, package: Option>, @@ -64,7 +66,8 @@ pub(crate) fn implicit_namespace_package( project_root: &Path, src: &[PathBuf], allow_nested_roots: bool, -) -> Option { + context: &LintContext, +) { if package.is_none() // Ignore non-`.py` files, which don't require an `__init__.py`. && PySourceType::try_from_path(path).is_some_and(PySourceType::is_py_file) @@ -83,16 +86,14 @@ pub(crate) fn implicit_namespace_package( // Ignore PEP 723 scripts. && ScriptTag::parse(locator.contents().as_bytes()).is_none() { - return Some(OldDiagnostic::new( + context.report_diagnostic( ImplicitNamespacePackage { filename: fs::relativize_path(path), parent: None, }, TextRange::default(), - )); - } - - if allow_nested_roots { + ); + } else if allow_nested_roots { if let Some(PackageRoot::Nested { path: root }) = package.as_ref() { if path.ends_with("__init__.py") { // Identify the intermediary package that's missing the `__init__.py` file. @@ -100,17 +101,15 @@ pub(crate) fn implicit_namespace_package( .ancestors() .find(|parent| !parent.join("__init__.py").exists()) { - return Some(OldDiagnostic::new( + context.report_diagnostic( ImplicitNamespacePackage { filename: fs::relativize_path(path), parent: Some(fs::relativize_path(parent)), }, TextRange::default(), - )); + ); } } } } - - None } diff --git a/crates/ruff_linter/src/rules/flake8_pyi/rules/type_comment_in_stub.rs b/crates/ruff_linter/src/rules/flake8_pyi/rules/type_comment_in_stub.rs index 62177e18a6..25f5e39ccb 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/rules/type_comment_in_stub.rs +++ b/crates/ruff_linter/src/rules/flake8_pyi/rules/type_comment_in_stub.rs @@ -6,7 +6,8 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_trivia::CommentRanges; use crate::Locator; -use crate::{OldDiagnostic, Violation}; +use crate::Violation; +use crate::checkers::ast::LintContext; /// ## What it does /// Checks for the use of type comments (e.g., `x = 1 # type: int`) in stub @@ -38,7 +39,7 @@ impl Violation for TypeCommentInStub { /// PYI033 pub(crate) fn type_comment_in_stub( - diagnostics: &mut Vec, + context: &LintContext, locator: &Locator, comment_ranges: &CommentRanges, ) { @@ -46,7 +47,7 @@ pub(crate) fn type_comment_in_stub( let comment = locator.slice(range); if TYPE_COMMENT_REGEX.is_match(comment) && !TYPE_IGNORE_REGEX.is_match(comment) { - diagnostics.push(OldDiagnostic::new(TypeCommentInStub, range)); + context.report_diagnostic(TypeCommentInStub, range); } } } diff --git a/crates/ruff_linter/src/rules/flake8_todos/rules/todos.rs b/crates/ruff_linter/src/rules/flake8_todos/rules/todos.rs index 9ee015251c..0c36eb3a46 100644 --- a/crates/ruff_linter/src/rules/flake8_todos/rules/todos.rs +++ b/crates/ruff_linter/src/rules/flake8_todos/rules/todos.rs @@ -7,8 +7,9 @@ use ruff_python_trivia::CommentRanges; use ruff_text_size::{TextLen, TextRange, TextSize}; use crate::Locator; +use crate::checkers::ast::LintContext; use crate::directives::{TodoComment, TodoDirective, TodoDirectiveKind}; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic, Violation}; +use crate::{AlwaysFixableViolation, Edit, Fix, Violation}; /// ## What it does /// Checks that a TODO comment is labelled with "TODO". @@ -248,7 +249,7 @@ static ISSUE_LINK_TODO_LINE_REGEX_SET: LazyLock = LazyLock::new(|| { }); pub(crate) fn todos( - diagnostics: &mut Vec, + context: &LintContext, todo_comments: &[TodoComment], locator: &Locator, comment_ranges: &CommentRanges, @@ -267,8 +268,8 @@ pub(crate) fn todos( continue; } - directive_errors(diagnostics, directive); - static_errors(diagnostics, content, range, directive); + directive_errors(context, directive); + static_errors(context, content, range, directive); let mut has_issue_link = false; // VSCode recommended links on same line are ok: @@ -307,20 +308,20 @@ pub(crate) fn todos( if !has_issue_link { // TD003 - diagnostics.push(OldDiagnostic::new(MissingTodoLink, directive.range)); + context.report_diagnostic(MissingTodoLink, directive.range); } } } /// Check that the directive itself is valid. This function modifies `diagnostics` in-place. -fn directive_errors(diagnostics: &mut Vec, directive: &TodoDirective) { +fn directive_errors(context: &LintContext, directive: &TodoDirective) { if directive.content == "TODO" { return; } if directive.content.to_uppercase() == "TODO" { // TD006 - let mut diagnostic = OldDiagnostic::new( + let mut diagnostic = context.report_diagnostic( InvalidTodoCapitalization { tag: directive.content.to_string(), }, @@ -331,22 +332,20 @@ fn directive_errors(diagnostics: &mut Vec, directive: &TodoDirect "TODO".to_string(), directive.range, ))); - - diagnostics.push(diagnostic); } else { // TD001 - diagnostics.push(OldDiagnostic::new( + context.report_diagnostic( InvalidTodoTag { tag: directive.content.to_string(), }, directive.range, - )); + ); } } /// Checks for "static" errors in the comment: missing colon, missing author, etc. -fn static_errors( - diagnostics: &mut Vec, +pub(crate) fn static_errors( + context: &LintContext, comment: &str, comment_range: TextRange, directive: &TodoDirective, @@ -367,13 +366,13 @@ fn static_errors( TextSize::try_from(end_index).unwrap() } else { // TD002 - diagnostics.push(OldDiagnostic::new(MissingTodoAuthor, directive.range)); + context.report_diagnostic(MissingTodoAuthor, directive.range); TextSize::new(0) } } else { // TD002 - diagnostics.push(OldDiagnostic::new(MissingTodoAuthor, directive.range)); + context.report_diagnostic(MissingTodoAuthor, directive.range); TextSize::new(0) }; @@ -382,21 +381,18 @@ fn static_errors( if let Some(after_colon) = after_author.strip_prefix(':') { if after_colon.is_empty() { // TD005 - diagnostics.push(OldDiagnostic::new(MissingTodoDescription, directive.range)); + context.report_diagnostic(MissingTodoDescription, directive.range); } else if !after_colon.starts_with(char::is_whitespace) { // TD007 - diagnostics.push(OldDiagnostic::new( - MissingSpaceAfterTodoColon, - directive.range, - )); + context.report_diagnostic(MissingSpaceAfterTodoColon, directive.range); } } else { // TD004 - diagnostics.push(OldDiagnostic::new(MissingTodoColon, directive.range)); + context.report_diagnostic(MissingTodoColon, directive.range); if after_author.is_empty() { // TD005 - diagnostics.push(OldDiagnostic::new(MissingTodoDescription, directive.range)); + context.report_diagnostic(MissingTodoDescription, directive.range); } } } diff --git a/crates/ruff_linter/src/rules/isort/rules/add_required_imports.rs b/crates/ruff_linter/src/rules/isort/rules/add_required_imports.rs index 0f263ba83c..3948202883 100644 --- a/crates/ruff_linter/src/rules/isort/rules/add_required_imports.rs +++ b/crates/ruff_linter/src/rules/isort/rules/add_required_imports.rs @@ -7,9 +7,10 @@ use ruff_python_semantic::{FutureImport, NameImport}; use ruff_text_size::{TextRange, TextSize}; use crate::Locator; +use crate::checkers::ast::LintContext; use crate::importer::Importer; use crate::settings::LinterSettings; -use crate::{AlwaysFixableViolation, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Fix}; /// ## What it does /// Adds any required imports, as specified by the user, to the top of the @@ -91,15 +92,16 @@ fn add_required_import( locator: &Locator, stylist: &Stylist, source_type: PySourceType, -) -> Option { + context: &LintContext, +) { // Don't add imports to semantically-empty files. if parsed.suite().iter().all(is_docstring_stmt) { - return None; + return; } // We don't need to add `__future__` imports to stubs. if source_type.is_stub() && required_import.is_future_import() { - return None; + return; } // If the import is already present in a top-level block, don't add it. @@ -108,18 +110,17 @@ fn add_required_import( .iter() .any(|stmt| includes_import(stmt, required_import)) { - return None; + return; } // Always insert the diagnostic at top-of-file. - let mut diagnostic = OldDiagnostic::new( + let mut diagnostic = context.report_diagnostic( MissingRequiredImport(required_import.to_string()), TextRange::default(), ); diagnostic.set_fix(Fix::safe_edit( Importer::new(parsed, locator, stylist).add_import(required_import, TextSize::default()), )); - Some(diagnostic) } /// I002 @@ -129,13 +130,16 @@ pub(crate) fn add_required_imports( stylist: &Stylist, settings: &LinterSettings, source_type: PySourceType, -) -> Vec { - settings - .isort - .required_imports - .iter() - .filter_map(|required_import| { - add_required_import(required_import, parsed, locator, stylist, source_type) - }) - .collect() + context: &LintContext, +) { + for required_import in &settings.isort.required_imports { + add_required_import( + required_import, + parsed, + locator, + stylist, + source_type, + context, + ); + } } diff --git a/crates/ruff_linter/src/rules/isort/rules/organize_imports.rs b/crates/ruff_linter/src/rules/isort/rules/organize_imports.rs index 6fdd6e0794..1fe0659864 100644 --- a/crates/ruff_linter/src/rules/isort/rules/organize_imports.rs +++ b/crates/ruff_linter/src/rules/isort/rules/organize_imports.rs @@ -13,12 +13,13 @@ use ruff_text_size::{Ranged, TextRange}; use super::super::block::Block; use super::super::{comments, format_imports}; use crate::Locator; +use crate::checkers::ast::LintContext; use crate::line_width::LineWidthBuilder; use crate::package::PackageRoot; use crate::preview::is_full_path_match_source_strategy_enabled; use crate::rules::isort::categorize::MatchSourceStrategy; use crate::settings::LinterSettings; -use crate::{Edit, Fix, FixAvailability, OldDiagnostic, Violation}; +use crate::{Edit, Fix, FixAvailability, Violation}; /// ## What it does /// De-duplicates, groups, and sorts imports based on the provided `isort` settings. @@ -98,7 +99,8 @@ pub(crate) fn organize_imports( source_type: PySourceType, tokens: &Tokens, target_version: PythonVersion, -) -> Option { + context: &LintContext, +) { let indentation = locator.slice(extract_indentation_range(&block.imports, locator)); let indentation = leading_indentation(indentation); @@ -110,7 +112,8 @@ pub(crate) fn organize_imports( || indexer .followed_by_multi_statement_line(block.imports.last().unwrap(), locator.contents()) { - return Some(OldDiagnostic::new(UnsortedImports, range)); + context.report_diagnostic(UnsortedImports, range); + return; } // Extract comments. Take care to grab any inline comments from the last line. @@ -153,12 +156,11 @@ pub(crate) fn organize_imports( let fix_range = TextRange::new(locator.line_start(range.start()), trailing_line_end); let actual = locator.slice(fix_range); if matches_ignoring_indentation(actual, &expected) { - return None; + return; } - let mut diagnostic = OldDiagnostic::new(UnsortedImports, range); + let mut diagnostic = context.report_diagnostic(UnsortedImports, range); diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( indent(&expected, indentation).to_string(), fix_range, ))); - Some(diagnostic) } diff --git a/crates/ruff_linter/src/rules/pep8_naming/rules/invalid_module_name.rs b/crates/ruff_linter/src/rules/pep8_naming/rules/invalid_module_name.rs index d2d60c6a4b..6e7e699282 100644 --- a/crates/ruff_linter/src/rules/pep8_naming/rules/invalid_module_name.rs +++ b/crates/ruff_linter/src/rules/pep8_naming/rules/invalid_module_name.rs @@ -7,9 +7,10 @@ use ruff_python_stdlib::identifiers::{is_migration_name, is_module_name}; use ruff_python_stdlib::path::is_module_file; use ruff_text_size::TextRange; +use crate::Violation; +use crate::checkers::ast::LintContext; use crate::package::PackageRoot; use crate::rules::pep8_naming::settings::IgnoreNames; -use crate::{OldDiagnostic, Violation}; /// ## What it does /// Checks for module names that do not follow the `snake_case` naming @@ -54,9 +55,10 @@ pub(crate) fn invalid_module_name( path: &Path, package: Option>, ignore_names: &IgnoreNames, -) -> Option { + context: &LintContext, +) { if !PySourceType::try_from_path(path).is_some_and(PySourceType::is_py_file_or_stub) { - return None; + return; } if let Some(package) = package { @@ -78,18 +80,16 @@ pub(crate) fn invalid_module_name( if !is_valid_module_name { // Ignore any explicitly-allowed names. if ignore_names.matches(&module_name) { - return None; + return; } - return Some(OldDiagnostic::new( + context.report_diagnostic( InvalidModuleName { name: module_name.to_string(), }, TextRange::default(), - )); + ); } } - - None } /// Return `true` if a [`Path`] refers to a migration file. diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/blank_lines.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/blank_lines.rs index 723ccbf49d..4075a245c3 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/blank_lines.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/blank_lines.rs @@ -21,7 +21,7 @@ use crate::AlwaysFixableViolation; use crate::Edit; use crate::Fix; use crate::Locator; -use crate::OldDiagnostic; +use crate::checkers::ast::LintContext; use crate::checkers::logical_lines::expand_indent; use crate::line_width::IndentWidth; use crate::rules::pycodestyle::helpers::is_non_logical_token; @@ -690,8 +690,7 @@ impl Status { } /// Contains variables used for the linting of blank lines. -#[derive(Debug)] -pub(crate) struct BlankLinesChecker<'a> { +pub(crate) struct BlankLinesChecker<'a, 'b> { stylist: &'a Stylist<'a>, locator: &'a Locator<'a>, indent_width: IndentWidth, @@ -699,16 +698,18 @@ pub(crate) struct BlankLinesChecker<'a> { lines_between_types: usize, source_type: PySourceType, cell_offsets: Option<&'a CellOffsets>, + context: &'a LintContext<'b>, } -impl<'a> BlankLinesChecker<'a> { +impl<'a, 'b> BlankLinesChecker<'a, 'b> { pub(crate) fn new( locator: &'a Locator<'a>, stylist: &'a Stylist<'a>, settings: &crate::settings::LinterSettings, source_type: PySourceType, cell_offsets: Option<&'a CellOffsets>, - ) -> BlankLinesChecker<'a> { + context: &'a LintContext<'b>, + ) -> BlankLinesChecker<'a, 'b> { BlankLinesChecker { stylist, locator, @@ -717,11 +718,12 @@ impl<'a> BlankLinesChecker<'a> { lines_between_types: settings.isort.lines_between_types, source_type, cell_offsets, + context, } } /// E301, E302, E303, E304, E305, E306 - pub(crate) fn check_lines(&self, tokens: &Tokens, diagnostics: &mut Vec) { + pub(crate) fn check_lines(&self, tokens: &Tokens) { let mut prev_indent_length: Option = None; let mut prev_logical_line: Option = None; let mut state = BlankLinesState::default(); @@ -762,7 +764,7 @@ impl<'a> BlankLinesChecker<'a> { state.class_status.update(&logical_line); state.fn_status.update(&logical_line); - self.check_line(&logical_line, &state, prev_indent_length, diagnostics); + self.check_line(&logical_line, &state, prev_indent_length); match logical_line.kind { LogicalLineKind::Class => { @@ -824,7 +826,6 @@ impl<'a> BlankLinesChecker<'a> { line: &LogicalLineInfo, state: &BlankLinesState, prev_indent_length: Option, - diagnostics: &mut Vec, ) { if line.preceding_blank_lines == 0 // Only applies to methods. @@ -842,14 +843,13 @@ impl<'a> BlankLinesChecker<'a> { && !self.source_type.is_stub() { // E301 - let mut diagnostic = - OldDiagnostic::new(BlankLineBetweenMethods, line.first_token_range); + let mut diagnostic = self + .context + .report_diagnostic(BlankLineBetweenMethods, line.first_token_range); diagnostic.set_fix(Fix::safe_edit(Edit::insertion( self.stylist.line_ending().to_string(), self.locator.line_start(state.last_non_comment_line_end), ))); - - diagnostics.push(diagnostic); } // Blank lines in stub files are used to group definitions. Don't enforce blank lines. @@ -897,7 +897,7 @@ impl<'a> BlankLinesChecker<'a> { && !line.is_beginning_of_cell { // E302 - let mut diagnostic = OldDiagnostic::new( + let mut diagnostic = self.context.report_diagnostic( BlankLinesTopLevel { actual_blank_lines: line.preceding_blank_lines.count(), expected_blank_lines: expected_blank_lines_before_definition, @@ -921,8 +921,6 @@ impl<'a> BlankLinesChecker<'a> { self.locator.line_start(state.last_non_comment_line_end), ))); } - - diagnostics.push(diagnostic); } // If between `import` and `from .. import ..` or the other way round, @@ -941,7 +939,7 @@ impl<'a> BlankLinesChecker<'a> { if line.blank_lines > max_blank_lines { // E303 - let mut diagnostic = OldDiagnostic::new( + let mut diagnostic = self.context.report_diagnostic( TooManyBlankLines { actual_blank_lines: line.blank_lines.count(), }, @@ -958,8 +956,6 @@ impl<'a> BlankLinesChecker<'a> { ))); } } - - diagnostics.push(diagnostic); } if matches!(state.follows, Follows::Decorator) @@ -967,7 +963,7 @@ impl<'a> BlankLinesChecker<'a> { && line.preceding_blank_lines > 0 { // E304 - let mut diagnostic = OldDiagnostic::new( + let mut diagnostic = self.context.report_diagnostic( BlankLineAfterDecorator { actual_blank_lines: line.preceding_blank_lines.count(), }, @@ -997,8 +993,6 @@ impl<'a> BlankLinesChecker<'a> { }; diagnostic.set_fix(fix); - - diagnostics.push(diagnostic); } if line.preceding_blank_lines < BLANK_LINES_TOP_LEVEL @@ -1014,7 +1008,7 @@ impl<'a> BlankLinesChecker<'a> { && !line.is_beginning_of_cell { // E305 - let mut diagnostic = OldDiagnostic::new( + let mut diagnostic = self.context.report_diagnostic( BlankLinesAfterFunctionOrClass { actual_blank_lines: line.preceding_blank_lines.count(), }, @@ -1036,8 +1030,6 @@ impl<'a> BlankLinesChecker<'a> { self.locator.line_start(state.last_non_comment_line_end), ))); } - - diagnostics.push(diagnostic); } if line.preceding_blank_lines == 0 @@ -1057,15 +1049,14 @@ impl<'a> BlankLinesChecker<'a> { && !self.source_type.is_stub() { // E306 - let mut diagnostic = - OldDiagnostic::new(BlankLinesBeforeNestedDefinition, line.first_token_range); + let mut diagnostic = self + .context + .report_diagnostic(BlankLinesBeforeNestedDefinition, line.first_token_range); diagnostic.set_fix(Fix::safe_edit(Edit::insertion( self.stylist.line_ending().to_string(), self.locator.line_start(line.first_token_range.start()), ))); - - diagnostics.push(diagnostic); } } } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/compound_statements.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/compound_statements.rs index 4a5e166fce..d6762adee5 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/compound_statements.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/compound_statements.rs @@ -6,8 +6,9 @@ use ruff_python_parser::{TokenIterWithContext, TokenKind, Tokens}; use ruff_text_size::{Ranged, TextSize}; use crate::Locator; +use crate::checkers::ast::LintContext; use crate::{AlwaysFixableViolation, Violation}; -use crate::{Edit, Fix, OldDiagnostic}; +use crate::{Edit, Fix}; /// ## What it does /// Checks for compound statements (multiple statements on the same line). @@ -98,7 +99,7 @@ impl AlwaysFixableViolation for UselessSemicolon { /// E701, E702, E703 pub(crate) fn compound_statements( - diagnostics: &mut Vec, + context: &LintContext, tokens: &Tokens, locator: &Locator, indexer: &Indexer, @@ -167,14 +168,14 @@ pub(crate) fn compound_statements( !has_non_trivia_tokens_till(token_iter.clone(), cell_range.end()) })) { - let mut diagnostic = OldDiagnostic::new(UselessSemicolon, range); - diagnostic.set_fix(Fix::safe_edit(Edit::deletion( - indexer - .preceded_by_continuations(range.start(), locator.contents()) - .unwrap_or(range.start()), - range.end(), - ))); - diagnostics.push(diagnostic); + context + .report_diagnostic(UselessSemicolon, range) + .set_fix(Fix::safe_edit(Edit::deletion( + indexer + .preceded_by_continuations(range.start(), locator.contents()) + .unwrap_or(range.start()), + range.end(), + ))); } } @@ -224,10 +225,7 @@ pub(crate) fn compound_statements( | TokenKind::NonLogicalNewline => {} _ => { if let Some(range) = semi { - diagnostics.push(OldDiagnostic::new( - MultipleStatementsOnOneLineSemicolon, - range, - )); + context.report_diagnostic(MultipleStatementsOnOneLineSemicolon, range); // Reset. semi = None; @@ -235,7 +233,7 @@ pub(crate) fn compound_statements( } if let Some(range) = colon { - diagnostics.push(OldDiagnostic::new(MultipleStatementsOnOneLineColon, range)); + context.report_diagnostic(MultipleStatementsOnOneLineColon, range); // Reset. colon = None; diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/doc_line_too_long.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/doc_line_too_long.rs index c669a9c7e2..c9f0ae0753 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/doc_line_too_long.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/doc_line_too_long.rs @@ -2,9 +2,10 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_trivia::CommentRanges; use ruff_source_file::Line; +use crate::Violation; +use crate::checkers::ast::LintContext; use crate::rules::pycodestyle::overlong::Overlong; use crate::settings::LinterSettings; -use crate::{OldDiagnostic, Violation}; /// ## What it does /// Checks for doc lines that exceed the specified maximum character length. @@ -86,9 +87,13 @@ pub(crate) fn doc_line_too_long( line: &Line, comment_ranges: &CommentRanges, settings: &LinterSettings, -) -> Option { - let limit = settings.pycodestyle.max_doc_length?; - Overlong::try_from_line( + context: &LintContext, +) { + let Some(limit) = settings.pycodestyle.max_doc_length else { + return; + }; + + if let Some(overlong) = Overlong::try_from_line( line, comment_ranges, limit, @@ -98,11 +103,10 @@ pub(crate) fn doc_line_too_long( &[] }, settings.tab_size, - ) - .map(|overlong| { - OldDiagnostic::new( + ) { + context.report_diagnostic( DocLineTooLong(overlong.width(), limit.value() as usize), overlong.range(), - ) - }) + ); + } } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/line_too_long.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/line_too_long.rs index dd93733790..9a03e0f98a 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/line_too_long.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/line_too_long.rs @@ -2,9 +2,10 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_trivia::CommentRanges; use ruff_source_file::Line; +use crate::Violation; +use crate::checkers::ast::LintContext; use crate::rules::pycodestyle::overlong::Overlong; use crate::settings::LinterSettings; -use crate::{OldDiagnostic, Violation}; /// ## What it does /// Checks for lines that exceed the specified maximum character length. @@ -84,10 +85,11 @@ pub(crate) fn line_too_long( line: &Line, comment_ranges: &CommentRanges, settings: &LinterSettings, -) -> Option { + context: &LintContext, +) { let limit = settings.pycodestyle.max_line_length; - Overlong::try_from_line( + if let Some(overlong) = Overlong::try_from_line( line, comment_ranges, limit, @@ -97,11 +99,10 @@ pub(crate) fn line_too_long( &[] }, settings.tab_size, - ) - .map(|overlong| { - OldDiagnostic::new( + ) { + context.report_diagnostic( LineTooLong(overlong.width(), limit.value() as usize), overlong.range(), - ) - }) + ); + } } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/extraneous_whitespace.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/extraneous_whitespace.rs index d05d154e17..cf259832bb 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/extraneous_whitespace.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/extraneous_whitespace.rs @@ -5,7 +5,6 @@ use ruff_text_size::{Ranged, TextRange}; use crate::AlwaysFixableViolation; use crate::Edit; use crate::Fix; -use crate::OldDiagnostic; use crate::checkers::logical_lines::LogicalLinesContext; use super::{LogicalLine, Whitespace}; @@ -165,13 +164,13 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &mut LogicalLin BracketOrPunctuation::OpenBracket(symbol) if symbol != '{' || fstrings == 0 => { let (trailing, trailing_len) = line.trailing_whitespace(token); if !matches!(trailing, Whitespace::None) { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( WhitespaceAfterOpenBracket { symbol }, TextRange::at(token.end(), trailing_len), - ); - diagnostic - .set_fix(Fix::safe_edit(Edit::range_deletion(diagnostic.range()))); - context.push_diagnostic(diagnostic); + ) { + let range = diagnostic.range(); + diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range))); + } } } BracketOrPunctuation::CloseBracket(symbol) if symbol != '}' || fstrings == 0 => { @@ -179,13 +178,13 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &mut LogicalLin if let (Whitespace::Single | Whitespace::Many | Whitespace::Tab, offset) = line.leading_whitespace(token) { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( WhitespaceBeforeCloseBracket { symbol }, TextRange::at(token.start() - offset, offset), - ); - diagnostic - .set_fix(Fix::safe_edit(Edit::range_deletion(diagnostic.range()))); - context.push_diagnostic(diagnostic); + ) { + let range = diagnostic.range(); + diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range))); + } } } } @@ -205,14 +204,14 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &mut LogicalLin // If we're in the second half of a double colon, disallow // any whitespace (e.g., `foo[1: :2]` or `foo[1 : : 2]`). if matches!(prev_token, Some(TokenKind::Colon)) { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( WhitespaceBeforePunctuation { symbol }, TextRange::at(token.start() - offset, offset), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion( - diagnostic.range(), - ))); - context.push_diagnostic(diagnostic); + ) { + let range = diagnostic.range(); + diagnostic + .set_fix(Fix::safe_edit(Edit::range_deletion(range))); + } } else if iter.peek().is_some_and(|token| { matches!(token.kind(), TokenKind::Rsqb | TokenKind::Comma) }) { @@ -220,14 +219,15 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &mut LogicalLin // Or `foo[index :, 2]`, but not `foo[index :, 2]`. if let (Whitespace::Many | Whitespace::Tab, offset) = whitespace { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( WhitespaceBeforePunctuation { symbol }, TextRange::at(token.start() - offset, offset), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion( - diagnostic.range(), - ))); - context.push_diagnostic(diagnostic); + ) { + let range = diagnostic.range(); + diagnostic.set_fix(Fix::safe_edit( + Edit::range_deletion(range), + )); + } } } else if iter.peek().is_some_and(|token| { matches!( @@ -245,15 +245,19 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &mut LogicalLin // whitespace before the colon and so should the fix if let (Whitespace::Many | Whitespace::Tab, offset) = whitespace { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( WhitespaceBeforePunctuation { symbol }, TextRange::at(token.start() - offset, offset), - ); - diagnostic.set_fix(Fix::safe_edits( - Edit::range_deletion(diagnostic.range()), - [Edit::insertion(" ".into(), token.start() - offset)], - )); - context.push_diagnostic(diagnostic); + ) { + let range = diagnostic.range(); + diagnostic.set_fix(Fix::safe_edits( + Edit::range_deletion(range), + [Edit::insertion( + " ".into(), + token.start() - offset, + )], + )); + } } } else { // Allow, e.g., `foo[1:2]` or `foo[1 : 2]` or `foo[1 :: 2]`. @@ -262,14 +266,15 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &mut LogicalLin .filter(|next| matches!(next.kind(), TokenKind::Colon)) .unwrap_or(&token); if line.trailing_whitespace(token) != whitespace { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( WhitespaceBeforePunctuation { symbol }, TextRange::at(token.start() - offset, offset), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion( - diagnostic.range(), - ))); - context.push_diagnostic(diagnostic); + ) { + let range = diagnostic.range(); + diagnostic.set_fix(Fix::safe_edit( + Edit::range_deletion(range), + )); + } } } } else { @@ -280,14 +285,13 @@ pub(crate) fn extraneous_whitespace(line: &LogicalLine, context: &mut LogicalLin // Avoid removing any whitespace for f-string debug expressions. continue; } - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( WhitespaceBeforePunctuation { symbol }, TextRange::at(token.start() - offset, offset), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion( - diagnostic.range(), - ))); - context.push_diagnostic(diagnostic); + ) { + let range = diagnostic.range(); + diagnostic.set_fix(Fix::safe_edit(Edit::range_deletion(range))); + } } } } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/indentation.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/indentation.rs index de7349d059..a4e07cee1f 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/indentation.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/indentation.rs @@ -2,8 +2,9 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_parser::TokenKind; use ruff_text_size::TextRange; -use crate::OldDiagnostic; use crate::Violation; +use crate::checkers::ast::LintContext; +use crate::settings::LinterSettings; use super::LogicalLine; @@ -256,6 +257,7 @@ impl Violation for OverIndented { } /// E111, E112, E113, E114, E115, E116, E117 +#[expect(clippy::too_many_arguments)] pub(crate) fn indentation( logical_line: &LogicalLine, prev_logical_line: Option<&LogicalLine>, @@ -264,57 +266,58 @@ pub(crate) fn indentation( prev_indent_level: Option, indent_size: usize, range: TextRange, -) -> Vec { - let mut diagnostics = vec![]; - + context: &LintContext, + settings: &LinterSettings, +) { if indent_level % indent_size != 0 { - diagnostics.push(if logical_line.is_comment_only() { - OldDiagnostic::new( + if logical_line.is_comment_only() { + context.report_diagnostic_if_enabled( IndentationWithInvalidMultipleComment { indent_width: indent_size, }, range, - ) + settings, + ); } else { - OldDiagnostic::new( + context.report_diagnostic_if_enabled( IndentationWithInvalidMultiple { indent_width: indent_size, }, range, - ) - }); + settings, + ); + } } let indent_expect = prev_logical_line .and_then(|prev_logical_line| prev_logical_line.tokens_trimmed().last()) .is_some_and(|t| t.kind() == TokenKind::Colon); if indent_expect && indent_level <= prev_indent_level.unwrap_or(0) { - diagnostics.push(if logical_line.is_comment_only() { - OldDiagnostic::new(NoIndentedBlockComment, range) + if logical_line.is_comment_only() { + context.report_diagnostic_if_enabled(NoIndentedBlockComment, range, settings); } else { - OldDiagnostic::new(NoIndentedBlock, range) - }); + context.report_diagnostic_if_enabled(NoIndentedBlock, range, settings); + } } else if !indent_expect && prev_indent_level.is_some_and(|prev_indent_level| indent_level > prev_indent_level) { - diagnostics.push(if logical_line.is_comment_only() { - OldDiagnostic::new(UnexpectedIndentationComment, range) + if logical_line.is_comment_only() { + context.report_diagnostic_if_enabled(UnexpectedIndentationComment, range, settings); } else { - OldDiagnostic::new(UnexpectedIndentation, range) - }); + context.report_diagnostic_if_enabled(UnexpectedIndentation, range, settings); + } } if indent_expect { let expected_indent_amount = if indent_char == '\t' { 8 } else { 4 }; let expected_indent_level = prev_indent_level.unwrap_or(0) + expected_indent_amount; if indent_level > expected_indent_level { - diagnostics.push(OldDiagnostic::new( + context.report_diagnostic_if_enabled( OverIndented { is_comment: logical_line.is_comment_only(), }, range, - )); + settings, + ); } } - - diagnostics } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace.rs index e572022bd7..165bd07c9c 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace.rs @@ -4,7 +4,7 @@ use ruff_text_size::Ranged; use crate::Edit; use crate::checkers::logical_lines::LogicalLinesContext; -use crate::{AlwaysFixableViolation, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Fix}; use super::{DefinitionState, LogicalLine}; @@ -103,10 +103,14 @@ pub(crate) fn missing_whitespace(line: &LogicalLine, context: &mut LogicalLinesC } } - let diagnostic = - OldDiagnostic::new(MissingWhitespace { token: kind }, token.range()); - let fix = Fix::safe_edit(Edit::insertion(" ".to_string(), token.end())); - context.push_diagnostic(diagnostic.with_fix(fix)); + if let Some(mut diagnostic) = + context.report_diagnostic(MissingWhitespace { token: kind }, token.range()) + { + diagnostic.set_fix(Fix::safe_edit(Edit::insertion( + " ".to_string(), + token.end(), + ))); + } } } _ => {} diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace_after_keyword.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace_after_keyword.rs index 9883e34e7a..214b509f65 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace_after_keyword.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace_after_keyword.rs @@ -4,7 +4,7 @@ use ruff_text_size::Ranged; use crate::checkers::logical_lines::LogicalLinesContext; use crate::rules::pycodestyle::rules::logical_lines::LogicalLine; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Edit, Fix}; /// ## What it does /// Checks for missing whitespace after keywords. @@ -71,9 +71,11 @@ pub(crate) fn missing_whitespace_after_keyword( )) && tok0.end() == tok1.start() { - let mut diagnostic = OldDiagnostic::new(MissingWhitespaceAfterKeyword, tok0.range()); - diagnostic.set_fix(Fix::safe_edit(Edit::insertion(" ".to_string(), tok0.end()))); - context.push_diagnostic(diagnostic); + if let Some(mut diagnostic) = + context.report_diagnostic(MissingWhitespaceAfterKeyword, tok0.range()) + { + diagnostic.set_fix(Fix::safe_edit(Edit::insertion(" ".to_string(), tok0.end()))); + } } } } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace_around_operator.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace_around_operator.rs index 1eaf0d7cb1..8433aec216 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace_around_operator.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/missing_whitespace_around_operator.rs @@ -2,10 +2,11 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_parser::TokenKind; use ruff_text_size::{Ranged, TextRange}; +use crate::checkers::ast::DiagnosticGuard; use crate::checkers::logical_lines::LogicalLinesContext; use crate::rules::pycodestyle::helpers::is_non_logical_token; use crate::rules::pycodestyle::rules::logical_lines::{DefinitionState, LogicalLine}; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Edit, Fix}; /// ## What it does /// Checks for missing whitespace around all operators. @@ -252,31 +253,37 @@ pub(crate) fn missing_whitespace_around_operator( match (has_leading_trivia, has_trailing_trivia) { // Operator with trailing but no leading space, enforce consistent spacing. (false, true) => { - context.push_diagnostic( - diagnostic_kind_for_operator(kind, token.range()).with_fix(Fix::safe_edit( - Edit::insertion(" ".to_string(), token.start()), - )), - ); + if let Some(mut diagnostic) = + diagnostic_kind_for_operator(kind, token.range(), context) + { + diagnostic.set_fix(Fix::safe_edit(Edit::insertion( + " ".to_string(), + token.start(), + ))); + } } // Operator with leading but no trailing space, enforce consistent spacing. (true, false) => { - context.push_diagnostic( - diagnostic_kind_for_operator(kind, token.range()).with_fix(Fix::safe_edit( - Edit::insertion(" ".to_string(), token.end()), - )), - ); + if let Some(mut diagnostic) = + diagnostic_kind_for_operator(kind, token.range(), context) + { + diagnostic.set_fix(Fix::safe_edit(Edit::insertion( + " ".to_string(), + token.end(), + ))); + } } // Operator with no space, require spaces if it is required by the operator. (false, false) => { if needs_space == NeedsSpace::Yes { - context.push_diagnostic( - diagnostic_kind_for_operator(kind, token.range()).with_fix( - Fix::safe_edits( - Edit::insertion(" ".to_string(), token.start()), - [Edit::insertion(" ".to_string(), token.end())], - ), - ), - ); + if let Some(mut diagnostic) = + diagnostic_kind_for_operator(kind, token.range(), context) + { + diagnostic.set_fix(Fix::safe_edits( + Edit::insertion(" ".to_string(), token.start()), + [Edit::insertion(" ".to_string(), token.end())], + )); + } } } (true, true) => { @@ -314,15 +321,19 @@ impl From for NeedsSpace { } } -fn diagnostic_kind_for_operator(operator: TokenKind, range: TextRange) -> OldDiagnostic { +fn diagnostic_kind_for_operator<'a, 'b>( + operator: TokenKind, + range: TextRange, + context: &'a mut LogicalLinesContext<'b, '_>, +) -> Option> { if operator == TokenKind::Percent { - OldDiagnostic::new(MissingWhitespaceAroundModuloOperator, range) + context.report_diagnostic(MissingWhitespaceAroundModuloOperator, range) } else if operator.is_bitwise_or_shift() { - OldDiagnostic::new(MissingWhitespaceAroundBitwiseOrShiftOperator, range) + context.report_diagnostic(MissingWhitespaceAroundBitwiseOrShiftOperator, range) } else if operator.is_arithmetic() { - OldDiagnostic::new(MissingWhitespaceAroundArithmeticOperator, range) + context.report_diagnostic(MissingWhitespaceAroundArithmeticOperator, range) } else { - OldDiagnostic::new(MissingWhitespaceAroundOperator, range) + context.report_diagnostic(MissingWhitespaceAroundOperator, range) } } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/redundant_backslash.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/redundant_backslash.rs index 4726c28fdf..66353bdb93 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/redundant_backslash.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/redundant_backslash.rs @@ -6,7 +6,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize}; use crate::Locator; use crate::checkers::logical_lines::LogicalLinesContext; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Edit, Fix}; use super::LogicalLine; @@ -75,15 +75,15 @@ pub(crate) fn redundant_backslash( for continuation_line in &continuation_lines[start_index..end_index] { let backslash_end = locator.line_end(*continuation_line); let backslash_start = backslash_end - TextSize::new(1); - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( RedundantBackslash, TextRange::new(backslash_start, backslash_end), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::deletion( - backslash_start, - backslash_end, - ))); - context.push_diagnostic(diagnostic); + ) { + diagnostic.set_fix(Fix::safe_edit(Edit::deletion( + backslash_start, + backslash_end, + ))); + } } } } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/space_around_operator.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/space_around_operator.rs index 7194333e17..8381792d7e 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/space_around_operator.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/space_around_operator.rs @@ -3,7 +3,7 @@ use ruff_python_parser::TokenKind; use ruff_text_size::{Ranged, TextRange}; use crate::checkers::logical_lines::LogicalLinesContext; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Edit, Fix}; use super::{LogicalLine, Whitespace}; @@ -206,26 +206,26 @@ pub(crate) fn space_around_operator(line: &LogicalLine, context: &mut LogicalLin if !after_operator { match line.leading_whitespace(token) { (Whitespace::Tab, offset) => { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( TabBeforeOperator, TextRange::at(token.start() - offset, offset), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - " ".to_string(), - TextRange::at(token.start() - offset, offset), - ))); - context.push_diagnostic(diagnostic); + ) { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + " ".to_string(), + TextRange::at(token.start() - offset, offset), + ))); + } } (Whitespace::Many, offset) => { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( MultipleSpacesBeforeOperator, TextRange::at(token.start() - offset, offset), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - " ".to_string(), - TextRange::at(token.start() - offset, offset), - ))); - context.push_diagnostic(diagnostic); + ) { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + " ".to_string(), + TextRange::at(token.start() - offset, offset), + ))); + } } _ => {} } @@ -233,24 +233,25 @@ pub(crate) fn space_around_operator(line: &LogicalLine, context: &mut LogicalLin match line.trailing_whitespace(token) { (Whitespace::Tab, len) => { - let mut diagnostic = - OldDiagnostic::new(TabAfterOperator, TextRange::at(token.end(), len)); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - " ".to_string(), - TextRange::at(token.end(), len), - ))); - context.push_diagnostic(diagnostic); + if let Some(mut diagnostic) = + context.report_diagnostic(TabAfterOperator, TextRange::at(token.end(), len)) + { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + " ".to_string(), + TextRange::at(token.end(), len), + ))); + } } (Whitespace::Many, len) => { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( MultipleSpacesAfterOperator, TextRange::at(token.end(), len), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - " ".to_string(), - TextRange::at(token.end(), len), - ))); - context.push_diagnostic(diagnostic); + ) { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + " ".to_string(), + TextRange::at(token.end(), len), + ))); + } } _ => {} } @@ -266,24 +267,25 @@ pub(crate) fn space_after_comma(line: &LogicalLine, context: &mut LogicalLinesCo if matches!(token.kind(), TokenKind::Comma) { match line.trailing_whitespace(token) { (Whitespace::Tab, len) => { - let mut diagnostic = - OldDiagnostic::new(TabAfterComma, TextRange::at(token.end(), len)); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - " ".to_string(), - TextRange::at(token.end(), len), - ))); - context.push_diagnostic(diagnostic); + if let Some(mut diagnostic) = + context.report_diagnostic(TabAfterComma, TextRange::at(token.end(), len)) + { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + " ".to_string(), + TextRange::at(token.end(), len), + ))); + } } (Whitespace::Many, len) => { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( MultipleSpacesAfterComma, TextRange::at(token.end(), len), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - " ".to_string(), - TextRange::at(token.end(), len), - ))); - context.push_diagnostic(diagnostic); + ) { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + " ".to_string(), + TextRange::at(token.end(), len), + ))); + } } _ => {} } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_around_keywords.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_around_keywords.rs index 2a1d5055f7..2d3ca6e072 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_around_keywords.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_around_keywords.rs @@ -2,7 +2,7 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_text_size::{Ranged, TextRange}; use crate::checkers::logical_lines::LogicalLinesContext; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Edit, Fix}; use super::{LogicalLine, Whitespace}; @@ -133,27 +133,27 @@ pub(crate) fn whitespace_around_keywords(line: &LogicalLine, context: &mut Logic match line.leading_whitespace(token) { (Whitespace::Tab, offset) => { let start = token.start(); - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( TabBeforeKeyword, TextRange::at(start - offset, offset), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - " ".to_string(), - TextRange::at(start - offset, offset), - ))); - context.push_diagnostic(diagnostic); + ) { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + " ".to_string(), + TextRange::at(start - offset, offset), + ))); + } } (Whitespace::Many, offset) => { let start = token.start(); - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( MultipleSpacesBeforeKeyword, TextRange::at(start - offset, offset), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - " ".to_string(), - TextRange::at(start - offset, offset), - ))); - context.push_diagnostic(diagnostic); + ) { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + " ".to_string(), + TextRange::at(start - offset, offset), + ))); + } } _ => {} } @@ -161,24 +161,25 @@ pub(crate) fn whitespace_around_keywords(line: &LogicalLine, context: &mut Logic match line.trailing_whitespace(token) { (Whitespace::Tab, len) => { - let mut diagnostic = - OldDiagnostic::new(TabAfterKeyword, TextRange::at(token.end(), len)); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - " ".to_string(), - TextRange::at(token.end(), len), - ))); - context.push_diagnostic(diagnostic); + if let Some(mut diagnostic) = + context.report_diagnostic(TabAfterKeyword, TextRange::at(token.end(), len)) + { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + " ".to_string(), + TextRange::at(token.end(), len), + ))); + } } (Whitespace::Many, len) => { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( MultipleSpacesAfterKeyword, TextRange::at(token.end(), len), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - " ".to_string(), - TextRange::at(token.end(), len), - ))); - context.push_diagnostic(diagnostic); + ) { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + " ".to_string(), + TextRange::at(token.end(), len), + ))); + } } _ => {} } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_around_named_parameter_equals.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_around_named_parameter_equals.rs index 5aa5b03327..f714e60dae 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_around_named_parameter_equals.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_around_named_parameter_equals.rs @@ -4,7 +4,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize}; use crate::checkers::logical_lines::LogicalLinesContext; use crate::rules::pycodestyle::rules::logical_lines::{DefinitionState, LogicalLine}; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Edit, Fix}; /// ## What it does /// Checks for missing whitespace around the equals sign in an unannotated @@ -125,13 +125,14 @@ pub(crate) fn whitespace_around_named_parameter_equals( if definition_state.in_type_params() || (annotated_func_arg && parens == 1) { let start = token.start(); if start == prev_end && prev_end != TextSize::new(0) { - let mut diagnostic = - OldDiagnostic::new(MissingWhitespaceAroundParameterEquals, token.range); - diagnostic.set_fix(Fix::safe_edit(Edit::insertion( - " ".to_string(), - token.start(), - ))); - context.push_diagnostic(diagnostic); + if let Some(mut diagnostic) = context + .report_diagnostic(MissingWhitespaceAroundParameterEquals, token.range) + { + diagnostic.set_fix(Fix::safe_edit(Edit::insertion( + " ".to_string(), + token.start(), + ))); + } } while let Some(next) = iter.peek() { @@ -141,15 +142,15 @@ pub(crate) fn whitespace_around_named_parameter_equals( let next_start = next.start(); if next_start == token.end() { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( MissingWhitespaceAroundParameterEquals, token.range, - ); - diagnostic.set_fix(Fix::safe_edit(Edit::insertion( - " ".to_string(), - token.end(), - ))); - context.push_diagnostic(diagnostic); + ) { + diagnostic.set_fix(Fix::safe_edit(Edit::insertion( + " ".to_string(), + token.end(), + ))); + } } break; } @@ -157,12 +158,13 @@ pub(crate) fn whitespace_around_named_parameter_equals( } else { // If there's space between the preceding token and the equals sign, report it. if token.start() != prev_end { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( UnexpectedSpacesAroundKeywordParameterEquals, TextRange::new(prev_end, token.start()), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::deletion(prev_end, token.start()))); - context.push_diagnostic(diagnostic); + ) { + diagnostic + .set_fix(Fix::safe_edit(Edit::deletion(prev_end, token.start()))); + } } // If there's space between the equals sign and the following token, report it. @@ -171,15 +173,15 @@ pub(crate) fn whitespace_around_named_parameter_equals( iter.next(); } else { if next.start() != token.end() { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( UnexpectedSpacesAroundKeywordParameterEquals, TextRange::new(token.end(), next.start()), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::deletion( - token.end(), - next.start(), - ))); - context.push_diagnostic(diagnostic); + ) { + diagnostic.set_fix(Fix::safe_edit(Edit::deletion( + token.end(), + next.start(), + ))); + } } break; } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_before_comment.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_before_comment.rs index abd6522eb0..73f4dd5899 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_before_comment.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_before_comment.rs @@ -7,7 +7,7 @@ use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; use crate::Locator; use crate::checkers::logical_lines::LogicalLinesContext; use crate::rules::pycodestyle::rules::logical_lines::LogicalLine; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Edit, Fix}; /// ## What it does /// Checks if inline comments are separated by at least two spaces. @@ -185,15 +185,15 @@ pub(crate) fn whitespace_before_comment( let is_inline_comment = !line_text.trim_whitespace().is_empty(); if is_inline_comment { if range.start() - prev_end < " ".text_len() { - let mut diagnostic = OldDiagnostic::new( + if let Some(mut diagnostic) = context.report_diagnostic( TooFewSpacesBeforeInlineComment, TextRange::new(prev_end, range.start()), - ); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - " ".to_string(), - TextRange::new(prev_end, range.start()), - ))); - context.push_diagnostic(diagnostic); + ) { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + " ".to_string(), + TextRange::new(prev_end, range.start()), + ))); + } } } @@ -210,30 +210,35 @@ pub(crate) fn whitespace_before_comment( if is_inline_comment { if bad_prefix.is_some() || comment.chars().next().is_some_and(char::is_whitespace) { - let mut diagnostic = OldDiagnostic::new(NoSpaceAfterInlineComment, range); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - format_leading_space(token_text), - range, - ))); - context.push_diagnostic(diagnostic); - } - } else if let Some(bad_prefix) = bad_prefix { - if bad_prefix != '!' || !line.is_start_of_file() { - if bad_prefix != '#' { - let mut diagnostic = OldDiagnostic::new(NoSpaceAfterBlockComment, range); + if let Some(mut diagnostic) = + context.report_diagnostic(NoSpaceAfterInlineComment, range) + { diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( format_leading_space(token_text), range, ))); - context.push_diagnostic(diagnostic); + } + } + } else if let Some(bad_prefix) = bad_prefix { + if bad_prefix != '!' || !line.is_start_of_file() { + if bad_prefix != '#' { + if let Some(mut diagnostic) = + context.report_diagnostic(NoSpaceAfterBlockComment, range) + { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + format_leading_space(token_text), + range, + ))); + } } else if !comment.is_empty() { - let mut diagnostic = - OldDiagnostic::new(MultipleLeadingHashesForBlockComment, range); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( - format_leading_hashes(token_text), - range, - ))); - context.push_diagnostic(diagnostic); + if let Some(mut diagnostic) = + context.report_diagnostic(MultipleLeadingHashesForBlockComment, range) + { + diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + format_leading_hashes(token_text), + range, + ))); + } } } } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_before_parameters.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_before_parameters.rs index 17806ef42d..baf0d4b1f3 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_before_parameters.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/logical_lines/whitespace_before_parameters.rs @@ -4,7 +4,7 @@ use ruff_text_size::{Ranged, TextRange, TextSize}; use crate::checkers::logical_lines::LogicalLinesContext; use crate::rules::pycodestyle::rules::logical_lines::LogicalLine; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Edit, Fix}; /// ## What it does /// Checks for extraneous whitespace immediately preceding an open parenthesis @@ -76,9 +76,11 @@ pub(crate) fn whitespace_before_parameters(line: &LogicalLine, context: &mut Log let end = token.end() - TextSize::from(1); let kind: WhitespaceBeforeParameters = WhitespaceBeforeParameters { bracket: kind }; - let mut diagnostic = OldDiagnostic::new(kind, TextRange::new(start, end)); - diagnostic.set_fix(Fix::safe_edit(Edit::deletion(start, end))); - context.push_diagnostic(diagnostic); + if let Some(mut diagnostic) = + context.report_diagnostic(kind, TextRange::new(start, end)) + { + diagnostic.set_fix(Fix::safe_edit(Edit::deletion(start, end))); + } } pre_pre_kind = Some(prev_token); prev_token = kind; diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/missing_newline_at_end_of_file.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/missing_newline_at_end_of_file.rs index f256c58b4d..f8301cbc94 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/missing_newline_at_end_of_file.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/missing_newline_at_end_of_file.rs @@ -3,7 +3,8 @@ use ruff_python_codegen::Stylist; use ruff_text_size::{TextLen, TextRange}; use crate::Locator; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::checkers::ast::LintContext; +use crate::{AlwaysFixableViolation, Edit, Fix}; /// ## What it does /// Checks for files missing a new line at the end of the file. @@ -40,24 +41,22 @@ impl AlwaysFixableViolation for MissingNewlineAtEndOfFile { pub(crate) fn no_newline_at_end_of_file( locator: &Locator, stylist: &Stylist, -) -> Option { + context: &LintContext, +) { let source = locator.contents(); // Ignore empty and BOM only files. if source.is_empty() || source == "\u{feff}" { - return None; + return; } if !source.ends_with(['\n', '\r']) { let range = TextRange::empty(locator.contents().text_len()); - let mut diagnostic = OldDiagnostic::new(MissingNewlineAtEndOfFile, range); + let mut diagnostic = context.report_diagnostic(MissingNewlineAtEndOfFile, range); diagnostic.set_fix(Fix::safe_edit(Edit::insertion( stylist.line_ending().to_string(), range.start(), ))); - return Some(diagnostic); } - - None } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/mixed_spaces_and_tabs.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/mixed_spaces_and_tabs.rs index 128844f98b..649fc972fc 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/mixed_spaces_and_tabs.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/mixed_spaces_and_tabs.rs @@ -4,7 +4,7 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_trivia::leading_indentation; use ruff_source_file::Line; -use crate::{OldDiagnostic, Violation}; +use crate::{Violation, checkers::ast::LintContext}; /// ## What it does /// Checks for mixed tabs and spaces in indentation. @@ -37,15 +37,13 @@ impl Violation for MixedSpacesAndTabs { } /// E101 -pub(crate) fn mixed_spaces_and_tabs(line: &Line) -> Option { +pub(crate) fn mixed_spaces_and_tabs(line: &Line, context: &LintContext) { let indent = leading_indentation(line.as_str()); if indent.contains(' ') && indent.contains('\t') { - Some(OldDiagnostic::new( + context.report_diagnostic( MixedSpacesAndTabs, TextRange::at(line.start(), indent.text_len()), - )) - } else { - None + ); } } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/tab_indentation.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/tab_indentation.rs index 10fb88ac8d..4e75efb2b8 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/tab_indentation.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/tab_indentation.rs @@ -4,7 +4,8 @@ use ruff_source_file::LineRanges; use ruff_text_size::{TextRange, TextSize}; use crate::Locator; -use crate::{OldDiagnostic, Violation}; +use crate::Violation; +use crate::checkers::ast::LintContext; /// ## What it does /// Checks for indentation that uses tabs. @@ -33,11 +34,7 @@ impl Violation for TabIndentation { } /// W191 -pub(crate) fn tab_indentation( - diagnostics: &mut Vec, - locator: &Locator, - indexer: &Indexer, -) { +pub(crate) fn tab_indentation(context: &LintContext, locator: &Locator, indexer: &Indexer) { let contents = locator.contents().as_bytes(); let mut offset = 0; while let Some(index) = memchr::memchr(b'\t', &contents[offset..]) { @@ -46,7 +43,7 @@ pub(crate) fn tab_indentation( // Determine whether the tab is part of the line's indentation. if let Some(indent) = tab_indentation_at_line_start(range.start(), locator, indexer) { - diagnostics.push(OldDiagnostic::new(TabIndentation, indent)); + context.report_diagnostic(TabIndentation, indent); } // Advance to the next line. diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/too_many_newlines_at_end_of_file.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/too_many_newlines_at_end_of_file.rs index 4b41734f7f..d7facdf9e1 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/too_many_newlines_at_end_of_file.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/too_many_newlines_at_end_of_file.rs @@ -6,7 +6,7 @@ use ruff_notebook::CellOffsets; use ruff_python_parser::{Token, TokenKind, Tokens}; use ruff_text_size::{Ranged, TextRange, TextSize}; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Edit, Fix, checkers::ast::LintContext}; /// ## What it does /// Checks for files with multiple trailing blank lines. @@ -59,16 +59,16 @@ impl AlwaysFixableViolation for TooManyNewlinesAtEndOfFile { /// W391 pub(crate) fn too_many_newlines_at_end_of_file( - diagnostics: &mut Vec, + context: &LintContext, tokens: &Tokens, cell_offsets: Option<&CellOffsets>, ) { let mut tokens_iter = tokens.iter().rev().peekable(); if let Some(cell_offsets) = cell_offsets { - diagnostics.extend(notebook_newline_diagnostics(tokens_iter, cell_offsets)); - } else if let Some(diagnostic) = newline_diagnostic(&mut tokens_iter, false) { - diagnostics.push(diagnostic); + notebook_newline_diagnostics(tokens_iter, cell_offsets, context); + } else { + newline_diagnostic(&mut tokens_iter, false, context); } } @@ -76,8 +76,8 @@ pub(crate) fn too_many_newlines_at_end_of_file( fn notebook_newline_diagnostics<'a>( mut tokens_iter: Peekable>, cell_offsets: &CellOffsets, -) -> Vec { - let mut results = Vec::new(); + context: &LintContext, +) { let offset_iter = cell_offsets.iter().rev(); // NB: When interpreting the below, recall that the iterators @@ -88,20 +88,16 @@ fn notebook_newline_diagnostics<'a>( .peeking_take_while(|tok| tok.end() >= offset) .for_each(drop); - let Some(diagnostic) = newline_diagnostic(&mut tokens_iter, true) else { - continue; - }; - - results.push(diagnostic); + newline_diagnostic(&mut tokens_iter, true, context); } - results } /// Possible diagnostic, with fix, for too many newlines in cell or source file fn newline_diagnostic<'a>( tokens_iter: &mut Peekable>, in_notebook: bool, -) -> Option { + context: &LintContext, +) { let mut num_trailing_newlines: u32 = 0; let mut newline_range_start: Option = None; let mut newline_range_end: Option = None; @@ -127,23 +123,24 @@ fn newline_diagnostic<'a>( } if num_trailing_newlines == 0 || num_trailing_newlines == 1 { - return None; + return; } - let (start, end) = (match (newline_range_start, newline_range_end) { + let Some((start, end)) = (match (newline_range_start, newline_range_end) { (Some(s), Some(e)) => Some((s, e)), _ => None, - })?; + }) else { + return; + }; let diagnostic_range = TextRange::new(start, end); - Some( - OldDiagnostic::new( + context + .report_diagnostic( TooManyNewlinesAtEndOfFile { num_trailing_newlines, in_notebook, }, diagnostic_range, ) - .with_fix(Fix::safe_edit(Edit::range_deletion(diagnostic_range))), - ) + .set_fix(Fix::safe_edit(Edit::range_deletion(diagnostic_range))); } diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/trailing_whitespace.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/trailing_whitespace.rs index 7db1304722..1da5f5ba17 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/trailing_whitespace.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/trailing_whitespace.rs @@ -4,9 +4,10 @@ use ruff_source_file::Line; use ruff_text_size::{TextLen, TextRange, TextSize}; use crate::Locator; +use crate::checkers::ast::LintContext; use crate::registry::Rule; use crate::settings::LinterSettings; -use crate::{AlwaysFixableViolation, Applicability, Edit, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Applicability, Edit, Fix}; /// ## What it does /// Checks for superfluous trailing whitespace. @@ -78,7 +79,8 @@ pub(crate) fn trailing_whitespace( locator: &Locator, indexer: &Indexer, settings: &LinterSettings, -) -> Option { + context: &LintContext, +) { let whitespace_len: TextSize = line .chars() .rev() @@ -95,7 +97,7 @@ pub(crate) fn trailing_whitespace( }; if range == line.range() { if settings.rules.enabled(Rule::BlankLineWithWhitespace) { - let mut diagnostic = OldDiagnostic::new(BlankLineWithWhitespace, range); + let mut diagnostic = context.report_diagnostic(BlankLineWithWhitespace, range); // Remove any preceding continuations, to avoid introducing a potential // syntax error. diagnostic.set_fix(Fix::applicable_edit( @@ -107,16 +109,13 @@ pub(crate) fn trailing_whitespace( )), applicability, )); - return Some(diagnostic); } } else if settings.rules.enabled(Rule::TrailingWhitespace) { - let mut diagnostic = OldDiagnostic::new(TrailingWhitespace, range); + let mut diagnostic = context.report_diagnostic(TrailingWhitespace, range); diagnostic.set_fix(Fix::applicable_edit( Edit::range_deletion(range), applicability, )); - return Some(diagnostic); } } - None } diff --git a/crates/ruff_linter/src/rules/pygrep_hooks/rules/blanket_noqa.rs b/crates/ruff_linter/src/rules/pygrep_hooks/rules/blanket_noqa.rs index b3f63e584b..e60fd42072 100644 --- a/crates/ruff_linter/src/rules/pygrep_hooks/rules/blanket_noqa.rs +++ b/crates/ruff_linter/src/rules/pygrep_hooks/rules/blanket_noqa.rs @@ -3,8 +3,9 @@ use ruff_python_trivia::Cursor; use ruff_text_size::{Ranged, TextRange}; use crate::Locator; +use crate::checkers::ast::LintContext; use crate::noqa::{self, Directive, FileNoqaDirectives, NoqaDirectives}; -use crate::{Edit, Fix, FixAvailability, OldDiagnostic, Violation}; +use crate::{Edit, Fix, FixAvailability, Violation}; /// ## What it does /// Check for `noqa` annotations that suppress all diagnostics, as opposed to @@ -74,20 +75,20 @@ impl Violation for BlanketNOQA { /// PGH004 pub(crate) fn blanket_noqa( - diagnostics: &mut Vec, + context: &LintContext, noqa_directives: &NoqaDirectives, locator: &Locator, file_noqa_directives: &FileNoqaDirectives, ) { for line in file_noqa_directives.lines() { if let Directive::All(_) = line.parsed_file_exemption { - diagnostics.push(OldDiagnostic::new( + context.report_diagnostic( BlanketNOQA { missing_colon: false, file_exemption: true, }, line.range(), - )); + ); } } @@ -105,7 +106,7 @@ pub(crate) fn blanket_noqa( // Ex) `# noqa F401` let start = all.end(); let end = start + cursor.token_len(); - let mut diagnostic = OldDiagnostic::new( + let mut diagnostic = context.report_diagnostic( BlanketNOQA { missing_colon: true, file_exemption: false, @@ -113,16 +114,15 @@ pub(crate) fn blanket_noqa( TextRange::new(all.start(), end), ); diagnostic.set_fix(Fix::unsafe_edit(Edit::insertion(':'.to_string(), start))); - diagnostics.push(diagnostic); } else { // Otherwise, it looks like an intentional blanket `noqa` annotation. - diagnostics.push(OldDiagnostic::new( + context.report_diagnostic( BlanketNOQA { missing_colon: false, file_exemption: false, }, all.range(), - )); + ); } } } diff --git a/crates/ruff_linter/src/rules/pygrep_hooks/rules/blanket_type_ignore.rs b/crates/ruff_linter/src/rules/pygrep_hooks/rules/blanket_type_ignore.rs index 8388acd8e6..bf0d5346c1 100644 --- a/crates/ruff_linter/src/rules/pygrep_hooks/rules/blanket_type_ignore.rs +++ b/crates/ruff_linter/src/rules/pygrep_hooks/rules/blanket_type_ignore.rs @@ -9,7 +9,8 @@ use ruff_python_trivia::CommentRanges; use ruff_text_size::TextSize; use crate::Locator; -use crate::{OldDiagnostic, Violation}; +use crate::Violation; +use crate::checkers::ast::LintContext; /// ## What it does /// Check for `type: ignore` annotations that suppress all type warnings, as @@ -52,7 +53,7 @@ impl Violation for BlanketTypeIgnore { /// PGH003 pub(crate) fn blanket_type_ignore( - diagnostics: &mut Vec, + context: &LintContext, comment_ranges: &CommentRanges, locator: &Locator, ) { @@ -92,10 +93,10 @@ pub(crate) fn blanket_type_ignore( // Match the optional `[...]` tag. if let Ok(codes) = parse_type_ignore_tag(comment) { if codes.is_empty() { - diagnostics.push(OldDiagnostic::new( + context.report_diagnostic( BlanketTypeIgnore, range.add_start(TextSize::try_from(start).unwrap()), - )); + ); } } } diff --git a/crates/ruff_linter/src/rules/pylint/rules/bidirectional_unicode.rs b/crates/ruff_linter/src/rules/pylint/rules/bidirectional_unicode.rs index 4f3566850c..e5ed24bc18 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/bidirectional_unicode.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/bidirectional_unicode.rs @@ -1,7 +1,7 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_source_file::Line; -use crate::{OldDiagnostic, Violation}; +use crate::{Violation, checkers::ast::LintContext}; const BIDI_UNICODE: [char; 10] = [ '\u{202A}', //{LEFT-TO-RIGHT EMBEDDING} @@ -53,10 +53,8 @@ impl Violation for BidirectionalUnicode { } /// PLE2502 -pub(crate) fn bidirectional_unicode(line: &Line) -> Vec { - let mut diagnostics = Vec::new(); +pub(crate) fn bidirectional_unicode(line: &Line, context: &LintContext) { if line.contains(BIDI_UNICODE) { - diagnostics.push(OldDiagnostic::new(BidirectionalUnicode, line.full_range())); + context.report_diagnostic(BidirectionalUnicode, line.full_range()); } - diagnostics } diff --git a/crates/ruff_linter/src/rules/pylint/rules/empty_comment.rs b/crates/ruff_linter/src/rules/pylint/rules/empty_comment.rs index 2dd0ac29b7..deb3543f21 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/empty_comment.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/empty_comment.rs @@ -4,7 +4,8 @@ use ruff_source_file::LineRanges; use ruff_text_size::{TextRange, TextSize}; use crate::Locator; -use crate::{Edit, Fix, FixAvailability, OldDiagnostic, Violation}; +use crate::checkers::ast::LintContext; +use crate::{Edit, Fix, FixAvailability, Violation}; /// ## What it does /// Checks for a # symbol appearing on a line not followed by an actual comment. @@ -45,7 +46,7 @@ impl Violation for EmptyComment { /// PLR2044 pub(crate) fn empty_comments( - diagnostics: &mut Vec, + context: &LintContext, comment_ranges: &CommentRanges, locator: &Locator, ) { @@ -58,14 +59,12 @@ pub(crate) fn empty_comments( } // If the line contains an empty comment, add a diagnostic. - if let Some(diagnostic) = empty_comment(range, locator) { - diagnostics.push(diagnostic); - } + empty_comment(context, range, locator); } } /// Return a [`Diagnostic`] if the comment at the given [`TextRange`] is empty. -fn empty_comment(range: TextRange, locator: &Locator) -> Option { +fn empty_comment(context: &LintContext, range: TextRange, locator: &Locator) { // Check: is the comment empty? if !locator .slice(range) @@ -73,7 +72,7 @@ fn empty_comment(range: TextRange, locator: &Locator) -> Option { .skip(1) .all(is_python_whitespace) { - return None; + return; } // Find the location of the `#`. @@ -96,13 +95,13 @@ fn empty_comment(range: TextRange, locator: &Locator) -> Option { } }); - Some( - OldDiagnostic::new(EmptyComment, TextRange::new(first_hash_col, line.end())).with_fix( - Fix::safe_edit(if let Some(deletion_start_col) = deletion_start_col { + context + .report_diagnostic(EmptyComment, TextRange::new(first_hash_col, line.end())) + .set_fix(Fix::safe_edit( + if let Some(deletion_start_col) = deletion_start_col { Edit::deletion(line.start() + deletion_start_col, line.end()) } else { Edit::range_deletion(locator.full_line_range(first_hash_col)) - }), - ), - ) + }, + )); } diff --git a/crates/ruff_linter/src/rules/pylint/rules/invalid_string_characters.rs b/crates/ruff_linter/src/rules/pylint/rules/invalid_string_characters.rs index 8bbc5541a7..1e13bb733d 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/invalid_string_characters.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/invalid_string_characters.rs @@ -3,7 +3,8 @@ use ruff_python_parser::{Token, TokenKind}; use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; use crate::Locator; -use crate::{Edit, Fix, FixAvailability, OldDiagnostic, Violation}; +use crate::checkers::ast::LintContext; +use crate::{Edit, Fix, FixAvailability, Violation}; /// ## What it does /// Checks for strings that contain the control character `BS`. @@ -180,11 +181,7 @@ impl Violation for InvalidCharacterZeroWidthSpace { } /// PLE2510, PLE2512, PLE2513, PLE2514, PLE2515 -pub(crate) fn invalid_string_characters( - diagnostics: &mut Vec, - token: &Token, - locator: &Locator, -) { +pub(crate) fn invalid_string_characters(context: &LintContext, token: &Token, locator: &Locator) { let text = match token.kind() { // We can't use the `value` field since it's decoded and e.g. for f-strings removed a curly // brace that escaped another curly brace, which would gives us wrong column information. @@ -197,13 +194,22 @@ pub(crate) fn invalid_string_characters( let c = match_.chars().next().unwrap(); let range = TextRange::at(location, c.text_len()); let (replacement, mut diagnostic) = match c { - '\x08' => ("\\b", OldDiagnostic::new(InvalidCharacterBackspace, range)), - '\x1A' => ("\\x1A", OldDiagnostic::new(InvalidCharacterSub, range)), - '\x1B' => ("\\x1B", OldDiagnostic::new(InvalidCharacterEsc, range)), - '\0' => ("\\0", OldDiagnostic::new(InvalidCharacterNul, range)), + '\x08' => ( + "\\b", + context.report_diagnostic(InvalidCharacterBackspace, range), + ), + '\x1A' => ( + "\\x1A", + context.report_diagnostic(InvalidCharacterSub, range), + ), + '\x1B' => ( + "\\x1B", + context.report_diagnostic(InvalidCharacterEsc, range), + ), + '\0' => ("\\0", context.report_diagnostic(InvalidCharacterNul, range)), '\u{200b}' => ( "\\u200b", - OldDiagnostic::new(InvalidCharacterZeroWidthSpace, range), + context.report_diagnostic(InvalidCharacterZeroWidthSpace, range), ), _ => { continue; @@ -214,7 +220,5 @@ pub(crate) fn invalid_string_characters( let edit = Edit::range_replacement(replacement.to_string(), range); diagnostic.set_fix(Fix::safe_edit(edit)); } - - diagnostics.push(diagnostic); } } diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/extraneous_parentheses.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/extraneous_parentheses.rs index 94dde654dd..5ee814f9f8 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/extraneous_parentheses.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/extraneous_parentheses.rs @@ -5,7 +5,8 @@ use ruff_python_parser::{Token, TokenKind, Tokens}; use ruff_text_size::{Ranged, TextRange}; use crate::Locator; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::checkers::ast::LintContext; +use crate::{AlwaysFixableViolation, Edit, Fix}; /// ## What it does /// Checks for extraneous parentheses. @@ -114,11 +115,7 @@ fn match_extraneous_parentheses(tokens: &mut Iter<'_, Token>) -> Option<(TextRan } /// UP034 -pub(crate) fn extraneous_parentheses( - diagnostics: &mut Vec, - tokens: &Tokens, - locator: &Locator, -) { +pub(crate) fn extraneous_parentheses(context: &LintContext, tokens: &Tokens, locator: &Locator) { let mut token_iter = tokens.iter(); while let Some(token) = token_iter.next() { if !matches!(token.kind(), TokenKind::Lpar) { @@ -129,7 +126,7 @@ pub(crate) fn extraneous_parentheses( continue; }; - let mut diagnostic = OldDiagnostic::new( + let mut diagnostic = context.report_diagnostic( ExtraneousParentheses, TextRange::new(start_range.start(), end_range.end()), ); @@ -139,6 +136,5 @@ pub(crate) fn extraneous_parentheses( start_range.start(), end_range.end(), ))); - diagnostics.push(diagnostic); } } diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_coding_comment.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_coding_comment.rs index cadf274f08..3ee47fc94c 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_coding_comment.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_coding_comment.rs @@ -9,7 +9,8 @@ use ruff_source_file::LineRanges; use ruff_text_size::{Ranged, TextRange, TextSize}; use crate::Locator; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::checkers::ast::LintContext; +use crate::{AlwaysFixableViolation, Edit, Fix}; /// ## What it does /// Checks for unnecessary UTF-8 encoding declarations. @@ -66,7 +67,7 @@ struct CodingCommentRange { /// UP009 pub(crate) fn unnecessary_coding_comment( - diagnostics: &mut Vec, + context: &LintContext, locator: &Locator, comment_ranges: &CommentRanges, ) { @@ -106,9 +107,9 @@ pub(crate) fn unnecessary_coding_comment( } let fix = Fix::safe_edit(Edit::range_deletion(range.line)); - let diagnostic = OldDiagnostic::new(UTF8EncodingDeclaration, range.comment); - - diagnostics.push(diagnostic.with_fix(fix)); + context + .report_diagnostic(UTF8EncodingDeclaration, range.comment) + .set_fix(fix); } struct CodingCommentIterator<'a> { diff --git a/crates/ruff_linter/src/rules/ruff/mod.rs b/crates/ruff_linter/src/rules/ruff/mod.rs index 242a6f2d0e..39cab60819 100644 --- a/crates/ruff_linter/src/rules/ruff/mod.rs +++ b/crates/ruff_linter/src/rules/ruff/mod.rs @@ -463,7 +463,7 @@ mod tests { let contents = fs::read_to_string(path)?; let source_file = SourceFileBuilder::new("pyproject.toml", contents).finish(); let messages = lint_pyproject_toml( - source_file, + &source_file, &settings::LinterSettings::for_rule(Rule::InvalidPyprojectToml), ); assert_messages!(snapshot, messages); diff --git a/crates/ruff_linter/src/rules/ruff/rules/ambiguous_unicode_character.rs b/crates/ruff_linter/src/rules/ruff/rules/ambiguous_unicode_character.rs index a8c1d89fcd..d1b1ef67ed 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/ambiguous_unicode_character.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/ambiguous_unicode_character.rs @@ -7,13 +7,12 @@ use ruff_python_ast::{self as ast, StringLike}; use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; use crate::Locator; -use crate::checkers::ast::Checker; +use crate::Violation; +use crate::checkers::ast::{Checker, LintContext}; use crate::preview::is_unicode_to_unicode_confusables_enabled; -use crate::registry::AsRule; use crate::rules::ruff::rules::Context; use crate::rules::ruff::rules::confusables::confusable; use crate::settings::LinterSettings; -use crate::{OldDiagnostic, Violation}; /// ## What it does /// Checks for ambiguous Unicode characters in strings. @@ -176,14 +175,14 @@ impl Violation for AmbiguousUnicodeCharacterComment { /// RUF003 pub(crate) fn ambiguous_unicode_character_comment( - diagnostics: &mut Vec, + context: &LintContext, locator: &Locator, range: TextRange, settings: &LinterSettings, ) { let text = locator.slice(range); for candidate in ambiguous_unicode_character(text, range, settings) { - diagnostics.extend(candidate.into_diagnostic(Context::Comment, settings)); + candidate.into_diagnostic(Context::Comment, settings, context); } } @@ -342,37 +341,41 @@ impl Candidate { } } - fn into_diagnostic(self, context: Context, settings: &LinterSettings) -> Option { + fn into_diagnostic( + self, + context: Context, + settings: &LinterSettings, + lint_context: &LintContext, + ) { if !settings.allowed_confusables.contains(&self.confusable) { let char_range = TextRange::at(self.offset, self.confusable.text_len()); - let diagnostic = match context { - Context::String => OldDiagnostic::new( + match context { + Context::String => lint_context.report_diagnostic_if_enabled( AmbiguousUnicodeCharacterString { confusable: self.confusable, representant: self.representant, }, char_range, + settings, ), - Context::Docstring => OldDiagnostic::new( + Context::Docstring => lint_context.report_diagnostic_if_enabled( AmbiguousUnicodeCharacterDocstring { confusable: self.confusable, representant: self.representant, }, char_range, + settings, ), - Context::Comment => OldDiagnostic::new( + Context::Comment => lint_context.report_diagnostic_if_enabled( AmbiguousUnicodeCharacterComment { confusable: self.confusable, representant: self.representant, }, char_range, + settings, ), }; - if settings.rules.enabled(diagnostic.rule()) { - return Some(diagnostic); - } } - None } fn report_diagnostic(self, checker: &Checker, context: Context) { diff --git a/crates/ruff_linter/src/rules/ruff/rules/indented_form_feed.rs b/crates/ruff_linter/src/rules/ruff/rules/indented_form_feed.rs index 1c95430ea7..b943c65536 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/indented_form_feed.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/indented_form_feed.rs @@ -4,7 +4,7 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_source_file::Line; use ruff_text_size::{TextRange, TextSize}; -use crate::{OldDiagnostic, Violation}; +use crate::{Violation, checkers::ast::LintContext}; /// ## What it does /// Checks for form feed characters preceded by either a space or a tab. @@ -49,11 +49,13 @@ const SPACE: u8 = b' '; const TAB: u8 = b'\t'; /// RUF054 -pub(crate) fn indented_form_feed(line: &Line) -> Option { - let index_relative_to_line = memchr(FORM_FEED, line.as_bytes())?; +pub(crate) fn indented_form_feed(line: &Line, context: &LintContext) { + let Some(index_relative_to_line) = memchr(FORM_FEED, line.as_bytes()) else { + return; + }; if index_relative_to_line == 0 { - return None; + return; } if line[..index_relative_to_line] @@ -61,12 +63,14 @@ pub(crate) fn indented_form_feed(line: &Line) -> Option { .iter() .any(|byte| *byte != SPACE && *byte != TAB) { - return None; + return; } - let relative_index = u32::try_from(index_relative_to_line).ok()?; + let Ok(relative_index) = u32::try_from(index_relative_to_line) else { + return; + }; let absolute_index = line.start() + TextSize::new(relative_index); let range = TextRange::at(absolute_index, 1.into()); - Some(OldDiagnostic::new(IndentedFormFeed, range)) + context.report_diagnostic(IndentedFormFeed, range); } diff --git a/crates/ruff_linter/src/rules/ruff/rules/invalid_rule_code.rs b/crates/ruff_linter/src/rules/ruff/rules/invalid_rule_code.rs index 14f0a4e84d..734a544b72 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/invalid_rule_code.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/invalid_rule_code.rs @@ -2,10 +2,11 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; use crate::Locator; +use crate::checkers::ast::LintContext; use crate::noqa::{Code, Directive}; use crate::noqa::{Codes, NoqaDirectives}; use crate::registry::Rule; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Edit, Fix}; /// ## What it does /// Checks for `noqa` codes that are invalid. @@ -48,7 +49,7 @@ impl AlwaysFixableViolation for InvalidRuleCode { /// RUF102 for invalid noqa codes pub(crate) fn invalid_noqa_code( - diagnostics: &mut Vec, + context: &LintContext, noqa_directives: &NoqaDirectives, locator: &Locator, external: &[String], @@ -69,11 +70,11 @@ pub(crate) fn invalid_noqa_code( .partition(|&code| code_is_valid(code, external)); if valid_codes.is_empty() { - diagnostics.push(all_codes_invalid_diagnostic(directive, invalid_codes)); + all_codes_invalid_diagnostic(directive, invalid_codes, context); } else { - diagnostics.extend(invalid_codes.into_iter().map(|invalid_code| { - some_codes_are_invalid_diagnostic(directive, invalid_code, locator) - })); + for invalid_code in invalid_codes { + some_codes_are_invalid_diagnostic(directive, invalid_code, locator, context); + } } } } @@ -86,36 +87,40 @@ fn code_is_valid(code: &Code, external: &[String]) -> bool { fn all_codes_invalid_diagnostic( directive: &Codes<'_>, invalid_codes: Vec<&Code<'_>>, -) -> OldDiagnostic { - OldDiagnostic::new( - InvalidRuleCode { - rule_code: invalid_codes - .into_iter() - .map(Code::as_str) - .collect::>() - .join(", "), - }, - directive.range(), - ) - .with_fix(Fix::safe_edit(Edit::range_deletion(directive.range()))) + context: &LintContext, +) { + context + .report_diagnostic( + InvalidRuleCode { + rule_code: invalid_codes + .into_iter() + .map(Code::as_str) + .collect::>() + .join(", "), + }, + directive.range(), + ) + .set_fix(Fix::safe_edit(Edit::range_deletion(directive.range()))); } fn some_codes_are_invalid_diagnostic( codes: &Codes, invalid_code: &Code, locator: &Locator, -) -> OldDiagnostic { - let diagnostic = OldDiagnostic::new( - InvalidRuleCode { - rule_code: invalid_code.to_string(), - }, - invalid_code.range(), - ); - diagnostic.with_fix(Fix::safe_edit(remove_invalid_noqa( - codes, - invalid_code, - locator, - ))) + context: &LintContext, +) { + context + .report_diagnostic( + InvalidRuleCode { + rule_code: invalid_code.to_string(), + }, + invalid_code.range(), + ) + .set_fix(Fix::safe_edit(remove_invalid_noqa( + codes, + invalid_code, + locator, + ))); } fn remove_invalid_noqa(codes: &Codes, invalid_code: &Code, locator: &Locator) -> Edit { diff --git a/crates/ruff_linter/src/rules/ruff/rules/redirected_noqa.rs b/crates/ruff_linter/src/rules/ruff/rules/redirected_noqa.rs index 5093883f29..e11a9a7ad1 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/redirected_noqa.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/redirected_noqa.rs @@ -1,9 +1,10 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_text_size::Ranged; +use crate::checkers::ast::LintContext; use crate::noqa::{Codes, Directive, FileNoqaDirectives, NoqaDirectives}; use crate::rule_redirects::get_redirect_target; -use crate::{AlwaysFixableViolation, Edit, Fix, OldDiagnostic}; +use crate::{AlwaysFixableViolation, Edit, Fix}; /// ## What it does /// Checks for `noqa` directives that use redirected rule codes. @@ -43,38 +44,32 @@ impl AlwaysFixableViolation for RedirectedNOQA { } /// RUF101 for in-line noqa directives -pub(crate) fn redirected_noqa( - diagnostics: &mut Vec, - noqa_directives: &NoqaDirectives, -) { +pub(crate) fn redirected_noqa(context: &LintContext, noqa_directives: &NoqaDirectives) { for line in noqa_directives.lines() { let Directive::Codes(directive) = &line.directive else { continue; }; - build_diagnostics(diagnostics, directive); + build_diagnostics(context, directive); } } /// RUF101 for file noqa directives -pub(crate) fn redirected_file_noqa( - diagnostics: &mut Vec, - noqa_directives: &FileNoqaDirectives, -) { +pub(crate) fn redirected_file_noqa(context: &LintContext, noqa_directives: &FileNoqaDirectives) { for line in noqa_directives.lines() { let Directive::Codes(codes) = &line.parsed_file_exemption else { continue; }; - build_diagnostics(diagnostics, codes); + build_diagnostics(context, codes); } } /// Convert a sequence of [Codes] into [Diagnostic]s and append them to `diagnostics`. -fn build_diagnostics(diagnostics: &mut Vec, codes: &Codes<'_>) { +pub(crate) fn build_diagnostics(context: &LintContext, codes: &Codes<'_>) { for code in codes.iter() { if let Some(redirected) = get_redirect_target(code.as_str()) { - let mut diagnostic = OldDiagnostic::new( + let mut diagnostic = context.report_diagnostic( RedirectedNOQA { original: code.to_string(), target: redirected.to_string(), @@ -85,7 +80,6 @@ fn build_diagnostics(diagnostics: &mut Vec, codes: &Codes<'_>) { redirected.to_string(), code.range(), ))); - diagnostics.push(diagnostic); } } } diff --git a/crates/ruff_linter/src/rules/ruff/rules/test_rules.rs b/crates/ruff_linter/src/rules/ruff/rules/test_rules.rs index d3a43f21f5..3fc282ad3a 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/test_rules.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/test_rules.rs @@ -18,8 +18,9 @@ use ruff_python_trivia::CommentRanges; use ruff_text_size::TextSize; use crate::Locator; +use crate::checkers::ast::LintContext; use crate::registry::Rule; -use crate::{Edit, Fix, FixAvailability, OldDiagnostic, Violation}; +use crate::{Edit, Fix, FixAvailability, Violation}; /// Check if a comment exists anywhere in a given file fn comment_exists(text: &str, locator: &Locator, comment_ranges: &CommentRanges) -> bool { @@ -48,7 +49,7 @@ pub(crate) const TEST_RULES: &[Rule] = &[ ]; pub(crate) trait TestRule { - fn diagnostic(locator: &Locator, comment_ranges: &CommentRanges) -> Option; + fn diagnostic(locator: &Locator, comment_ranges: &CommentRanges, context: &LintContext); } /// ## What it does @@ -79,11 +80,8 @@ impl Violation for StableTestRule { } impl TestRule for StableTestRule { - fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges) -> Option { - Some(OldDiagnostic::new( - StableTestRule, - ruff_text_size::TextRange::default(), - )) + fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges, context: &LintContext) { + context.report_diagnostic(StableTestRule, ruff_text_size::TextRange::default()); } } @@ -115,15 +113,12 @@ impl Violation for StableTestRuleSafeFix { } impl TestRule for StableTestRuleSafeFix { - fn diagnostic(locator: &Locator, comment_ranges: &CommentRanges) -> Option { + fn diagnostic(locator: &Locator, comment_ranges: &CommentRanges, context: &LintContext) { let comment = "# fix from stable-test-rule-safe-fix\n".to_string(); - if comment_exists(&comment, locator, comment_ranges) { - None - } else { - Some( - OldDiagnostic::new(StableTestRuleSafeFix, ruff_text_size::TextRange::default()) - .with_fix(Fix::safe_edit(Edit::insertion(comment, TextSize::new(0)))), - ) + if !comment_exists(&comment, locator, comment_ranges) { + context + .report_diagnostic(StableTestRuleSafeFix, ruff_text_size::TextRange::default()) + .set_fix(Fix::safe_edit(Edit::insertion(comment, TextSize::new(0)))); } } } @@ -156,18 +151,15 @@ impl Violation for StableTestRuleUnsafeFix { } impl TestRule for StableTestRuleUnsafeFix { - fn diagnostic(locator: &Locator, comment_ranges: &CommentRanges) -> Option { + fn diagnostic(locator: &Locator, comment_ranges: &CommentRanges, context: &LintContext) { let comment = "# fix from stable-test-rule-unsafe-fix\n".to_string(); - if comment_exists(&comment, locator, comment_ranges) { - None - } else { - Some( - OldDiagnostic::new( + if !comment_exists(&comment, locator, comment_ranges) { + context + .report_diagnostic( StableTestRuleUnsafeFix, ruff_text_size::TextRange::default(), ) - .with_fix(Fix::unsafe_edit(Edit::insertion(comment, TextSize::new(0)))), - ) + .set_fix(Fix::unsafe_edit(Edit::insertion(comment, TextSize::new(0)))); } } } @@ -200,21 +192,18 @@ impl Violation for StableTestRuleDisplayOnlyFix { } impl TestRule for StableTestRuleDisplayOnlyFix { - fn diagnostic(locator: &Locator, comment_ranges: &CommentRanges) -> Option { + fn diagnostic(locator: &Locator, comment_ranges: &CommentRanges, context: &LintContext) { let comment = "# fix from stable-test-rule-display-only-fix\n".to_string(); - if comment_exists(&comment, locator, comment_ranges) { - None - } else { - Some( - OldDiagnostic::new( + if !comment_exists(&comment, locator, comment_ranges) { + context + .report_diagnostic( StableTestRuleDisplayOnlyFix, ruff_text_size::TextRange::default(), ) - .with_fix(Fix::display_only_edit(Edit::insertion( + .set_fix(Fix::display_only_edit(Edit::insertion( comment, TextSize::new(0), - ))), - ) + ))); } } } @@ -247,11 +236,8 @@ impl Violation for PreviewTestRule { } impl TestRule for PreviewTestRule { - fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges) -> Option { - Some(OldDiagnostic::new( - PreviewTestRule, - ruff_text_size::TextRange::default(), - )) + fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges, context: &LintContext) { + context.report_diagnostic(PreviewTestRule, ruff_text_size::TextRange::default()); } } @@ -283,11 +269,8 @@ impl Violation for DeprecatedTestRule { } impl TestRule for DeprecatedTestRule { - fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges) -> Option { - Some(OldDiagnostic::new( - DeprecatedTestRule, - ruff_text_size::TextRange::default(), - )) + fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges, context: &LintContext) { + context.report_diagnostic(DeprecatedTestRule, ruff_text_size::TextRange::default()); } } @@ -319,11 +302,11 @@ impl Violation for AnotherDeprecatedTestRule { } impl TestRule for AnotherDeprecatedTestRule { - fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges) -> Option { - Some(OldDiagnostic::new( + fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges, context: &LintContext) { + context.report_diagnostic( AnotherDeprecatedTestRule, ruff_text_size::TextRange::default(), - )) + ); } } @@ -355,11 +338,8 @@ impl Violation for RemovedTestRule { } impl TestRule for RemovedTestRule { - fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges) -> Option { - Some(OldDiagnostic::new( - RemovedTestRule, - ruff_text_size::TextRange::default(), - )) + fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges, context: &LintContext) { + context.report_diagnostic(RemovedTestRule, ruff_text_size::TextRange::default()); } } @@ -391,11 +371,8 @@ impl Violation for AnotherRemovedTestRule { } impl TestRule for AnotherRemovedTestRule { - fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges) -> Option { - Some(OldDiagnostic::new( - AnotherRemovedTestRule, - ruff_text_size::TextRange::default(), - )) + fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges, context: &LintContext) { + context.report_diagnostic(AnotherRemovedTestRule, ruff_text_size::TextRange::default()); } } @@ -427,11 +404,8 @@ impl Violation for RedirectedFromTestRule { } impl TestRule for RedirectedFromTestRule { - fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges) -> Option { - Some(OldDiagnostic::new( - RedirectedFromTestRule, - ruff_text_size::TextRange::default(), - )) + fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges, context: &LintContext) { + context.report_diagnostic(RedirectedFromTestRule, ruff_text_size::TextRange::default()); } } @@ -463,11 +437,8 @@ impl Violation for RedirectedToTestRule { } impl TestRule for RedirectedToTestRule { - fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges) -> Option { - Some(OldDiagnostic::new( - RedirectedToTestRule, - ruff_text_size::TextRange::default(), - )) + fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges, context: &LintContext) { + context.report_diagnostic(RedirectedToTestRule, ruff_text_size::TextRange::default()); } } @@ -499,10 +470,10 @@ impl Violation for RedirectedFromPrefixTestRule { } impl TestRule for RedirectedFromPrefixTestRule { - fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges) -> Option { - Some(OldDiagnostic::new( + fn diagnostic(_locator: &Locator, _comment_ranges: &CommentRanges, context: &LintContext) { + context.report_diagnostic( RedirectedFromPrefixTestRule, ruff_text_size::TextRange::default(), - )) + ); } }