[ty] Prototype of --add-ignore CLI option

This commit is contained in:
Micha Reiser 2025-11-29 18:37:03 +01:00
parent 69ace00210
commit d464344f74
No known key found for this signature in database
11 changed files with 438 additions and 92 deletions

3
Cargo.lock generated
View File

@ -4332,8 +4332,10 @@ dependencies = [
"rayon", "rayon",
"regex", "regex",
"ruff_db", "ruff_db",
"ruff_diagnostics",
"ruff_python_ast", "ruff_python_ast",
"ruff_python_trivia", "ruff_python_trivia",
"ruff_text_size",
"salsa", "salsa",
"tempfile", "tempfile",
"toml", "toml",
@ -4428,6 +4430,7 @@ dependencies = [
"regex-automata", "regex-automata",
"ruff_cache", "ruff_cache",
"ruff_db", "ruff_db",
"ruff_diagnostics",
"ruff_macros", "ruff_macros",
"ruff_memory_usage", "ruff_memory_usage",
"ruff_options_metadata", "ruff_options_metadata",

View File

@ -1,6 +1,7 @@
use std::ops::Deref; use std::ops::Deref;
use std::sync::Arc; use std::sync::Arc;
use ruff_diagnostics::SourceMap;
use ruff_notebook::Notebook; use ruff_notebook::Notebook;
use ruff_python_ast::PySourceType; use ruff_python_ast::PySourceType;
use ruff_source_file::LineIndex; use ruff_source_file::LineIndex;
@ -90,6 +91,34 @@ impl SourceText {
pub fn read_error(&self) -> Option<&SourceTextError> { pub fn read_error(&self) -> Option<&SourceTextError> {
self.inner.read_error.as_ref() self.inner.read_error.as_ref()
} }
pub fn updated(&mut self, new_source: String, source_map: &SourceMap) {
let inner = Arc::make_mut(&mut self.inner);
match &mut inner.kind {
SourceTextKind::Text(text) => *text = new_source,
SourceTextKind::Notebook { notebook } => {
notebook.update(&source_map, new_source);
}
};
}
pub fn to_raw_content(&self) -> std::borrow::Cow<'_, str> {
match &self.inner.kind {
SourceTextKind::Text(text) => text.as_str().into(),
SourceTextKind::Notebook { notebook } => {
let mut output = Vec::new();
notebook
.write(&mut output)
.expect("Writing to a `Vec` should not fail");
String::from_utf8(output)
.expect(
"Notebook should serialize to valid UTF-8 if the source was valid UTF-8",
)
.into()
}
}
}
} }
impl Deref for SourceText { impl Deref for SourceText {
@ -117,13 +146,13 @@ impl std::fmt::Debug for SourceText {
} }
} }
#[derive(Eq, PartialEq, get_size2::GetSize)] #[derive(Eq, PartialEq, get_size2::GetSize, Clone)]
struct SourceTextInner { struct SourceTextInner {
kind: SourceTextKind, kind: SourceTextKind,
read_error: Option<SourceTextError>, read_error: Option<SourceTextError>,
} }
#[derive(Eq, PartialEq, get_size2::GetSize)] #[derive(Eq, PartialEq, get_size2::GetSize, Clone)]
enum SourceTextKind { enum SourceTextKind {
Text(String), Text(String),
Notebook { Notebook {

View File

@ -16,6 +16,8 @@ license.workspace = true
[dependencies] [dependencies]
ruff_db = { workspace = true, features = ["os", "cache"] } ruff_db = { workspace = true, features = ["os", "cache"] }
ruff_python_ast = { workspace = true } ruff_python_ast = { workspace = true }
ruff_diagnostics = { workspace = true }
ruff_text_size = { workspace = true }
ty_combine = { workspace = true } ty_combine = { workspace = true }
ty_python_semantic = { workspace = true } ty_python_semantic = { workspace = true }
ty_project = { workspace = true, features = ["zstd"] } ty_project = { workspace = true, features = ["zstd"] }

View File

@ -53,6 +53,9 @@ pub(crate) struct CheckCommand {
)] )]
pub paths: Vec<SystemPathBuf>, pub paths: Vec<SystemPathBuf>,
#[arg(long)]
pub(crate) add_ignore: bool,
/// Run the command within the given project directory. /// Run the command within the given project directory.
/// ///
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory, /// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,

View File

@ -5,9 +5,14 @@ mod python_version;
mod version; mod version;
pub use args::Cli; pub use args::Cli;
use ruff_db::source::source_text;
use ruff_diagnostics::{Fix, SourceMap};
use ruff_text_size::{Ranged as _, TextLen, TextRange, TextSize};
use ty_project::metadata::settings::TerminalSettings; use ty_project::metadata::settings::TerminalSettings;
use ty_python_semantic::suppress_all;
use ty_static::EnvVars; use ty_static::EnvVars;
use std::collections::BTreeMap;
use std::fmt::Write; use std::fmt::Write;
use std::process::{ExitCode, Termination}; use std::process::{ExitCode, Termination};
@ -22,16 +27,17 @@ use clap::{CommandFactory, Parser};
use colored::Colorize; use colored::Colorize;
use crossbeam::channel as crossbeam_channel; use crossbeam::channel as crossbeam_channel;
use rayon::ThreadPoolBuilder; use rayon::ThreadPoolBuilder;
use ruff_db::Db as _;
use ruff_db::diagnostic::{ use ruff_db::diagnostic::{
Diagnostic, DiagnosticId, DisplayDiagnosticConfig, DisplayDiagnostics, Severity, Diagnostic, DiagnosticId, DisplayDiagnosticConfig, DisplayDiagnostics, Severity,
}; };
use ruff_db::files::File; use ruff_db::files::{File, FilePath};
use ruff_db::max_parallelism; use ruff_db::max_parallelism;
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf}; use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
use salsa::Database; use salsa::Database;
use ty_project::metadata::options::ProjectOptionsOverrides; use ty_project::metadata::options::ProjectOptionsOverrides;
use ty_project::watch::ProjectWatcher; use ty_project::watch::ProjectWatcher;
use ty_project::{CollectReporter, Db, watch}; use ty_project::{CollectReporter, Db, suppress_all_diagnostics, watch};
use ty_project::{ProjectDatabase, ProjectMetadata}; use ty_project::{ProjectDatabase, ProjectMetadata};
use ty_server::run_server; use ty_server::run_server;
@ -111,6 +117,12 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
.map(|path| SystemPath::absolute(path, &cwd)) .map(|path| SystemPath::absolute(path, &cwd))
.collect(); .collect();
let mode = if args.add_ignore {
MainLoopMode::AddIgnore
} else {
MainLoopMode::Check
};
let system = OsSystem::new(&cwd); let system = OsSystem::new(&cwd);
let watch = args.watch; let watch = args.watch;
let exit_zero = args.exit_zero; let exit_zero = args.exit_zero;
@ -138,7 +150,7 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
} }
let (main_loop, main_loop_cancellation_token) = let (main_loop, main_loop_cancellation_token) =
MainLoop::new(project_options_overrides, printer); MainLoop::new(mode, project_options_overrides, printer);
// Listen to Ctrl+C and abort the watch mode. // Listen to Ctrl+C and abort the watch mode.
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token)); let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
@ -209,6 +221,8 @@ impl Termination for ExitStatus {
} }
struct MainLoop { struct MainLoop {
mode: MainLoopMode,
/// Sender that can be used to send messages to the main loop. /// Sender that can be used to send messages to the main loop.
sender: crossbeam_channel::Sender<MainLoopMessage>, sender: crossbeam_channel::Sender<MainLoopMessage>,
@ -226,6 +240,7 @@ struct MainLoop {
impl MainLoop { impl MainLoop {
fn new( fn new(
mode: MainLoopMode,
project_options_overrides: ProjectOptionsOverrides, project_options_overrides: ProjectOptionsOverrides,
printer: Printer, printer: Printer,
) -> (Self, MainLoopCancellationToken) { ) -> (Self, MainLoopCancellationToken) {
@ -233,6 +248,7 @@ impl MainLoop {
( (
Self { Self {
mode,
sender: sender.clone(), sender: sender.clone(),
receiver, receiver,
watcher: None, watcher: None,
@ -310,13 +326,13 @@ impl MainLoop {
result, result,
revision: check_revision, revision: check_revision,
} => { } => {
let terminal_settings = db.project().settings(db).terminal(); if check_revision != revision {
let display_config = DisplayDiagnosticConfig::default() tracing::debug!(
.format(terminal_settings.output_format.into()) "Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
.color(colored::control::SHOULD_COLORIZE.should_colorize()) );
.show_fix_diff(true); continue;
}
if check_revision == revision {
if db.project().files(db).is_empty() { if db.project().files(db).is_empty() {
tracing::warn!("No python files found under the given path(s)"); tracing::warn!("No python files found under the given path(s)");
} }
@ -326,8 +342,16 @@ impl MainLoop {
return Ok(ExitStatus::Success); return Ok(ExitStatus::Success);
} }
let terminal_settings = db.project().settings(db).terminal();
let is_human_readable = terminal_settings.output_format.is_human_readable(); let is_human_readable = terminal_settings.output_format.is_human_readable();
let diagnostics = match self.mode {
MainLoopMode::Check => {
let display_config = DisplayDiagnosticConfig::default()
.format(terminal_settings.output_format.into())
.color(colored::control::SHOULD_COLORIZE.should_colorize())
.show_fix_diff(true);
if result.is_empty() { if result.is_empty() {
if is_human_readable { if is_human_readable {
writeln!( writeln!(
@ -336,16 +360,10 @@ impl MainLoop {
"All checks passed!".green().bold() "All checks passed!".green().bold()
)?; )?;
} }
if self.watcher.is_none() {
return Ok(ExitStatus::Success);
}
} else { } else {
let diagnostics_count = result.len(); let diagnostics_count = result.len();
let mut stdout = self.printer.stream_for_details().lock(); let mut stdout = self.printer.stream_for_details().lock();
let exit_status =
exit_status_from_diagnostics(&result, terminal_settings);
// Only render diagnostics if they're going to be displayed, since doing // Only render diagnostics if they're going to be displayed, since doing
// so is expensive. // so is expensive.
@ -365,6 +383,38 @@ impl MainLoop {
if diagnostics_count > 1 { "s" } else { "" } if diagnostics_count > 1 { "s" } else { "" }
)?; )?;
} }
}
result
}
MainLoopMode::AddIgnore => {
let result = suppress_all_diagnostics(db, result);
if is_human_readable {
writeln!(
self.printer.stream_for_failure_summary(),
"Ignored {} diagnostic{}",
result.count,
if result.count > 1 { "s" } else { "" }
)?;
}
result.diagnostics
}
};
if self.watcher.is_some() {
continue;
}
let exit_status = if diagnostics.is_empty() {
ExitStatus::Success
} else {
let exit_status =
exit_status_from_diagnostics(&diagnostics, terminal_settings);
exit_status
};
if exit_status.is_internal_error() { if exit_status.is_internal_error() {
tracing::warn!( tracing::warn!(
@ -372,16 +422,8 @@ impl MainLoop {
); );
} }
if self.watcher.is_none() {
return Ok(exit_status); return Ok(exit_status);
} }
}
} else {
tracing::debug!(
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
);
}
}
MainLoopMessage::ApplyChanges(changes) => { MainLoopMessage::ApplyChanges(changes) => {
revision += 1; revision += 1;
@ -406,6 +448,12 @@ impl MainLoop {
} }
} }
#[derive(Copy, Clone, Debug)]
enum MainLoopMode {
Check,
AddIgnore,
}
fn exit_status_from_diagnostics( fn exit_status_from_diagnostics(
diagnostics: &[Diagnostic], diagnostics: &[Diagnostic],
terminal_settings: &TerminalSettings, terminal_settings: &TerminalSettings,

View File

@ -4,7 +4,7 @@ use ruff_db::{files::File, parsed::parsed_module};
use ruff_diagnostics::Edit; use ruff_diagnostics::Edit;
use ruff_text_size::TextRange; use ruff_text_size::TextRange;
use ty_project::Db; use ty_project::Db;
use ty_python_semantic::create_suppression_fix; use ty_python_semantic::suppress_single;
use ty_python_semantic::types::UNRESOLVED_REFERENCE; use ty_python_semantic::types::UNRESOLVED_REFERENCE;
/// A `QuickFix` Code Action /// A `QuickFix` Code Action
@ -36,7 +36,7 @@ pub fn code_actions(
actions.push(QuickFix { actions.push(QuickFix {
title: format!("Ignore '{}' for this line", lint_id.name()), title: format!("Ignore '{}' for this line", lint_id.name()),
edits: create_suppression_fix(db, file, lint_id, diagnostic_range).into_edits(), edits: suppress_single(db, file, lint_id, diagnostic_range).into_edits(),
preferred: false, preferred: false,
}); });

View File

@ -14,6 +14,7 @@ license.workspace = true
[dependencies] [dependencies]
ruff_cache = { workspace = true } ruff_cache = { workspace = true }
ruff_db = { workspace = true, features = ["cache", "serde"] } ruff_db = { workspace = true, features = ["cache", "serde"] }
ruff_diagnostics = { workspace = true }
ruff_macros = { workspace = true } ruff_macros = { workspace = true }
ruff_memory_usage = { workspace = true } ruff_memory_usage = { workspace = true }
ruff_options_metadata = { workspace = true } ruff_options_metadata = { workspace = true }

View File

@ -0,0 +1,142 @@
use std::collections::BTreeMap;
use ruff_db::{
diagnostic::{Annotation, Diagnostic, DiagnosticId, Severity, Span},
files::{File, FilePath},
source::source_text,
};
use ruff_diagnostics::{Fix, SourceMap};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
use ty_python_semantic::suppress_all;
use crate::Db;
pub struct SuppressAllResult {
/// The non-lint diagnostics that can't be suppressed.
pub diagnostics: Vec<Diagnostic>,
/// The number of diagnostics that were suppressed.
pub count: usize,
}
/// Suppress all
pub fn suppress_all_diagnostics(db: &dyn Db, diagnostics: Vec<Diagnostic>) -> SuppressAllResult {
let system = db
.system()
.as_writable()
.expect("System should be writable");
let mut non_lint_diagnostics = diagnostics;
let mut by_file: BTreeMap<File, Vec<_>> = BTreeMap::new();
non_lint_diagnostics.retain(|diagnostic| {
let DiagnosticId::Lint(lint_id) = diagnostic.id() else {
return true;
};
let Some(span) = diagnostic.primary_span() else {
return true;
};
let Some(range) = span.range() else {
return true;
};
by_file
.entry(span.expect_ty_file())
.or_default()
.push((lint_id, range));
false
});
let mut count = 0usize;
for (file, to_suppress) in by_file {
let FilePath::System(path) = file.path(db) else {
tracing::debug!(
"Skipping file `{}` with non-system path because vendored and system virtual file paths are read-only",
file.path(db)
);
continue;
};
let mut source = source_text(db, file);
let count_current_file = to_suppress.len();
let fixes = suppress_all(db, file, to_suppress);
let (new_source, source_map) = apply_fixes(db, file, fixes);
source.updated(new_source, &source_map);
// Create new source from applying fixes
if let Err(err) = system.write_file(path, &*source.to_raw_content()) {
let mut diag = Diagnostic::new(
DiagnosticId::Io,
Severity::Error,
format_args!("Failed to write fixes: {err}"),
);
diag.annotate(Annotation::primary(Span::from(file)));
non_lint_diagnostics.push(diag);
continue;
}
count += count_current_file;
}
SuppressAllResult {
diagnostics: non_lint_diagnostics,
count,
}
}
/// Apply a series of fixes to `File` and returns the updated source code along with the source map.
fn apply_fixes(db: &dyn Db, file: File, mut fixes: Vec<Fix>) -> (String, SourceMap) {
let source = source_text(db, file);
let source = source.as_str();
let mut output = String::with_capacity(source.len());
let mut last_pos: Option<TextSize> = None;
let mut source_map = SourceMap::default();
fixes.sort_unstable_by_key(|fix| fix.min_start());
for fix in fixes {
let mut edits = fix.edits().iter().peekable();
// If the fix contains at least one new edit, enforce isolation and positional requirements.
if let Some(first) = edits.peek() {
// If this fix overlaps with a fix we've already applied, skip it.
if last_pos.is_some_and(|last_pos| last_pos >= first.start()) {
continue;
}
}
let mut applied_edits = Vec::with_capacity(fix.edits().len());
for edit in edits {
// Add all contents from `last_pos` to `fix.location`.
let slice = &source[TextRange::new(last_pos.unwrap_or_default(), edit.start())];
output.push_str(slice);
// Add the start source marker for the patch.
source_map.push_start_marker(edit, output.text_len());
// Add the patch itself.
output.push_str(edit.content().unwrap_or_default());
// Add the end source marker for the added patch.
source_map.push_end_marker(edit, output.text_len());
// Track that the edit was applied.
last_pos = Some(edit.end());
applied_edits.push(edit);
}
}
// Add the remaining content.
let slice = &source[last_pos.unwrap_or_default().to_usize()..];
output.push_str(slice);
(output, source_map)
}

View File

@ -9,6 +9,7 @@ use crate::walk::{ProjectFilesFilter, ProjectFilesWalker};
pub use db::tests::TestDb; pub use db::tests::TestDb;
pub use db::{ChangeResult, CheckMode, Db, ProjectDatabase, SalsaMemoryDump}; pub use db::{ChangeResult, CheckMode, Db, ProjectDatabase, SalsaMemoryDump};
use files::{Index, Indexed, IndexedFiles}; use files::{Index, Indexed, IndexedFiles};
pub use fixes::suppress_all_diagnostics;
use metadata::settings::Settings; use metadata::settings::Settings;
pub use metadata::{ProjectMetadata, ProjectMetadataError}; pub use metadata::{ProjectMetadata, ProjectMetadataError};
use ruff_db::diagnostic::{ use ruff_db::diagnostic::{
@ -34,6 +35,7 @@ use ty_python_semantic::types::check_types;
mod db; mod db;
mod files; mod files;
mod fixes;
mod glob; mod glob;
pub mod metadata; pub mod metadata;
mod walk; mod walk;

View File

@ -25,7 +25,7 @@ pub use semantic_model::{
Completion, HasDefinition, HasType, MemberDefinition, NameKind, SemanticModel, Completion, HasDefinition, HasType, MemberDefinition, NameKind, SemanticModel,
}; };
pub use site_packages::{PythonEnvironment, SitePackagesPaths, SysPrefixPathOrigin}; pub use site_packages::{PythonEnvironment, SitePackagesPaths, SysPrefixPathOrigin};
pub use suppression::create_suppression_fix; pub use suppression::{suppress_all, suppress_single};
pub use types::DisplaySettings; pub use types::DisplaySettings;
pub use types::ide_support::{ pub use types::ide_support::{
ImportAliasResolution, ResolvedDefinition, definitions_for_attribute, definitions_for_bin_op, ImportAliasResolution, ResolvedDefinition, definitions_for_attribute, definitions_for_bin_op,

View File

@ -1,4 +1,8 @@
use ruff_db::diagnostic::LintName;
use smallvec::{SmallVec, smallvec}; use smallvec::{SmallVec, smallvec};
use std::cmp::Ordering;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::error::Error; use std::error::Error;
use std::fmt; use std::fmt;
use std::fmt::Formatter; use std::fmt::Formatter;
@ -375,15 +379,142 @@ fn check_unused_suppressions(context: &mut CheckSuppressionsContext) {
} }
} }
pub fn suppress_all<I>(db: &dyn Db, file: File, ids_with_range: I) -> Vec<Fix>
where
I: IntoIterator<Item = (LintName, TextRange)>,
{
let grouped = group_by_suppression_range(db, file, ids_with_range);
create_all_fixes(db, file, grouped)
}
/// Creates a fix to suppress a single lint.
pub fn suppress_single(db: &dyn Db, file: File, id: LintId, range: TextRange) -> Fix {
let suppression_range = suppression_range(db, file, range);
create_suppression_fix(db, file, id.name(), suppression_range)
}
fn create_all_fixes(
db: &dyn Db,
file: File,
grouped: BTreeMap<SuppressionRange, BTreeSet<LintName>>,
) -> Vec<Fix> {
let mut fixes = Vec::new();
for (range, lints) in grouped {
for lint in lints.into_iter().rev() {
let fix = create_suppression_fix(db, file, lint, range);
fixes.push(fix);
}
}
fixes
}
fn group_by_suppression_range<I>(
db: &dyn Db,
file: File,
ids_with_range: I,
) -> BTreeMap<SuppressionRange, BTreeSet<LintName>>
where
I: IntoIterator<Item = (LintName, TextRange)>,
{
let mut map: BTreeMap<SuppressionRange, BTreeSet<LintName>> = BTreeMap::new();
for (id, range) in ids_with_range {
let full_range = suppression_range(db, file, range);
map.entry(full_range).or_default().insert(id);
}
map
}
/// Returns the suppression range for the given `range`.
///
/// The suppression range is defined as:
///
/// * `start`: The `end` of the preceding `Newline` or `NonLogicalLine` token.
/// * `end`: The `start` of the first `NonLogicalLine` or `Newline` token coming after the range.
///
/// For most ranges, this means the suppression range starts at the beginning of the physical line
/// and ends at the end of the physical line containing `range`. The exceptions to this are:
///
/// * If `range` is within a single-line interpolated expression, then the start and end are extended to the start and end of the enclosing interpolated string.
/// * If there's a line continuation, then the suppression range is extended to include the following line too.
/// * If there's a multiline string, then the suppression range is extended to cover the starting and ending line of the multiline string.
fn suppression_range(db: &dyn Db, file: File, range: TextRange) -> SuppressionRange {
let parsed = parsed_module(db, file).load(db);
let before_tokens = parsed.tokens().before(range.start());
let line_start = before_tokens
.iter()
.rfind(|token| {
matches!(
token.kind(),
TokenKind::Newline | TokenKind::NonLogicalNewline
)
})
.map(Ranged::end)
.unwrap_or(TextSize::default());
let after_tokens = parsed.tokens().after(range.end());
let line_end = after_tokens
.iter()
.find(|token| {
matches!(
token.kind(),
TokenKind::Newline | TokenKind::NonLogicalNewline
)
})
.map(Ranged::start)
.unwrap_or(range.end());
SuppressionRange(TextRange::new(line_start, line_end))
}
/// The range of the suppression.
///
/// Guranteed to start at the start of a line and
/// ends at the end of a line (right before the `\n`).
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
struct SuppressionRange(TextRange);
impl SuppressionRange {
fn text_range(&self) -> TextRange {
self.0
}
fn line_end(&self) -> TextSize {
self.0.end()
}
}
impl PartialOrd for SuppressionRange {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for SuppressionRange {
fn cmp(&self, other: &Self) -> Ordering {
self.0.ordering(other.0)
}
}
/// Creates a fix for adding a suppression comment to suppress `lint` for `range`. /// Creates a fix for adding a suppression comment to suppress `lint` for `range`.
/// ///
/// The fix prefers adding the code to an existing `ty: ignore[]` comment over /// The fix prefers adding the code to an existing `ty: ignore[]` comment over
/// adding a new suppression comment. /// adding a new suppression comment.
pub fn create_suppression_fix(db: &dyn Db, file: File, id: LintId, range: TextRange) -> Fix { fn create_suppression_fix(
db: &dyn Db,
file: File,
name: LintName,
suppression_range: SuppressionRange,
) -> Fix {
let suppressions = suppressions(db, file); let suppressions = suppressions(db, file);
let source = source_text(db, file); let source = source_text(db, file);
let mut existing_suppressions = suppressions.line_suppressions(range).filter(|suppression| { let mut existing_suppressions = suppressions
.line_suppressions(suppression_range.text_range())
.filter(|suppression| {
matches!( matches!(
suppression.target, suppression.target,
SuppressionTarget::Lint(_) | SuppressionTarget::Empty, SuppressionTarget::Lint(_) | SuppressionTarget::Empty,
@ -398,9 +529,9 @@ pub fn create_suppression_fix(db: &dyn Db, file: File, id: LintId, range: TextRa
let up_to_last_code = before_closing_paren.trim_end(); let up_to_last_code = before_closing_paren.trim_end();
let insertion = if up_to_last_code.ends_with(',') { let insertion = if up_to_last_code.ends_with(',') {
format!(" {id}", id = id.name()) format!(" {name}")
} else { } else {
format!(", {id}", id = id.name()) format!(", {name}")
}; };
let relative_offset_from_end = comment_text.text_len() - up_to_last_code.text_len(); let relative_offset_from_end = comment_text.text_len() - up_to_last_code.text_len();
@ -414,28 +545,13 @@ pub fn create_suppression_fix(db: &dyn Db, file: File, id: LintId, range: TextRa
// Always insert a new suppression at the end of the range to avoid having to deal with multiline strings // Always insert a new suppression at the end of the range to avoid having to deal with multiline strings
// etc. // etc.
let parsed = parsed_module(db, file).load(db);
let tokens_after = parsed.tokens().after(range.end());
// Same as for `line_end` when building up the `suppressions`: Ignore newlines
// in multiline-strings, inside f-strings, or after a line continuation because we can't
// place a comment on those lines.
let line_end = tokens_after
.iter()
.find(|token| {
matches!(
token.kind(),
TokenKind::Newline | TokenKind::NonLogicalNewline
)
})
.map(Ranged::start)
.unwrap_or(source.text_len());
let line_end = suppression_range.line_end();
let up_to_line_end = &source[..line_end.to_usize()]; let up_to_line_end = &source[..line_end.to_usize()];
let up_to_first_content = up_to_line_end.trim_end(); let up_to_first_content = up_to_line_end.trim_end();
let trailing_whitespace_len = up_to_line_end.text_len() - up_to_first_content.text_len(); let trailing_whitespace_len = up_to_line_end.text_len() - up_to_first_content.text_len();
let insertion = format!(" # ty:ignore[{id}]", id = id.name()); let insertion = format!(" # ty:ignore[{name}]");
Fix::safe_edit(if trailing_whitespace_len == TextSize::ZERO { Fix::safe_edit(if trailing_whitespace_len == TextSize::ZERO {
Edit::insertion(insertion, line_end) Edit::insertion(insertion, line_end)
@ -613,7 +729,7 @@ impl Suppressions {
// Don't use intersect to avoid that suppressions on inner-expression // Don't use intersect to avoid that suppressions on inner-expression
// ignore errors for outer expressions // ignore errors for outer expressions
suppression.suppressed_range.contains(range.start()) suppression.suppressed_range.contains(range.start())
|| suppression.suppressed_range.contains(range.end()) || suppression.suppressed_range.contains_inclusive(range.end())
}) })
} }