mirror of
https://github.com/astral-sh/ruff
synced 2026-01-21 05:20:49 -05:00
[ty] Add --add-ignore CLI option (#21696)
This commit is contained in:
2
Cargo.lock
generated
2
Cargo.lock
generated
@@ -4511,11 +4511,13 @@ dependencies = [
|
||||
"regex-automata",
|
||||
"ruff_cache",
|
||||
"ruff_db",
|
||||
"ruff_diagnostics",
|
||||
"ruff_macros",
|
||||
"ruff_memory_usage",
|
||||
"ruff_options_metadata",
|
||||
"ruff_python_ast",
|
||||
"ruff_python_formatter",
|
||||
"ruff_python_trivia",
|
||||
"ruff_text_size",
|
||||
"rustc-hash",
|
||||
"salsa",
|
||||
|
||||
@@ -221,7 +221,7 @@ fn setup_micro_case(code: &str) -> Case {
|
||||
let file_path = "src/test.py";
|
||||
fs.write_file_all(
|
||||
SystemPathBuf::from(file_path),
|
||||
ruff_python_trivia::textwrap::dedent(code),
|
||||
&*ruff_python_trivia::textwrap::dedent(code),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::fmt::Formatter;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
@@ -49,3 +50,15 @@ impl CancellationToken {
|
||||
self.cancelled.load(std::sync::atomic::Ordering::Relaxed)
|
||||
}
|
||||
}
|
||||
|
||||
/// The operation was canceled by the provided [`CancellationToken`].
|
||||
#[derive(Debug)]
|
||||
pub struct Canceled;
|
||||
|
||||
impl std::error::Error for Canceled {}
|
||||
|
||||
impl std::fmt::Display for Canceled {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str("operation was canceled")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,6 +98,44 @@ impl Diagnostic {
|
||||
diag
|
||||
}
|
||||
|
||||
/// Adds sub diagnostics that tell the user that this is a bug in ty
|
||||
/// and asks them to open an issue on GitHub.
|
||||
pub fn add_bug_sub_diagnostics(&mut self, url_encoded_title: &str) {
|
||||
self.sub(SubDiagnostic::new(
|
||||
SubDiagnosticSeverity::Info,
|
||||
"This indicates a bug in ty.",
|
||||
));
|
||||
|
||||
self.sub(SubDiagnostic::new(
|
||||
SubDiagnosticSeverity::Info,
|
||||
format_args!(
|
||||
"If you could open an issue at https://github.com/astral-sh/ty/issues/new?title={url_encoded_title}, we'd be very appreciative!"
|
||||
),
|
||||
));
|
||||
self.sub(SubDiagnostic::new(
|
||||
SubDiagnosticSeverity::Info,
|
||||
format!(
|
||||
"Platform: {os} {arch}",
|
||||
os = std::env::consts::OS,
|
||||
arch = std::env::consts::ARCH
|
||||
),
|
||||
));
|
||||
if let Some(version) = crate::program_version() {
|
||||
self.sub(SubDiagnostic::new(
|
||||
SubDiagnosticSeverity::Info,
|
||||
format!("Version: {version}"),
|
||||
));
|
||||
}
|
||||
|
||||
self.sub(SubDiagnostic::new(
|
||||
SubDiagnosticSeverity::Info,
|
||||
format!(
|
||||
"Args: {args:?}",
|
||||
args = std::env::args().collect::<Vec<_>>()
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
/// Add an annotation to this diagnostic.
|
||||
///
|
||||
/// Annotations for a diagnostic are optional, but if any are added,
|
||||
@@ -1019,6 +1057,13 @@ impl DiagnosticId {
|
||||
matches!(self, DiagnosticId::Lint(_))
|
||||
}
|
||||
|
||||
pub const fn as_lint(&self) -> Option<LintName> {
|
||||
match self {
|
||||
DiagnosticId::Lint(name) => Some(*name),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if this `DiagnosticId` represents a lint with the given name.
|
||||
pub fn is_lint_named(&self, name: &str) -> bool {
|
||||
matches!(self, DiagnosticId::Lint(self_name) if self_name == name)
|
||||
|
||||
@@ -14,6 +14,7 @@ use crate::diagnostic::{Span, UnifiedFile};
|
||||
use crate::file_revision::FileRevision;
|
||||
use crate::files::file_root::FileRoots;
|
||||
use crate::files::private::FileStatus;
|
||||
use crate::source::SourceText;
|
||||
use crate::system::{SystemPath, SystemPathBuf, SystemVirtualPath, SystemVirtualPathBuf};
|
||||
use crate::vendored::{VendoredPath, VendoredPathBuf};
|
||||
use crate::{Db, FxDashMap, vendored};
|
||||
@@ -323,6 +324,17 @@ pub struct File {
|
||||
/// the file has been deleted is to change the status to `Deleted`.
|
||||
#[default]
|
||||
status: FileStatus,
|
||||
|
||||
/// Overrides the result of [`source_text`](crate::source::source_text).
|
||||
///
|
||||
/// This is useful when running queries after modifying a file's content but
|
||||
/// before the content is written to disk. For example, to verify that the applied fixes
|
||||
/// didn't introduce any new errors.
|
||||
///
|
||||
/// The override gets automatically removed the next time the file changes.
|
||||
#[default]
|
||||
#[returns(ref)]
|
||||
pub source_text_override: Option<SourceText>,
|
||||
}
|
||||
|
||||
// The Salsa heap is tracked separately.
|
||||
@@ -444,20 +456,28 @@ impl File {
|
||||
_ => (FileStatus::NotFound, FileRevision::zero(), None),
|
||||
};
|
||||
|
||||
let mut clear_override = false;
|
||||
|
||||
if file.status(db) != status {
|
||||
tracing::debug!("Updating the status of `{}`", file.path(db));
|
||||
file.set_status(db).to(status);
|
||||
clear_override = true;
|
||||
}
|
||||
|
||||
if file.revision(db) != revision {
|
||||
tracing::debug!("Updating the revision of `{}`", file.path(db));
|
||||
file.set_revision(db).to(revision);
|
||||
clear_override = true;
|
||||
}
|
||||
|
||||
if file.permissions(db) != permission {
|
||||
tracing::debug!("Updating the permissions of `{}`", file.path(db));
|
||||
file.set_permissions(db).to(permission);
|
||||
}
|
||||
|
||||
if clear_override && file.source_text_override(db).is_some() {
|
||||
file.set_source_text_override(db).to(None);
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the file exists.
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use std::borrow::Cow;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use ruff_diagnostics::SourceMap;
|
||||
use ruff_notebook::Notebook;
|
||||
use ruff_python_ast::PySourceType;
|
||||
use ruff_source_file::LineIndex;
|
||||
@@ -16,6 +18,10 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
let _span = tracing::trace_span!("source_text", file = %path).entered();
|
||||
let mut read_error = None;
|
||||
|
||||
if let Some(source) = file.source_text_override(db) {
|
||||
return source.clone();
|
||||
}
|
||||
|
||||
let kind = if is_notebook(db.system(), path) {
|
||||
file.read_to_notebook(db)
|
||||
.unwrap_or_else(|error| {
|
||||
@@ -90,6 +96,45 @@ impl SourceText {
|
||||
pub fn read_error(&self) -> Option<&SourceTextError> {
|
||||
self.inner.read_error.as_ref()
|
||||
}
|
||||
|
||||
/// Returns a new instance for this file with the updated source text (Python code).
|
||||
///
|
||||
/// Uses the `source_map` to preserve the cell-boundaries.
|
||||
#[must_use]
|
||||
pub fn with_text(&self, new_text: String, source_map: &SourceMap) -> Self {
|
||||
let new_kind = match &self.inner.kind {
|
||||
SourceTextKind::Text(_) => SourceTextKind::Text(new_text),
|
||||
|
||||
SourceTextKind::Notebook { notebook } => {
|
||||
let mut new_notebook = notebook.as_ref().clone();
|
||||
new_notebook.update(source_map, new_text);
|
||||
SourceTextKind::Notebook {
|
||||
notebook: new_notebook.into(),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Self {
|
||||
inner: Arc::new(SourceTextInner {
|
||||
kind: new_kind,
|
||||
read_error: self.inner.read_error.clone(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_bytes(&self) -> Cow<'_, [u8]> {
|
||||
match &self.inner.kind {
|
||||
SourceTextKind::Text(source) => Cow::Borrowed(source.as_bytes()),
|
||||
SourceTextKind::Notebook { notebook } => {
|
||||
let mut output: Vec<u8> = Vec::new();
|
||||
notebook
|
||||
.write(&mut output)
|
||||
.expect("writing to a Vec should never fail");
|
||||
|
||||
Cow::Owned(output)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for SourceText {
|
||||
@@ -117,13 +162,13 @@ impl std::fmt::Debug for SourceText {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Eq, PartialEq, get_size2::GetSize, Clone)]
|
||||
struct SourceTextInner {
|
||||
kind: SourceTextKind,
|
||||
read_error: Option<SourceTextError>,
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq, get_size2::GetSize)]
|
||||
#[derive(Eq, PartialEq, get_size2::GetSize, Clone)]
|
||||
enum SourceTextKind {
|
||||
Text(String),
|
||||
Notebook {
|
||||
|
||||
@@ -271,7 +271,12 @@ pub trait WritableSystem: System {
|
||||
fn create_new_file(&self, path: &SystemPath) -> Result<()>;
|
||||
|
||||
/// Writes the given content to the file at the given path.
|
||||
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()>;
|
||||
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
|
||||
self.write_file_bytes(path, content.as_bytes())
|
||||
}
|
||||
|
||||
/// Writes the given content to the file at the given path.
|
||||
fn write_file_bytes(&self, path: &SystemPath, content: &[u8]) -> Result<()>;
|
||||
|
||||
/// Creates a directory at `path` as well as any intermediate directories.
|
||||
fn create_directory_all(&self, path: &SystemPath) -> Result<()>;
|
||||
@@ -311,6 +316,8 @@ pub trait WritableSystem: System {
|
||||
|
||||
Ok(Some(cache_path))
|
||||
}
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn WritableSystem>;
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
|
||||
@@ -122,7 +122,9 @@ impl MemoryFileSystem {
|
||||
let entry = by_path.get(&normalized).ok_or_else(not_found)?;
|
||||
|
||||
match entry {
|
||||
Entry::File(file) => Ok(file.content.clone()),
|
||||
Entry::File(file) => {
|
||||
String::from_utf8(file.content.to_vec()).map_err(|_| invalid_utf8())
|
||||
}
|
||||
Entry::Directory(_) => Err(is_a_directory()),
|
||||
}
|
||||
}
|
||||
@@ -139,7 +141,7 @@ impl MemoryFileSystem {
|
||||
.get(&path.as_ref().to_path_buf())
|
||||
.ok_or_else(not_found)?;
|
||||
|
||||
Ok(file.content.clone())
|
||||
String::from_utf8(file.content.to_vec()).map_err(|_| invalid_utf8())
|
||||
}
|
||||
|
||||
pub fn exists(&self, path: &SystemPath) -> bool {
|
||||
@@ -161,7 +163,7 @@ impl MemoryFileSystem {
|
||||
match by_path.entry(normalized) {
|
||||
btree_map::Entry::Vacant(entry) => {
|
||||
entry.insert(Entry::File(File {
|
||||
content: String::new(),
|
||||
content: Box::default(),
|
||||
last_modified: file_time_now(),
|
||||
}));
|
||||
|
||||
@@ -177,13 +179,17 @@ impl MemoryFileSystem {
|
||||
/// Stores a new file in the file system.
|
||||
///
|
||||
/// The operation overrides the content for an existing file with the same normalized `path`.
|
||||
pub fn write_file(&self, path: impl AsRef<SystemPath>, content: impl ToString) -> Result<()> {
|
||||
pub fn write_file(
|
||||
&self,
|
||||
path: impl AsRef<SystemPath>,
|
||||
content: impl AsRef<[u8]>,
|
||||
) -> Result<()> {
|
||||
let mut by_path = self.inner.by_path.write().unwrap();
|
||||
|
||||
let normalized = self.normalize_path(path.as_ref());
|
||||
|
||||
let file = get_or_create_file(&mut by_path, &normalized)?;
|
||||
file.content = content.to_string();
|
||||
file.content = content.as_ref().to_vec().into_boxed_slice();
|
||||
file.last_modified = file_time_now();
|
||||
|
||||
Ok(())
|
||||
@@ -214,7 +220,7 @@ impl MemoryFileSystem {
|
||||
pub fn write_file_all(
|
||||
&self,
|
||||
path: impl AsRef<SystemPath>,
|
||||
content: impl ToString,
|
||||
content: impl AsRef<[u8]>,
|
||||
) -> Result<()> {
|
||||
let path = path.as_ref();
|
||||
|
||||
@@ -228,19 +234,24 @@ impl MemoryFileSystem {
|
||||
/// Stores a new virtual file in the file system.
|
||||
///
|
||||
/// The operation overrides the content for an existing virtual file with the same `path`.
|
||||
pub fn write_virtual_file(&self, path: impl AsRef<SystemVirtualPath>, content: impl ToString) {
|
||||
pub fn write_virtual_file(
|
||||
&self,
|
||||
path: impl AsRef<SystemVirtualPath>,
|
||||
content: impl AsRef<[u8]>,
|
||||
) {
|
||||
let path = path.as_ref();
|
||||
let mut virtual_files = self.inner.virtual_files.write().unwrap();
|
||||
let content = content.as_ref().to_vec().into_boxed_slice();
|
||||
|
||||
match virtual_files.entry(path.to_path_buf()) {
|
||||
std::collections::hash_map::Entry::Vacant(entry) => {
|
||||
entry.insert(File {
|
||||
content: content.to_string(),
|
||||
content,
|
||||
last_modified: file_time_now(),
|
||||
});
|
||||
}
|
||||
std::collections::hash_map::Entry::Occupied(mut entry) => {
|
||||
entry.get_mut().content = content.to_string();
|
||||
entry.get_mut().content = content;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -468,7 +479,7 @@ impl Entry {
|
||||
|
||||
#[derive(Debug)]
|
||||
struct File {
|
||||
content: String,
|
||||
content: Box<[u8]>,
|
||||
last_modified: FileTime,
|
||||
}
|
||||
|
||||
@@ -497,6 +508,13 @@ fn directory_not_empty() -> std::io::Error {
|
||||
std::io::Error::other("directory not empty")
|
||||
}
|
||||
|
||||
fn invalid_utf8() -> std::io::Error {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"stream did not contain valid UTF-8",
|
||||
)
|
||||
}
|
||||
|
||||
fn create_dir_all(
|
||||
paths: &mut RwLockWriteGuard<BTreeMap<Utf8PathBuf, Entry>>,
|
||||
normalized: &Utf8Path,
|
||||
@@ -533,7 +551,7 @@ fn get_or_create_file<'a>(
|
||||
|
||||
let entry = paths.entry(normalized.to_path_buf()).or_insert_with(|| {
|
||||
Entry::File(File {
|
||||
content: String::new(),
|
||||
content: Box::default(),
|
||||
last_modified: file_time_now(),
|
||||
})
|
||||
});
|
||||
@@ -844,7 +862,7 @@ mod tests {
|
||||
let fs = with_files(["c.py"]);
|
||||
|
||||
let error = fs
|
||||
.write_file(SystemPath::new("a/b.py"), "content".to_string())
|
||||
.write_file(SystemPath::new("a/b.py"), "content")
|
||||
.unwrap_err();
|
||||
|
||||
assert_eq!(error.kind(), ErrorKind::NotFound);
|
||||
@@ -855,7 +873,7 @@ mod tests {
|
||||
let fs = with_files(["a/b.py"]);
|
||||
|
||||
let error = fs
|
||||
.write_file_all(SystemPath::new("a/b.py/c"), "content".to_string())
|
||||
.write_file_all(SystemPath::new("a/b.py/c"), "content")
|
||||
.unwrap_err();
|
||||
|
||||
assert_eq!(error.kind(), ErrorKind::Other);
|
||||
@@ -878,7 +896,7 @@ mod tests {
|
||||
let fs = MemoryFileSystem::new();
|
||||
let path = SystemPath::new("a.py");
|
||||
|
||||
fs.write_file_all(path, "Test content".to_string())?;
|
||||
fs.write_file_all(path, "Test content")?;
|
||||
|
||||
assert_eq!(fs.read_to_string(path)?, "Test content");
|
||||
|
||||
@@ -915,9 +933,7 @@ mod tests {
|
||||
|
||||
fs.create_directory_all("a")?;
|
||||
|
||||
let error = fs
|
||||
.write_file(SystemPath::new("a"), "content".to_string())
|
||||
.unwrap_err();
|
||||
let error = fs.write_file(SystemPath::new("a"), "content").unwrap_err();
|
||||
|
||||
assert_eq!(error.kind(), ErrorKind::Other);
|
||||
|
||||
|
||||
@@ -361,13 +361,17 @@ impl WritableSystem for OsSystem {
|
||||
std::fs::File::create_new(path).map(drop)
|
||||
}
|
||||
|
||||
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
|
||||
fn write_file_bytes(&self, path: &SystemPath, content: &[u8]) -> Result<()> {
|
||||
std::fs::write(path.as_std_path(), content)
|
||||
}
|
||||
|
||||
fn create_directory_all(&self, path: &SystemPath) -> Result<()> {
|
||||
std::fs::create_dir_all(path.as_std_path())
|
||||
}
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn WritableSystem> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for OsSystem {
|
||||
|
||||
@@ -205,13 +205,17 @@ impl WritableSystem for TestSystem {
|
||||
self.system().create_new_file(path)
|
||||
}
|
||||
|
||||
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
|
||||
self.system().write_file(path, content)
|
||||
fn write_file_bytes(&self, path: &SystemPath, content: &[u8]) -> Result<()> {
|
||||
self.system().write_file_bytes(path, content)
|
||||
}
|
||||
|
||||
fn create_directory_all(&self, path: &SystemPath) -> Result<()> {
|
||||
self.system().create_directory_all(path)
|
||||
}
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn WritableSystem> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension trait for databases that use a [`WritableSystem`].
|
||||
@@ -283,7 +287,11 @@ pub trait DbWithTestSystem: Db + Sized {
|
||||
///
|
||||
/// ## Panics
|
||||
/// If the db isn't using the [`InMemorySystem`].
|
||||
fn write_virtual_file(&mut self, path: impl AsRef<SystemVirtualPath>, content: impl ToString) {
|
||||
fn write_virtual_file(
|
||||
&mut self,
|
||||
path: impl AsRef<SystemVirtualPath>,
|
||||
content: impl AsRef<[u8]>,
|
||||
) {
|
||||
let path = path.as_ref();
|
||||
self.test_system()
|
||||
.memory_file_system()
|
||||
@@ -322,23 +330,23 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
#[derive(Clone, Default, Debug)]
|
||||
pub struct InMemorySystem {
|
||||
user_config_directory: Mutex<Option<SystemPathBuf>>,
|
||||
user_config_directory: Arc<Mutex<Option<SystemPathBuf>>>,
|
||||
memory_fs: MemoryFileSystem,
|
||||
}
|
||||
|
||||
impl InMemorySystem {
|
||||
pub fn new(cwd: SystemPathBuf) -> Self {
|
||||
Self {
|
||||
user_config_directory: Mutex::new(None),
|
||||
user_config_directory: Mutex::new(None).into(),
|
||||
memory_fs: MemoryFileSystem::with_current_directory(cwd),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_memory_fs(memory_fs: MemoryFileSystem) -> Self {
|
||||
Self {
|
||||
user_config_directory: Mutex::new(None),
|
||||
user_config_directory: Mutex::new(None).into(),
|
||||
memory_fs,
|
||||
}
|
||||
}
|
||||
@@ -440,10 +448,7 @@ impl System for InMemorySystem {
|
||||
}
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn System> {
|
||||
Box::new(Self {
|
||||
user_config_directory: Mutex::new(self.user_config_directory.lock().unwrap().clone()),
|
||||
memory_fs: self.memory_fs.clone(),
|
||||
})
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -452,11 +457,15 @@ impl WritableSystem for InMemorySystem {
|
||||
self.memory_fs.create_new_file(path)
|
||||
}
|
||||
|
||||
fn write_file(&self, path: &SystemPath, content: &str) -> Result<()> {
|
||||
fn write_file_bytes(&self, path: &SystemPath, content: &[u8]) -> Result<()> {
|
||||
self.memory_fs.write_file(path, content)
|
||||
}
|
||||
|
||||
fn create_directory_all(&self, path: &SystemPath) -> Result<()> {
|
||||
self.memory_fs.create_directory_all(path)
|
||||
}
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn WritableSystem> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
3
crates/ty/docs/cli.md
generated
3
crates/ty/docs/cli.md
generated
@@ -37,7 +37,8 @@ ty check [OPTIONS] [PATH]...
|
||||
|
||||
<h3 class="cli-reference">Options</h3>
|
||||
|
||||
<dl class="cli-reference"><dt id="ty-check--color"><a href="#ty-check--color"><code>--color</code></a> <i>when</i></dt><dd><p>Control when colored output is used</p>
|
||||
<dl class="cli-reference"><dt id="ty-check--add-ignore"><a href="#ty-check--add-ignore"><code>--add-ignore</code></a></dt><dd><p>Adds <code>ty: ignore</code> comments to suppress all rule diagnostics</p>
|
||||
</dd><dt id="ty-check--color"><a href="#ty-check--color"><code>--color</code></a> <i>when</i></dt><dd><p>Control when colored output is used</p>
|
||||
<p>Possible values:</p>
|
||||
<ul>
|
||||
<li><code>auto</code>: Display colors if the output goes to an interactive terminal</li>
|
||||
|
||||
@@ -54,6 +54,10 @@ pub(crate) struct CheckCommand {
|
||||
)]
|
||||
pub paths: Vec<SystemPathBuf>,
|
||||
|
||||
/// Adds `ty: ignore` comments to suppress all rule diagnostics.
|
||||
#[arg(long)]
|
||||
pub(crate) add_ignore: bool,
|
||||
|
||||
/// Run the command within the given project directory.
|
||||
///
|
||||
/// All `pyproject.toml` files will be discovered by walking up the directory tree from the given project directory,
|
||||
|
||||
@@ -4,25 +4,17 @@ mod printer;
|
||||
mod python_version;
|
||||
mod version;
|
||||
|
||||
pub use args::Cli;
|
||||
use ty_project::metadata::settings::TerminalSettings;
|
||||
use ty_static::EnvVars;
|
||||
|
||||
use std::fmt::Write;
|
||||
use std::process::{ExitCode, Termination};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::args::{CheckCommand, Command, TerminalColor};
|
||||
use crate::logging::{VerbosityLevel, setup_tracing};
|
||||
use crate::printer::Printer;
|
||||
use anyhow::{Context, anyhow};
|
||||
use clap::{CommandFactory, Parser};
|
||||
use colored::Colorize;
|
||||
use crossbeam::channel as crossbeam_channel;
|
||||
use rayon::ThreadPoolBuilder;
|
||||
use ruff_db::cancellation::{CancellationToken, CancellationTokenSource};
|
||||
use ruff_db::cancellation::{Canceled, CancellationToken, CancellationTokenSource};
|
||||
use ruff_db::diagnostic::{
|
||||
Diagnostic, DiagnosticId, DisplayDiagnosticConfig, DisplayDiagnostics, Severity,
|
||||
};
|
||||
@@ -31,10 +23,17 @@ use ruff_db::max_parallelism;
|
||||
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
|
||||
use salsa::Database;
|
||||
use ty_project::metadata::options::ProjectOptionsOverrides;
|
||||
use ty_project::metadata::settings::TerminalSettings;
|
||||
use ty_project::watch::ProjectWatcher;
|
||||
use ty_project::{CollectReporter, Db, watch};
|
||||
use ty_project::{CollectReporter, Db, suppress_all_diagnostics, watch};
|
||||
use ty_project::{ProjectDatabase, ProjectMetadata};
|
||||
use ty_server::run_server;
|
||||
use ty_static::EnvVars;
|
||||
|
||||
use crate::args::{CheckCommand, Command, TerminalColor};
|
||||
use crate::logging::{VerbosityLevel, setup_tracing};
|
||||
use crate::printer::Printer;
|
||||
pub use args::Cli;
|
||||
|
||||
pub fn run() -> anyhow::Result<ExitStatus> {
|
||||
setup_rayon();
|
||||
@@ -112,6 +111,12 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
||||
.map(|path| SystemPath::absolute(path, &cwd))
|
||||
.collect();
|
||||
|
||||
let mode = if args.add_ignore {
|
||||
MainLoopMode::AddIgnore
|
||||
} else {
|
||||
MainLoopMode::Check
|
||||
};
|
||||
|
||||
let system = OsSystem::new(&cwd);
|
||||
let watch = args.watch;
|
||||
let exit_zero = args.exit_zero;
|
||||
@@ -144,7 +149,7 @@ fn run_check(args: CheckCommand) -> anyhow::Result<ExitStatus> {
|
||||
}
|
||||
|
||||
let (main_loop, main_loop_cancellation_token) =
|
||||
MainLoop::new(project_options_overrides, printer);
|
||||
MainLoop::new(mode, project_options_overrides, printer);
|
||||
|
||||
// Listen to Ctrl+C and abort the watch mode.
|
||||
let main_loop_cancellation_token = Mutex::new(Some(main_loop_cancellation_token));
|
||||
@@ -215,6 +220,8 @@ impl Termination for ExitStatus {
|
||||
}
|
||||
|
||||
struct MainLoop {
|
||||
mode: MainLoopMode,
|
||||
|
||||
/// Sender that can be used to send messages to the main loop.
|
||||
sender: crossbeam_channel::Sender<MainLoopMessage>,
|
||||
|
||||
@@ -237,6 +244,7 @@ struct MainLoop {
|
||||
|
||||
impl MainLoop {
|
||||
fn new(
|
||||
mode: MainLoopMode,
|
||||
project_options_overrides: ProjectOptionsOverrides,
|
||||
printer: Printer,
|
||||
) -> (Self, MainLoopCancellationToken) {
|
||||
@@ -247,6 +255,7 @@ impl MainLoop {
|
||||
|
||||
(
|
||||
Self {
|
||||
mode,
|
||||
sender: sender.clone(),
|
||||
receiver,
|
||||
watcher: None,
|
||||
@@ -325,80 +334,78 @@ impl MainLoop {
|
||||
result,
|
||||
revision: check_revision,
|
||||
} => {
|
||||
let terminal_settings = db.project().settings(db).terminal();
|
||||
let display_config = DisplayDiagnosticConfig::default()
|
||||
.format(terminal_settings.output_format.into())
|
||||
.color(colored::control::SHOULD_COLORIZE.should_colorize())
|
||||
.with_cancellation_token(Some(self.cancellation_token.clone()))
|
||||
.show_fix_diff(true);
|
||||
|
||||
if check_revision == revision {
|
||||
if db.project().files(db).is_empty() {
|
||||
tracing::warn!("No python files found under the given path(s)");
|
||||
}
|
||||
|
||||
// TODO: We should have an official flag to silence workspace diagnostics.
|
||||
if std::env::var("TY_MEMORY_REPORT").as_deref() == Ok("mypy_primer") {
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
|
||||
let is_human_readable = terminal_settings.output_format.is_human_readable();
|
||||
|
||||
if result.is_empty() {
|
||||
if is_human_readable {
|
||||
writeln!(
|
||||
self.printer.stream_for_success_summary(),
|
||||
"{}",
|
||||
"All checks passed!".green().bold()
|
||||
)?;
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
} else {
|
||||
let diagnostics_count = result.len();
|
||||
|
||||
let mut stdout = self.printer.stream_for_details().lock();
|
||||
let exit_status =
|
||||
exit_status_from_diagnostics(&result, terminal_settings);
|
||||
|
||||
// Only render diagnostics if they're going to be displayed, since doing
|
||||
// so is expensive.
|
||||
if stdout.is_enabled() {
|
||||
write!(
|
||||
stdout,
|
||||
"{}",
|
||||
DisplayDiagnostics::new(db, &display_config, &result)
|
||||
)?;
|
||||
}
|
||||
|
||||
if !self.cancellation_token.is_cancelled() {
|
||||
if is_human_readable {
|
||||
writeln!(
|
||||
self.printer.stream_for_failure_summary(),
|
||||
"Found {} diagnostic{}",
|
||||
diagnostics_count,
|
||||
if diagnostics_count > 1 { "s" } else { "" }
|
||||
)?;
|
||||
}
|
||||
|
||||
if exit_status.is_internal_error() {
|
||||
tracing::warn!(
|
||||
"A fatal error occurred while checking some files. Not all project files were analyzed. See the diagnostics list above for details."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if self.watcher.is_none() {
|
||||
return Ok(exit_status);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if check_revision != revision {
|
||||
tracing::debug!(
|
||||
"Discarding check result for outdated revision: current: {revision}, result revision: {check_revision}"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
if db.project().files(db).is_empty() {
|
||||
tracing::warn!("No python files found under the given path(s)");
|
||||
}
|
||||
|
||||
let result = match self.mode {
|
||||
MainLoopMode::Check => {
|
||||
// TODO: We should have an official flag to silence workspace diagnostics.
|
||||
if std::env::var("TY_MEMORY_REPORT").as_deref() == Ok("mypy_primer") {
|
||||
return Ok(ExitStatus::Success);
|
||||
}
|
||||
|
||||
self.write_diagnostics(db, &result)?;
|
||||
|
||||
if self.cancellation_token.is_cancelled() {
|
||||
Err(Canceled)
|
||||
} else {
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
MainLoopMode::AddIgnore => {
|
||||
if let Ok(result) =
|
||||
suppress_all_diagnostics(db, result, &self.cancellation_token)
|
||||
{
|
||||
self.write_diagnostics(db, &result.diagnostics)?;
|
||||
|
||||
let terminal_settings = db.project().settings(db).terminal();
|
||||
let is_human_readable =
|
||||
terminal_settings.output_format.is_human_readable();
|
||||
|
||||
if is_human_readable {
|
||||
writeln!(
|
||||
self.printer.stream_for_failure_summary(),
|
||||
"Added {} ignore comment{}",
|
||||
result.count,
|
||||
if result.count > 1 { "s" } else { "" }
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(result.diagnostics)
|
||||
} else {
|
||||
Err(Canceled)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let exit_status = match result.as_deref() {
|
||||
Ok([]) => ExitStatus::Success,
|
||||
Ok(diagnostics) => {
|
||||
let terminal_settings = db.project().settings(db).terminal();
|
||||
exit_status_from_diagnostics(diagnostics, terminal_settings)
|
||||
}
|
||||
Err(Canceled) => ExitStatus::Success,
|
||||
};
|
||||
|
||||
if exit_status.is_internal_error() {
|
||||
tracing::warn!(
|
||||
"A fatal error occurred while checking some files. Not all project files were analyzed. See the diagnostics list above for details."
|
||||
);
|
||||
}
|
||||
|
||||
if self.watcher.is_some() {
|
||||
continue;
|
||||
}
|
||||
|
||||
return Ok(exit_status);
|
||||
}
|
||||
|
||||
MainLoopMessage::ApplyChanges(changes) => {
|
||||
@@ -425,6 +432,65 @@ impl MainLoop {
|
||||
|
||||
Ok(ExitStatus::Success)
|
||||
}
|
||||
|
||||
fn write_diagnostics(
|
||||
&self,
|
||||
db: &ProjectDatabase,
|
||||
diagnostics: &[Diagnostic],
|
||||
) -> anyhow::Result<()> {
|
||||
let terminal_settings = db.project().settings(db).terminal();
|
||||
let is_human_readable = terminal_settings.output_format.is_human_readable();
|
||||
|
||||
match diagnostics {
|
||||
[] => {
|
||||
if is_human_readable {
|
||||
writeln!(
|
||||
self.printer.stream_for_success_summary(),
|
||||
"{}",
|
||||
"All checks passed!".green().bold()
|
||||
)?;
|
||||
}
|
||||
}
|
||||
diagnostics => {
|
||||
let diagnostics_count = diagnostics.len();
|
||||
|
||||
let mut stdout = self.printer.stream_for_details().lock();
|
||||
|
||||
// Only render diagnostics if they're going to be displayed, since doing
|
||||
// so is expensive.
|
||||
if stdout.is_enabled() {
|
||||
let display_config = DisplayDiagnosticConfig::default()
|
||||
.format(terminal_settings.output_format.into())
|
||||
.color(colored::control::SHOULD_COLORIZE.should_colorize())
|
||||
.with_cancellation_token(Some(self.cancellation_token.clone()))
|
||||
.show_fix_diff(true);
|
||||
|
||||
write!(
|
||||
stdout,
|
||||
"{}",
|
||||
DisplayDiagnostics::new(db, &display_config, diagnostics)
|
||||
)?;
|
||||
}
|
||||
|
||||
if !self.cancellation_token.is_cancelled() && is_human_readable {
|
||||
writeln!(
|
||||
self.printer.stream_for_failure_summary(),
|
||||
"Found {} diagnostic{}",
|
||||
diagnostics_count,
|
||||
if diagnostics_count > 1 { "s" } else { "" }
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum MainLoopMode {
|
||||
Check,
|
||||
AddIgnore,
|
||||
}
|
||||
|
||||
fn exit_status_from_diagnostics(
|
||||
|
||||
114
crates/ty/tests/cli/fixes.rs
Normal file
114
crates/ty/tests/cli/fixes.rs
Normal file
@@ -0,0 +1,114 @@
|
||||
use insta_cmd::assert_cmd_snapshot;
|
||||
|
||||
use crate::CliTest;
|
||||
|
||||
#[test]
|
||||
fn add_ignore() -> anyhow::Result<()> {
|
||||
let case = CliTest::with_file(
|
||||
"different_violations.py",
|
||||
r#"
|
||||
import sys
|
||||
|
||||
x = 1 + a
|
||||
|
||||
if sys.does_not_exist:
|
||||
...
|
||||
|
||||
def test(a, b): ...
|
||||
|
||||
test(x = 10, b = 12)
|
||||
"#,
|
||||
)?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--add-ignore"), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
Added 4 ignore comments
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
// There should be no diagnostics when running ty again
|
||||
assert_cmd_snapshot!(case.command(), @r"
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
All checks passed!
|
||||
|
||||
----- stderr -----
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_ignore_unfixable() -> anyhow::Result<()> {
|
||||
let case = CliTest::with_files([
|
||||
("has_syntax_error.py", r"print(x # [unresolved-reference]"),
|
||||
(
|
||||
"different_violations.py",
|
||||
r#"
|
||||
import sys
|
||||
|
||||
x = 1 + a
|
||||
|
||||
reveal_type(x)
|
||||
|
||||
if sys.does_not_exist:
|
||||
...
|
||||
"#,
|
||||
),
|
||||
(
|
||||
"repeated_violations.py",
|
||||
r#"
|
||||
x = (
|
||||
1 +
|
||||
a * b
|
||||
)
|
||||
|
||||
y = y # ty: ignore[unresolved-reference]
|
||||
"#,
|
||||
),
|
||||
])?;
|
||||
|
||||
assert_cmd_snapshot!(case.command().arg("--add-ignore").env("RUST_BACKTRACE", "1"), @r"
|
||||
success: false
|
||||
exit_code: 1
|
||||
----- stdout -----
|
||||
info[revealed-type]: Revealed type
|
||||
--> different_violations.py:6:13
|
||||
|
|
||||
4 | x = 1 + a # ty:ignore[unresolved-reference]
|
||||
5 |
|
||||
6 | reveal_type(x) # ty:ignore[undefined-reveal]
|
||||
| ^ `Unknown`
|
||||
7 |
|
||||
8 | if sys.does_not_exist: # ty:ignore[unresolved-attribute]
|
||||
|
|
||||
|
||||
error[unresolved-reference]: Name `x` used when not defined
|
||||
--> has_syntax_error.py:1:7
|
||||
|
|
||||
1 | print(x # [unresolved-reference]
|
||||
| ^
|
||||
|
|
||||
info: rule `unresolved-reference` is enabled by default
|
||||
|
||||
error[invalid-syntax]: unexpected EOF while parsing
|
||||
--> has_syntax_error.py:1:34
|
||||
|
|
||||
1 | print(x # [unresolved-reference]
|
||||
| ^
|
||||
|
|
||||
|
||||
Found 3 diagnostics
|
||||
Added 5 ignore comments
|
||||
|
||||
----- stderr -----
|
||||
WARN Skipping file `<temp_dir>/has_syntax_error.py` with syntax errors
|
||||
");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -2,6 +2,7 @@ mod analysis_options;
|
||||
mod config_option;
|
||||
mod exit_code;
|
||||
mod file_selection;
|
||||
mod fixes;
|
||||
mod python_environment;
|
||||
mod rule_selection;
|
||||
|
||||
|
||||
@@ -5,8 +5,8 @@ use ruff_diagnostics::Edit;
|
||||
use ruff_python_ast::find_node::covering_node;
|
||||
use ruff_text_size::TextRange;
|
||||
use ty_project::Db;
|
||||
use ty_python_semantic::create_suppression_fix;
|
||||
use ty_python_semantic::lint::LintId;
|
||||
use ty_python_semantic::suppress_single;
|
||||
use ty_python_semantic::types::{UNDEFINED_REVEAL, UNRESOLVED_REFERENCE};
|
||||
|
||||
/// A `QuickFix` Code Action
|
||||
@@ -42,7 +42,7 @@ pub fn code_actions(
|
||||
// Suggest just suppressing the lint (always a valid option, but never ideal)
|
||||
actions.push(QuickFix {
|
||||
title: format!("Ignore '{}' for this line", lint_id.name()),
|
||||
edits: create_suppression_fix(db, file, lint_id, diagnostic_range).into_edits(),
|
||||
edits: suppress_single(db, file, lint_id, diagnostic_range).into_edits(),
|
||||
preferred: false,
|
||||
});
|
||||
|
||||
@@ -437,6 +437,38 @@ mod tests {
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_ignore_line_continuation_empty_lines() {
|
||||
let test = CodeActionTest::with_source(
|
||||
r#"b = bbbbb \
|
||||
[ ccc # test
|
||||
|
||||
+ <START>ddd<END> \
|
||||
|
||||
] # test
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
||||
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||
--> main.py:4:11
|
||||
|
|
||||
2 | [ ccc # test
|
||||
3 |
|
||||
4 | + ddd \
|
||||
| ^^^
|
||||
5 |
|
||||
6 | ] # test
|
||||
|
|
||||
2 | [ ccc # test
|
||||
3 |
|
||||
4 | + ddd \
|
||||
-
|
||||
5 + # ty:ignore[unresolved-reference]
|
||||
6 | ] # test
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn undefined_reveal_type() {
|
||||
let test = CodeActionTest::with_source(
|
||||
|
||||
@@ -14,6 +14,7 @@ license.workspace = true
|
||||
[dependencies]
|
||||
ruff_cache = { workspace = true }
|
||||
ruff_db = { workspace = true, features = ["cache", "serde"] }
|
||||
ruff_diagnostics = { workspace = true }
|
||||
ruff_macros = { workspace = true }
|
||||
ruff_memory_usage = { workspace = true }
|
||||
ruff_options_metadata = { workspace = true }
|
||||
@@ -30,7 +31,7 @@ anyhow = { workspace = true }
|
||||
camino = { workspace = true }
|
||||
colored = { workspace = true }
|
||||
crossbeam = { workspace = true }
|
||||
get-size2 = { workspace = true }
|
||||
get-size2 = { workspace = true, features = ["ordermap"] }
|
||||
globset = { workspace = true }
|
||||
notify = { workspace = true }
|
||||
ordermap = { workspace = true, features = ["serde"] }
|
||||
@@ -48,8 +49,10 @@ toml = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
insta = { workspace = true, features = ["redactions", "ron"] }
|
||||
ruff_db = { workspace = true, features = ["testing"] }
|
||||
ruff_python_trivia = { workspace = true }
|
||||
|
||||
insta = { workspace = true, features = ["redactions", "ron"] }
|
||||
|
||||
[features]
|
||||
default = ["zstd"]
|
||||
|
||||
794
crates/ty_project/src/fixes.rs
Normal file
794
crates/ty_project/src/fixes.rs
Normal file
@@ -0,0 +1,794 @@
|
||||
use ruff_db::cancellation::{Canceled, CancellationToken};
|
||||
use ruff_db::diagnostic::{DisplayDiagnosticConfig, DisplayDiagnostics};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::source::SourceText;
|
||||
use ruff_db::system::{SystemPath, WritableSystem};
|
||||
use ruff_db::{
|
||||
diagnostic::{Annotation, Diagnostic, DiagnosticId, Severity, Span},
|
||||
files::File,
|
||||
source::source_text,
|
||||
};
|
||||
use ruff_diagnostics::{Fix, IsolationLevel, SourceMap};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
use rustc_hash::FxHashSet;
|
||||
use salsa::Setter as _;
|
||||
use std::collections::BTreeMap;
|
||||
use thiserror::Error;
|
||||
use ty_python_semantic::{UNUSED_IGNORE_COMMENT, suppress_all};
|
||||
|
||||
use crate::Db;
|
||||
|
||||
pub struct SuppressAllResult {
|
||||
/// The non-lint diagnostics that can't be suppressed or the diagnostics of files
|
||||
/// that couldn't be suppressed (because ty failed to write the result back to disk,
|
||||
/// or the file contains syntax errors).
|
||||
pub diagnostics: Vec<Diagnostic>,
|
||||
|
||||
/// The number of diagnostics that were suppressed.
|
||||
pub count: usize,
|
||||
}
|
||||
|
||||
/// Adds suppressions to all lint diagnostics and writes the changed files back to disk.
|
||||
///
|
||||
/// Returns how many diagnostics were suppressed along the remaining, non-suppressed diagnostics.
|
||||
///
|
||||
/// ## Panics
|
||||
/// If the `db`'s system isn't [writable](WritableSystem).
|
||||
pub fn suppress_all_diagnostics(
|
||||
db: &mut dyn Db,
|
||||
mut diagnostics: Vec<Diagnostic>,
|
||||
cancellation_token: &CancellationToken,
|
||||
) -> Result<SuppressAllResult, Canceled> {
|
||||
let system = WritableSystem::dyn_clone(
|
||||
db.system()
|
||||
.as_writable()
|
||||
.expect("System should be writable"),
|
||||
);
|
||||
|
||||
let has_fixable = diagnostics.iter().any(|diagnostic| {
|
||||
diagnostic
|
||||
.primary_span()
|
||||
.and_then(|span| span.range())
|
||||
.is_some()
|
||||
&& diagnostic.id().is_lint()
|
||||
&& diagnostic.id() != DiagnosticId::Lint(UNUSED_IGNORE_COMMENT.name())
|
||||
});
|
||||
|
||||
// Early return if there are no diagnostics that can be suppressed to avoid all the heavy work below.
|
||||
if !has_fixable {
|
||||
return Ok(SuppressAllResult {
|
||||
diagnostics,
|
||||
count: 0,
|
||||
});
|
||||
}
|
||||
|
||||
let mut by_file: BTreeMap<File, Vec<_>> = BTreeMap::new();
|
||||
|
||||
// Group the diagnostics by file, leave the file-agnostic diagnostics in `diagnostics`.
|
||||
for diagnostic in diagnostics.extract_if(.., |diagnostic| diagnostic.primary_span().is_some()) {
|
||||
let span = diagnostic
|
||||
.primary_span()
|
||||
.expect("should be set because `extract_if` only yields elements with a primary_span");
|
||||
|
||||
by_file
|
||||
.entry(span.expect_ty_file())
|
||||
.or_default()
|
||||
.push(diagnostic);
|
||||
}
|
||||
|
||||
let mut fixed_count = 0usize;
|
||||
let project = db.project();
|
||||
|
||||
// Try to suppress all lint-diagnostics in the given file.
|
||||
for (&file, file_diagnostics) in &mut by_file {
|
||||
if cancellation_token.is_cancelled() {
|
||||
return Err(Canceled);
|
||||
}
|
||||
|
||||
let Some(path) = file.path(db).as_system_path() else {
|
||||
tracing::debug!(
|
||||
"Skipping file `{}` with non-system path because vendored and system virtual file paths are read-only",
|
||||
file.path(db)
|
||||
);
|
||||
|
||||
continue;
|
||||
};
|
||||
|
||||
let parsed = parsed_module(db, file);
|
||||
if parsed.load(db).has_syntax_errors() {
|
||||
tracing::warn!("Skipping file `{path}` with syntax errors",);
|
||||
continue;
|
||||
}
|
||||
|
||||
let fixable_diagnostics: Vec<_> = file_diagnostics
|
||||
.iter()
|
||||
.filter_map(|diagnostic| {
|
||||
let lint_id = diagnostic.id().as_lint()?;
|
||||
|
||||
// Don't suppress unused ignore comments.
|
||||
if lint_id == UNUSED_IGNORE_COMMENT.name() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// We can't suppress diagnostics without a corresponding file or range.
|
||||
let span = diagnostic.primary_span()?;
|
||||
let range = span.range()?;
|
||||
|
||||
Some((lint_id, range))
|
||||
})
|
||||
.collect();
|
||||
|
||||
if fixable_diagnostics.is_empty() {
|
||||
tracing::debug!(
|
||||
"Skipping file `{path}` because it contains no suppressable diagnostics"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
tracing::debug!(
|
||||
"Suppressing {} diagnostics in `{path}`.",
|
||||
fixable_diagnostics.len()
|
||||
);
|
||||
|
||||
// Required to work around borrow checker issues.
|
||||
let path = path.to_path_buf();
|
||||
let fixes = suppress_all(db, file, &fixable_diagnostics);
|
||||
let source = source_text(db, file);
|
||||
|
||||
// TODO: Handle overlapping fixes when adding support for `--fix` by iterating until all fixes
|
||||
// were successfully applied. We don't need to do that for suppressions because suppression fixes
|
||||
// should never overlap (and, if they were, the worst outcome is that some suppressions are missing).
|
||||
let FixedCode {
|
||||
source: new_source,
|
||||
source_map,
|
||||
} = apply_fixes(&source, fixes).unwrap_or_else(|fixed| fixed);
|
||||
|
||||
let new_source = source.with_text(new_source, &source_map);
|
||||
|
||||
// Verify that the fix didn't introduce any syntax errors by overriding
|
||||
// the source text for `file`.
|
||||
let mut source_guard = WithUpdatedSourceGuard::new(db, file, &source, new_source.clone());
|
||||
let db = source_guard.db();
|
||||
let new_parsed = parsed_module(db, file);
|
||||
let new_parsed = new_parsed.load(db);
|
||||
|
||||
if new_parsed.has_syntax_errors() {
|
||||
let mut diag = Diagnostic::new(
|
||||
DiagnosticId::InternalError,
|
||||
Severity::Fatal,
|
||||
format_args!(
|
||||
"Adding suppressions introduced a syntax error. Reverting all changes."
|
||||
),
|
||||
);
|
||||
|
||||
let mut file_annotation = Annotation::primary(Span::from(file));
|
||||
file_annotation.hide_snippet(true);
|
||||
diag.annotate(file_annotation);
|
||||
|
||||
let parse_diagnostics: Vec<_> = new_parsed
|
||||
.errors()
|
||||
.iter()
|
||||
.map(|error| {
|
||||
Diagnostic::invalid_syntax(Span::from(file), &error.error, error.location)
|
||||
})
|
||||
.collect();
|
||||
|
||||
diag.add_bug_sub_diagnostics("%5BFix%20error%5D");
|
||||
|
||||
let file_db: &dyn ruff_db::Db = db;
|
||||
|
||||
diag.info(format_args!(
|
||||
"Introduced syntax errors:\n\n{}",
|
||||
DisplayDiagnostics::new(
|
||||
&file_db,
|
||||
&DisplayDiagnosticConfig::default(),
|
||||
&parse_diagnostics
|
||||
)
|
||||
));
|
||||
|
||||
file_diagnostics.push(diag);
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Write the changes back to disk.
|
||||
if let Err(err) = write_changes(db, &*system, file, &path, &new_source) {
|
||||
let mut diag = Diagnostic::new(
|
||||
DiagnosticId::Io,
|
||||
Severity::Error,
|
||||
format_args!("Failed to write fixes to file: {err}"),
|
||||
);
|
||||
|
||||
diag.annotate(Annotation::primary(Span::from(file)));
|
||||
diagnostics.push(diag);
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// If we got here then we've been successful. Re-check to get the diagnostics with the
|
||||
// update source, update the fix count.
|
||||
|
||||
if fixable_diagnostics.len() == file_diagnostics.len() {
|
||||
file_diagnostics.clear();
|
||||
} else {
|
||||
// If there are any other file level diagnostics, call `check_file` to re-compute them
|
||||
// with updated ranges.
|
||||
let diagnostics = project.check_file(db, file);
|
||||
*file_diagnostics = diagnostics;
|
||||
}
|
||||
|
||||
fixed_count += fixable_diagnostics.len();
|
||||
// Don't restore the source text or we risk a panic when rendering the diagnostics
|
||||
// if reading any of the fixed files fails (for whatever reason).
|
||||
// The override will get removed on the next `File::sync_path` call.
|
||||
source_guard.defuse();
|
||||
}
|
||||
|
||||
// Stitch the remaining diagnostics back together.
|
||||
diagnostics.extend(by_file.into_values().flatten());
|
||||
diagnostics.sort_by(|left, right| {
|
||||
left.rendering_sort_key(db)
|
||||
.cmp(&right.rendering_sort_key(db))
|
||||
});
|
||||
|
||||
Ok(SuppressAllResult {
|
||||
diagnostics,
|
||||
count: fixed_count,
|
||||
})
|
||||
}
|
||||
|
||||
fn write_changes(
|
||||
db: &dyn Db,
|
||||
system: &dyn WritableSystem,
|
||||
file: File,
|
||||
path: &SystemPath,
|
||||
new_source: &SourceText,
|
||||
) -> Result<(), WriteChangesError> {
|
||||
let metadata = system.path_metadata(path)?;
|
||||
|
||||
if metadata.revision() != file.revision(db) {
|
||||
return Err(WriteChangesError::FileWasModified);
|
||||
}
|
||||
|
||||
system.write_file_bytes(path, &new_source.to_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
enum WriteChangesError {
|
||||
#[error("failed to write changes to disk: {0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
#[error("the file has been modified")]
|
||||
FileWasModified,
|
||||
}
|
||||
|
||||
/// Apply a series of fixes to `File` and returns the updated source code along with the source map.
|
||||
///
|
||||
/// Returns an error if not all fixes were applied because some fixes are overlapping.
|
||||
fn apply_fixes(source: &str, mut fixes: Vec<Fix>) -> Result<FixedCode, FixedCode> {
|
||||
let mut output = String::with_capacity(source.len());
|
||||
let mut last_pos: Option<TextSize> = None;
|
||||
let mut has_overlapping_fixes = false;
|
||||
let mut isolated: FxHashSet<u32> = FxHashSet::default();
|
||||
|
||||
let mut source_map = SourceMap::default();
|
||||
|
||||
fixes.sort_unstable_by_key(Fix::min_start);
|
||||
|
||||
for fix in fixes {
|
||||
let mut edits = fix.edits().iter().peekable();
|
||||
|
||||
// If the fix contains at least one new edit, enforce isolation and positional requirements.
|
||||
if let Some(first) = edits.peek() {
|
||||
// If this fix requires isolation, and we've already applied another fix in the
|
||||
// same isolation group, skip it.
|
||||
if let IsolationLevel::Group(id) = fix.isolation() {
|
||||
if !isolated.insert(id) {
|
||||
has_overlapping_fixes = true;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// If this fix overlaps with a fix we've already applied, skip it.
|
||||
if last_pos.is_some_and(|last_pos| last_pos >= first.start()) {
|
||||
has_overlapping_fixes = true;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let mut applied_edits = Vec::with_capacity(fix.edits().len());
|
||||
for edit in edits {
|
||||
// Add all contents from `last_pos` to `fix.location`.
|
||||
let slice = &source[TextRange::new(last_pos.unwrap_or_default(), edit.start())];
|
||||
output.push_str(slice);
|
||||
|
||||
// Add the start source marker for the patch.
|
||||
source_map.push_start_marker(edit, output.text_len());
|
||||
|
||||
// Add the patch itself.
|
||||
output.push_str(edit.content().unwrap_or_default());
|
||||
|
||||
// Add the end source marker for the added patch.
|
||||
source_map.push_end_marker(edit, output.text_len());
|
||||
|
||||
// Track that the edit was applied.
|
||||
last_pos = Some(edit.end());
|
||||
applied_edits.push(edit);
|
||||
}
|
||||
}
|
||||
|
||||
// Add the remaining content.
|
||||
let slice = &source[last_pos.unwrap_or_default().to_usize()..];
|
||||
output.push_str(slice);
|
||||
|
||||
let fixed = FixedCode {
|
||||
source: output,
|
||||
source_map,
|
||||
};
|
||||
|
||||
if has_overlapping_fixes {
|
||||
Err(fixed)
|
||||
} else {
|
||||
Ok(fixed)
|
||||
}
|
||||
}
|
||||
|
||||
struct FixedCode {
|
||||
/// Source map that allows mapping positions in the fixed code back to positions in the original
|
||||
/// source code (useful for mapping fixed lines back to their original notebook cells).
|
||||
source_map: SourceMap,
|
||||
|
||||
/// The fixed source code
|
||||
source: String,
|
||||
}
|
||||
|
||||
/// Guard that sets [`File::set_source_text_override`] and guarantees to restore the original source
|
||||
/// text unless the guard is explicitly defused.
|
||||
struct WithUpdatedSourceGuard<'db> {
|
||||
db: &'db mut dyn Db,
|
||||
file: File,
|
||||
old_source: Option<SourceText>,
|
||||
}
|
||||
|
||||
impl<'db> WithUpdatedSourceGuard<'db> {
|
||||
fn new(
|
||||
db: &'db mut dyn Db,
|
||||
file: File,
|
||||
old_source: &SourceText,
|
||||
new_source: SourceText,
|
||||
) -> Self {
|
||||
file.set_source_text_override(db).to(Some(new_source));
|
||||
Self {
|
||||
db,
|
||||
file,
|
||||
old_source: Some(old_source.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
fn defuse(&mut self) {
|
||||
self.old_source = None;
|
||||
}
|
||||
|
||||
fn db(&mut self) -> &mut dyn Db {
|
||||
self.db
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for WithUpdatedSourceGuard<'_> {
|
||||
fn drop(&mut self) {
|
||||
if let Some(old_source) = self.old_source.take() {
|
||||
// We don't set `source_text_override` to `None` here because setting the value
|
||||
// invalidates the `source_text` query and there's the chance that reading the file's content
|
||||
// will fail this time (e.g. because the file was deleted), resulting in ty panicking
|
||||
// when trying to render any diagnostic for that file (because all offsets now point nowhere).
|
||||
// The override will be cleared by `File::sync_path`, the next time the revision changes.
|
||||
self.file
|
||||
.set_source_text_override(self.db)
|
||||
.to(Some(old_source));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::hash::{DefaultHasher, Hash, Hasher};
|
||||
|
||||
use insta::assert_snapshot;
|
||||
use ruff_db::cancellation::CancellationTokenSource;
|
||||
use ruff_db::diagnostic::{Diagnostic, DisplayDiagnosticConfig, DisplayDiagnostics};
|
||||
use ruff_db::files::{File, system_path_to_file};
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_db::system::{DbWithWritableSystem, SystemPath, SystemPathBuf};
|
||||
use ruff_python_ast::name::Name;
|
||||
use rustc_hash::FxHashMap;
|
||||
use ty_python_semantic::UNUSED_IGNORE_COMMENT;
|
||||
use ty_python_semantic::lint::Level;
|
||||
|
||||
use crate::db::tests::TestDb;
|
||||
use crate::metadata::options::Rules;
|
||||
use crate::metadata::value::RangedValue;
|
||||
use crate::{Db, ProjectMetadata, suppress_all_diagnostics};
|
||||
|
||||
#[test]
|
||||
fn simple_suppression() {
|
||||
assert_snapshot!(
|
||||
suppress_all_in(r#"
|
||||
a = b + 10"#
|
||||
),
|
||||
@r"
|
||||
Added 1 suppressions
|
||||
|
||||
## Fixed source
|
||||
|
||||
```py
|
||||
a = b + 10 # ty:ignore[unresolved-reference]
|
||||
```
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_suppressions_same_code() {
|
||||
assert_snapshot!(
|
||||
suppress_all_in(r#"
|
||||
a = b + 10 + c"#
|
||||
),
|
||||
@r"
|
||||
Added 2 suppressions
|
||||
|
||||
## Fixed source
|
||||
|
||||
```py
|
||||
a = b + 10 + c # ty:ignore[unresolved-reference]
|
||||
```
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiple_suppressions_different_codes() {
|
||||
assert_snapshot!(
|
||||
suppress_all_in(r#"
|
||||
import sys
|
||||
a = b + 10 + sys.veeersion"#
|
||||
),
|
||||
@r"
|
||||
Added 2 suppressions
|
||||
|
||||
## Fixed source
|
||||
|
||||
```py
|
||||
import sys
|
||||
a = b + 10 + sys.veeersion # ty:ignore[unresolved-attribute, unresolved-reference]
|
||||
```
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dont_fix_unused_ignore() {
|
||||
assert_snapshot!(
|
||||
suppress_all_in(r#"
|
||||
import sys
|
||||
a = 5 + 10 # ty: ignore[unresolved-reference]"#
|
||||
),
|
||||
@r"
|
||||
Added 0 suppressions
|
||||
|
||||
## Fixed source
|
||||
|
||||
```py
|
||||
import sys
|
||||
a = 5 + 10 # ty: ignore[unresolved-reference]
|
||||
```
|
||||
|
||||
## Diagnostics after applying fixes
|
||||
|
||||
warning[unused-ignore-comment]: Unused `ty: ignore` directive
|
||||
--> test.py:2:13
|
||||
|
|
||||
1 | import sys
|
||||
2 | a = 5 + 10 # ty: ignore[unresolved-reference]
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Remove the unused suppression comment
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dont_fix_files_containing_syntax_errors() {
|
||||
assert_snapshot!(
|
||||
suppress_all_in(r#"
|
||||
import sys
|
||||
a = x +
|
||||
"#
|
||||
),
|
||||
@r"
|
||||
Added 0 suppressions
|
||||
|
||||
## Fixed source
|
||||
|
||||
```py
|
||||
import sys
|
||||
a = x +
|
||||
```
|
||||
|
||||
## Diagnostics after applying fixes
|
||||
|
||||
error[unresolved-reference]: Name `x` used when not defined
|
||||
--> test.py:2:5
|
||||
|
|
||||
1 | import sys
|
||||
2 | a = x +
|
||||
| ^
|
||||
|
|
||||
info: rule `unresolved-reference` is enabled by default
|
||||
|
||||
error[invalid-syntax]: Expected an expression
|
||||
--> test.py:2:8
|
||||
|
|
||||
1 | import sys
|
||||
2 | a = x +
|
||||
| ^
|
||||
|
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn arguments() {
|
||||
assert_snapshot!(
|
||||
suppress_all_in(r#"
|
||||
def test(a, b):
|
||||
pass
|
||||
|
||||
|
||||
test(
|
||||
a = 10,
|
||||
c = "unknown"
|
||||
)
|
||||
"#
|
||||
),
|
||||
@r#"
|
||||
Added 2 suppressions
|
||||
|
||||
## Fixed source
|
||||
|
||||
```py
|
||||
def test(a, b):
|
||||
pass
|
||||
|
||||
|
||||
test(
|
||||
a = 10,
|
||||
c = "unknown" # ty:ignore[unknown-argument]
|
||||
) # ty:ignore[missing-argument]
|
||||
```
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_type() {
|
||||
assert_snapshot!(
|
||||
suppress_all_in(r#"class A:
|
||||
def test(self, b: int) -> str:
|
||||
return "test"
|
||||
|
||||
|
||||
class B(A):
|
||||
def test(
|
||||
self,
|
||||
b: str
|
||||
) -> A.b:
|
||||
pass"#
|
||||
),
|
||||
@r#"
|
||||
Added 2 suppressions
|
||||
|
||||
## Fixed source
|
||||
|
||||
```py
|
||||
class A:
|
||||
def test(self, b: int) -> str:
|
||||
return "test"
|
||||
|
||||
|
||||
class B(A):
|
||||
def test(
|
||||
self,
|
||||
b: str
|
||||
) -> A.b: # ty:ignore[invalid-method-override, unresolved-attribute]
|
||||
pass
|
||||
```
|
||||
"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn existing_ty_ignore() {
|
||||
assert_snapshot!(
|
||||
suppress_all_in(r#"class A:
|
||||
def test(self, b: int) -> str:
|
||||
return "test"
|
||||
|
||||
|
||||
class B(A):
|
||||
def test( # ty:ignore[unresolved-reference]
|
||||
self,
|
||||
b: str
|
||||
) -> A.b:
|
||||
pass"#
|
||||
),
|
||||
@r#"
|
||||
Added 2 suppressions
|
||||
|
||||
## Fixed source
|
||||
|
||||
```py
|
||||
class A:
|
||||
def test(self, b: int) -> str:
|
||||
return "test"
|
||||
|
||||
|
||||
class B(A):
|
||||
def test( # ty:ignore[unresolved-reference, invalid-method-override]
|
||||
self,
|
||||
b: str
|
||||
) -> A.b: # ty:ignore[unresolved-attribute]
|
||||
pass
|
||||
```
|
||||
|
||||
## Diagnostics after applying fixes
|
||||
|
||||
warning[unused-ignore-comment]: Unused `ty: ignore` directive: 'unresolved-reference'
|
||||
--> test.py:7:28
|
||||
|
|
||||
6 | class B(A):
|
||||
7 | def test( # ty:ignore[unresolved-reference, invalid-method-override]
|
||||
| ^^^^^^^^^^^^^^^^^^^^
|
||||
8 | self,
|
||||
9 | b: str
|
||||
|
|
||||
help: Remove the unused suppression code
|
||||
"#);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn suppress_all_in(source: &str) -> String {
|
||||
use std::fmt::Write as _;
|
||||
|
||||
let mut metadata = ProjectMetadata::new(Name::new_static("test"), SystemPathBuf::from("."));
|
||||
metadata.options.rules = Some(Rules::from_iter([(
|
||||
RangedValue::cli(UNUSED_IGNORE_COMMENT.name.to_string()),
|
||||
RangedValue::cli(Level::Warn),
|
||||
)]));
|
||||
|
||||
let mut db = TestDb::new(metadata);
|
||||
db.init_program().unwrap();
|
||||
|
||||
db.write_file(
|
||||
"test.py",
|
||||
ruff_python_trivia::textwrap::dedent(source).trim(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let file = system_path_to_file(&db, "test.py").unwrap();
|
||||
|
||||
let parsed_before = parsed_module(&db, file);
|
||||
let had_syntax_errors = parsed_before.load(&db).has_syntax_errors();
|
||||
|
||||
let diagnostics = db.project().check_file(&db, file);
|
||||
let total_diagnostics = diagnostics.len();
|
||||
let cancellation_token_source = CancellationTokenSource::new();
|
||||
let fixes =
|
||||
suppress_all_diagnostics(&mut db, diagnostics, &cancellation_token_source.token())
|
||||
.expect("operation never gets cancelled");
|
||||
|
||||
assert_eq!(fixes.count, total_diagnostics - fixes.diagnostics.len());
|
||||
|
||||
File::sync_path(&mut db, SystemPath::new("test.py"));
|
||||
|
||||
let fixed = source_text(&db, file);
|
||||
|
||||
let parsed = parsed_module(&db, file);
|
||||
let parsed = parsed.load(&db);
|
||||
|
||||
let diagnostics_after_applying_fixes = db.project().check_file(&db, file);
|
||||
|
||||
let mut output = String::new();
|
||||
|
||||
writeln!(
|
||||
output,
|
||||
"Added {} suppressions\n\n## Fixed source\n\n```py\n{}\n```\n",
|
||||
fixes.count,
|
||||
fixed.as_str()
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
if !fixes.diagnostics.is_empty() {
|
||||
writeln!(
|
||||
output,
|
||||
"## Diagnostics after applying fixes\n\n{diagnostics}\n",
|
||||
diagnostics = DisplayDiagnostics::new(
|
||||
&db,
|
||||
&DisplayDiagnosticConfig::default(),
|
||||
&fixes.diagnostics
|
||||
)
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
assert!(
|
||||
!parsed.has_syntax_errors() || had_syntax_errors,
|
||||
"Fixed introduced syntax errors\n\n{output}"
|
||||
);
|
||||
|
||||
let new_diagnostics =
|
||||
diff_diagnostics(&fixes.diagnostics, &diagnostics_after_applying_fixes);
|
||||
|
||||
if !new_diagnostics.is_empty() {
|
||||
writeln!(
|
||||
&mut output,
|
||||
"## New diagnostics after re-checking file\n\n{diagnostics}\n",
|
||||
diagnostics = DisplayDiagnostics::new(
|
||||
&db,
|
||||
&DisplayDiagnosticConfig::default(),
|
||||
&new_diagnostics
|
||||
)
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
fn diff_diagnostics<'a>(before: &'a [Diagnostic], after: &'a [Diagnostic]) -> Vec<Diagnostic> {
|
||||
let before = DiagnosticFingerprint::group_diagnostics(before);
|
||||
let after = DiagnosticFingerprint::group_diagnostics(after);
|
||||
|
||||
after
|
||||
.into_iter()
|
||||
.filter(|(key, _)| !before.contains_key(key))
|
||||
.map(|(_, diagnostic)| diagnostic.clone())
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
|
||||
struct DiagnosticFingerprint(u64);
|
||||
|
||||
impl DiagnosticFingerprint {
|
||||
fn group_diagnostics(diagnostics: &[Diagnostic]) -> FxHashMap<Self, &Diagnostic> {
|
||||
let mut result = FxHashMap::default();
|
||||
|
||||
for diagnostic in diagnostics {
|
||||
Self::from_diagnostic(diagnostic, &mut result);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn from_diagnostic<'a>(
|
||||
diagnostic: &'a Diagnostic,
|
||||
seen: &mut FxHashMap<DiagnosticFingerprint, &'a Diagnostic>,
|
||||
) -> DiagnosticFingerprint {
|
||||
let mut disambiguator = 0u64;
|
||||
|
||||
loop {
|
||||
let mut h = DefaultHasher::default();
|
||||
disambiguator.hash(&mut h);
|
||||
|
||||
diagnostic.id().hash(&mut h);
|
||||
|
||||
let key = DiagnosticFingerprint(h.finish());
|
||||
match seen.entry(key) {
|
||||
Entry::Occupied(_) => {
|
||||
disambiguator += 1;
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(diagnostic);
|
||||
return key;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -9,6 +9,7 @@ use crate::walk::{ProjectFilesFilter, ProjectFilesWalker};
|
||||
pub use db::tests::TestDb;
|
||||
pub use db::{ChangeResult, CheckMode, Db, ProjectDatabase, SalsaMemoryDump};
|
||||
use files::{Index, Indexed, IndexedFiles};
|
||||
pub use fixes::suppress_all_diagnostics;
|
||||
use metadata::settings::Settings;
|
||||
pub use metadata::{ProjectMetadata, ProjectMetadataError};
|
||||
use ruff_db::diagnostic::{
|
||||
@@ -33,6 +34,7 @@ use ty_python_semantic::types::check_types;
|
||||
|
||||
mod db;
|
||||
mod files;
|
||||
mod fixes;
|
||||
mod glob;
|
||||
pub mod metadata;
|
||||
mod walk;
|
||||
@@ -694,38 +696,7 @@ where
|
||||
Err(error) => {
|
||||
let message = error.to_diagnostic_message(Some(file.path(db)));
|
||||
let mut diagnostic = Diagnostic::new(DiagnosticId::Panic, Severity::Fatal, message);
|
||||
diagnostic.sub(SubDiagnostic::new(
|
||||
SubDiagnosticSeverity::Info,
|
||||
"This indicates a bug in ty.",
|
||||
));
|
||||
|
||||
let report_message = "If you could open an issue at https://github.com/astral-sh/ty/issues/new?title=%5Bpanic%5D, we'd be very appreciative!";
|
||||
diagnostic.sub(SubDiagnostic::new(
|
||||
SubDiagnosticSeverity::Info,
|
||||
report_message,
|
||||
));
|
||||
diagnostic.sub(SubDiagnostic::new(
|
||||
SubDiagnosticSeverity::Info,
|
||||
format!(
|
||||
"Platform: {os} {arch}",
|
||||
os = std::env::consts::OS,
|
||||
arch = std::env::consts::ARCH
|
||||
),
|
||||
));
|
||||
if let Some(version) = ruff_db::program_version() {
|
||||
diagnostic.sub(SubDiagnostic::new(
|
||||
SubDiagnosticSeverity::Info,
|
||||
format!("Version: {version}"),
|
||||
));
|
||||
}
|
||||
|
||||
diagnostic.sub(SubDiagnostic::new(
|
||||
SubDiagnosticSeverity::Info,
|
||||
format!(
|
||||
"Args: {args:?}",
|
||||
args = std::env::args().collect::<Vec<_>>()
|
||||
),
|
||||
));
|
||||
diagnostic.add_bug_sub_diagnostics("%5Bpanic%5D");
|
||||
|
||||
if let Some(backtrace) = error.backtrace {
|
||||
match backtrace.status() {
|
||||
|
||||
@@ -850,7 +850,6 @@ impl SrcOptions {
|
||||
)]
|
||||
#[serde(rename_all = "kebab-case", transparent)]
|
||||
pub struct Rules {
|
||||
#[get_size(ignore)] // TODO: Add `GetSize` support for `OrderMap`.
|
||||
inner: OrderMap<RangedValue<String>, RangedValue<Level>, BuildHasherDefault<FxHasher>>,
|
||||
}
|
||||
|
||||
|
||||
@@ -5,9 +5,7 @@
|
||||
use std::hash::BuildHasherDefault;
|
||||
|
||||
use crate::lint::{LintRegistry, LintRegistryBuilder};
|
||||
use crate::suppression::{
|
||||
IGNORE_COMMENT_UNKNOWN_RULE, INVALID_IGNORE_COMMENT, UNUSED_IGNORE_COMMENT,
|
||||
};
|
||||
use crate::suppression::{IGNORE_COMMENT_UNKNOWN_RULE, INVALID_IGNORE_COMMENT};
|
||||
pub use db::Db;
|
||||
pub use diagnostic::add_inferred_python_version_hint_to_diagnostic;
|
||||
pub use program::{
|
||||
@@ -19,7 +17,7 @@ pub use semantic_model::{
|
||||
Completion, HasDefinition, HasType, MemberDefinition, NameKind, SemanticModel,
|
||||
};
|
||||
pub use site_packages::{PythonEnvironment, SitePackagesPaths, SysPrefixPathOrigin};
|
||||
pub use suppression::create_suppression_fix;
|
||||
pub use suppression::{UNUSED_IGNORE_COMMENT, suppress_all, suppress_single};
|
||||
pub use ty_module_resolver::MisconfigurationMode;
|
||||
pub use types::DisplaySettings;
|
||||
pub use types::ide_support::{
|
||||
|
||||
@@ -33,7 +33,7 @@ pub struct LintMetadata {
|
||||
pub line: u32,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, get_size2::GetSize)]
|
||||
#[cfg_attr(
|
||||
feature = "serde",
|
||||
derive(serde::Serialize, serde::Deserialize),
|
||||
|
||||
@@ -14,7 +14,7 @@ use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::diagnostic::DiagnosticGuard;
|
||||
use crate::lint::{GetLintError, Level, LintMetadata, LintRegistry, LintStatus};
|
||||
pub use crate::suppression::add_ignore::create_suppression_fix;
|
||||
pub use crate::suppression::add_ignore::{suppress_all, suppress_single};
|
||||
use crate::suppression::parser::{
|
||||
ParseError, ParseErrorKind, SuppressionComment, SuppressionParser,
|
||||
};
|
||||
@@ -40,7 +40,7 @@ declare_lint! {
|
||||
/// ```py
|
||||
/// a = 20 / 2
|
||||
/// ```
|
||||
pub(crate) static UNUSED_IGNORE_COMMENT = {
|
||||
pub static UNUSED_IGNORE_COMMENT = {
|
||||
summary: "detects unused `type: ignore` comments",
|
||||
status: LintStatus::stable("0.0.1-alpha.1"),
|
||||
default_level: Level::Ignore,
|
||||
@@ -362,7 +362,7 @@ impl Suppressions {
|
||||
// Don't use intersect to avoid that suppressions on inner-expression
|
||||
// ignore errors for outer expressions
|
||||
suppression.suppressed_range.contains(range.start())
|
||||
|| suppression.suppressed_range.contains(range.end())
|
||||
|| suppression.suppressed_range.contains_inclusive(range.end())
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1,66 +1,158 @@
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::fmt::Formatter;
|
||||
|
||||
use ruff_db::diagnostic::LintName;
|
||||
use ruff_db::display::FormatterJoinExtension;
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::source::source_text;
|
||||
use ruff_diagnostics::{Edit, Fix};
|
||||
use ruff_python_ast::token::TokenKind;
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
use rustc_hash::FxHashSet;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use crate::Db;
|
||||
use crate::lint::LintId;
|
||||
use crate::suppression::{SuppressionTarget, suppressions};
|
||||
use crate::suppression::{SuppressionTarget, Suppressions, suppressions};
|
||||
|
||||
/// Creates a fix for adding a suppression comment to suppress `lint` for `range`.
|
||||
/// Creates fixes to suppress all violations in `ids_with_range`.
|
||||
///
|
||||
/// The fix prefers adding the code to an existing `ty: ignore[]` comment over
|
||||
/// adding a new suppression comment.
|
||||
pub fn create_suppression_fix(db: &dyn Db, file: File, id: LintId, range: TextRange) -> Fix {
|
||||
/// This is different from calling `suppress_single` for every item in `ids_with_range`
|
||||
/// in that errors on the same line are grouped together and ty will only insert a single
|
||||
/// suppression with possibly multiple codes instead of adding multiple suppression comments.
|
||||
pub fn suppress_all(db: &dyn Db, file: File, ids_with_range: &[(LintName, TextRange)]) -> Vec<Fix> {
|
||||
let suppressions = suppressions(db, file);
|
||||
let source = source_text(db, file);
|
||||
|
||||
let mut existing_suppressions = suppressions.line_suppressions(range).filter(|suppression| {
|
||||
matches!(
|
||||
suppression.target,
|
||||
SuppressionTarget::Lint(_) | SuppressionTarget::Empty,
|
||||
)
|
||||
});
|
||||
// Compute the full suppression ranges for each diagnostic.
|
||||
let ids_full_range: Vec<_> = ids_with_range
|
||||
.iter()
|
||||
.map(|&(id, range)| (id, suppression_range(db, file, range)))
|
||||
.collect();
|
||||
|
||||
// If there's an existing `ty: ignore[]` comment, append the code to it instead of creating a new suppression comment.
|
||||
if let Some(existing) = existing_suppressions.next() {
|
||||
let comment_text = &source[existing.comment_range];
|
||||
// Only add to the existing ignore comment if it has no reason.
|
||||
if let Some(before_closing_paren) = comment_text.trim_end().strip_suffix(']') {
|
||||
let up_to_last_code = before_closing_paren.trim_end();
|
||||
// 1. Group the diagnostics by their line-start position and try to add
|
||||
// the suppression to an existing `ty: ignore` comment on that line.
|
||||
let mut by_start: BTreeMap<_, (BTreeSet<LintName>, SmallVec<[usize; 2]>)> = BTreeMap::new();
|
||||
|
||||
let insertion = if up_to_last_code.ends_with(',') {
|
||||
format!(" {id}", id = id.name())
|
||||
} else {
|
||||
format!(", {id}", id = id.name())
|
||||
};
|
||||
for (i, &(id, range)) in ids_full_range.iter().enumerate() {
|
||||
let (lints, indices) = by_start.entry(range.start()).or_default();
|
||||
lints.insert(id);
|
||||
indices.push(i);
|
||||
}
|
||||
|
||||
let relative_offset_from_end = comment_text.text_len() - up_to_last_code.text_len();
|
||||
let mut fixes = Vec::with_capacity(ids_full_range.len());
|
||||
|
||||
return Fix::safe_edit(Edit::insertion(
|
||||
insertion,
|
||||
existing.comment_range.end() - relative_offset_from_end,
|
||||
));
|
||||
// Tracks the indices in `ids_with_range` for which we pushed a
|
||||
// fix to `fixes`
|
||||
let mut fixed = FxHashSet::default();
|
||||
|
||||
for (start_offset, (lints, original_indices)) in by_start {
|
||||
let codes: SmallVec<[LintName; 2]> = lints.into_iter().collect();
|
||||
if let Some(add_to_start) =
|
||||
add_to_existing_suppression(suppressions, &source, &codes, start_offset)
|
||||
{
|
||||
// Mark the diagnostics as fixed, so that we don't generate a fix at the end of the line.
|
||||
fixed.extend(original_indices);
|
||||
fixes.push(add_to_start);
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Group the diagnostics by their end position and try to add the code to an
|
||||
// existing `ty: ignore` comment or insert a new `ty: ignore` comment. But only do this
|
||||
// for diagnostics for which we haven't pushed a start-line fix.
|
||||
let mut by_end: BTreeMap<TextSize, BTreeSet<LintName>> = BTreeMap::new();
|
||||
|
||||
for (i, (id, range)) in ids_full_range.into_iter().enumerate() {
|
||||
if fixed.contains(&i) {
|
||||
// We already pushed a fix that appends the suppression to an existing suppression on the
|
||||
// start line.
|
||||
continue;
|
||||
}
|
||||
|
||||
by_end.entry(range.end()).or_default().insert(id);
|
||||
}
|
||||
|
||||
for (end_offset, lints) in by_end {
|
||||
let codes: SmallVec<[LintName; 2]> = lints.into_iter().collect();
|
||||
|
||||
fixes.push(append_to_existing_or_add_end_of_line_suppression(
|
||||
suppressions,
|
||||
&source,
|
||||
&codes,
|
||||
end_offset,
|
||||
));
|
||||
}
|
||||
|
||||
fixes.sort_by_key(ruff_diagnostics::Fix::min_start);
|
||||
|
||||
fixes
|
||||
}
|
||||
|
||||
/// Creates a fix to suppress a single lint.
|
||||
pub fn suppress_single(db: &dyn Db, file: File, id: LintId, range: TextRange) -> Fix {
|
||||
let suppression_range = suppression_range(db, file, range);
|
||||
|
||||
let suppressions = suppressions(db, file);
|
||||
let source = source_text(db, file);
|
||||
let codes = &[id.name()];
|
||||
|
||||
if let Some(add_fix) =
|
||||
add_to_existing_suppression(suppressions, &source, codes, suppression_range.start())
|
||||
{
|
||||
return add_fix;
|
||||
}
|
||||
|
||||
append_to_existing_or_add_end_of_line_suppression(
|
||||
suppressions,
|
||||
&source,
|
||||
codes,
|
||||
suppression_range.end(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the suppression range for the given `range`.
|
||||
///
|
||||
/// The suppression range is defined as:
|
||||
///
|
||||
/// * `start`: The `end` of the preceding `Newline` or `NonLogicalLine` token.
|
||||
/// * `end`: The `start` of the first `NonLogicalLine` or `Newline` token coming after the range.
|
||||
///
|
||||
/// For most ranges, this means the suppression range starts at the beginning of the physical line
|
||||
/// and ends at the end of the physical line containing `range`. The exceptions to this are:
|
||||
///
|
||||
/// * If `range` is within a single-line interpolated expression, then the start and end are extended to the start and end of the enclosing interpolated string.
|
||||
/// * If there's a line continuation, then the suppression range is extended to include the following line too.
|
||||
/// * If there's a multiline string, then the suppression range is extended to cover the starting and ending line of the multiline string.
|
||||
fn suppression_range(db: &dyn Db, file: File, range: TextRange) -> TextRange {
|
||||
// Always insert a new suppression at the end of the range to avoid having to deal with multiline strings
|
||||
// etc. Also make sure to not pass a sub-token range to `Tokens::after`.
|
||||
let parsed = parsed_module(db, file).load(db);
|
||||
let tokens = parsed.tokens().at_offset(range.end());
|
||||
let token_range = match tokens {
|
||||
let before_token_range = match parsed.tokens().at_offset(range.start()) {
|
||||
ruff_python_ast::token::TokenAt::None => range,
|
||||
ruff_python_ast::token::TokenAt::Single(token) => token.range(),
|
||||
ruff_python_ast::token::TokenAt::Between(..) => range,
|
||||
};
|
||||
let tokens_after = parsed.tokens().after(token_range.end());
|
||||
let before_tokens = parsed.tokens().before(before_token_range.start());
|
||||
|
||||
// Same as for `line_end` when building up the `suppressions`: Ignore newlines
|
||||
// in multiline-strings, inside f-strings, or after a line continuation because we can't
|
||||
// place a comment on those lines.
|
||||
let line_end = tokens_after
|
||||
let line_start = before_tokens
|
||||
.iter()
|
||||
.rfind(|token| {
|
||||
matches!(
|
||||
token.kind(),
|
||||
TokenKind::Newline | TokenKind::NonLogicalNewline
|
||||
)
|
||||
})
|
||||
.map(Ranged::end)
|
||||
.unwrap_or(TextSize::default());
|
||||
|
||||
let after_token_range = match parsed.tokens().at_offset(range.end()) {
|
||||
ruff_python_ast::token::TokenAt::None => range,
|
||||
ruff_python_ast::token::TokenAt::Single(token) => token.range(),
|
||||
ruff_python_ast::token::TokenAt::Between(..) => range,
|
||||
};
|
||||
let after_tokens = parsed.tokens().after(after_token_range.end());
|
||||
let line_end = after_tokens
|
||||
.iter()
|
||||
.find(|token| {
|
||||
matches!(
|
||||
@@ -69,13 +161,29 @@ pub fn create_suppression_fix(db: &dyn Db, file: File, id: LintId, range: TextRa
|
||||
)
|
||||
})
|
||||
.map(Ranged::start)
|
||||
.unwrap_or(source.text_len());
|
||||
.unwrap_or(range.end());
|
||||
|
||||
TextRange::new(line_start, line_end)
|
||||
}
|
||||
|
||||
fn append_to_existing_or_add_end_of_line_suppression(
|
||||
suppressions: &Suppressions,
|
||||
source: &str,
|
||||
codes: &[LintName],
|
||||
line_end: TextSize,
|
||||
) -> Fix {
|
||||
if let Some(add_fix) = add_to_existing_suppression(suppressions, source, codes, line_end) {
|
||||
return add_fix;
|
||||
}
|
||||
|
||||
let up_to_line_end = &source[..line_end.to_usize()];
|
||||
let up_to_first_content = up_to_line_end.trim_end();
|
||||
// Don't use `trim_end` in case the previous line ends with a `\` followed by a newline. We don't want to eat
|
||||
// into that newline!
|
||||
let up_to_first_content =
|
||||
up_to_line_end.trim_end_matches(|c| !matches!(c, '\n' | '\r') && c.is_whitespace());
|
||||
let trailing_whitespace_len = up_to_line_end.text_len() - up_to_first_content.text_len();
|
||||
|
||||
let insertion = format!(" # ty:ignore[{id}]", id = id.name());
|
||||
let insertion = format!(" # ty:ignore[{codes}]", codes = Codes(codes));
|
||||
|
||||
Fix::safe_edit(if trailing_whitespace_len == TextSize::ZERO {
|
||||
Edit::insertion(insertion, line_end)
|
||||
@@ -85,3 +193,48 @@ pub fn create_suppression_fix(db: &dyn Db, file: File, id: LintId, range: TextRa
|
||||
Edit::replacement(insertion, line_end - trailing_whitespace_len, line_end)
|
||||
})
|
||||
}
|
||||
|
||||
fn add_to_existing_suppression(
|
||||
suppressions: &Suppressions,
|
||||
source: &str,
|
||||
codes: &[LintName],
|
||||
offset: TextSize,
|
||||
) -> Option<Fix> {
|
||||
let mut existing_suppressions = suppressions
|
||||
.line_suppressions(TextRange::empty(offset))
|
||||
.filter(|suppression| {
|
||||
matches!(
|
||||
suppression.target,
|
||||
SuppressionTarget::Lint(_) | SuppressionTarget::Empty,
|
||||
)
|
||||
});
|
||||
|
||||
// If there's an existing `ty: ignore[]` comment, append the code to it instead of creating a new suppression comment.
|
||||
let existing = existing_suppressions.next()?;
|
||||
let comment_text = &source[existing.comment_range];
|
||||
|
||||
// Only add to the existing ignore comment if it has no reason.
|
||||
let before_closing_paren = comment_text.trim_end().strip_suffix(']')?;
|
||||
let up_to_last_code = before_closing_paren.trim_end();
|
||||
|
||||
let insertion = if up_to_last_code.ends_with(',') {
|
||||
format!(" {codes}", codes = Codes(codes))
|
||||
} else {
|
||||
format!(", {codes}", codes = Codes(codes))
|
||||
};
|
||||
|
||||
let relative_offset_from_end = comment_text.text_len() - up_to_last_code.text_len();
|
||||
|
||||
Some(Fix::safe_edit(Edit::insertion(
|
||||
insertion,
|
||||
existing.comment_range.end() - relative_offset_from_end,
|
||||
)))
|
||||
}
|
||||
|
||||
struct Codes<'a>(&'a [LintName]);
|
||||
|
||||
impl std::fmt::Display for Codes<'_> {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
f.join(", ").entries(self.0).finish()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -335,13 +335,17 @@ impl WritableSystem for MdtestSystem {
|
||||
self.as_system().create_new_file(&self.normalize_path(path))
|
||||
}
|
||||
|
||||
fn write_file(&self, path: &SystemPath, content: &str) -> ruff_db::system::Result<()> {
|
||||
fn write_file_bytes(&self, path: &SystemPath, content: &[u8]) -> ruff_db::system::Result<()> {
|
||||
self.as_system()
|
||||
.write_file(&self.normalize_path(path), content)
|
||||
.write_file_bytes(&self.normalize_path(path), content)
|
||||
}
|
||||
|
||||
fn create_directory_all(&self, path: &SystemPath) -> ruff_db::system::Result<()> {
|
||||
self.as_system()
|
||||
.create_directory_all(&self.normalize_path(path))
|
||||
}
|
||||
|
||||
fn dyn_clone(&self) -> Box<dyn WritableSystem> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user