mirror of https://github.com/astral-sh/ruff
Flatten `red_knot_project` import paths (#15616)
This commit is contained in:
parent
f82ef32e53
commit
73798327c6
|
|
@ -6,11 +6,10 @@ use clap::Parser;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use crossbeam::channel as crossbeam_channel;
|
use crossbeam::channel as crossbeam_channel;
|
||||||
use python_version::PythonVersion;
|
use python_version::PythonVersion;
|
||||||
use red_knot_project::db::ProjectDatabase;
|
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
|
||||||
use red_knot_project::project::options::{EnvironmentOptions, Options};
|
|
||||||
use red_knot_project::project::ProjectMetadata;
|
|
||||||
use red_knot_project::watch;
|
use red_knot_project::watch;
|
||||||
use red_knot_project::watch::ProjectWatcher;
|
use red_knot_project::watch::ProjectWatcher;
|
||||||
|
use red_knot_project::{ProjectDatabase, ProjectMetadata};
|
||||||
use red_knot_python_semantic::SitePackages;
|
use red_knot_python_semantic::SitePackages;
|
||||||
use red_knot_server::run_server;
|
use red_knot_server::run_server;
|
||||||
use ruff_db::diagnostic::Diagnostic;
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
|
|
|
||||||
|
|
@ -4,11 +4,10 @@ use std::io::Write;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
use anyhow::{anyhow, Context};
|
use anyhow::{anyhow, Context};
|
||||||
use red_knot_project::db::{Db, ProjectDatabase};
|
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
|
||||||
use red_knot_project::project::options::{EnvironmentOptions, Options};
|
use red_knot_project::metadata::pyproject::{PyProject, Tool};
|
||||||
use red_knot_project::project::pyproject::{PyProject, Tool};
|
|
||||||
use red_knot_project::project::ProjectMetadata;
|
|
||||||
use red_knot_project::watch::{directory_watcher, ChangeEvent, ProjectWatcher};
|
use red_knot_project::watch::{directory_watcher, ChangeEvent, ProjectWatcher};
|
||||||
|
use red_knot_project::{Db, ProjectDatabase, ProjectMetadata};
|
||||||
use red_knot_python_semantic::{
|
use red_knot_python_semantic::{
|
||||||
resolve_module, ModuleName, PythonPlatform, PythonVersion, SitePackages,
|
resolve_module, ModuleName, PythonPlatform, PythonVersion, SitePackages,
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -148,7 +148,7 @@ impl_noop_combine!(String);
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::project::combine::Combine;
|
use crate::combine::Combine;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
use std::panic::RefUnwindSafe;
|
use std::panic::RefUnwindSafe;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use crate::project::{check_file, Project, ProjectMetadata};
|
|
||||||
use crate::DEFAULT_LINT_REGISTRY;
|
use crate::DEFAULT_LINT_REGISTRY;
|
||||||
|
use crate::{check_file, Project, ProjectMetadata};
|
||||||
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
|
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
|
||||||
use red_knot_python_semantic::{Db as SemanticDb, Program};
|
use red_knot_python_semantic::{Db as SemanticDb, Program};
|
||||||
use ruff_db::diagnostic::Diagnostic;
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
|
|
@ -179,8 +179,8 @@ pub(crate) mod tests {
|
||||||
use ruff_db::{Db as SourceDb, Upcast};
|
use ruff_db::{Db as SourceDb, Upcast};
|
||||||
|
|
||||||
use crate::db::Db;
|
use crate::db::Db;
|
||||||
use crate::project::{Project, ProjectMetadata};
|
|
||||||
use crate::DEFAULT_LINT_REGISTRY;
|
use crate::DEFAULT_LINT_REGISTRY;
|
||||||
|
use crate::{Project, ProjectMetadata};
|
||||||
|
|
||||||
#[salsa::db]
|
#[salsa::db]
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
use crate::db::{Db, ProjectDatabase};
|
use crate::db::{Db, ProjectDatabase};
|
||||||
use crate::project::options::Options;
|
use crate::metadata::options::Options;
|
||||||
use crate::project::{Project, ProjectMetadata};
|
|
||||||
use crate::watch::{ChangeEvent, CreatedKind, DeletedKind};
|
use crate::watch::{ChangeEvent, CreatedKind, DeletedKind};
|
||||||
|
use crate::{Project, ProjectMetadata};
|
||||||
|
|
||||||
use red_knot_python_semantic::Program;
|
use red_knot_python_semantic::Program;
|
||||||
use ruff_db::files::{system_path_to_file, File, Files};
|
use ruff_db::files::{system_path_to_file, File, Files};
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ use salsa::Setter;
|
||||||
use ruff_db::files::File;
|
use ruff_db::files::File;
|
||||||
|
|
||||||
use crate::db::Db;
|
use crate::db::Db;
|
||||||
use crate::project::Project;
|
use crate::Project;
|
||||||
|
|
||||||
/// Cheap cloneable hash set of files.
|
/// Cheap cloneable hash set of files.
|
||||||
type FileSet = Arc<FxHashSet<File>>;
|
type FileSet = Arc<FxHashSet<File>>;
|
||||||
|
|
@ -234,8 +234,8 @@ mod tests {
|
||||||
|
|
||||||
use crate::db::tests::TestDb;
|
use crate::db::tests::TestDb;
|
||||||
use crate::db::Db;
|
use crate::db::Db;
|
||||||
use crate::project::files::Index;
|
use crate::files::Index;
|
||||||
use crate::project::ProjectMetadata;
|
use crate::ProjectMetadata;
|
||||||
use ruff_db::files::system_path_to_file;
|
use ruff_db::files::system_path_to_file;
|
||||||
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
|
||||||
use ruff_python_ast::name::Name;
|
use ruff_python_ast::name::Name;
|
||||||
|
|
@ -1,8 +1,31 @@
|
||||||
|
#![allow(clippy::ref_option)]
|
||||||
|
|
||||||
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder};
|
use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder};
|
||||||
use red_knot_python_semantic::register_lints;
|
use red_knot_python_semantic::register_lints;
|
||||||
|
use red_knot_python_semantic::types::check_types;
|
||||||
|
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, ParseDiagnostic, Severity};
|
||||||
|
use ruff_db::files::{system_path_to_file, File};
|
||||||
|
use ruff_db::parsed::parsed_module;
|
||||||
|
use ruff_db::source::{source_text, SourceTextError};
|
||||||
|
use ruff_db::system::walk_directory::WalkState;
|
||||||
|
use ruff_db::system::{FileType, SystemPath};
|
||||||
|
use ruff_python_ast::PySourceType;
|
||||||
|
use ruff_text_size::TextRange;
|
||||||
|
use rustc_hash::{FxBuildHasher, FxHashSet};
|
||||||
|
use salsa::Durability;
|
||||||
|
use salsa::Setter;
|
||||||
|
use std::borrow::Cow;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
pub mod db;
|
pub use db::{Db, ProjectDatabase};
|
||||||
pub mod project;
|
use files::{Index, Indexed, IndexedFiles};
|
||||||
|
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
|
||||||
|
|
||||||
|
pub mod combine;
|
||||||
|
|
||||||
|
mod db;
|
||||||
|
mod files;
|
||||||
|
pub mod metadata;
|
||||||
pub mod watch;
|
pub mod watch;
|
||||||
|
|
||||||
pub static DEFAULT_LINT_REGISTRY: std::sync::LazyLock<LintRegistry> =
|
pub static DEFAULT_LINT_REGISTRY: std::sync::LazyLock<LintRegistry> =
|
||||||
|
|
@ -13,3 +36,435 @@ pub fn default_lints_registry() -> LintRegistry {
|
||||||
register_lints(&mut builder);
|
register_lints(&mut builder);
|
||||||
builder.build()
|
builder.build()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The project as a Salsa ingredient.
|
||||||
|
///
|
||||||
|
/// ## How is a project different from a program?
|
||||||
|
/// There are two (related) motivations:
|
||||||
|
///
|
||||||
|
/// 1. Program is defined in `ruff_db` and it can't reference the settings types for the linter and formatter
|
||||||
|
/// without introducing a cyclic dependency. The project is defined in a higher level crate
|
||||||
|
/// where it can reference these setting types.
|
||||||
|
/// 2. Running `ruff check` with different target versions results in different programs (settings) but
|
||||||
|
/// it remains the same project. That's why program is a narrowed view of the project only
|
||||||
|
/// holding on to the most fundamental settings required for checking.
|
||||||
|
#[salsa::input]
|
||||||
|
pub struct Project {
|
||||||
|
/// The files that are open in the project.
|
||||||
|
///
|
||||||
|
/// Setting the open files to a non-`None` value changes `check` to only check the
|
||||||
|
/// open files rather than all files in the project.
|
||||||
|
#[return_ref]
|
||||||
|
#[default]
|
||||||
|
open_fileset: Option<Arc<FxHashSet<File>>>,
|
||||||
|
|
||||||
|
/// The first-party files of this project.
|
||||||
|
#[default]
|
||||||
|
#[return_ref]
|
||||||
|
file_set: IndexedFiles,
|
||||||
|
|
||||||
|
/// The metadata describing the project, including the unresolved options.
|
||||||
|
#[return_ref]
|
||||||
|
pub metadata: ProjectMetadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Project {
|
||||||
|
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Self {
|
||||||
|
Project::builder(metadata)
|
||||||
|
.durability(Durability::MEDIUM)
|
||||||
|
.open_fileset_durability(Durability::LOW)
|
||||||
|
.file_set_durability(Durability::LOW)
|
||||||
|
.new(db)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
||||||
|
self.metadata(db).root()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn name(self, db: &dyn Db) -> &str {
|
||||||
|
self.metadata(db).name()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reload(self, db: &mut dyn Db, metadata: ProjectMetadata) {
|
||||||
|
tracing::debug!("Reloading project");
|
||||||
|
assert_eq!(self.root(db), metadata.root());
|
||||||
|
|
||||||
|
if &metadata != self.metadata(db) {
|
||||||
|
self.set_metadata(db).to(metadata);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.reload_files(db);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Checks all open files in the project and its dependencies.
|
||||||
|
pub fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn Diagnostic>> {
|
||||||
|
let project_span = tracing::debug_span!("Project::check");
|
||||||
|
let _span = project_span.enter();
|
||||||
|
|
||||||
|
tracing::debug!("Checking project '{name}'", name = self.name(db));
|
||||||
|
let result = Arc::new(std::sync::Mutex::new(Vec::new()));
|
||||||
|
let inner_result = Arc::clone(&result);
|
||||||
|
|
||||||
|
let db = db.clone();
|
||||||
|
let project_span = project_span.clone();
|
||||||
|
|
||||||
|
rayon::scope(move |scope| {
|
||||||
|
let files = ProjectFiles::new(&db, self);
|
||||||
|
for file in &files {
|
||||||
|
let result = inner_result.clone();
|
||||||
|
let db = db.clone();
|
||||||
|
let project_span = project_span.clone();
|
||||||
|
|
||||||
|
scope.spawn(move |_| {
|
||||||
|
let check_file_span = tracing::debug_span!(parent: &project_span, "check_file", file=%file.path(&db));
|
||||||
|
let _entered = check_file_span.entered();
|
||||||
|
|
||||||
|
let file_diagnostics = check_file(&db, file);
|
||||||
|
result.lock().unwrap().extend(file_diagnostics);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Arc::into_inner(result).unwrap().into_inner().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Opens a file in the project.
|
||||||
|
///
|
||||||
|
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
|
||||||
|
pub fn open_file(self, db: &mut dyn Db, file: File) {
|
||||||
|
tracing::debug!("Opening file `{}`", file.path(db));
|
||||||
|
|
||||||
|
let mut open_files = self.take_open_files(db);
|
||||||
|
open_files.insert(file);
|
||||||
|
self.set_open_files(db, open_files);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Closes a file in the project.
|
||||||
|
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
|
||||||
|
tracing::debug!("Closing file `{}`", file.path(db));
|
||||||
|
|
||||||
|
let mut open_files = self.take_open_files(db);
|
||||||
|
let removed = open_files.remove(&file);
|
||||||
|
|
||||||
|
if removed {
|
||||||
|
self.set_open_files(db, open_files);
|
||||||
|
}
|
||||||
|
|
||||||
|
removed
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the open files in the project or `None` if the entire project should be checked.
|
||||||
|
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
||||||
|
self.open_fileset(db).as_deref()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the open files in the project.
|
||||||
|
///
|
||||||
|
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
|
||||||
|
#[tracing::instrument(level = "debug", skip(self, db))]
|
||||||
|
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
|
||||||
|
tracing::debug!("Set open project files (count: {})", open_files.len());
|
||||||
|
|
||||||
|
self.set_open_fileset(db).to(Some(Arc::new(open_files)));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This takes the open files from the project and returns them.
|
||||||
|
///
|
||||||
|
/// This changes the behavior of `check` to check all files in the project instead of just the open files.
|
||||||
|
fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
|
||||||
|
tracing::debug!("Take open project files");
|
||||||
|
|
||||||
|
// Salsa will cancel any pending queries and remove its own reference to `open_files`
|
||||||
|
// so that the reference counter to `open_files` now drops to 1.
|
||||||
|
let open_files = self.set_open_fileset(db).to(None);
|
||||||
|
|
||||||
|
if let Some(open_files) = open_files {
|
||||||
|
Arc::try_unwrap(open_files).unwrap()
|
||||||
|
} else {
|
||||||
|
FxHashSet::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if the file is open in the project.
|
||||||
|
///
|
||||||
|
/// A file is considered open when:
|
||||||
|
/// * explicitly set as an open file using [`open_file`](Self::open_file)
|
||||||
|
/// * It has a [`SystemPath`] and belongs to a package's `src` files
|
||||||
|
/// * It has a [`SystemVirtualPath`](ruff_db::system::SystemVirtualPath)
|
||||||
|
pub fn is_file_open(self, db: &dyn Db, file: File) -> bool {
|
||||||
|
if let Some(open_files) = self.open_files(db) {
|
||||||
|
open_files.contains(&file)
|
||||||
|
} else if file.path(db).is_system_path() {
|
||||||
|
self.contains_file(db, file)
|
||||||
|
} else {
|
||||||
|
file.path(db).is_system_virtual_path()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if `file` is a first-party file part of this package.
|
||||||
|
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
|
||||||
|
self.files(db).contains(&file)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(level = "debug", skip(db))]
|
||||||
|
pub fn remove_file(self, db: &mut dyn Db, file: File) {
|
||||||
|
tracing::debug!(
|
||||||
|
"Removing file `{}` from project `{}`",
|
||||||
|
file.path(db),
|
||||||
|
self.name(db)
|
||||||
|
);
|
||||||
|
|
||||||
|
let Some(mut index) = IndexedFiles::indexed_mut(db, self) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
index.remove(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_file(self, db: &mut dyn Db, file: File) {
|
||||||
|
tracing::debug!(
|
||||||
|
"Adding file `{}` to project `{}`",
|
||||||
|
file.path(db),
|
||||||
|
self.name(db)
|
||||||
|
);
|
||||||
|
|
||||||
|
let Some(mut index) = IndexedFiles::indexed_mut(db, self) else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
index.insert(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the files belonging to this project.
|
||||||
|
pub fn files(self, db: &dyn Db) -> Indexed<'_> {
|
||||||
|
let files = self.file_set(db);
|
||||||
|
|
||||||
|
let indexed = match files.get() {
|
||||||
|
Index::Lazy(vacant) => {
|
||||||
|
let _entered =
|
||||||
|
tracing::debug_span!("Project::index_files", package = %self.name(db))
|
||||||
|
.entered();
|
||||||
|
|
||||||
|
let files = discover_project_files(db, self);
|
||||||
|
tracing::info!("Found {} files in project `{}`", files.len(), self.name(db));
|
||||||
|
vacant.set(files)
|
||||||
|
}
|
||||||
|
Index::Indexed(indexed) => indexed,
|
||||||
|
};
|
||||||
|
|
||||||
|
indexed
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reload_files(self, db: &mut dyn Db) {
|
||||||
|
tracing::debug!("Reloading files for project `{}`", self.name(db));
|
||||||
|
|
||||||
|
if !self.file_set(db).is_lazy() {
|
||||||
|
// Force a re-index of the files in the next revision.
|
||||||
|
self.set_file_set(db).to(IndexedFiles::lazy());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn check_file(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
|
||||||
|
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
|
||||||
|
// Abort checking if there are IO errors.
|
||||||
|
let source = source_text(db.upcast(), file);
|
||||||
|
|
||||||
|
if let Some(read_error) = source.read_error() {
|
||||||
|
diagnostics.push(Box::new(IOErrorDiagnostic {
|
||||||
|
file,
|
||||||
|
error: read_error.clone(),
|
||||||
|
}));
|
||||||
|
return diagnostics;
|
||||||
|
}
|
||||||
|
|
||||||
|
let parsed = parsed_module(db.upcast(), file);
|
||||||
|
diagnostics.extend(parsed.errors().iter().map(|error| {
|
||||||
|
let diagnostic: Box<dyn Diagnostic> = Box::new(ParseDiagnostic::new(file, error.clone()));
|
||||||
|
diagnostic
|
||||||
|
}));
|
||||||
|
|
||||||
|
diagnostics.extend(check_types(db.upcast(), file).iter().map(|diagnostic| {
|
||||||
|
let boxed: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
|
||||||
|
boxed
|
||||||
|
}));
|
||||||
|
|
||||||
|
diagnostics.sort_unstable_by_key(|diagnostic| diagnostic.range().unwrap_or_default().start());
|
||||||
|
|
||||||
|
diagnostics
|
||||||
|
}
|
||||||
|
|
||||||
|
fn discover_project_files(db: &dyn Db, project: Project) -> FxHashSet<File> {
|
||||||
|
let paths = std::sync::Mutex::new(Vec::new());
|
||||||
|
|
||||||
|
db.system().walk_directory(project.root(db)).run(|| {
|
||||||
|
Box::new(|entry| {
|
||||||
|
match entry {
|
||||||
|
Ok(entry) => {
|
||||||
|
// Skip over any non python files to avoid creating too many entries in `Files`.
|
||||||
|
match entry.file_type() {
|
||||||
|
FileType::File => {
|
||||||
|
if entry
|
||||||
|
.path()
|
||||||
|
.extension()
|
||||||
|
.and_then(PySourceType::try_from_extension)
|
||||||
|
.is_some()
|
||||||
|
{
|
||||||
|
let mut paths = paths.lock().unwrap();
|
||||||
|
paths.push(entry.into_path());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
FileType::Directory | FileType::Symlink => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(error) => {
|
||||||
|
// TODO Handle error
|
||||||
|
tracing::error!("Failed to walk path: {error}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
WalkState::Continue
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
let paths = paths.into_inner().unwrap();
|
||||||
|
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
|
||||||
|
|
||||||
|
for path in paths {
|
||||||
|
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
||||||
|
// We can ignore this.
|
||||||
|
if let Ok(file) = system_path_to_file(db.upcast(), &path) {
|
||||||
|
files.insert(file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
files
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum ProjectFiles<'a> {
|
||||||
|
OpenFiles(&'a FxHashSet<File>),
|
||||||
|
Indexed(files::Indexed<'a>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> ProjectFiles<'a> {
|
||||||
|
fn new(db: &'a dyn Db, project: Project) -> Self {
|
||||||
|
if let Some(open_files) = project.open_files(db) {
|
||||||
|
ProjectFiles::OpenFiles(open_files)
|
||||||
|
} else {
|
||||||
|
ProjectFiles::Indexed(project.files(db))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> IntoIterator for &'a ProjectFiles<'a> {
|
||||||
|
type Item = File;
|
||||||
|
type IntoIter = ProjectFilesIter<'a>;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
match self {
|
||||||
|
ProjectFiles::OpenFiles(files) => ProjectFilesIter::OpenFiles(files.iter()),
|
||||||
|
ProjectFiles::Indexed(indexed) => ProjectFilesIter::Indexed {
|
||||||
|
files: indexed.into_iter(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum ProjectFilesIter<'db> {
|
||||||
|
OpenFiles(std::collections::hash_set::Iter<'db, File>),
|
||||||
|
Indexed { files: files::IndexedIter<'db> },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Iterator for ProjectFilesIter<'_> {
|
||||||
|
type Item = File;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
match self {
|
||||||
|
ProjectFilesIter::OpenFiles(files) => files.next().copied(),
|
||||||
|
ProjectFilesIter::Indexed { files } => files.next(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct IOErrorDiagnostic {
|
||||||
|
file: File,
|
||||||
|
error: SourceTextError,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Diagnostic for IOErrorDiagnostic {
|
||||||
|
fn id(&self) -> DiagnosticId {
|
||||||
|
DiagnosticId::Io
|
||||||
|
}
|
||||||
|
|
||||||
|
fn message(&self) -> Cow<str> {
|
||||||
|
self.error.to_string().into()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn file(&self) -> File {
|
||||||
|
self.file
|
||||||
|
}
|
||||||
|
|
||||||
|
fn range(&self) -> Option<TextRange> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn severity(&self) -> Severity {
|
||||||
|
Severity::Error
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::db::tests::TestDb;
|
||||||
|
use crate::{check_file, ProjectMetadata};
|
||||||
|
use red_knot_python_semantic::types::check_types;
|
||||||
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
|
use ruff_db::files::system_path_to_file;
|
||||||
|
use ruff_db::source::source_text;
|
||||||
|
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
||||||
|
use ruff_db::testing::assert_function_query_was_not_run;
|
||||||
|
use ruff_python_ast::name::Name;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn check_file_skips_type_checking_when_file_cant_be_read() -> ruff_db::system::Result<()> {
|
||||||
|
let project = ProjectMetadata::new(Name::new_static("test"), SystemPathBuf::from("/"));
|
||||||
|
let mut db = TestDb::new(project);
|
||||||
|
let path = SystemPath::new("test.py");
|
||||||
|
|
||||||
|
db.write_file(path, "x = 10")?;
|
||||||
|
let file = system_path_to_file(&db, path).unwrap();
|
||||||
|
|
||||||
|
// Now the file gets deleted before we had a chance to read its source text.
|
||||||
|
db.memory_file_system().remove_file(path)?;
|
||||||
|
file.sync(&mut db);
|
||||||
|
|
||||||
|
assert_eq!(source_text(&db, file).as_str(), "");
|
||||||
|
assert_eq!(
|
||||||
|
check_file(&db, file)
|
||||||
|
.into_iter()
|
||||||
|
.map(|diagnostic| diagnostic.message().into_owned())
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
vec!["Failed to read file: No such file or directory".to_string()]
|
||||||
|
);
|
||||||
|
|
||||||
|
let events = db.take_salsa_events();
|
||||||
|
assert_function_query_was_not_run(&db, check_types, file, &events);
|
||||||
|
|
||||||
|
// The user now creates a new file with an empty text. The source text
|
||||||
|
// content returned by `source_text` remains unchanged, but the diagnostics should get updated.
|
||||||
|
db.write_file(path, "").unwrap();
|
||||||
|
|
||||||
|
assert_eq!(source_text(&db, file).as_str(), "");
|
||||||
|
assert_eq!(
|
||||||
|
check_file(&db, file)
|
||||||
|
.into_iter()
|
||||||
|
.map(|diagnostic| diagnostic.message().into_owned())
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
vec![] as Vec<String>
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,15 @@
|
||||||
|
use red_knot_python_semantic::ProgramSettings;
|
||||||
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
use ruff_db::system::{System, SystemPath, SystemPathBuf};
|
||||||
use ruff_python_ast::name::Name;
|
use ruff_python_ast::name::Name;
|
||||||
|
|
||||||
use crate::project::combine::Combine;
|
|
||||||
use crate::project::options::Options;
|
|
||||||
use crate::project::pyproject::{Project, PyProject, PyProjectError};
|
|
||||||
use red_knot_python_semantic::ProgramSettings;
|
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use super::options::KnotTomlError;
|
use crate::combine::Combine;
|
||||||
|
use crate::metadata::pyproject::{Project, PyProject, PyProjectError};
|
||||||
|
use options::KnotTomlError;
|
||||||
|
use options::Options;
|
||||||
|
|
||||||
|
pub mod options;
|
||||||
|
pub mod pyproject;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq)]
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
|
|
@ -222,7 +224,7 @@ mod tests {
|
||||||
use insta::assert_ron_snapshot;
|
use insta::assert_ron_snapshot;
|
||||||
use ruff_db::system::{SystemPathBuf, TestSystem};
|
use ruff_db::system::{SystemPathBuf, TestSystem};
|
||||||
|
|
||||||
use crate::project::{ProjectDiscoveryError, ProjectMetadata};
|
use crate::{ProjectDiscoveryError, ProjectMetadata};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn project_without_pyproject() -> anyhow::Result<()> {
|
fn project_without_pyproject() -> anyhow::Result<()> {
|
||||||
|
|
@ -16,12 +16,12 @@ pub struct Options {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Options {
|
impl Options {
|
||||||
pub(super) fn from_toml_str(content: &str) -> Result<Self, KnotTomlError> {
|
pub(crate) fn from_toml_str(content: &str) -> Result<Self, KnotTomlError> {
|
||||||
let options = toml::from_str(content)?;
|
let options = toml::from_str(content)?;
|
||||||
Ok(options)
|
Ok(options)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn to_program_settings(
|
pub(crate) fn to_program_settings(
|
||||||
&self,
|
&self,
|
||||||
project_root: &SystemPath,
|
project_root: &SystemPath,
|
||||||
system: &dyn System,
|
system: &dyn System,
|
||||||
|
|
@ -1,7 +1,61 @@
|
||||||
|
use pep440_rs::{Version, VersionSpecifiers};
|
||||||
use serde::{Deserialize, Deserializer, Serialize};
|
use serde::{Deserialize, Deserializer, Serialize};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
use crate::metadata::options::Options;
|
||||||
|
|
||||||
|
/// A `pyproject.toml` as specified in PEP 517.
|
||||||
|
#[derive(Deserialize, Serialize, Debug, Default, Clone)]
|
||||||
|
#[serde(rename_all = "kebab-case")]
|
||||||
|
pub struct PyProject {
|
||||||
|
/// PEP 621-compliant project metadata.
|
||||||
|
pub project: Option<Project>,
|
||||||
|
/// Tool-specific metadata.
|
||||||
|
pub tool: Option<Tool>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PyProject {
|
||||||
|
pub(crate) fn knot(&self) -> Option<&Options> {
|
||||||
|
self.tool.as_ref().and_then(|tool| tool.knot.as_ref())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Error, Debug)]
|
||||||
|
pub enum PyProjectError {
|
||||||
|
#[error(transparent)]
|
||||||
|
TomlSyntax(#[from] toml::de::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PyProject {
|
||||||
|
pub(crate) fn from_toml_str(content: &str) -> Result<Self, PyProjectError> {
|
||||||
|
toml::from_str(content).map_err(PyProjectError::TomlSyntax)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// PEP 621 project metadata (`project`).
|
||||||
|
///
|
||||||
|
/// See <https://packaging.python.org/en/latest/specifications/pyproject-toml>.
|
||||||
|
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq)]
|
||||||
|
#[serde(rename_all = "kebab-case")]
|
||||||
|
pub struct Project {
|
||||||
|
/// The name of the project
|
||||||
|
///
|
||||||
|
/// Note: Intentionally option to be more permissive during deserialization.
|
||||||
|
/// `PackageMetadata::from_pyproject` reports missing names.
|
||||||
|
pub name: Option<PackageName>,
|
||||||
|
/// The version of the project
|
||||||
|
pub version: Option<Version>,
|
||||||
|
/// The Python versions this project is compatible with.
|
||||||
|
pub requires_python: Option<VersionSpecifiers>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]
|
||||||
|
#[serde(rename_all = "kebab-case")]
|
||||||
|
pub struct Tool {
|
||||||
|
pub knot: Option<Options>,
|
||||||
|
}
|
||||||
|
|
||||||
/// The normalized name of a package.
|
/// The normalized name of a package.
|
||||||
///
|
///
|
||||||
/// Converts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`.
|
/// Converts the name to lowercase and collapses runs of `-`, `_`, and `.` down to a single `-`.
|
||||||
|
|
@ -1,460 +0,0 @@
|
||||||
#![allow(clippy::ref_option)]
|
|
||||||
|
|
||||||
use crate::db::Db;
|
|
||||||
use crate::db::ProjectDatabase;
|
|
||||||
use crate::project::files::{Index, Indexed, IndexedFiles, IndexedIter};
|
|
||||||
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
|
|
||||||
|
|
||||||
use red_knot_python_semantic::types::check_types;
|
|
||||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, ParseDiagnostic, Severity};
|
|
||||||
use ruff_db::parsed::parsed_module;
|
|
||||||
use ruff_db::source::{source_text, SourceTextError};
|
|
||||||
use ruff_db::system::FileType;
|
|
||||||
use ruff_db::{
|
|
||||||
files::{system_path_to_file, File},
|
|
||||||
system::{walk_directory::WalkState, SystemPath},
|
|
||||||
};
|
|
||||||
use ruff_python_ast::PySourceType;
|
|
||||||
use ruff_text_size::TextRange;
|
|
||||||
use rustc_hash::{FxBuildHasher, FxHashSet};
|
|
||||||
use salsa::{Durability, Setter as _};
|
|
||||||
use std::borrow::Cow;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
pub mod combine;
|
|
||||||
mod files;
|
|
||||||
mod metadata;
|
|
||||||
pub mod options;
|
|
||||||
pub mod pyproject;
|
|
||||||
|
|
||||||
/// The project as a Salsa ingredient.
|
|
||||||
///
|
|
||||||
/// ## How is a project different from a program?
|
|
||||||
/// There are two (related) motivations:
|
|
||||||
///
|
|
||||||
/// 1. Program is defined in `ruff_db` and it can't reference the settings types for the linter and formatter
|
|
||||||
/// without introducing a cyclic dependency. The project is defined in a higher level crate
|
|
||||||
/// where it can reference these setting types.
|
|
||||||
/// 2. Running `ruff check` with different target versions results in different programs (settings) but
|
|
||||||
/// it remains the same project. That's why program is a narrowed view of the project only
|
|
||||||
/// holding on to the most fundamental settings required for checking.
|
|
||||||
#[salsa::input]
|
|
||||||
pub struct Project {
|
|
||||||
/// The files that are open in the project.
|
|
||||||
///
|
|
||||||
/// Setting the open files to a non-`None` value changes `check` to only check the
|
|
||||||
/// open files rather than all files in the project.
|
|
||||||
#[return_ref]
|
|
||||||
#[default]
|
|
||||||
open_fileset: Option<Arc<FxHashSet<File>>>,
|
|
||||||
|
|
||||||
/// The first-party files of this project.
|
|
||||||
#[default]
|
|
||||||
#[return_ref]
|
|
||||||
file_set: IndexedFiles,
|
|
||||||
|
|
||||||
/// The metadata describing the project, including the unresolved options.
|
|
||||||
#[return_ref]
|
|
||||||
pub metadata: ProjectMetadata,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Project {
|
|
||||||
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Self {
|
|
||||||
Project::builder(metadata)
|
|
||||||
.durability(Durability::MEDIUM)
|
|
||||||
.open_fileset_durability(Durability::LOW)
|
|
||||||
.file_set_durability(Durability::LOW)
|
|
||||||
.new(db)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn root(self, db: &dyn Db) -> &SystemPath {
|
|
||||||
self.metadata(db).root()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn name(self, db: &dyn Db) -> &str {
|
|
||||||
self.metadata(db).name()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn reload(self, db: &mut dyn Db, metadata: ProjectMetadata) {
|
|
||||||
tracing::debug!("Reloading project");
|
|
||||||
assert_eq!(self.root(db), metadata.root());
|
|
||||||
|
|
||||||
if &metadata != self.metadata(db) {
|
|
||||||
self.set_metadata(db).to(metadata);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.reload_files(db);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Checks all open files in the project and its dependencies.
|
|
||||||
pub fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn Diagnostic>> {
|
|
||||||
let project_span = tracing::debug_span!("Project::check");
|
|
||||||
let _span = project_span.enter();
|
|
||||||
|
|
||||||
tracing::debug!("Checking project '{name}'", name = self.name(db));
|
|
||||||
let result = Arc::new(std::sync::Mutex::new(Vec::new()));
|
|
||||||
let inner_result = Arc::clone(&result);
|
|
||||||
|
|
||||||
let db = db.clone();
|
|
||||||
let project_span = project_span.clone();
|
|
||||||
|
|
||||||
rayon::scope(move |scope| {
|
|
||||||
let files = ProjectFiles::new(&db, self);
|
|
||||||
for file in &files {
|
|
||||||
let result = inner_result.clone();
|
|
||||||
let db = db.clone();
|
|
||||||
let project_span = project_span.clone();
|
|
||||||
|
|
||||||
scope.spawn(move |_| {
|
|
||||||
let check_file_span = tracing::debug_span!(parent: &project_span, "check_file", file=%file.path(&db));
|
|
||||||
let _entered = check_file_span.entered();
|
|
||||||
|
|
||||||
let file_diagnostics = check_file(&db, file);
|
|
||||||
result.lock().unwrap().extend(file_diagnostics);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
Arc::into_inner(result).unwrap().into_inner().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Opens a file in the project.
|
|
||||||
///
|
|
||||||
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
|
|
||||||
pub fn open_file(self, db: &mut dyn Db, file: File) {
|
|
||||||
tracing::debug!("Opening file `{}`", file.path(db));
|
|
||||||
|
|
||||||
let mut open_files = self.take_open_files(db);
|
|
||||||
open_files.insert(file);
|
|
||||||
self.set_open_files(db, open_files);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Closes a file in the project.
|
|
||||||
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
|
|
||||||
tracing::debug!("Closing file `{}`", file.path(db));
|
|
||||||
|
|
||||||
let mut open_files = self.take_open_files(db);
|
|
||||||
let removed = open_files.remove(&file);
|
|
||||||
|
|
||||||
if removed {
|
|
||||||
self.set_open_files(db, open_files);
|
|
||||||
}
|
|
||||||
|
|
||||||
removed
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the open files in the project or `None` if the entire project should be checked.
|
|
||||||
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
|
|
||||||
self.open_fileset(db).as_deref()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sets the open files in the project.
|
|
||||||
///
|
|
||||||
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
|
|
||||||
#[tracing::instrument(level = "debug", skip(self, db))]
|
|
||||||
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
|
|
||||||
tracing::debug!("Set open project files (count: {})", open_files.len());
|
|
||||||
|
|
||||||
self.set_open_fileset(db).to(Some(Arc::new(open_files)));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This takes the open files from the project and returns them.
|
|
||||||
///
|
|
||||||
/// This changes the behavior of `check` to check all files in the project instead of just the open files.
|
|
||||||
fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
|
|
||||||
tracing::debug!("Take open project files");
|
|
||||||
|
|
||||||
// Salsa will cancel any pending queries and remove its own reference to `open_files`
|
|
||||||
// so that the reference counter to `open_files` now drops to 1.
|
|
||||||
let open_files = self.set_open_fileset(db).to(None);
|
|
||||||
|
|
||||||
if let Some(open_files) = open_files {
|
|
||||||
Arc::try_unwrap(open_files).unwrap()
|
|
||||||
} else {
|
|
||||||
FxHashSet::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the file is open in the project.
|
|
||||||
///
|
|
||||||
/// A file is considered open when:
|
|
||||||
/// * explicitly set as an open file using [`open_file`](Self::open_file)
|
|
||||||
/// * It has a [`SystemPath`] and belongs to a package's `src` files
|
|
||||||
/// * It has a [`SystemVirtualPath`](ruff_db::system::SystemVirtualPath)
|
|
||||||
pub fn is_file_open(self, db: &dyn Db, file: File) -> bool {
|
|
||||||
if let Some(open_files) = self.open_files(db) {
|
|
||||||
open_files.contains(&file)
|
|
||||||
} else if file.path(db).is_system_path() {
|
|
||||||
self.contains_file(db, file)
|
|
||||||
} else {
|
|
||||||
file.path(db).is_system_virtual_path()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if `file` is a first-party file part of this package.
|
|
||||||
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
|
|
||||||
self.files(db).contains(&file)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", skip(db))]
|
|
||||||
pub fn remove_file(self, db: &mut dyn Db, file: File) {
|
|
||||||
tracing::debug!(
|
|
||||||
"Removing file `{}` from project `{}`",
|
|
||||||
file.path(db),
|
|
||||||
self.name(db)
|
|
||||||
);
|
|
||||||
|
|
||||||
let Some(mut index) = IndexedFiles::indexed_mut(db, self) else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
index.remove(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_file(self, db: &mut dyn Db, file: File) {
|
|
||||||
tracing::debug!(
|
|
||||||
"Adding file `{}` to project `{}`",
|
|
||||||
file.path(db),
|
|
||||||
self.name(db)
|
|
||||||
);
|
|
||||||
|
|
||||||
let Some(mut index) = IndexedFiles::indexed_mut(db, self) else {
|
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
index.insert(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the files belonging to this project.
|
|
||||||
pub fn files(self, db: &dyn Db) -> Indexed<'_> {
|
|
||||||
let files = self.file_set(db);
|
|
||||||
|
|
||||||
let indexed = match files.get() {
|
|
||||||
Index::Lazy(vacant) => {
|
|
||||||
let _entered =
|
|
||||||
tracing::debug_span!("Project::index_files", package = %self.name(db))
|
|
||||||
.entered();
|
|
||||||
|
|
||||||
let files = discover_project_files(db, self);
|
|
||||||
tracing::info!("Found {} files in project `{}`", files.len(), self.name(db));
|
|
||||||
vacant.set(files)
|
|
||||||
}
|
|
||||||
Index::Indexed(indexed) => indexed,
|
|
||||||
};
|
|
||||||
|
|
||||||
indexed
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn reload_files(self, db: &mut dyn Db) {
|
|
||||||
tracing::debug!("Reloading files for project `{}`", self.name(db));
|
|
||||||
|
|
||||||
if !self.file_set(db).is_lazy() {
|
|
||||||
// Force a re-index of the files in the next revision.
|
|
||||||
self.set_file_set(db).to(IndexedFiles::lazy());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(super) fn check_file(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
|
|
||||||
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
|
|
||||||
// Abort checking if there are IO errors.
|
|
||||||
let source = source_text(db.upcast(), file);
|
|
||||||
|
|
||||||
if let Some(read_error) = source.read_error() {
|
|
||||||
diagnostics.push(Box::new(IOErrorDiagnostic {
|
|
||||||
file,
|
|
||||||
error: read_error.clone(),
|
|
||||||
}));
|
|
||||||
return diagnostics;
|
|
||||||
}
|
|
||||||
|
|
||||||
let parsed = parsed_module(db.upcast(), file);
|
|
||||||
diagnostics.extend(parsed.errors().iter().map(|error| {
|
|
||||||
let diagnostic: Box<dyn Diagnostic> = Box::new(ParseDiagnostic::new(file, error.clone()));
|
|
||||||
diagnostic
|
|
||||||
}));
|
|
||||||
|
|
||||||
diagnostics.extend(check_types(db.upcast(), file).iter().map(|diagnostic| {
|
|
||||||
let boxed: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
|
|
||||||
boxed
|
|
||||||
}));
|
|
||||||
|
|
||||||
diagnostics.sort_unstable_by_key(|diagnostic| diagnostic.range().unwrap_or_default().start());
|
|
||||||
|
|
||||||
diagnostics
|
|
||||||
}
|
|
||||||
|
|
||||||
fn discover_project_files(db: &dyn Db, project: Project) -> FxHashSet<File> {
|
|
||||||
let paths = std::sync::Mutex::new(Vec::new());
|
|
||||||
|
|
||||||
db.system().walk_directory(project.root(db)).run(|| {
|
|
||||||
Box::new(|entry| {
|
|
||||||
match entry {
|
|
||||||
Ok(entry) => {
|
|
||||||
// Skip over any non python files to avoid creating too many entries in `Files`.
|
|
||||||
match entry.file_type() {
|
|
||||||
FileType::File => {
|
|
||||||
if entry
|
|
||||||
.path()
|
|
||||||
.extension()
|
|
||||||
.and_then(PySourceType::try_from_extension)
|
|
||||||
.is_some()
|
|
||||||
{
|
|
||||||
let mut paths = paths.lock().unwrap();
|
|
||||||
paths.push(entry.into_path());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
FileType::Directory | FileType::Symlink => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(error) => {
|
|
||||||
// TODO Handle error
|
|
||||||
tracing::error!("Failed to walk path: {error}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
WalkState::Continue
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
let paths = paths.into_inner().unwrap();
|
|
||||||
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
|
|
||||||
|
|
||||||
for path in paths {
|
|
||||||
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
|
|
||||||
// We can ignore this.
|
|
||||||
if let Ok(file) = system_path_to_file(db.upcast(), &path) {
|
|
||||||
files.insert(file);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
files
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum ProjectFiles<'a> {
|
|
||||||
OpenFiles(&'a FxHashSet<File>),
|
|
||||||
Indexed(Indexed<'a>),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> ProjectFiles<'a> {
|
|
||||||
fn new(db: &'a dyn Db, project: Project) -> Self {
|
|
||||||
if let Some(open_files) = project.open_files(db) {
|
|
||||||
ProjectFiles::OpenFiles(open_files)
|
|
||||||
} else {
|
|
||||||
ProjectFiles::Indexed(project.files(db))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> IntoIterator for &'a ProjectFiles<'a> {
|
|
||||||
type Item = File;
|
|
||||||
type IntoIter = ProjectFilesIter<'a>;
|
|
||||||
|
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
|
||||||
match self {
|
|
||||||
ProjectFiles::OpenFiles(files) => ProjectFilesIter::OpenFiles(files.iter()),
|
|
||||||
ProjectFiles::Indexed(indexed) => ProjectFilesIter::Indexed {
|
|
||||||
files: indexed.into_iter(),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
enum ProjectFilesIter<'db> {
|
|
||||||
OpenFiles(std::collections::hash_set::Iter<'db, File>),
|
|
||||||
Indexed { files: IndexedIter<'db> },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Iterator for ProjectFilesIter<'_> {
|
|
||||||
type Item = File;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
match self {
|
|
||||||
ProjectFilesIter::OpenFiles(files) => files.next().copied(),
|
|
||||||
ProjectFilesIter::Indexed { files } => files.next(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct IOErrorDiagnostic {
|
|
||||||
file: File,
|
|
||||||
error: SourceTextError,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Diagnostic for IOErrorDiagnostic {
|
|
||||||
fn id(&self) -> DiagnosticId {
|
|
||||||
DiagnosticId::Io
|
|
||||||
}
|
|
||||||
|
|
||||||
fn message(&self) -> Cow<str> {
|
|
||||||
self.error.to_string().into()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn file(&self) -> File {
|
|
||||||
self.file
|
|
||||||
}
|
|
||||||
|
|
||||||
fn range(&self) -> Option<TextRange> {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
fn severity(&self) -> Severity {
|
|
||||||
Severity::Error
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use crate::db::tests::TestDb;
|
|
||||||
use crate::project::{check_file, ProjectMetadata};
|
|
||||||
use red_knot_python_semantic::types::check_types;
|
|
||||||
use ruff_db::diagnostic::Diagnostic;
|
|
||||||
use ruff_db::files::system_path_to_file;
|
|
||||||
use ruff_db::source::source_text;
|
|
||||||
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
|
|
||||||
use ruff_db::testing::assert_function_query_was_not_run;
|
|
||||||
use ruff_python_ast::name::Name;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn check_file_skips_type_checking_when_file_cant_be_read() -> ruff_db::system::Result<()> {
|
|
||||||
let project = ProjectMetadata::new(Name::new_static("test"), SystemPathBuf::from("/"));
|
|
||||||
let mut db = TestDb::new(project);
|
|
||||||
let path = SystemPath::new("test.py");
|
|
||||||
|
|
||||||
db.write_file(path, "x = 10")?;
|
|
||||||
let file = system_path_to_file(&db, path).unwrap();
|
|
||||||
|
|
||||||
// Now the file gets deleted before we had a chance to read its source text.
|
|
||||||
db.memory_file_system().remove_file(path)?;
|
|
||||||
file.sync(&mut db);
|
|
||||||
|
|
||||||
assert_eq!(source_text(&db, file).as_str(), "");
|
|
||||||
assert_eq!(
|
|
||||||
check_file(&db, file)
|
|
||||||
.into_iter()
|
|
||||||
.map(|diagnostic| diagnostic.message().into_owned())
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
vec!["Failed to read file: No such file or directory".to_string()]
|
|
||||||
);
|
|
||||||
|
|
||||||
let events = db.take_salsa_events();
|
|
||||||
assert_function_query_was_not_run(&db, check_types, file, &events);
|
|
||||||
|
|
||||||
// The user now creates a new file with an empty text. The source text
|
|
||||||
// content returned by `source_text` remains unchanged, but the diagnostics should get updated.
|
|
||||||
db.write_file(path, "").unwrap();
|
|
||||||
|
|
||||||
assert_eq!(source_text(&db, file).as_str(), "");
|
|
||||||
assert_eq!(
|
|
||||||
check_file(&db, file)
|
|
||||||
.into_iter()
|
|
||||||
.map(|diagnostic| diagnostic.message().into_owned())
|
|
||||||
.collect::<Vec<_>>(),
|
|
||||||
vec![] as Vec<String>
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,59 +0,0 @@
|
||||||
mod package_name;
|
|
||||||
|
|
||||||
use pep440_rs::{Version, VersionSpecifiers};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
use crate::project::options::Options;
|
|
||||||
pub(crate) use package_name::PackageName;
|
|
||||||
|
|
||||||
/// A `pyproject.toml` as specified in PEP 517.
|
|
||||||
#[derive(Deserialize, Serialize, Debug, Default, Clone)]
|
|
||||||
#[serde(rename_all = "kebab-case")]
|
|
||||||
pub struct PyProject {
|
|
||||||
/// PEP 621-compliant project metadata.
|
|
||||||
pub project: Option<Project>,
|
|
||||||
/// Tool-specific metadata.
|
|
||||||
pub tool: Option<Tool>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PyProject {
|
|
||||||
pub(crate) fn knot(&self) -> Option<&Options> {
|
|
||||||
self.tool.as_ref().and_then(|tool| tool.knot.as_ref())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
pub enum PyProjectError {
|
|
||||||
#[error(transparent)]
|
|
||||||
TomlSyntax(#[from] toml::de::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PyProject {
|
|
||||||
pub(crate) fn from_toml_str(content: &str) -> Result<Self, PyProjectError> {
|
|
||||||
toml::from_str(content).map_err(PyProjectError::TomlSyntax)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// PEP 621 project metadata (`project`).
|
|
||||||
///
|
|
||||||
/// See <https://packaging.python.org/en/latest/specifications/pyproject-toml>.
|
|
||||||
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq)]
|
|
||||||
#[serde(rename_all = "kebab-case")]
|
|
||||||
pub struct Project {
|
|
||||||
/// The name of the project
|
|
||||||
///
|
|
||||||
/// Note: Intentionally option to be more permissive during deserialization.
|
|
||||||
/// `PackageMetadata::from_pyproject` reports missing names.
|
|
||||||
pub name: Option<PackageName>,
|
|
||||||
/// The version of the project
|
|
||||||
pub version: Option<Version>,
|
|
||||||
/// The Python versions this project is compatible with.
|
|
||||||
pub requires_python: Option<VersionSpecifiers>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)]
|
|
||||||
#[serde(rename_all = "kebab-case")]
|
|
||||||
pub struct Tool {
|
|
||||||
pub knot: Option<Options>,
|
|
||||||
}
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
source: crates/red_knot_workspace/src/project/metadata.rs
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
expression: root
|
expression: root
|
||||||
---
|
---
|
||||||
ProjectMetadata(
|
ProjectMetadata(
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
source: crates/red_knot_workspace/src/project/metadata.rs
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
expression: sub_project
|
expression: sub_project
|
||||||
---
|
---
|
||||||
ProjectMetadata(
|
ProjectMetadata(
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
source: crates/red_knot_workspace/src/project/metadata.rs
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
expression: root
|
expression: root
|
||||||
---
|
---
|
||||||
ProjectMetadata(
|
ProjectMetadata(
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
source: crates/red_knot_workspace/src/project/metadata.rs
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
expression: sub_project
|
expression: sub_project
|
||||||
---
|
---
|
||||||
ProjectMetadata(
|
ProjectMetadata(
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
source: crates/red_knot_workspace/src/project/metadata.rs
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
expression: root
|
expression: root
|
||||||
---
|
---
|
||||||
ProjectMetadata(
|
ProjectMetadata(
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
source: crates/red_knot_workspace/src/project/metadata.rs
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
expression: project
|
expression: project
|
||||||
---
|
---
|
||||||
ProjectMetadata(
|
ProjectMetadata(
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
---
|
---
|
||||||
source: crates/red_knot_workspace/src/project/metadata.rs
|
source: crates/red_knot_project/src/metadata.rs
|
||||||
expression: project
|
expression: project
|
||||||
---
|
---
|
||||||
ProjectMetadata(
|
ProjectMetadata(
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
use anyhow::{anyhow, Context};
|
use anyhow::{anyhow, Context};
|
||||||
use red_knot_project::db::ProjectDatabase;
|
use red_knot_project::{ProjectDatabase, ProjectMetadata};
|
||||||
use red_knot_project::project::ProjectMetadata;
|
|
||||||
use red_knot_python_semantic::{HasTy, SemanticModel};
|
use red_knot_python_semantic::{HasTy, SemanticModel};
|
||||||
use ruff_db::files::{system_path_to_file, File};
|
use ruff_db::files::{system_path_to_file, File};
|
||||||
use ruff_db::parsed::parsed_module;
|
use ruff_db::parsed::parsed_module;
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ use crate::edit::ToRangeExt;
|
||||||
use crate::server::api::traits::{BackgroundDocumentRequestHandler, RequestHandler};
|
use crate::server::api::traits::{BackgroundDocumentRequestHandler, RequestHandler};
|
||||||
use crate::server::{client::Notifier, Result};
|
use crate::server::{client::Notifier, Result};
|
||||||
use crate::session::DocumentSnapshot;
|
use crate::session::DocumentSnapshot;
|
||||||
use red_knot_project::db::{Db, ProjectDatabase};
|
use red_knot_project::{Db, ProjectDatabase};
|
||||||
use ruff_db::diagnostic::Severity;
|
use ruff_db::diagnostic::Severity;
|
||||||
use ruff_db::source::{line_index, source_text};
|
use ruff_db::source::{line_index, source_text};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ use crate::session::{DocumentSnapshot, Session};
|
||||||
|
|
||||||
use lsp_types::notification::Notification as LSPNotification;
|
use lsp_types::notification::Notification as LSPNotification;
|
||||||
use lsp_types::request::Request;
|
use lsp_types::request::Request;
|
||||||
use red_knot_project::db::ProjectDatabase;
|
use red_knot_project::ProjectDatabase;
|
||||||
|
|
||||||
/// A supertrait for any server request handler.
|
/// A supertrait for any server request handler.
|
||||||
pub(super) trait RequestHandler {
|
pub(super) trait RequestHandler {
|
||||||
|
|
|
||||||
|
|
@ -8,8 +8,7 @@ use std::sync::Arc;
|
||||||
use anyhow::anyhow;
|
use anyhow::anyhow;
|
||||||
use lsp_types::{ClientCapabilities, TextDocumentContentChangeEvent, Url};
|
use lsp_types::{ClientCapabilities, TextDocumentContentChangeEvent, Url};
|
||||||
|
|
||||||
use red_knot_project::db::ProjectDatabase;
|
use red_knot_project::{ProjectDatabase, ProjectMetadata};
|
||||||
use red_knot_project::project::ProjectMetadata;
|
|
||||||
use ruff_db::files::{system_path_to_file, File};
|
use ruff_db::files::{system_path_to_file, File};
|
||||||
use ruff_db::system::SystemPath;
|
use ruff_db::system::SystemPath;
|
||||||
use ruff_db::Db;
|
use ruff_db::Db;
|
||||||
|
|
|
||||||
|
|
@ -3,9 +3,9 @@ use std::any::Any;
|
||||||
use js_sys::Error;
|
use js_sys::Error;
|
||||||
use wasm_bindgen::prelude::*;
|
use wasm_bindgen::prelude::*;
|
||||||
|
|
||||||
use red_knot_project::db::{Db, ProjectDatabase};
|
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
|
||||||
use red_knot_project::project::options::{EnvironmentOptions, Options};
|
use red_knot_project::ProjectMetadata;
|
||||||
use red_knot_project::project::ProjectMetadata;
|
use red_knot_project::{Db, ProjectDatabase};
|
||||||
use ruff_db::diagnostic::Diagnostic;
|
use ruff_db::diagnostic::Diagnostic;
|
||||||
use ruff_db::files::{system_path_to_file, File};
|
use ruff_db::files::{system_path_to_file, File};
|
||||||
use ruff_db::system::walk_directory::WalkDirectoryBuilder;
|
use ruff_db::system::walk_directory::WalkDirectoryBuilder;
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,9 @@
|
||||||
#![allow(clippy::disallowed_names)]
|
#![allow(clippy::disallowed_names)]
|
||||||
|
|
||||||
use rayon::ThreadPoolBuilder;
|
use rayon::ThreadPoolBuilder;
|
||||||
use red_knot_project::db::{Db, ProjectDatabase};
|
use red_knot_project::metadata::options::{EnvironmentOptions, Options};
|
||||||
use red_knot_project::project::options::{EnvironmentOptions, Options};
|
|
||||||
use red_knot_project::project::ProjectMetadata;
|
|
||||||
use red_knot_project::watch::{ChangeEvent, ChangedKind};
|
use red_knot_project::watch::{ChangeEvent, ChangedKind};
|
||||||
|
use red_knot_project::{Db, ProjectDatabase, ProjectMetadata};
|
||||||
use red_knot_python_semantic::PythonVersion;
|
use red_knot_python_semantic::PythonVersion;
|
||||||
use ruff_benchmark::criterion::{criterion_group, criterion_main, BatchSize, Criterion};
|
use ruff_benchmark::criterion::{criterion_group, criterion_main, BatchSize, Criterion};
|
||||||
use ruff_benchmark::TestFile;
|
use ruff_benchmark::TestFile;
|
||||||
|
|
|
||||||
|
|
@ -19,14 +19,14 @@ pub(crate) fn derive_impl(input: DeriveInput) -> syn::Result<proc_macro2::TokenS
|
||||||
.expect("Expected to handle named fields");
|
.expect("Expected to handle named fields");
|
||||||
|
|
||||||
quote_spanned!(
|
quote_spanned!(
|
||||||
ident.span() => crate::project::combine::Combine::combine_with(&mut self.#ident, other.#ident)
|
ident.span() => crate::combine::Combine::combine_with(&mut self.#ident, other.#ident)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(quote! {
|
Ok(quote! {
|
||||||
#[automatically_derived]
|
#[automatically_derived]
|
||||||
impl crate::project::combine::Combine for #ident {
|
impl crate::combine::Combine for #ident {
|
||||||
fn combine_with(&mut self, other: Self) {
|
fn combine_with(&mut self, other: Self) {
|
||||||
#(
|
#(
|
||||||
#output
|
#output
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue