Remove workspace support (#15472)

This commit is contained in:
Micha Reiser 2025-01-15 09:03:38 +01:00 committed by GitHub
parent bec8441cf5
commit 18d5dbfb7f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
49 changed files with 1428 additions and 2326 deletions

View File

@ -4,7 +4,6 @@ extend-exclude = [
"crates/red_knot_vendored/vendor/**/*", "crates/red_knot_vendored/vendor/**/*",
"**/resources/**/*", "**/resources/**/*",
"**/snapshots/**/*", "**/snapshots/**/*",
"crates/red_knot_workspace/src/workspace/pyproject/package_name.rs"
] ]
[default.extend-words] [default.extend-words]
@ -25,3 +24,6 @@ extend-ignore-re = [
"(?Rm)^.*#\\s*spellchecker:disable-line$", "(?Rm)^.*#\\s*spellchecker:disable-line$",
"LICENSEs", "LICENSEs",
] ]
[default.extend-identifiers]
"FrIeNdLy" = "FrIeNdLy"

View File

@ -8,11 +8,11 @@ use crossbeam::channel as crossbeam_channel;
use python_version::PythonVersion; use python_version::PythonVersion;
use red_knot_python_semantic::SitePackages; use red_knot_python_semantic::SitePackages;
use red_knot_server::run_server; use red_knot_server::run_server;
use red_knot_workspace::db::RootDatabase; use red_knot_workspace::db::ProjectDatabase;
use red_knot_workspace::project::settings::Configuration;
use red_knot_workspace::project::ProjectMetadata;
use red_knot_workspace::watch; use red_knot_workspace::watch;
use red_knot_workspace::watch::WorkspaceWatcher; use red_knot_workspace::watch::ProjectWatcher;
use red_knot_workspace::workspace::settings::Configuration;
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_db::diagnostic::Diagnostic; use ruff_db::diagnostic::Diagnostic;
use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf}; use ruff_db::system::{OsSystem, System, SystemPath, SystemPathBuf};
use salsa::plumbing::ZalsaDatabase; use salsa::plumbing::ZalsaDatabase;
@ -165,7 +165,7 @@ fn run() -> anyhow::Result<ExitStatus> {
let system = OsSystem::new(cwd.clone()); let system = OsSystem::new(cwd.clone());
let cli_configuration = args.to_configuration(&cwd); let cli_configuration = args.to_configuration(&cwd);
let workspace_metadata = WorkspaceMetadata::discover( let workspace_metadata = ProjectMetadata::discover(
system.current_directory(), system.current_directory(),
&system, &system,
Some(&cli_configuration), Some(&cli_configuration),
@ -173,7 +173,7 @@ fn run() -> anyhow::Result<ExitStatus> {
// TODO: Use the `program_settings` to compute the key for the database's persistent // TODO: Use the `program_settings` to compute the key for the database's persistent
// cache and load the cache if it exists. // cache and load the cache if it exists.
let mut db = RootDatabase::new(workspace_metadata, system)?; let mut db = ProjectDatabase::new(workspace_metadata, system)?;
let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_configuration); let (main_loop, main_loop_cancellation_token) = MainLoop::new(cli_configuration);
@ -226,7 +226,7 @@ struct MainLoop {
receiver: crossbeam_channel::Receiver<MainLoopMessage>, receiver: crossbeam_channel::Receiver<MainLoopMessage>,
/// The file system watcher, if running in watch mode. /// The file system watcher, if running in watch mode.
watcher: Option<WorkspaceWatcher>, watcher: Option<ProjectWatcher>,
cli_configuration: Configuration, cli_configuration: Configuration,
} }
@ -246,21 +246,21 @@ impl MainLoop {
) )
} }
fn watch(mut self, db: &mut RootDatabase) -> anyhow::Result<ExitStatus> { fn watch(mut self, db: &mut ProjectDatabase) -> anyhow::Result<ExitStatus> {
tracing::debug!("Starting watch mode"); tracing::debug!("Starting watch mode");
let sender = self.sender.clone(); let sender = self.sender.clone();
let watcher = watch::directory_watcher(move |event| { let watcher = watch::directory_watcher(move |event| {
sender.send(MainLoopMessage::ApplyChanges(event)).unwrap(); sender.send(MainLoopMessage::ApplyChanges(event)).unwrap();
})?; })?;
self.watcher = Some(WorkspaceWatcher::new(watcher, db)); self.watcher = Some(ProjectWatcher::new(watcher, db));
self.run(db); self.run(db);
Ok(ExitStatus::Success) Ok(ExitStatus::Success)
} }
fn run(mut self, db: &mut RootDatabase) -> ExitStatus { fn run(mut self, db: &mut ProjectDatabase) -> ExitStatus {
self.sender.send(MainLoopMessage::CheckWorkspace).unwrap(); self.sender.send(MainLoopMessage::CheckWorkspace).unwrap();
let result = self.main_loop(db); let result = self.main_loop(db);
@ -270,7 +270,7 @@ impl MainLoop {
result result
} }
fn main_loop(&mut self, db: &mut RootDatabase) -> ExitStatus { fn main_loop(&mut self, db: &mut ProjectDatabase) -> ExitStatus {
// Schedule the first check. // Schedule the first check.
tracing::debug!("Starting main loop"); tracing::debug!("Starting main loop");
@ -282,7 +282,7 @@ impl MainLoop {
let db = db.clone(); let db = db.clone();
let sender = self.sender.clone(); let sender = self.sender.clone();
// Spawn a new task that checks the workspace. This needs to be done in a separate thread // Spawn a new task that checks the project. This needs to be done in a separate thread
// to prevent blocking the main loop here. // to prevent blocking the main loop here.
rayon::spawn(move || { rayon::spawn(move || {
if let Ok(result) = db.check() { if let Ok(result) = db.check() {

View File

@ -5,18 +5,18 @@ use std::time::{Duration, Instant};
use anyhow::{anyhow, Context}; use anyhow::{anyhow, Context};
use red_knot_python_semantic::{resolve_module, ModuleName, Program, PythonVersion, SitePackages}; use red_knot_python_semantic::{resolve_module, ModuleName, Program, PythonVersion, SitePackages};
use red_knot_workspace::db::{Db, RootDatabase}; use red_knot_workspace::db::{Db, ProjectDatabase};
use red_knot_workspace::watch::{directory_watcher, ChangeEvent, WorkspaceWatcher}; use red_knot_workspace::project::settings::{Configuration, SearchPathConfiguration};
use red_knot_workspace::workspace::settings::{Configuration, SearchPathConfiguration}; use red_knot_workspace::project::ProjectMetadata;
use red_knot_workspace::workspace::WorkspaceMetadata; use red_knot_workspace::watch::{directory_watcher, ChangeEvent, ProjectWatcher};
use ruff_db::files::{system_path_to_file, File, FileError}; use ruff_db::files::{system_path_to_file, File, FileError};
use ruff_db::source::source_text; use ruff_db::source::source_text;
use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf}; use ruff_db::system::{OsSystem, SystemPath, SystemPathBuf};
use ruff_db::Upcast; use ruff_db::Upcast;
struct TestCase { struct TestCase {
db: RootDatabase, db: ProjectDatabase,
watcher: Option<WorkspaceWatcher>, watcher: Option<ProjectWatcher>,
changes_receiver: crossbeam::channel::Receiver<Vec<ChangeEvent>>, changes_receiver: crossbeam::channel::Receiver<Vec<ChangeEvent>>,
/// The temporary directory that contains the test files. /// The temporary directory that contains the test files.
/// We need to hold on to it in the test case or the temp files get deleted. /// We need to hold on to it in the test case or the temp files get deleted.
@ -26,15 +26,15 @@ struct TestCase {
} }
impl TestCase { impl TestCase {
fn workspace_path(&self, relative: impl AsRef<SystemPath>) -> SystemPathBuf { fn project_path(&self, relative: impl AsRef<SystemPath>) -> SystemPathBuf {
SystemPath::absolute(relative, self.db.workspace().root(&self.db)) SystemPath::absolute(relative, self.db.project().root(&self.db))
} }
fn root_path(&self) -> &SystemPath { fn root_path(&self) -> &SystemPath {
&self.root_dir &self.root_dir
} }
fn db(&self) -> &RootDatabase { fn db(&self) -> &ProjectDatabase {
&self.db &self.db
} }
@ -150,7 +150,7 @@ impl TestCase {
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let program = Program::get(self.db()); let program = Program::get(self.db());
let new_settings = configuration.to_settings(self.db.workspace().root(&self.db)); let new_settings = configuration.to_settings(self.db.project().root(&self.db));
self.configuration.search_paths = configuration; self.configuration.search_paths = configuration;
program.update_search_paths(&mut self.db, &new_settings)?; program.update_search_paths(&mut self.db, &new_settings)?;
@ -163,9 +163,8 @@ impl TestCase {
Ok(()) Ok(())
} }
fn collect_package_files(&self, path: &SystemPath) -> Vec<File> { fn collect_project_files(&self) -> Vec<File> {
let package = self.db().workspace().package(self.db(), path).unwrap(); let files = self.db().project().files(self.db());
let files = package.files(self.db());
let mut collected: Vec<_> = files.into_iter().collect(); let mut collected: Vec<_> = files.into_iter().collect();
collected.sort_unstable_by_key(|file| file.path(self.db()).as_system_path().unwrap()); collected.sort_unstable_by_key(|file| file.path(self.db()).as_system_path().unwrap());
collected collected
@ -194,17 +193,17 @@ where
} }
trait SetupFiles { trait SetupFiles {
fn setup(self, root_path: &SystemPath, workspace_path: &SystemPath) -> anyhow::Result<()>; fn setup(self, root_path: &SystemPath, project_path: &SystemPath) -> anyhow::Result<()>;
} }
impl<const N: usize, P> SetupFiles for [(P, &'static str); N] impl<const N: usize, P> SetupFiles for [(P, &'static str); N]
where where
P: AsRef<SystemPath>, P: AsRef<SystemPath>,
{ {
fn setup(self, _root_path: &SystemPath, workspace_path: &SystemPath) -> anyhow::Result<()> { fn setup(self, _root_path: &SystemPath, project_path: &SystemPath) -> anyhow::Result<()> {
for (relative_path, content) in self { for (relative_path, content) in self {
let relative_path = relative_path.as_ref(); let relative_path = relative_path.as_ref();
let absolute_path = workspace_path.join(relative_path); let absolute_path = project_path.join(relative_path);
if let Some(parent) = absolute_path.parent() { if let Some(parent) = absolute_path.parent() {
std::fs::create_dir_all(parent).with_context(|| { std::fs::create_dir_all(parent).with_context(|| {
format!("Failed to create parent directory for file `{relative_path}`") format!("Failed to create parent directory for file `{relative_path}`")
@ -226,8 +225,8 @@ impl<F> SetupFiles for F
where where
F: FnOnce(&SystemPath, &SystemPath) -> anyhow::Result<()>, F: FnOnce(&SystemPath, &SystemPath) -> anyhow::Result<()>,
{ {
fn setup(self, root_path: &SystemPath, workspace_path: &SystemPath) -> anyhow::Result<()> { fn setup(self, root_path: &SystemPath, project_path: &SystemPath) -> anyhow::Result<()> {
self(root_path, workspace_path) self(root_path, project_path)
} }
} }
@ -235,7 +234,7 @@ fn setup<F>(setup_files: F) -> anyhow::Result<TestCase>
where where
F: SetupFiles, F: SetupFiles,
{ {
setup_with_search_paths(setup_files, |_root, _workspace_path| { setup_with_search_paths(setup_files, |_root, _project_path| {
SearchPathConfiguration::default() SearchPathConfiguration::default()
}) })
} }
@ -265,18 +264,18 @@ where
.simplified() .simplified()
.to_path_buf(); .to_path_buf();
let workspace_path = root_path.join("workspace"); let project_path = root_path.join("project");
std::fs::create_dir_all(workspace_path.as_std_path()) std::fs::create_dir_all(project_path.as_std_path())
.with_context(|| format!("Failed to create workspace directory `{workspace_path}`"))?; .with_context(|| format!("Failed to create project directory `{project_path}`"))?;
setup_files setup_files
.setup(&root_path, &workspace_path) .setup(&root_path, &project_path)
.context("Failed to setup test files")?; .context("Failed to setup test files")?;
let system = OsSystem::new(&workspace_path); let system = OsSystem::new(&project_path);
let search_paths = create_search_paths(&root_path, &workspace_path); let search_paths = create_search_paths(&root_path, &project_path);
for path in search_paths for path in search_paths
.extra_paths .extra_paths
@ -300,15 +299,15 @@ where
search_paths, search_paths,
}; };
let workspace = WorkspaceMetadata::discover(&workspace_path, &system, Some(&configuration))?; let project = ProjectMetadata::discover(&project_path, &system, Some(&configuration))?;
let db = RootDatabase::new(workspace, system)?; let db = ProjectDatabase::new(project, system)?;
let (sender, receiver) = crossbeam::channel::unbounded(); let (sender, receiver) = crossbeam::channel::unbounded();
let watcher = directory_watcher(move |events| sender.send(events).unwrap()) let watcher = directory_watcher(move |events| sender.send(events).unwrap())
.with_context(|| "Failed to create directory watcher")?; .with_context(|| "Failed to create directory watcher")?;
let watcher = WorkspaceWatcher::new(watcher, &db); let watcher = ProjectWatcher::new(watcher, &db);
assert!(!watcher.has_errored_paths()); assert!(!watcher.has_errored_paths());
let test_case = TestCase { let test_case = TestCase {
@ -359,12 +358,12 @@ fn update_file(path: impl AsRef<SystemPath>, content: &str) -> anyhow::Result<()
#[test] #[test]
fn new_file() -> anyhow::Result<()> { fn new_file() -> anyhow::Result<()> {
let mut case = setup([("bar.py", "")])?; let mut case = setup([("bar.py", "")])?;
let bar_path = case.workspace_path("bar.py"); let bar_path = case.project_path("bar.py");
let bar_file = case.system_file(&bar_path).unwrap(); let bar_file = case.system_file(&bar_path).unwrap();
let foo_path = case.workspace_path("foo.py"); let foo_path = case.project_path("foo.py");
assert_eq!(case.system_file(&foo_path), Err(FileError::NotFound)); assert_eq!(case.system_file(&foo_path), Err(FileError::NotFound));
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file]); assert_eq!(&case.collect_project_files(), &[bar_file]);
std::fs::write(foo_path.as_std_path(), "print('Hello')")?; std::fs::write(foo_path.as_std_path(), "print('Hello')")?;
@ -374,7 +373,7 @@ fn new_file() -> anyhow::Result<()> {
let foo = case.system_file(&foo_path).expect("foo.py to exist."); let foo = case.system_file(&foo_path).expect("foo.py to exist.");
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file, foo]); assert_eq!(&case.collect_project_files(), &[bar_file, foo]);
Ok(()) Ok(())
} }
@ -382,12 +381,12 @@ fn new_file() -> anyhow::Result<()> {
#[test] #[test]
fn new_ignored_file() -> anyhow::Result<()> { fn new_ignored_file() -> anyhow::Result<()> {
let mut case = setup([("bar.py", ""), (".ignore", "foo.py")])?; let mut case = setup([("bar.py", ""), (".ignore", "foo.py")])?;
let bar_path = case.workspace_path("bar.py"); let bar_path = case.project_path("bar.py");
let bar_file = case.system_file(&bar_path).unwrap(); let bar_file = case.system_file(&bar_path).unwrap();
let foo_path = case.workspace_path("foo.py"); let foo_path = case.project_path("foo.py");
assert_eq!(case.system_file(&foo_path), Err(FileError::NotFound)); assert_eq!(case.system_file(&foo_path), Err(FileError::NotFound));
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file]); assert_eq!(&case.collect_project_files(), &[bar_file]);
std::fs::write(foo_path.as_std_path(), "print('Hello')")?; std::fs::write(foo_path.as_std_path(), "print('Hello')")?;
@ -396,7 +395,7 @@ fn new_ignored_file() -> anyhow::Result<()> {
case.apply_changes(changes); case.apply_changes(changes);
assert!(case.system_file(&foo_path).is_ok()); assert!(case.system_file(&foo_path).is_ok());
assert_eq!(&case.collect_package_files(&bar_path), &[bar_file]); assert_eq!(&case.collect_project_files(), &[bar_file]);
Ok(()) Ok(())
} }
@ -405,11 +404,11 @@ fn new_ignored_file() -> anyhow::Result<()> {
fn changed_file() -> anyhow::Result<()> { fn changed_file() -> anyhow::Result<()> {
let foo_source = "print('Hello, world!')"; let foo_source = "print('Hello, world!')";
let mut case = setup([("foo.py", foo_source)])?; let mut case = setup([("foo.py", foo_source)])?;
let foo_path = case.workspace_path("foo.py"); let foo_path = case.project_path("foo.py");
let foo = case.system_file(&foo_path)?; let foo = case.system_file(&foo_path)?;
assert_eq!(source_text(case.db(), foo).as_str(), foo_source); assert_eq!(source_text(case.db(), foo).as_str(), foo_source);
assert_eq!(&case.collect_package_files(&foo_path), &[foo]); assert_eq!(&case.collect_project_files(), &[foo]);
update_file(&foo_path, "print('Version 2')")?; update_file(&foo_path, "print('Version 2')")?;
@ -420,7 +419,7 @@ fn changed_file() -> anyhow::Result<()> {
case.apply_changes(changes); case.apply_changes(changes);
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')"); assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
assert_eq!(&case.collect_package_files(&foo_path), &[foo]); assert_eq!(&case.collect_project_files(), &[foo]);
Ok(()) Ok(())
} }
@ -429,12 +428,12 @@ fn changed_file() -> anyhow::Result<()> {
fn deleted_file() -> anyhow::Result<()> { fn deleted_file() -> anyhow::Result<()> {
let foo_source = "print('Hello, world!')"; let foo_source = "print('Hello, world!')";
let mut case = setup([("foo.py", foo_source)])?; let mut case = setup([("foo.py", foo_source)])?;
let foo_path = case.workspace_path("foo.py"); let foo_path = case.project_path("foo.py");
let foo = case.system_file(&foo_path)?; let foo = case.system_file(&foo_path)?;
assert!(foo.exists(case.db())); assert!(foo.exists(case.db()));
assert_eq!(&case.collect_package_files(&foo_path), &[foo]); assert_eq!(&case.collect_project_files(), &[foo]);
std::fs::remove_file(foo_path.as_std_path())?; std::fs::remove_file(foo_path.as_std_path())?;
@ -443,7 +442,7 @@ fn deleted_file() -> anyhow::Result<()> {
case.apply_changes(changes); case.apply_changes(changes);
assert!(!foo.exists(case.db())); assert!(!foo.exists(case.db()));
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]); assert_eq!(&case.collect_project_files(), &[] as &[File]);
Ok(()) Ok(())
} }
@ -455,7 +454,7 @@ fn deleted_file() -> anyhow::Result<()> {
fn move_file_to_trash() -> anyhow::Result<()> { fn move_file_to_trash() -> anyhow::Result<()> {
let foo_source = "print('Hello, world!')"; let foo_source = "print('Hello, world!')";
let mut case = setup([("foo.py", foo_source)])?; let mut case = setup([("foo.py", foo_source)])?;
let foo_path = case.workspace_path("foo.py"); let foo_path = case.project_path("foo.py");
let trash_path = case.root_path().join(".trash"); let trash_path = case.root_path().join(".trash");
std::fs::create_dir_all(trash_path.as_std_path())?; std::fs::create_dir_all(trash_path.as_std_path())?;
@ -463,7 +462,7 @@ fn move_file_to_trash() -> anyhow::Result<()> {
let foo = case.system_file(&foo_path)?; let foo = case.system_file(&foo_path)?;
assert!(foo.exists(case.db())); assert!(foo.exists(case.db()));
assert_eq!(&case.collect_package_files(&foo_path), &[foo]); assert_eq!(&case.collect_project_files(), &[foo]);
std::fs::rename( std::fs::rename(
foo_path.as_std_path(), foo_path.as_std_path(),
@ -475,58 +474,50 @@ fn move_file_to_trash() -> anyhow::Result<()> {
case.apply_changes(changes); case.apply_changes(changes);
assert!(!foo.exists(case.db())); assert!(!foo.exists(case.db()));
assert_eq!(&case.collect_package_files(&foo_path), &[] as &[File]); assert_eq!(&case.collect_project_files(), &[] as &[File]);
Ok(()) Ok(())
} }
/// Move a file from a non-workspace (non-watched) location into the workspace. /// Move a file from a non-project (non-watched) location into the project.
#[test] #[test]
fn move_file_to_workspace() -> anyhow::Result<()> { fn move_file_to_project() -> anyhow::Result<()> {
let mut case = setup([("bar.py", "")])?; let mut case = setup([("bar.py", "")])?;
let bar_path = case.workspace_path("bar.py"); let bar_path = case.project_path("bar.py");
let bar = case.system_file(&bar_path).unwrap(); let bar = case.system_file(&bar_path).unwrap();
let foo_path = case.root_path().join("foo.py"); let foo_path = case.root_path().join("foo.py");
std::fs::write(foo_path.as_std_path(), "")?; std::fs::write(foo_path.as_std_path(), "")?;
let foo_in_workspace_path = case.workspace_path("foo.py"); let foo_in_project = case.project_path("foo.py");
assert!(case.system_file(&foo_path).is_ok()); assert!(case.system_file(&foo_path).is_ok());
assert_eq!(&case.collect_package_files(&bar_path), &[bar]); assert_eq!(&case.collect_project_files(), &[bar]);
assert!(case
.db()
.workspace()
.package(case.db(), &foo_path)
.is_none());
std::fs::rename(foo_path.as_std_path(), foo_in_workspace_path.as_std_path())?; std::fs::rename(foo_path.as_std_path(), foo_in_project.as_std_path())?;
let changes = case.stop_watch(event_for_file("foo.py")); let changes = case.stop_watch(event_for_file("foo.py"));
case.apply_changes(changes); case.apply_changes(changes);
let foo_in_workspace = case.system_file(&foo_in_workspace_path)?; let foo_in_project = case.system_file(&foo_in_project)?;
assert!(foo_in_workspace.exists(case.db())); assert!(foo_in_project.exists(case.db()));
assert_eq!( assert_eq!(&case.collect_project_files(), &[bar, foo_in_project]);
&case.collect_package_files(&foo_in_workspace_path),
&[bar, foo_in_workspace]
);
Ok(()) Ok(())
} }
/// Rename a workspace file. /// Rename a project file.
#[test] #[test]
fn rename_file() -> anyhow::Result<()> { fn rename_file() -> anyhow::Result<()> {
let mut case = setup([("foo.py", "")])?; let mut case = setup([("foo.py", "")])?;
let foo_path = case.workspace_path("foo.py"); let foo_path = case.project_path("foo.py");
let bar_path = case.workspace_path("bar.py"); let bar_path = case.project_path("bar.py");
let foo = case.system_file(&foo_path)?; let foo = case.system_file(&foo_path)?;
assert_eq!(case.collect_package_files(&foo_path), [foo]); assert_eq!(case.collect_project_files(), [foo]);
std::fs::rename(foo_path.as_std_path(), bar_path.as_std_path())?; std::fs::rename(foo_path.as_std_path(), bar_path.as_std_path())?;
@ -539,15 +530,15 @@ fn rename_file() -> anyhow::Result<()> {
let bar = case.system_file(&bar_path)?; let bar = case.system_file(&bar_path)?;
assert!(bar.exists(case.db())); assert!(bar.exists(case.db()));
assert_eq!(case.collect_package_files(&foo_path), [bar]); assert_eq!(case.collect_project_files(), [bar]);
Ok(()) Ok(())
} }
#[test] #[test]
fn directory_moved_to_workspace() -> anyhow::Result<()> { fn directory_moved_to_project() -> anyhow::Result<()> {
let mut case = setup([("bar.py", "import sub.a")])?; let mut case = setup([("bar.py", "import sub.a")])?;
let bar = case.system_file(case.workspace_path("bar.py")).unwrap(); let bar = case.system_file(case.project_path("bar.py")).unwrap();
let sub_original_path = case.root_path().join("sub"); let sub_original_path = case.root_path().join("sub");
let init_original_path = sub_original_path.join("__init__.py"); let init_original_path = sub_original_path.join("__init__.py");
@ -565,12 +556,9 @@ fn directory_moved_to_workspace() -> anyhow::Result<()> {
); );
assert_eq!(sub_a_module, None); assert_eq!(sub_a_module, None);
assert_eq!( assert_eq!(case.collect_project_files(), &[bar]);
case.collect_package_files(&case.workspace_path("bar.py")),
&[bar]
);
let sub_new_path = case.workspace_path("sub"); let sub_new_path = case.project_path("sub");
std::fs::rename(sub_original_path.as_std_path(), sub_new_path.as_std_path()) std::fs::rename(sub_original_path.as_std_path(), sub_new_path.as_std_path())
.with_context(|| "Failed to move sub directory")?; .with_context(|| "Failed to move sub directory")?;
@ -592,10 +580,7 @@ fn directory_moved_to_workspace() -> anyhow::Result<()> {
) )
.is_some()); .is_some());
assert_eq!( assert_eq!(case.collect_project_files(), &[bar, init_file, a_file]);
case.collect_package_files(&case.workspace_path("bar.py")),
&[bar, init_file, a_file]
);
Ok(()) Ok(())
} }
@ -607,7 +592,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
("sub/__init__.py", ""), ("sub/__init__.py", ""),
("sub/a.py", ""), ("sub/a.py", ""),
])?; ])?;
let bar = case.system_file(case.workspace_path("bar.py")).unwrap(); let bar = case.system_file(case.project_path("bar.py")).unwrap();
assert!(resolve_module( assert!(resolve_module(
case.db().upcast(), case.db().upcast(),
@ -615,7 +600,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
) )
.is_some()); .is_some());
let sub_path = case.workspace_path("sub"); let sub_path = case.project_path("sub");
let init_file = case let init_file = case
.system_file(sub_path.join("__init__.py")) .system_file(sub_path.join("__init__.py"))
.expect("__init__.py to exist"); .expect("__init__.py to exist");
@ -623,10 +608,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
.system_file(sub_path.join("a.py")) .system_file(sub_path.join("a.py"))
.expect("a.py to exist"); .expect("a.py to exist");
assert_eq!( assert_eq!(case.collect_project_files(), &[bar, init_file, a_file]);
case.collect_package_files(&case.workspace_path("bar.py")),
&[bar, init_file, a_file]
);
std::fs::create_dir(case.root_path().join(".trash").as_std_path())?; std::fs::create_dir(case.root_path().join(".trash").as_std_path())?;
let trashed_sub = case.root_path().join(".trash/sub"); let trashed_sub = case.root_path().join(".trash/sub");
@ -647,10 +629,7 @@ fn directory_moved_to_trash() -> anyhow::Result<()> {
assert!(!init_file.exists(case.db())); assert!(!init_file.exists(case.db()));
assert!(!a_file.exists(case.db())); assert!(!a_file.exists(case.db()));
assert_eq!( assert_eq!(case.collect_project_files(), &[bar]);
case.collect_package_files(&case.workspace_path("bar.py")),
&[bar]
);
Ok(()) Ok(())
} }
@ -663,7 +642,7 @@ fn directory_renamed() -> anyhow::Result<()> {
("sub/a.py", ""), ("sub/a.py", ""),
])?; ])?;
let bar = case.system_file(case.workspace_path("bar.py")).unwrap(); let bar = case.system_file(case.project_path("bar.py")).unwrap();
assert!(resolve_module( assert!(resolve_module(
case.db().upcast(), case.db().upcast(),
@ -676,7 +655,7 @@ fn directory_renamed() -> anyhow::Result<()> {
) )
.is_none()); .is_none());
let sub_path = case.workspace_path("sub"); let sub_path = case.project_path("sub");
let sub_init = case let sub_init = case
.system_file(sub_path.join("__init__.py")) .system_file(sub_path.join("__init__.py"))
.expect("__init__.py to exist"); .expect("__init__.py to exist");
@ -684,14 +663,11 @@ fn directory_renamed() -> anyhow::Result<()> {
.system_file(sub_path.join("a.py")) .system_file(sub_path.join("a.py"))
.expect("a.py to exist"); .expect("a.py to exist");
assert_eq!( assert_eq!(case.collect_project_files(), &[bar, sub_init, sub_a]);
case.collect_package_files(&sub_path),
&[bar, sub_init, sub_a]
);
let foo_baz = case.workspace_path("foo/baz"); let foo_baz = case.project_path("foo/baz");
std::fs::create_dir(case.workspace_path("foo").as_std_path())?; std::fs::create_dir(case.project_path("foo").as_std_path())?;
std::fs::rename(sub_path.as_std_path(), foo_baz.as_std_path()) std::fs::rename(sub_path.as_std_path(), foo_baz.as_std_path())
.with_context(|| "Failed to move the sub directory")?; .with_context(|| "Failed to move the sub directory")?;
@ -730,7 +706,7 @@ fn directory_renamed() -> anyhow::Result<()> {
assert!(foo_baz_a.exists(case.db())); assert!(foo_baz_a.exists(case.db()));
assert_eq!( assert_eq!(
case.collect_package_files(&sub_path), case.collect_project_files(),
&[bar, foo_baz_init, foo_baz_a] &[bar, foo_baz_init, foo_baz_a]
); );
@ -745,7 +721,7 @@ fn directory_deleted() -> anyhow::Result<()> {
("sub/a.py", ""), ("sub/a.py", ""),
])?; ])?;
let bar = case.system_file(case.workspace_path("bar.py")).unwrap(); let bar = case.system_file(case.project_path("bar.py")).unwrap();
assert!(resolve_module( assert!(resolve_module(
case.db().upcast(), case.db().upcast(),
@ -753,7 +729,7 @@ fn directory_deleted() -> anyhow::Result<()> {
) )
.is_some()); .is_some());
let sub_path = case.workspace_path("sub"); let sub_path = case.project_path("sub");
let init_file = case let init_file = case
.system_file(sub_path.join("__init__.py")) .system_file(sub_path.join("__init__.py"))
@ -761,10 +737,7 @@ fn directory_deleted() -> anyhow::Result<()> {
let a_file = case let a_file = case
.system_file(sub_path.join("a.py")) .system_file(sub_path.join("a.py"))
.expect("a.py to exist"); .expect("a.py to exist");
assert_eq!( assert_eq!(case.collect_project_files(), &[bar, init_file, a_file]);
case.collect_package_files(&sub_path),
&[bar, init_file, a_file]
);
std::fs::remove_dir_all(sub_path.as_std_path()) std::fs::remove_dir_all(sub_path.as_std_path())
.with_context(|| "Failed to remove the sub directory")?; .with_context(|| "Failed to remove the sub directory")?;
@ -782,20 +755,20 @@ fn directory_deleted() -> anyhow::Result<()> {
assert!(!init_file.exists(case.db())); assert!(!init_file.exists(case.db()));
assert!(!a_file.exists(case.db())); assert!(!a_file.exists(case.db()));
assert_eq!(case.collect_package_files(&sub_path), &[bar]); assert_eq!(case.collect_project_files(), &[bar]);
Ok(()) Ok(())
} }
#[test] #[test]
fn search_path() -> anyhow::Result<()> { fn search_path() -> anyhow::Result<()> {
let mut case = setup_with_search_paths( let mut case =
[("bar.py", "import sub.a")], setup_with_search_paths([("bar.py", "import sub.a")], |root_path, _project_path| {
|root_path, _workspace_path| SearchPathConfiguration { SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])), site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
..SearchPathConfiguration::default() ..SearchPathConfiguration::default()
}, }
)?; })?;
let site_packages = case.root_path().join("site_packages"); let site_packages = case.root_path().join("site_packages");
@ -812,8 +785,8 @@ fn search_path() -> anyhow::Result<()> {
assert!(resolve_module(case.db().upcast(), &ModuleName::new_static("a").unwrap()).is_some()); assert!(resolve_module(case.db().upcast(), &ModuleName::new_static("a").unwrap()).is_some());
assert_eq!( assert_eq!(
case.collect_package_files(&case.workspace_path("bar.py")), case.collect_project_files(),
&[case.system_file(case.workspace_path("bar.py")).unwrap()] &[case.system_file(case.project_path("bar.py")).unwrap()]
); );
Ok(()) Ok(())
@ -823,7 +796,7 @@ fn search_path() -> anyhow::Result<()> {
fn add_search_path() -> anyhow::Result<()> { fn add_search_path() -> anyhow::Result<()> {
let mut case = setup([("bar.py", "import sub.a")])?; let mut case = setup([("bar.py", "import sub.a")])?;
let site_packages = case.workspace_path("site_packages"); let site_packages = case.project_path("site_packages");
std::fs::create_dir_all(site_packages.as_std_path())?; std::fs::create_dir_all(site_packages.as_std_path())?;
assert!(resolve_module(case.db().upcast(), &ModuleName::new_static("a").unwrap()).is_none()); assert!(resolve_module(case.db().upcast(), &ModuleName::new_static("a").unwrap()).is_none());
@ -848,13 +821,13 @@ fn add_search_path() -> anyhow::Result<()> {
#[test] #[test]
fn remove_search_path() -> anyhow::Result<()> { fn remove_search_path() -> anyhow::Result<()> {
let mut case = setup_with_search_paths( let mut case =
[("bar.py", "import sub.a")], setup_with_search_paths([("bar.py", "import sub.a")], |root_path, _project_path| {
|root_path, _workspace_path| SearchPathConfiguration { SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])), site_packages: Some(SitePackages::Known(vec![root_path.join("site_packages")])),
..SearchPathConfiguration::default() ..SearchPathConfiguration::default()
}, }
)?; })?;
// Remove site packages from the search path settings. // Remove site packages from the search path settings.
let site_packages = case.root_path().join("site_packages"); let site_packages = case.root_path().join("site_packages");
@ -876,8 +849,8 @@ fn remove_search_path() -> anyhow::Result<()> {
#[test] #[test]
fn changed_versions_file() -> anyhow::Result<()> { fn changed_versions_file() -> anyhow::Result<()> {
let mut case = setup_with_search_paths( let mut case = setup_with_search_paths(
|root_path: &SystemPath, workspace_path: &SystemPath| { |root_path: &SystemPath, project_path: &SystemPath| {
std::fs::write(workspace_path.join("bar.py").as_std_path(), "import sub.a")?; std::fs::write(project_path.join("bar.py").as_std_path(), "import sub.a")?;
std::fs::create_dir_all(root_path.join("typeshed/stdlib").as_std_path())?; std::fs::create_dir_all(root_path.join("typeshed/stdlib").as_std_path())?;
std::fs::write(root_path.join("typeshed/stdlib/VERSIONS").as_std_path(), "")?; std::fs::write(root_path.join("typeshed/stdlib/VERSIONS").as_std_path(), "")?;
std::fs::write( std::fs::write(
@ -887,7 +860,7 @@ fn changed_versions_file() -> anyhow::Result<()> {
Ok(()) Ok(())
}, },
|root_path, _workspace_path| SearchPathConfiguration { |root_path, _project_path| SearchPathConfiguration {
typeshed: Some(root_path.join("typeshed")), typeshed: Some(root_path.join("typeshed")),
..SearchPathConfiguration::default() ..SearchPathConfiguration::default()
}, },
@ -915,11 +888,11 @@ fn changed_versions_file() -> anyhow::Result<()> {
Ok(()) Ok(())
} }
/// Watch a workspace that contains two files where one file is a hardlink to another. /// Watch a project that contains two files where one file is a hardlink to another.
/// ///
/// Setup: /// Setup:
/// ```text /// ```text
/// - workspace /// - project
/// |- foo.py /// |- foo.py
/// |- bar.py (hard link to foo.py) /// |- bar.py (hard link to foo.py)
/// ``` /// ```
@ -935,22 +908,22 @@ fn changed_versions_file() -> anyhow::Result<()> {
/// I haven't found any documentation that states the notification behavior on Windows but what /// I haven't found any documentation that states the notification behavior on Windows but what
/// we're seeing is that Windows only emits a single event, similar to Linux. /// we're seeing is that Windows only emits a single event, similar to Linux.
#[test] #[test]
fn hard_links_in_workspace() -> anyhow::Result<()> { fn hard_links_in_project() -> anyhow::Result<()> {
let mut case = setup(|_root: &SystemPath, workspace: &SystemPath| { let mut case = setup(|_root: &SystemPath, project: &SystemPath| {
let foo_path = workspace.join("foo.py"); let foo_path = project.join("foo.py");
std::fs::write(foo_path.as_std_path(), "print('Version 1')")?; std::fs::write(foo_path.as_std_path(), "print('Version 1')")?;
// Create a hardlink to `foo` // Create a hardlink to `foo`
let bar_path = workspace.join("bar.py"); let bar_path = project.join("bar.py");
std::fs::hard_link(foo_path.as_std_path(), bar_path.as_std_path()) std::fs::hard_link(foo_path.as_std_path(), bar_path.as_std_path())
.context("Failed to create hard link from foo.py -> bar.py")?; .context("Failed to create hard link from foo.py -> bar.py")?;
Ok(()) Ok(())
})?; })?;
let foo_path = case.workspace_path("foo.py"); let foo_path = case.project_path("foo.py");
let foo = case.system_file(&foo_path).unwrap(); let foo = case.system_file(&foo_path).unwrap();
let bar_path = case.workspace_path("bar.py"); let bar_path = case.project_path("bar.py");
let bar = case.system_file(&bar_path).unwrap(); let bar = case.system_file(&bar_path).unwrap();
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 1')"); assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 1')");
@ -973,12 +946,12 @@ fn hard_links_in_workspace() -> anyhow::Result<()> {
Ok(()) Ok(())
} }
/// Watch a workspace that contains one file that is a hardlink to a file outside the workspace. /// Watch a project that contains one file that is a hardlink to a file outside the project.
/// ///
/// Setup: /// Setup:
/// ```text /// ```text
/// - foo.py /// - foo.py
/// - workspace /// - project
/// |- bar.py (hard link to /foo.py) /// |- bar.py (hard link to /foo.py)
/// ``` /// ```
/// ///
@ -996,7 +969,7 @@ fn hard_links_in_workspace() -> anyhow::Result<()> {
/// [source](https://learn.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-readdirectorychangesw) /// [source](https://learn.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-readdirectorychangesw)
/// ///
/// My interpretation of this is that Windows doesn't support observing changes made to /// My interpretation of this is that Windows doesn't support observing changes made to
/// hard linked files outside the workspace. /// hard linked files outside the project.
#[test] #[test]
#[cfg_attr( #[cfg_attr(
target_os = "linux", target_os = "linux",
@ -1006,13 +979,13 @@ fn hard_links_in_workspace() -> anyhow::Result<()> {
target_os = "windows", target_os = "windows",
ignore = "windows doesn't support observing changes to hard linked files." ignore = "windows doesn't support observing changes to hard linked files."
)] )]
fn hard_links_to_target_outside_workspace() -> anyhow::Result<()> { fn hard_links_to_target_outside_project() -> anyhow::Result<()> {
let mut case = setup(|root: &SystemPath, workspace: &SystemPath| { let mut case = setup(|root: &SystemPath, project: &SystemPath| {
let foo_path = root.join("foo.py"); let foo_path = root.join("foo.py");
std::fs::write(foo_path.as_std_path(), "print('Version 1')")?; std::fs::write(foo_path.as_std_path(), "print('Version 1')")?;
// Create a hardlink to `foo` // Create a hardlink to `foo`
let bar_path = workspace.join("bar.py"); let bar_path = project.join("bar.py");
std::fs::hard_link(foo_path.as_std_path(), bar_path.as_std_path()) std::fs::hard_link(foo_path.as_std_path(), bar_path.as_std_path())
.context("Failed to create hard link from foo.py -> bar.py")?; .context("Failed to create hard link from foo.py -> bar.py")?;
@ -1021,7 +994,7 @@ fn hard_links_to_target_outside_workspace() -> anyhow::Result<()> {
let foo_path = case.root_path().join("foo.py"); let foo_path = case.root_path().join("foo.py");
let foo = case.system_file(&foo_path).unwrap(); let foo = case.system_file(&foo_path).unwrap();
let bar_path = case.workspace_path("bar.py"); let bar_path = case.project_path("bar.py");
let bar = case.system_file(&bar_path).unwrap(); let bar = case.system_file(&bar_path).unwrap();
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 1')"); assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 1')");
@ -1044,13 +1017,13 @@ mod unix {
//! Tests that make use of unix specific file-system features. //! Tests that make use of unix specific file-system features.
use super::*; use super::*;
/// Changes the metadata of the only file in the workspace. /// Changes the metadata of the only file in the project.
#[test] #[test]
fn changed_metadata() -> anyhow::Result<()> { fn changed_metadata() -> anyhow::Result<()> {
use std::os::unix::fs::PermissionsExt; use std::os::unix::fs::PermissionsExt;
let mut case = setup([("foo.py", "")])?; let mut case = setup([("foo.py", "")])?;
let foo_path = case.workspace_path("foo.py"); let foo_path = case.project_path("foo.py");
let foo = case.system_file(&foo_path)?; let foo = case.system_file(&foo_path)?;
assert_eq!( assert_eq!(
@ -1086,14 +1059,14 @@ mod unix {
Ok(()) Ok(())
} }
/// A workspace path is a symlink to a file outside the workspace. /// A project path is a symlink to a file outside the project.
/// ///
/// Setup: /// Setup:
/// ```text /// ```text
/// - bar /// - bar
/// |- baz.py /// |- baz.py
/// ///
/// - workspace /// - project
/// |- bar -> /bar /// |- bar -> /bar
/// ``` /// ```
/// ///
@ -1115,7 +1088,7 @@ mod unix {
ignore = "FSEvents doesn't emit change events for symlinked directories outside of the watched paths." ignore = "FSEvents doesn't emit change events for symlinked directories outside of the watched paths."
)] )]
fn symlink_target_outside_watched_paths() -> anyhow::Result<()> { fn symlink_target_outside_watched_paths() -> anyhow::Result<()> {
let mut case = setup(|root: &SystemPath, workspace: &SystemPath| { let mut case = setup(|root: &SystemPath, project: &SystemPath| {
// Set up the symlink target. // Set up the symlink target.
let link_target = root.join("bar"); let link_target = root.join("bar");
std::fs::create_dir_all(link_target.as_std_path()) std::fs::create_dir_all(link_target.as_std_path())
@ -1124,8 +1097,8 @@ mod unix {
std::fs::write(baz_original.as_std_path(), "def baz(): ...") std::fs::write(baz_original.as_std_path(), "def baz(): ...")
.context("Failed to write link target file")?; .context("Failed to write link target file")?;
// Create a symlink inside the workspace // Create a symlink inside the project
let bar = workspace.join("bar"); let bar = project.join("bar");
std::os::unix::fs::symlink(link_target.as_std_path(), bar.as_std_path()) std::os::unix::fs::symlink(link_target.as_std_path(), bar.as_std_path())
.context("Failed to create symlink to bar package")?; .context("Failed to create symlink to bar package")?;
@ -1137,7 +1110,7 @@ mod unix {
&ModuleName::new_static("bar.baz").unwrap(), &ModuleName::new_static("bar.baz").unwrap(),
) )
.expect("Expected bar.baz to exist in site-packages."); .expect("Expected bar.baz to exist in site-packages.");
let baz_workspace = case.workspace_path("bar/baz.py"); let baz_project = case.project_path("bar/baz.py");
assert_eq!( assert_eq!(
source_text(case.db(), baz.file()).as_str(), source_text(case.db(), baz.file()).as_str(),
@ -1145,7 +1118,7 @@ mod unix {
); );
assert_eq!( assert_eq!(
baz.file().path(case.db()).as_system_path(), baz.file().path(case.db()).as_system_path(),
Some(&*baz_workspace) Some(&*baz_project)
); );
let baz_original = case.root_path().join("bar/baz.py"); let baz_original = case.root_path().join("bar/baz.py");
@ -1164,7 +1137,7 @@ mod unix {
); );
// Write to the symlink source. // Write to the symlink source.
update_file(baz_workspace, "def baz(): print('Version 3')") update_file(baz_project, "def baz(): print('Version 3')")
.context("Failed to update bar/baz.py")?; .context("Failed to update bar/baz.py")?;
let changes = case.stop_watch(event_for_file("baz.py")); let changes = case.stop_watch(event_for_file("baz.py"));
@ -1179,14 +1152,14 @@ mod unix {
Ok(()) Ok(())
} }
/// Workspace contains a symlink to another directory inside the workspace. /// Project contains a symlink to another directory inside the project.
/// Changes to files in the symlinked directory should be reflected /// Changes to files in the symlinked directory should be reflected
/// to all files. /// to all files.
/// ///
/// Setup: /// Setup:
/// ```text /// ```text
/// - workspace /// - project
/// | - bar -> /workspace/patched/bar /// | - bar -> /project/patched/bar
/// | /// |
/// | - patched /// | - patched
/// | |-- bar /// | |-- bar
@ -1195,10 +1168,10 @@ mod unix {
/// |-- foo.py /// |-- foo.py
/// ``` /// ```
#[test] #[test]
fn symlink_inside_workspace() -> anyhow::Result<()> { fn symlink_inside_project() -> anyhow::Result<()> {
let mut case = setup(|_root: &SystemPath, workspace: &SystemPath| { let mut case = setup(|_root: &SystemPath, project: &SystemPath| {
// Set up the symlink target. // Set up the symlink target.
let link_target = workspace.join("patched/bar"); let link_target = project.join("patched/bar");
std::fs::create_dir_all(link_target.as_std_path()) std::fs::create_dir_all(link_target.as_std_path())
.context("Failed to create link target directory")?; .context("Failed to create link target directory")?;
let baz_original = link_target.join("baz.py"); let baz_original = link_target.join("baz.py");
@ -1206,8 +1179,8 @@ mod unix {
.context("Failed to write link target file")?; .context("Failed to write link target file")?;
// Create a symlink inside site-packages // Create a symlink inside site-packages
let bar_in_workspace = workspace.join("bar"); let bar_in_project = project.join("bar");
std::os::unix::fs::symlink(link_target.as_std_path(), bar_in_workspace.as_std_path()) std::os::unix::fs::symlink(link_target.as_std_path(), bar_in_project.as_std_path())
.context("Failed to create symlink to bar package")?; .context("Failed to create symlink to bar package")?;
Ok(()) Ok(())
@ -1218,9 +1191,9 @@ mod unix {
&ModuleName::new_static("bar.baz").unwrap(), &ModuleName::new_static("bar.baz").unwrap(),
) )
.expect("Expected bar.baz to exist in site-packages."); .expect("Expected bar.baz to exist in site-packages.");
let bar_baz = case.workspace_path("bar/baz.py"); let bar_baz = case.project_path("bar/baz.py");
let patched_bar_baz = case.workspace_path("patched/bar/baz.py"); let patched_bar_baz = case.project_path("patched/bar/baz.py");
let patched_bar_baz_file = case.system_file(&patched_bar_baz).unwrap(); let patched_bar_baz_file = case.system_file(&patched_bar_baz).unwrap();
assert_eq!( assert_eq!(
@ -1279,7 +1252,7 @@ mod unix {
/// - site-packages /// - site-packages
/// | - bar/baz.py /// | - bar/baz.py
/// ///
/// - workspace /// - project
/// |-- .venv/lib/python3.12/site-packages -> /site-packages /// |-- .venv/lib/python3.12/site-packages -> /site-packages
/// | /// |
/// |-- foo.py /// |-- foo.py
@ -1287,7 +1260,7 @@ mod unix {
#[test] #[test]
fn symlinked_module_search_path() -> anyhow::Result<()> { fn symlinked_module_search_path() -> anyhow::Result<()> {
let mut case = setup_with_search_paths( let mut case = setup_with_search_paths(
|root: &SystemPath, workspace: &SystemPath| { |root: &SystemPath, project: &SystemPath| {
// Set up the symlink target. // Set up the symlink target.
let site_packages = root.join("site-packages"); let site_packages = root.join("site-packages");
let bar = site_packages.join("bar"); let bar = site_packages.join("bar");
@ -1298,7 +1271,7 @@ mod unix {
.context("Failed to write baz.py")?; .context("Failed to write baz.py")?;
// Symlink the site packages in the venv to the global site packages // Symlink the site packages in the venv to the global site packages
let venv_site_packages = workspace.join(".venv/lib/python3.12/site-packages"); let venv_site_packages = project.join(".venv/lib/python3.12/site-packages");
std::fs::create_dir_all(venv_site_packages.parent().unwrap()) std::fs::create_dir_all(venv_site_packages.parent().unwrap())
.context("Failed to create .venv directory")?; .context("Failed to create .venv directory")?;
std::os::unix::fs::symlink( std::os::unix::fs::symlink(
@ -1309,9 +1282,9 @@ mod unix {
Ok(()) Ok(())
}, },
|_root, workspace| SearchPathConfiguration { |_root, project| SearchPathConfiguration {
site_packages: Some(SitePackages::Known(vec![ site_packages: Some(SitePackages::Known(vec![
workspace.join(".venv/lib/python3.12/site-packages") project.join(".venv/lib/python3.12/site-packages")
])), ])),
..SearchPathConfiguration::default() ..SearchPathConfiguration::default()
}, },
@ -1323,7 +1296,7 @@ mod unix {
) )
.expect("Expected bar.baz to exist in site-packages."); .expect("Expected bar.baz to exist in site-packages.");
let baz_site_packages_path = let baz_site_packages_path =
case.workspace_path(".venv/lib/python3.12/site-packages/bar/baz.py"); case.project_path(".venv/lib/python3.12/site-packages/bar/baz.py");
let baz_site_packages = case.system_file(&baz_site_packages_path).unwrap(); let baz_site_packages = case.system_file(&baz_site_packages_path).unwrap();
let baz_original = case.root_path().join("site-packages/bar/baz.py"); let baz_original = case.root_path().join("site-packages/bar/baz.py");
let baz_original_file = case.system_file(&baz_original).unwrap(); let baz_original_file = case.system_file(&baz_original).unwrap();
@ -1372,13 +1345,15 @@ mod unix {
} }
#[test] #[test]
fn nested_packages_delete_root() -> anyhow::Result<()> { fn nested_projects_delete_root() -> anyhow::Result<()> {
let mut case = setup(|root: &SystemPath, workspace_root: &SystemPath| { let mut case = setup(|root: &SystemPath, project_root: &SystemPath| {
std::fs::write( std::fs::write(
workspace_root.join("pyproject.toml").as_std_path(), project_root.join("pyproject.toml").as_std_path(),
r#" r#"
[project] [project]
name = "inner" name = "inner"
[tool.knot]
"#, "#,
)?; )?;
@ -1387,120 +1362,24 @@ fn nested_packages_delete_root() -> anyhow::Result<()> {
r#" r#"
[project] [project]
name = "outer" name = "outer"
[tool.knot]
"#, "#,
)?; )?;
Ok(()) Ok(())
})?; })?;
assert_eq!( assert_eq!(case.db().project().root(case.db()), &*case.project_path(""));
case.db().workspace().root(case.db()),
&*case.workspace_path("")
);
std::fs::remove_file(case.workspace_path("pyproject.toml").as_std_path())?; std::fs::remove_file(case.project_path("pyproject.toml").as_std_path())?;
let changes = case.stop_watch(ChangeEvent::is_deleted); let changes = case.stop_watch(ChangeEvent::is_deleted);
case.apply_changes(changes); case.apply_changes(changes);
// It should now pick up the outer workspace. // It should now pick up the outer project.
assert_eq!(case.db().workspace().root(case.db()), case.root_path()); assert_eq!(case.db().project().root(case.db()), case.root_path());
Ok(())
}
#[test]
fn added_package() -> anyhow::Result<()> {
let mut case = setup([
(
"pyproject.toml",
r#"
[project]
name = "inner"
[tool.knot.workspace]
members = ["packages/*"]
"#,
),
(
"packages/a/pyproject.toml",
r#"
[project]
name = "a"
"#,
),
])?;
assert_eq!(case.db().workspace().packages(case.db()).len(), 2);
std::fs::create_dir(case.workspace_path("packages/b").as_std_path())
.context("failed to create folder for package 'b'")?;
// It seems that the file watcher won't pick up on file changes shortly after the folder
// was created... I suspect this is because most file watchers don't support recursive
// file watching. Instead, file-watching libraries manually implement recursive file watching
// by setting a watcher for each directory. But doing this obviously "lags" behind.
case.take_watch_changes();
std::fs::write(
case.workspace_path("packages/b/pyproject.toml")
.as_std_path(),
r#"
[project]
name = "b"
"#,
)
.context("failed to write pyproject.toml for package b")?;
let changes = case.stop_watch(event_for_file("pyproject.toml"));
case.apply_changes(changes);
assert_eq!(case.db().workspace().packages(case.db()).len(), 3);
Ok(())
}
#[test]
fn removed_package() -> anyhow::Result<()> {
let mut case = setup([
(
"pyproject.toml",
r#"
[project]
name = "inner"
[tool.knot.workspace]
members = ["packages/*"]
"#,
),
(
"packages/a/pyproject.toml",
r#"
[project]
name = "a"
"#,
),
(
"packages/b/pyproject.toml",
r#"
[project]
name = "b"
"#,
),
])?;
assert_eq!(case.db().workspace().packages(case.db()).len(), 3);
std::fs::remove_dir_all(case.workspace_path("packages/b").as_std_path())
.context("failed to remove package 'b'")?;
let changes = case.stop_watch(ChangeEvent::is_deleted);
case.apply_changes(changes);
assert_eq!(case.db().workspace().packages(case.db()).len(), 2);
Ok(()) Ok(())
} }

View File

@ -180,7 +180,7 @@ pub(crate) mod tests {
Program::from_settings( Program::from_settings(
&db, &db,
&ProgramSettings { ProgramSettings {
python_version: self.python_version, python_version: self.python_version,
python_platform: self.python_platform, python_platform: self.python_platform,
search_paths, search_paths,

View File

@ -1294,7 +1294,7 @@ mod tests {
Program::from_settings( Program::from_settings(
&db, &db,
&ProgramSettings { ProgramSettings {
python_version: PythonVersion::PY38, python_version: PythonVersion::PY38,
python_platform: PythonPlatform::default(), python_platform: PythonPlatform::default(),
search_paths: SearchPathSettings { search_paths: SearchPathSettings {
@ -1800,7 +1800,7 @@ not_a_directory
Program::from_settings( Program::from_settings(
&db, &db,
&ProgramSettings { ProgramSettings {
python_version: PythonVersion::default(), python_version: PythonVersion::default(),
python_platform: PythonPlatform::default(), python_platform: PythonPlatform::default(),
search_paths: SearchPathSettings { search_paths: SearchPathSettings {

View File

@ -232,7 +232,7 @@ impl TestCaseBuilder<MockedTypeshed> {
Program::from_settings( Program::from_settings(
&db, &db,
&ProgramSettings { ProgramSettings {
python_version, python_version,
python_platform, python_platform,
search_paths: SearchPathSettings { search_paths: SearchPathSettings {
@ -290,7 +290,7 @@ impl TestCaseBuilder<VendoredTypeshed> {
Program::from_settings( Program::from_settings(
&db, &db,
&ProgramSettings { ProgramSettings {
python_version, python_version,
python_platform, python_platform,
search_paths: SearchPathSettings { search_paths: SearchPathSettings {

View File

@ -1,14 +1,13 @@
use crate::module_resolver::SearchPaths;
use crate::python_platform::PythonPlatform; use crate::python_platform::PythonPlatform;
use crate::python_version::PythonVersion; use crate::python_version::PythonVersion;
use crate::Db;
use anyhow::Context; use anyhow::Context;
use ruff_db::system::{SystemPath, SystemPathBuf};
use salsa::Durability; use salsa::Durability;
use salsa::Setter; use salsa::Setter;
use ruff_db::system::{SystemPath, SystemPathBuf};
use crate::module_resolver::SearchPaths;
use crate::Db;
#[salsa::input(singleton)] #[salsa::input(singleton)]
pub struct Program { pub struct Program {
pub python_version: PythonVersion, pub python_version: PythonVersion,
@ -21,25 +20,51 @@ pub struct Program {
} }
impl Program { impl Program {
pub fn from_settings(db: &dyn Db, settings: &ProgramSettings) -> anyhow::Result<Self> { pub fn from_settings(db: &dyn Db, settings: ProgramSettings) -> anyhow::Result<Self> {
let ProgramSettings { let ProgramSettings {
python_version, python_version,
python_platform, python_platform,
search_paths, search_paths,
} = settings; } = settings;
tracing::info!("Python version: Python {python_version}"); tracing::info!("Python version: Python {python_version}, platform: {python_platform}");
let search_paths = SearchPaths::from_settings(db, search_paths) let search_paths = SearchPaths::from_settings(db, &search_paths)
.with_context(|| "Invalid search path settings")?; .with_context(|| "Invalid search path settings")?;
Ok( Ok(
Program::builder(*python_version, python_platform.clone(), search_paths) Program::builder(python_version, python_platform, search_paths)
.durability(Durability::HIGH) .durability(Durability::HIGH)
.new(db), .new(db),
) )
} }
pub fn update_from_settings(
self,
db: &mut dyn Db,
settings: ProgramSettings,
) -> anyhow::Result<()> {
let ProgramSettings {
python_version,
python_platform,
search_paths,
} = settings;
if &python_platform != self.python_platform(db) {
tracing::debug!("Updating python platform: `{python_platform:?}`");
self.set_python_platform(db).to(python_platform);
}
if python_version != self.python_version(db) {
tracing::debug!("Updating python version: Python {python_version}");
self.set_python_version(db).to(python_version);
}
self.update_search_paths(db, &search_paths)?;
Ok(())
}
pub fn update_search_paths( pub fn update_search_paths(
self, self,
db: &mut dyn Db, db: &mut dyn Db,
@ -77,7 +102,7 @@ pub struct SearchPathSettings {
/// or pyright's stubPath configuration setting. /// or pyright's stubPath configuration setting.
pub extra_paths: Vec<SystemPathBuf>, pub extra_paths: Vec<SystemPathBuf>,
/// The root of the workspace, used for finding first-party modules. /// The root of the project, used for finding first-party modules.
pub src_root: SystemPathBuf, pub src_root: SystemPathBuf,
/// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types. /// Optional path to a "custom typeshed" directory on disk for us to use for standard-library types.

View File

@ -1,3 +1,5 @@
use std::fmt::{Display, Formatter};
/// The target platform to assume when resolving types. /// The target platform to assume when resolving types.
#[derive(Debug, Clone, Default, PartialEq, Eq)] #[derive(Debug, Clone, Default, PartialEq, Eq)]
#[cfg_attr( #[cfg_attr(
@ -17,3 +19,12 @@ pub enum PythonPlatform {
#[cfg_attr(feature = "serde", serde(untagged))] #[cfg_attr(feature = "serde", serde(untagged))]
Identifier(String), Identifier(String),
} }
impl Display for PythonPlatform {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
PythonPlatform::All => f.write_str("all"),
PythonPlatform::Identifier(name) => f.write_str(name),
}
}
}

View File

@ -86,13 +86,11 @@ fn background_request_task<'a, R: traits::BackgroundDocumentRequestHandler>(
return Box::new(|_, _| {}); return Box::new(|_, _| {});
}; };
let db = match path { let db = match path {
AnySystemPath::System(path) => { AnySystemPath::System(path) => match session.project_db_for_path(path.as_std_path()) {
match session.workspace_db_for_path(path.as_std_path()) {
Some(db) => db.clone(), Some(db) => db.clone(),
None => session.default_workspace_db().clone(), None => session.default_project_db().clone(),
} },
} AnySystemPath::SystemVirtual(_) => session.default_project_db().clone(),
AnySystemPath::SystemVirtual(_) => session.default_workspace_db().clone(),
}; };
let Some(snapshot) = session.take_snapshot(url) else { let Some(snapshot) = session.take_snapshot(url) else {

View File

@ -36,14 +36,14 @@ impl SyncNotificationHandler for DidChangeTextDocumentHandler {
match path { match path {
AnySystemPath::System(path) => { AnySystemPath::System(path) => {
let db = match session.workspace_db_for_path_mut(path.as_std_path()) { let db = match session.project_db_for_path_mut(path.as_std_path()) {
Some(db) => db, Some(db) => db,
None => session.default_workspace_db_mut(), None => session.default_project_db_mut(),
}; };
db.apply_changes(vec![ChangeEvent::file_content_changed(path)], None); db.apply_changes(vec![ChangeEvent::file_content_changed(path)], None);
} }
AnySystemPath::SystemVirtual(virtual_path) => { AnySystemPath::SystemVirtual(virtual_path) => {
let db = session.default_workspace_db_mut(); let db = session.default_project_db_mut();
db.apply_changes(vec![ChangeEvent::ChangedVirtual(virtual_path)], None); db.apply_changes(vec![ChangeEvent::ChangedVirtual(virtual_path)], None);
} }
} }

View File

@ -34,7 +34,7 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler {
.with_failure_code(ErrorCode::InternalError)?; .with_failure_code(ErrorCode::InternalError)?;
if let AnySystemPath::SystemVirtual(virtual_path) = path { if let AnySystemPath::SystemVirtual(virtual_path) = path {
let db = session.default_workspace_db_mut(); let db = session.default_project_db_mut();
db.apply_changes(vec![ChangeEvent::DeletedVirtual(virtual_path)], None); db.apply_changes(vec![ChangeEvent::DeletedVirtual(virtual_path)], None);
} }

View File

@ -33,7 +33,7 @@ impl SyncNotificationHandler for DidCloseNotebookHandler {
.with_failure_code(lsp_server::ErrorCode::InternalError)?; .with_failure_code(lsp_server::ErrorCode::InternalError)?;
if let AnySystemPath::SystemVirtual(virtual_path) = path { if let AnySystemPath::SystemVirtual(virtual_path) = path {
let db = session.default_workspace_db_mut(); let db = session.default_project_db_mut();
db.apply_changes(vec![ChangeEvent::DeletedVirtual(virtual_path)], None); db.apply_changes(vec![ChangeEvent::DeletedVirtual(virtual_path)], None);
} }

View File

@ -33,14 +33,14 @@ impl SyncNotificationHandler for DidOpenTextDocumentHandler {
match path { match path {
AnySystemPath::System(path) => { AnySystemPath::System(path) => {
let db = match session.workspace_db_for_path_mut(path.as_std_path()) { let db = match session.project_db_for_path_mut(path.as_std_path()) {
Some(db) => db, Some(db) => db,
None => session.default_workspace_db_mut(), None => session.default_project_db_mut(),
}; };
db.apply_changes(vec![ChangeEvent::Opened(path)], None); db.apply_changes(vec![ChangeEvent::Opened(path)], None);
} }
AnySystemPath::SystemVirtual(virtual_path) => { AnySystemPath::SystemVirtual(virtual_path) => {
let db = session.default_workspace_db_mut(); let db = session.default_project_db_mut();
db.files().virtual_file(db, &virtual_path); db.files().virtual_file(db, &virtual_path);
} }
} }

View File

@ -41,14 +41,14 @@ impl SyncNotificationHandler for DidOpenNotebookHandler {
match path { match path {
AnySystemPath::System(path) => { AnySystemPath::System(path) => {
let db = match session.workspace_db_for_path_mut(path.as_std_path()) { let db = match session.project_db_for_path_mut(path.as_std_path()) {
Some(db) => db, Some(db) => db,
None => session.default_workspace_db_mut(), None => session.default_project_db_mut(),
}; };
db.apply_changes(vec![ChangeEvent::Opened(path)], None); db.apply_changes(vec![ChangeEvent::Opened(path)], None);
} }
AnySystemPath::SystemVirtual(virtual_path) => { AnySystemPath::SystemVirtual(virtual_path) => {
let db = session.default_workspace_db_mut(); let db = session.default_project_db_mut();
db.files().virtual_file(db, &virtual_path); db.files().virtual_file(db, &virtual_path);
} }
} }

View File

@ -11,7 +11,7 @@ use crate::edit::ToRangeExt;
use crate::server::api::traits::{BackgroundDocumentRequestHandler, RequestHandler}; use crate::server::api::traits::{BackgroundDocumentRequestHandler, RequestHandler};
use crate::server::{client::Notifier, Result}; use crate::server::{client::Notifier, Result};
use crate::session::DocumentSnapshot; use crate::session::DocumentSnapshot;
use red_knot_workspace::db::{Db, RootDatabase}; use red_knot_workspace::db::{Db, ProjectDatabase};
use ruff_db::diagnostic::Severity; use ruff_db::diagnostic::Severity;
use ruff_db::source::{line_index, source_text}; use ruff_db::source::{line_index, source_text};
@ -28,7 +28,7 @@ impl BackgroundDocumentRequestHandler for DocumentDiagnosticRequestHandler {
fn run_with_snapshot( fn run_with_snapshot(
snapshot: DocumentSnapshot, snapshot: DocumentSnapshot,
db: RootDatabase, db: ProjectDatabase,
_notifier: Notifier, _notifier: Notifier,
_params: DocumentDiagnosticParams, _params: DocumentDiagnosticParams,
) -> Result<DocumentDiagnosticReportResult> { ) -> Result<DocumentDiagnosticReportResult> {
@ -46,7 +46,7 @@ impl BackgroundDocumentRequestHandler for DocumentDiagnosticRequestHandler {
} }
} }
fn compute_diagnostics(snapshot: &DocumentSnapshot, db: &RootDatabase) -> Vec<Diagnostic> { fn compute_diagnostics(snapshot: &DocumentSnapshot, db: &ProjectDatabase) -> Vec<Diagnostic> {
let Some(file) = snapshot.file(db) else { let Some(file) = snapshot.file(db) else {
tracing::info!( tracing::info!(
"No file found for snapshot for `{}`", "No file found for snapshot for `{}`",

View File

@ -5,7 +5,7 @@ use crate::session::{DocumentSnapshot, Session};
use lsp_types::notification::Notification as LSPNotification; use lsp_types::notification::Notification as LSPNotification;
use lsp_types::request::Request; use lsp_types::request::Request;
use red_knot_workspace::db::RootDatabase; use red_knot_workspace::db::ProjectDatabase;
/// A supertrait for any server request handler. /// A supertrait for any server request handler.
pub(super) trait RequestHandler { pub(super) trait RequestHandler {
@ -34,7 +34,7 @@ pub(super) trait BackgroundDocumentRequestHandler: RequestHandler {
fn run_with_snapshot( fn run_with_snapshot(
snapshot: DocumentSnapshot, snapshot: DocumentSnapshot,
db: RootDatabase, db: ProjectDatabase,
notifier: Notifier, notifier: Notifier,
params: <<Self as RequestHandler>::RequestType as Request>::Params, params: <<Self as RequestHandler>::RequestType as Request>::Params,
) -> super::Result<<<Self as RequestHandler>::RequestType as Request>::Result>; ) -> super::Result<<<Self as RequestHandler>::RequestType as Request>::Result>;

View File

@ -8,8 +8,8 @@ use std::sync::Arc;
use anyhow::anyhow; use anyhow::anyhow;
use lsp_types::{ClientCapabilities, TextDocumentContentChangeEvent, Url}; use lsp_types::{ClientCapabilities, TextDocumentContentChangeEvent, Url};
use red_knot_workspace::db::RootDatabase; use red_knot_workspace::db::ProjectDatabase;
use red_knot_workspace::workspace::WorkspaceMetadata; use red_knot_workspace::project::ProjectMetadata;
use ruff_db::files::{system_path_to_file, File}; use ruff_db::files::{system_path_to_file, File};
use ruff_db::system::SystemPath; use ruff_db::system::SystemPath;
use ruff_db::Db; use ruff_db::Db;
@ -28,7 +28,7 @@ pub(crate) mod index;
mod settings; mod settings;
// TODO(dhruvmanila): In general, the server shouldn't use any salsa queries directly and instead // TODO(dhruvmanila): In general, the server shouldn't use any salsa queries directly and instead
// should use methods on `RootDatabase`. // should use methods on `ProjectDatabase`.
/// The global state for the LSP /// The global state for the LSP
pub struct Session { pub struct Session {
@ -41,8 +41,9 @@ pub struct Session {
/// [`index_mut`]: Session::index_mut /// [`index_mut`]: Session::index_mut
index: Option<Arc<index::Index>>, index: Option<Arc<index::Index>>,
/// Maps workspace root paths to their respective databases. /// Maps workspace folders to their respective project databases.
workspaces: BTreeMap<PathBuf, RootDatabase>, projects_by_workspace_folder: BTreeMap<PathBuf, ProjectDatabase>,
/// The global position encoding, negotiated during LSP initialization. /// The global position encoding, negotiated during LSP initialization.
position_encoding: PositionEncoding, position_encoding: PositionEncoding,
/// Tracks what LSP features the client supports and doesn't support. /// Tracks what LSP features the client supports and doesn't support.
@ -68,14 +69,14 @@ impl Session {
let system = LSPSystem::new(index.clone()); let system = LSPSystem::new(index.clone());
// TODO(dhruvmanila): Get the values from the client settings // TODO(dhruvmanila): Get the values from the client settings
let metadata = WorkspaceMetadata::discover(system_path, &system, None)?; let metadata = ProjectMetadata::discover(system_path, &system, None)?;
// TODO(micha): Handle the case where the program settings are incorrect more gracefully. // TODO(micha): Handle the case where the program settings are incorrect more gracefully.
workspaces.insert(path, RootDatabase::new(metadata, system)?); workspaces.insert(path, ProjectDatabase::new(metadata, system)?);
} }
Ok(Self { Ok(Self {
position_encoding, position_encoding,
workspaces, projects_by_workspace_folder: workspaces,
index: Some(index), index: Some(index),
resolved_client_capabilities: Arc::new(ResolvedClientCapabilities::new( resolved_client_capabilities: Arc::new(ResolvedClientCapabilities::new(
client_capabilities, client_capabilities,
@ -87,38 +88,41 @@ impl Session {
// and `default_workspace_db_mut` but the borrow checker doesn't allow that. // and `default_workspace_db_mut` but the borrow checker doesn't allow that.
// https://github.com/astral-sh/ruff/pull/13041#discussion_r1726725437 // https://github.com/astral-sh/ruff/pull/13041#discussion_r1726725437
/// Returns a reference to the workspace [`RootDatabase`] corresponding to the given path, if /// Returns a reference to the project's [`ProjectDatabase`] corresponding to the given path, if
/// any. /// any.
pub(crate) fn workspace_db_for_path(&self, path: impl AsRef<Path>) -> Option<&RootDatabase> { pub(crate) fn project_db_for_path(&self, path: impl AsRef<Path>) -> Option<&ProjectDatabase> {
self.workspaces self.projects_by_workspace_folder
.range(..=path.as_ref().to_path_buf()) .range(..=path.as_ref().to_path_buf())
.next_back() .next_back()
.map(|(_, db)| db) .map(|(_, db)| db)
} }
/// Returns a mutable reference to the workspace [`RootDatabase`] corresponding to the given /// Returns a mutable reference to the project [`ProjectDatabase`] corresponding to the given
/// path, if any. /// path, if any.
pub(crate) fn workspace_db_for_path_mut( pub(crate) fn project_db_for_path_mut(
&mut self, &mut self,
path: impl AsRef<Path>, path: impl AsRef<Path>,
) -> Option<&mut RootDatabase> { ) -> Option<&mut ProjectDatabase> {
self.workspaces self.projects_by_workspace_folder
.range_mut(..=path.as_ref().to_path_buf()) .range_mut(..=path.as_ref().to_path_buf())
.next_back() .next_back()
.map(|(_, db)| db) .map(|(_, db)| db)
} }
/// Returns a reference to the default workspace [`RootDatabase`]. The default workspace is the /// Returns a reference to the default project [`ProjectDatabase`]. The default project is the
/// minimum root path in the workspace map. /// minimum root path in the project map.
pub(crate) fn default_workspace_db(&self) -> &RootDatabase { pub(crate) fn default_project_db(&self) -> &ProjectDatabase {
// SAFETY: Currently, red knot only support a single workspace. // SAFETY: Currently, red knot only support a single project.
self.workspaces.values().next().unwrap() self.projects_by_workspace_folder.values().next().unwrap()
} }
/// Returns a mutable reference to the default workspace [`RootDatabase`]. /// Returns a mutable reference to the default project [`ProjectDatabase`].
pub(crate) fn default_workspace_db_mut(&mut self) -> &mut RootDatabase { pub(crate) fn default_project_db_mut(&mut self) -> &mut ProjectDatabase {
// SAFETY: Currently, red knot only support a single workspace. // SAFETY: Currently, red knot only support a single project.
self.workspaces.values_mut().next().unwrap() self.projects_by_workspace_folder
.values_mut()
.next()
.unwrap()
} }
pub fn key_from_url(&self, url: Url) -> DocumentKey { pub fn key_from_url(&self, url: Url) -> DocumentKey {
@ -187,7 +191,7 @@ impl Session {
fn index_mut(&mut self) -> MutIndexGuard { fn index_mut(&mut self) -> MutIndexGuard {
let index = self.index.take().unwrap(); let index = self.index.take().unwrap();
for db in self.workspaces.values_mut() { for db in self.projects_by_workspace_folder.values_mut() {
// Remove the `index` from each database. This drops the count of `Arc<Index>` down to 1 // Remove the `index` from each database. This drops the count of `Arc<Index>` down to 1
db.system_mut() db.system_mut()
.as_any_mut() .as_any_mut()
@ -232,7 +236,7 @@ impl Drop for MutIndexGuard<'_> {
fn drop(&mut self) { fn drop(&mut self) {
if let Some(index) = self.index.take() { if let Some(index) = self.index.take() {
let index = Arc::new(index); let index = Arc::new(index);
for db in self.session.workspaces.values_mut() { for db in self.session.projects_by_workspace_folder.values_mut() {
db.system_mut() db.system_mut()
.as_any_mut() .as_any_mut()
.downcast_mut::<LSPSystem>() .downcast_mut::<LSPSystem>()
@ -267,7 +271,7 @@ impl DocumentSnapshot {
self.position_encoding self.position_encoding
} }
pub(crate) fn file(&self, db: &RootDatabase) -> Option<File> { pub(crate) fn file(&self, db: &ProjectDatabase) -> Option<File> {
match url_to_any_system_path(self.document_ref.file_url()).ok()? { match url_to_any_system_path(self.document_ref.file_url()).ok()? {
AnySystemPath::System(path) => system_path_to_file(db, path).ok(), AnySystemPath::System(path) => system_path_to_file(db, path).ok(),
AnySystemPath::SystemVirtual(virtual_path) => db AnySystemPath::SystemVirtual(virtual_path) => db

View File

@ -38,7 +38,7 @@ impl Db {
Program::from_settings( Program::from_settings(
&db, &db,
&ProgramSettings { ProgramSettings {
python_version: PythonVersion::default(), python_version: PythonVersion::default(),
python_platform: PythonPlatform::default(), python_platform: PythonPlatform::default(),
search_paths: SearchPathSettings::new(db.workspace_root.clone()), search_paths: SearchPathSettings::new(db.workspace_root.clone()),

View File

@ -3,9 +3,9 @@ use std::any::Any;
use js_sys::Error; use js_sys::Error;
use wasm_bindgen::prelude::*; use wasm_bindgen::prelude::*;
use red_knot_workspace::db::{Db, RootDatabase}; use red_knot_workspace::db::{Db, ProjectDatabase};
use red_knot_workspace::workspace::settings::Configuration; use red_knot_workspace::project::settings::Configuration;
use red_knot_workspace::workspace::WorkspaceMetadata; use red_knot_workspace::project::ProjectMetadata;
use ruff_db::diagnostic::Diagnostic; use ruff_db::diagnostic::Diagnostic;
use ruff_db::files::{system_path_to_file, File}; use ruff_db::files::{system_path_to_file, File};
use ruff_db::system::walk_directory::WalkDirectoryBuilder; use ruff_db::system::walk_directory::WalkDirectoryBuilder;
@ -33,7 +33,7 @@ pub fn run() {
#[wasm_bindgen] #[wasm_bindgen]
pub struct Workspace { pub struct Workspace {
db: RootDatabase, db: ProjectDatabase,
system: WasmSystem, system: WasmSystem,
} }
@ -42,7 +42,7 @@ impl Workspace {
#[wasm_bindgen(constructor)] #[wasm_bindgen(constructor)]
pub fn new(root: &str, settings: &Settings) -> Result<Workspace, Error> { pub fn new(root: &str, settings: &Settings) -> Result<Workspace, Error> {
let system = WasmSystem::new(SystemPath::new(root)); let system = WasmSystem::new(SystemPath::new(root));
let workspace = WorkspaceMetadata::discover( let workspace = ProjectMetadata::discover(
SystemPath::new(root), SystemPath::new(root),
&system, &system,
Some(&Configuration { Some(&Configuration {
@ -52,7 +52,7 @@ impl Workspace {
) )
.map_err(into_error)?; .map_err(into_error)?;
let db = RootDatabase::new(workspace, system.clone()).map_err(into_error)?; let db = ProjectDatabase::new(workspace, system.clone()).map_err(into_error)?;
Ok(Self { db, system }) Ok(Self { db, system })
} }
@ -67,7 +67,7 @@ impl Workspace {
let file = system_path_to_file(&self.db, path).expect("File to exist"); let file = system_path_to_file(&self.db, path).expect("File to exist");
file.sync(&mut self.db); file.sync(&mut self.db);
self.db.workspace().open_file(&mut self.db, file); self.db.project().open_file(&mut self.db, file);
Ok(FileHandle { Ok(FileHandle {
file, file,
@ -95,7 +95,7 @@ impl Workspace {
pub fn close_file(&mut self, file_id: &FileHandle) -> Result<(), Error> { pub fn close_file(&mut self, file_id: &FileHandle) -> Result<(), Error> {
let file = file_id.file; let file = file_id.file;
self.db.workspace().close_file(&mut self.db, file); self.db.project().close_file(&mut self.db, file);
self.system self.system
.fs .fs
.remove_file(&file_id.path) .remove_file(&file_id.path)

View File

@ -1,7 +1,7 @@
use std::panic::RefUnwindSafe; use std::panic::RefUnwindSafe;
use std::sync::Arc; use std::sync::Arc;
use crate::workspace::{check_file, Workspace, WorkspaceMetadata}; use crate::project::{check_file, Project, ProjectMetadata};
use crate::DEFAULT_LINT_REGISTRY; use crate::DEFAULT_LINT_REGISTRY;
use red_knot_python_semantic::lint::{LintRegistry, RuleSelection}; use red_knot_python_semantic::lint::{LintRegistry, RuleSelection};
use red_knot_python_semantic::{Db as SemanticDb, Program}; use red_knot_python_semantic::{Db as SemanticDb, Program};
@ -17,28 +17,28 @@ mod changes;
#[salsa::db] #[salsa::db]
pub trait Db: SemanticDb + Upcast<dyn SemanticDb> { pub trait Db: SemanticDb + Upcast<dyn SemanticDb> {
fn workspace(&self) -> Workspace; fn project(&self) -> Project;
} }
#[salsa::db] #[salsa::db]
#[derive(Clone)] #[derive(Clone)]
pub struct RootDatabase { pub struct ProjectDatabase {
workspace: Option<Workspace>, project: Option<Project>,
storage: salsa::Storage<RootDatabase>, storage: salsa::Storage<ProjectDatabase>,
files: Files, files: Files,
system: Arc<dyn System + Send + Sync + RefUnwindSafe>, system: Arc<dyn System + Send + Sync + RefUnwindSafe>,
rule_selection: Arc<RuleSelection>, rule_selection: Arc<RuleSelection>,
} }
impl RootDatabase { impl ProjectDatabase {
pub fn new<S>(workspace: WorkspaceMetadata, system: S) -> anyhow::Result<Self> pub fn new<S>(project_metadata: ProjectMetadata, system: S) -> anyhow::Result<Self>
where where
S: System + 'static + Send + Sync + RefUnwindSafe, S: System + 'static + Send + Sync + RefUnwindSafe,
{ {
let rule_selection = RuleSelection::from_registry(&DEFAULT_LINT_REGISTRY); let rule_selection = RuleSelection::from_registry(&DEFAULT_LINT_REGISTRY);
let mut db = Self { let mut db = Self {
workspace: None, project: None,
storage: salsa::Storage::default(), storage: salsa::Storage::default(),
files: Files::default(), files: Files::default(),
system: Arc::new(system), system: Arc::new(system),
@ -46,16 +46,17 @@ impl RootDatabase {
}; };
// Initialize the `Program` singleton // Initialize the `Program` singleton
Program::from_settings(&db, workspace.settings().program())?; let program_settings = project_metadata.to_program_settings();
Program::from_settings(&db, program_settings)?;
db.workspace = Some(Workspace::from_metadata(&db, workspace)); db.project = Some(Project::from_metadata(&db, project_metadata));
Ok(db) Ok(db)
} }
/// Checks all open files in the workspace and its dependencies. /// Checks all open files in the project and its dependencies.
pub fn check(&self) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> { pub fn check(&self) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
self.with_db(|db| db.workspace().check(db)) self.with_db(|db| db.project().check(db))
} }
pub fn check_file(&self, file: File) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> { pub fn check_file(&self, file: File) -> Result<Vec<Box<dyn Diagnostic>>, Cancelled> {
@ -77,13 +78,13 @@ impl RootDatabase {
pub(crate) fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled> pub(crate) fn with_db<F, T>(&self, f: F) -> Result<T, Cancelled>
where where
F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe, F: FnOnce(&ProjectDatabase) -> T + std::panic::UnwindSafe,
{ {
Cancelled::catch(|| f(self)) Cancelled::catch(|| f(self))
} }
} }
impl Upcast<dyn SemanticDb> for RootDatabase { impl Upcast<dyn SemanticDb> for ProjectDatabase {
fn upcast(&self) -> &(dyn SemanticDb + 'static) { fn upcast(&self) -> &(dyn SemanticDb + 'static) {
self self
} }
@ -93,7 +94,7 @@ impl Upcast<dyn SemanticDb> for RootDatabase {
} }
} }
impl Upcast<dyn SourceDb> for RootDatabase { impl Upcast<dyn SourceDb> for ProjectDatabase {
fn upcast(&self) -> &(dyn SourceDb + 'static) { fn upcast(&self) -> &(dyn SourceDb + 'static) {
self self
} }
@ -104,13 +105,13 @@ impl Upcast<dyn SourceDb> for RootDatabase {
} }
#[salsa::db] #[salsa::db]
impl SemanticDb for RootDatabase { impl SemanticDb for ProjectDatabase {
fn is_file_open(&self, file: File) -> bool { fn is_file_open(&self, file: File) -> bool {
let Some(workspace) = &self.workspace else { let Some(project) = &self.project else {
return false; return false;
}; };
workspace.is_file_open(self, file) project.is_file_open(self, file)
} }
fn rule_selection(&self) -> &RuleSelection { fn rule_selection(&self) -> &RuleSelection {
@ -123,7 +124,7 @@ impl SemanticDb for RootDatabase {
} }
#[salsa::db] #[salsa::db]
impl SourceDb for RootDatabase { impl SourceDb for ProjectDatabase {
fn vendored(&self) -> &VendoredFileSystem { fn vendored(&self) -> &VendoredFileSystem {
red_knot_vendored::file_system() red_knot_vendored::file_system()
} }
@ -138,7 +139,7 @@ impl SourceDb for RootDatabase {
} }
#[salsa::db] #[salsa::db]
impl salsa::Database for RootDatabase { impl salsa::Database for ProjectDatabase {
fn salsa_event(&self, event: &dyn Fn() -> Event) { fn salsa_event(&self, event: &dyn Fn() -> Event) {
if !tracing::enabled!(tracing::Level::TRACE) { if !tracing::enabled!(tracing::Level::TRACE) {
return; return;
@ -154,9 +155,9 @@ impl salsa::Database for RootDatabase {
} }
#[salsa::db] #[salsa::db]
impl Db for RootDatabase { impl Db for ProjectDatabase {
fn workspace(&self) -> Workspace { fn project(&self) -> Project {
self.workspace.unwrap() self.project.unwrap()
} }
} }
@ -174,7 +175,7 @@ pub(crate) mod tests {
use ruff_db::{Db as SourceDb, Upcast}; use ruff_db::{Db as SourceDb, Upcast};
use crate::db::Db; use crate::db::Db;
use crate::workspace::{Workspace, WorkspaceMetadata}; use crate::project::{Project, ProjectMetadata};
use crate::DEFAULT_LINT_REGISTRY; use crate::DEFAULT_LINT_REGISTRY;
#[salsa::db] #[salsa::db]
@ -186,11 +187,11 @@ pub(crate) mod tests {
system: TestSystem, system: TestSystem,
vendored: VendoredFileSystem, vendored: VendoredFileSystem,
rule_selection: RuleSelection, rule_selection: RuleSelection,
workspace: Option<Workspace>, project: Option<Project>,
} }
impl TestDb { impl TestDb {
pub(crate) fn new(workspace: WorkspaceMetadata) -> Self { pub(crate) fn new(project: ProjectMetadata) -> Self {
let mut db = Self { let mut db = Self {
storage: salsa::Storage::default(), storage: salsa::Storage::default(),
system: TestSystem::default(), system: TestSystem::default(),
@ -198,11 +199,11 @@ pub(crate) mod tests {
files: Files::default(), files: Files::default(),
events: Arc::default(), events: Arc::default(),
rule_selection: RuleSelection::from_registry(&DEFAULT_LINT_REGISTRY), rule_selection: RuleSelection::from_registry(&DEFAULT_LINT_REGISTRY),
workspace: None, project: None,
}; };
let workspace = Workspace::from_metadata(&db, workspace); let project = Project::from_metadata(&db, project);
db.workspace = Some(workspace); db.project = Some(project);
db db
} }
} }
@ -280,8 +281,8 @@ pub(crate) mod tests {
#[salsa::db] #[salsa::db]
impl Db for TestDb { impl Db for TestDb {
fn workspace(&self) -> Workspace { fn project(&self) -> Project {
self.workspace.unwrap() self.project.unwrap()
} }
} }

View File

@ -1,8 +1,7 @@
use crate::db::{Db, RootDatabase}; use crate::db::{Db, ProjectDatabase};
use crate::watch; use crate::project::settings::Configuration;
use crate::project::{Project, ProjectMetadata};
use crate::watch::{ChangeEvent, CreatedKind, DeletedKind}; use crate::watch::{ChangeEvent, CreatedKind, DeletedKind};
use crate::workspace::settings::Configuration;
use crate::workspace::{Workspace, WorkspaceMetadata};
use red_knot_python_semantic::Program; use red_knot_python_semantic::Program;
use ruff_db::files::{system_path_to_file, File, Files}; use ruff_db::files::{system_path_to_file, File, Files};
use ruff_db::system::walk_directory::WalkState; use ruff_db::system::walk_directory::WalkState;
@ -10,25 +9,24 @@ use ruff_db::system::SystemPath;
use ruff_db::Db as _; use ruff_db::Db as _;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
impl RootDatabase { impl ProjectDatabase {
#[tracing::instrument(level = "debug", skip(self, changes, base_configuration))] #[tracing::instrument(level = "debug", skip(self, changes, base_configuration))]
pub fn apply_changes( pub fn apply_changes(
&mut self, &mut self,
changes: Vec<watch::ChangeEvent>, changes: Vec<ChangeEvent>,
base_configuration: Option<&Configuration>, base_configuration: Option<&Configuration>,
) { ) {
let mut workspace = self.workspace(); let mut project = self.project();
let workspace_path = workspace.root(self).to_path_buf(); let project_path = project.root(self).to_path_buf();
let program = Program::get(self); let program = Program::get(self);
let custom_stdlib_versions_path = program let custom_stdlib_versions_path = program
.custom_stdlib_search_path(self) .custom_stdlib_search_path(self)
.map(|path| path.join("VERSIONS")); .map(|path| path.join("VERSIONS"));
let mut workspace_change = false; // Are there structural changes to the project
let mut project_changed = false;
// Changes to a custom stdlib path's VERSIONS // Changes to a custom stdlib path's VERSIONS
let mut custom_stdlib_change = false; let mut custom_stdlib_change = false;
// Packages that need reloading
let mut changed_packages = FxHashSet::default();
// Paths that were added // Paths that were added
let mut added_paths = FxHashSet::default(); let mut added_paths = FxHashSet::default();
@ -36,13 +34,13 @@ impl RootDatabase {
let mut synced_files = FxHashSet::default(); let mut synced_files = FxHashSet::default();
let mut synced_recursively = FxHashSet::default(); let mut synced_recursively = FxHashSet::default();
let mut sync_path = |db: &mut RootDatabase, path: &SystemPath| { let mut sync_path = |db: &mut ProjectDatabase, path: &SystemPath| {
if synced_files.insert(path.to_path_buf()) { if synced_files.insert(path.to_path_buf()) {
File::sync_path(db, path); File::sync_path(db, path);
} }
}; };
let mut sync_recursively = |db: &mut RootDatabase, path: &SystemPath| { let mut sync_recursively = |db: &mut ProjectDatabase, path: &SystemPath| {
if synced_recursively.insert(path.to_path_buf()) { if synced_recursively.insert(path.to_path_buf()) {
Files::sync_recursively(db, path); Files::sync_recursively(db, path);
} }
@ -54,19 +52,8 @@ impl RootDatabase {
path.file_name(), path.file_name(),
Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml") Some(".gitignore" | ".ignore" | "ruff.toml" | ".ruff.toml" | "pyproject.toml")
) { ) {
// Changes to ignore files or settings can change the workspace structure or add/remove files // Changes to ignore files or settings can change the project structure or add/remove files.
// from packages. project_changed = true;
if let Some(package) = workspace.package(self, path) {
if package.root(self) == workspace.root(self)
|| matches!(change, ChangeEvent::Deleted { .. })
{
workspace_change = true;
}
changed_packages.insert(package);
} else {
workspace_change = true;
}
continue; continue;
} }
@ -77,10 +64,11 @@ impl RootDatabase {
} }
match change { match change {
watch::ChangeEvent::Changed { path, kind: _ } ChangeEvent::Changed { path, kind: _ } | ChangeEvent::Opened(path) => {
| watch::ChangeEvent::Opened(path) => sync_path(self, &path), sync_path(self, &path);
}
watch::ChangeEvent::Created { kind, path } => { ChangeEvent::Created { kind, path } => {
match kind { match kind {
CreatedKind::File => sync_path(self, &path), CreatedKind::File => sync_path(self, &path),
CreatedKind::Directory | CreatedKind::Any => { CreatedKind::Directory | CreatedKind::Any => {
@ -97,7 +85,7 @@ impl RootDatabase {
} }
} }
watch::ChangeEvent::Deleted { kind, path } => { ChangeEvent::Deleted { kind, path } => {
let is_file = match kind { let is_file = match kind {
DeletedKind::File => true, DeletedKind::File => true,
DeletedKind::Directory => { DeletedKind::Directory => {
@ -113,10 +101,8 @@ impl RootDatabase {
if is_file { if is_file {
sync_path(self, &path); sync_path(self, &path);
if let Some(package) = workspace.package(self, &path) {
if let Some(file) = self.files().try_system(self, &path) { if let Some(file) = self.files().try_system(self, &path) {
package.remove_file(self, file); project.remove_file(self, file);
}
} }
} else { } else {
sync_recursively(self, &path); sync_recursively(self, &path);
@ -128,69 +114,68 @@ impl RootDatabase {
custom_stdlib_change = true; custom_stdlib_change = true;
} }
if let Some(package) = workspace.package(self, &path) { // Perform a full-reload in case the deleted directory contained the pyproject.toml.
changed_packages.insert(package); // We may want to make this more clever in the future, to e.g. iterate over the
} else { // indexed files and remove the once that start with the same path, unless
workspace_change = true; // the deleted path is the project configuration.
} project_changed = true;
} }
} }
watch::ChangeEvent::CreatedVirtual(path) ChangeEvent::CreatedVirtual(path) | ChangeEvent::ChangedVirtual(path) => {
| watch::ChangeEvent::ChangedVirtual(path) => {
File::sync_virtual_path(self, &path); File::sync_virtual_path(self, &path);
} }
watch::ChangeEvent::DeletedVirtual(path) => { ChangeEvent::DeletedVirtual(path) => {
if let Some(virtual_file) = self.files().try_virtual_file(&path) { if let Some(virtual_file) = self.files().try_virtual_file(&path) {
virtual_file.close(self); virtual_file.close(self);
} }
} }
watch::ChangeEvent::Rescan => { ChangeEvent::Rescan => {
workspace_change = true; project_changed = true;
Files::sync_all(self); Files::sync_all(self);
break; break;
} }
} }
} }
if workspace_change { if project_changed {
match WorkspaceMetadata::discover(&workspace_path, self.system(), base_configuration) { match ProjectMetadata::discover(&project_path, self.system(), base_configuration) {
Ok(metadata) => { Ok(metadata) => {
if metadata.root() == workspace.root(self) { let program_settings = metadata.to_program_settings();
tracing::debug!("Reloading workspace after structural change");
// TODO: Handle changes in the program settings. let program = Program::get(self);
workspace.reload(self, metadata); if let Err(error) = program.update_from_settings(self, program_settings) {
tracing::error!("Failed to update the program settings, keeping the old program settings: {error}");
};
if metadata.root() == project.root(self) {
tracing::debug!("Reloading project after structural change");
project.reload(self, metadata);
} else { } else {
tracing::debug!("Replace workspace after structural change"); tracing::debug!("Replace project after structural change");
workspace = Workspace::from_metadata(self, metadata); project = Project::from_metadata(self, metadata);
self.workspace = Some(workspace); self.project = Some(project);
} }
} }
Err(error) => { Err(error) => {
tracing::error!( tracing::error!(
"Failed to load workspace, keeping old workspace configuration: {error}" "Failed to load project, keeping old project configuration: {error}"
); );
} }
} }
return; return;
} else if custom_stdlib_change { } else if custom_stdlib_change {
let search_paths = workspace.search_path_settings(self).clone(); let search_paths = project.metadata(self).to_program_settings().search_paths;
if let Err(error) = program.update_search_paths(self, &search_paths) { if let Err(error) = program.update_search_paths(self, &search_paths) {
tracing::error!("Failed to set the new search paths: {error}"); tracing::error!("Failed to set the new search paths: {error}");
} }
} }
let mut added_paths = added_paths.into_iter().filter(|path| { let mut added_paths = added_paths.into_iter();
let Some(package) = workspace.package(self, path) else {
return false;
};
// Skip packages that need reloading
!changed_packages.contains(&package)
});
// Use directory walking to discover newly added files. // Use directory walking to discover newly added files.
if let Some(path) = added_paths.next() { if let Some(path) = added_paths.next() {
@ -221,18 +206,12 @@ impl RootDatabase {
}); });
for path in added_paths.into_inner().unwrap() { for path in added_paths.into_inner().unwrap() {
let package = workspace.package(self, &path);
let file = system_path_to_file(self, &path); let file = system_path_to_file(self, &path);
if let (Some(package), Ok(file)) = (package, file) { if let Ok(file) = file {
package.add_file(self, file); project.add_file(self, file);
} }
} }
} }
// Reload
for package in changed_packages {
package.reload_files(self);
}
} }
} }

View File

@ -2,8 +2,8 @@ use red_knot_python_semantic::lint::{LintRegistry, LintRegistryBuilder};
use red_knot_python_semantic::register_lints; use red_knot_python_semantic::register_lints;
pub mod db; pub mod db;
pub mod project;
pub mod watch; pub mod watch;
pub mod workspace;
pub static DEFAULT_LINT_REGISTRY: std::sync::LazyLock<LintRegistry> = pub static DEFAULT_LINT_REGISTRY: std::sync::LazyLock<LintRegistry> =
std::sync::LazyLock::new(default_lints_registry); std::sync::LazyLock::new(default_lints_registry);

View File

@ -0,0 +1,459 @@
#![allow(clippy::ref_option)]
use crate::db::Db;
use crate::db::ProjectDatabase;
use crate::project::files::{Index, Indexed, IndexedFiles, IndexedIter};
pub use metadata::{ProjectDiscoveryError, ProjectMetadata};
use red_knot_python_semantic::types::check_types;
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, ParseDiagnostic, Severity};
use ruff_db::parsed::parsed_module;
use ruff_db::source::{source_text, SourceTextError};
use ruff_db::system::FileType;
use ruff_db::{
files::{system_path_to_file, File},
system::{walk_directory::WalkState, SystemPath},
};
use ruff_python_ast::PySourceType;
use ruff_text_size::TextRange;
use rustc_hash::{FxBuildHasher, FxHashSet};
use salsa::{Durability, Setter as _};
use std::borrow::Cow;
use std::sync::Arc;
mod files;
mod metadata;
mod pyproject;
pub mod settings;
/// The project as a Salsa ingredient.
///
/// ## How is a project different from a program?
/// There are two (related) motivations:
///
/// 1. Program is defined in `ruff_db` and it can't reference the settings types for the linter and formatter
/// without introducing a cyclic dependency. The project is defined in a higher level crate
/// where it can reference these setting types.
/// 2. Running `ruff check` with different target versions results in different programs (settings) but
/// it remains the same project. That's why program is a narrowed view of the project only
/// holding on to the most fundamental settings required for checking.
#[salsa::input]
pub struct Project {
/// The files that are open in the project.
///
/// Setting the open files to a non-`None` value changes `check` to only check the
/// open files rather than all files in the project.
#[return_ref]
#[default]
open_fileset: Option<Arc<FxHashSet<File>>>,
/// The first-party files of this project.
#[default]
#[return_ref]
file_set: IndexedFiles,
/// The metadata describing the project, including the unresolved configuration.
#[return_ref]
pub metadata: ProjectMetadata,
}
impl Project {
pub fn from_metadata(db: &dyn Db, metadata: ProjectMetadata) -> Self {
Project::builder(metadata)
.durability(Durability::MEDIUM)
.open_fileset_durability(Durability::LOW)
.file_set_durability(Durability::LOW)
.new(db)
}
pub fn root(self, db: &dyn Db) -> &SystemPath {
self.metadata(db).root()
}
pub fn name(self, db: &dyn Db) -> &str {
self.metadata(db).name()
}
pub fn reload(self, db: &mut dyn Db, metadata: ProjectMetadata) {
tracing::debug!("Reloading project");
assert_eq!(self.root(db), metadata.root());
if &metadata != self.metadata(db) {
self.set_metadata(db).to(metadata);
}
self.reload_files(db);
}
/// Checks all open files in the project and its dependencies.
pub fn check(self, db: &ProjectDatabase) -> Vec<Box<dyn Diagnostic>> {
let project_span = tracing::debug_span!("Project::check");
let _span = project_span.enter();
tracing::debug!("Checking project '{name}'", name = self.name(db));
let result = Arc::new(std::sync::Mutex::new(Vec::new()));
let inner_result = Arc::clone(&result);
let db = db.clone();
let project_span = project_span.clone();
rayon::scope(move |scope| {
let files = ProjectFiles::new(&db, self);
for file in &files {
let result = inner_result.clone();
let db = db.clone();
let project_span = project_span.clone();
scope.spawn(move |_| {
let check_file_span = tracing::debug_span!(parent: &project_span, "check_file", file=%file.path(&db));
let _entered = check_file_span.entered();
let file_diagnostics = check_file(&db, file);
result.lock().unwrap().extend(file_diagnostics);
});
}
});
Arc::into_inner(result).unwrap().into_inner().unwrap()
}
/// Opens a file in the project.
///
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
pub fn open_file(self, db: &mut dyn Db, file: File) {
tracing::debug!("Opening file `{}`", file.path(db));
let mut open_files = self.take_open_files(db);
open_files.insert(file);
self.set_open_files(db, open_files);
}
/// Closes a file in the project.
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
tracing::debug!("Closing file `{}`", file.path(db));
let mut open_files = self.take_open_files(db);
let removed = open_files.remove(&file);
if removed {
self.set_open_files(db, open_files);
}
removed
}
/// Returns the open files in the project or `None` if the entire project should be checked.
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
self.open_fileset(db).as_deref()
}
/// Sets the open files in the project.
///
/// This changes the behavior of `check` to only check the open files rather than all files in the project.
#[tracing::instrument(level = "debug", skip(self, db))]
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
tracing::debug!("Set open project files (count: {})", open_files.len());
self.set_open_fileset(db).to(Some(Arc::new(open_files)));
}
/// This takes the open files from the project and returns them.
///
/// This changes the behavior of `check` to check all files in the project instead of just the open files.
fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
tracing::debug!("Take open project files");
// Salsa will cancel any pending queries and remove its own reference to `open_files`
// so that the reference counter to `open_files` now drops to 1.
let open_files = self.set_open_fileset(db).to(None);
if let Some(open_files) = open_files {
Arc::try_unwrap(open_files).unwrap()
} else {
FxHashSet::default()
}
}
/// Returns `true` if the file is open in the project.
///
/// A file is considered open when:
/// * explicitly set as an open file using [`open_file`](Self::open_file)
/// * It has a [`SystemPath`] and belongs to a package's `src` files
/// * It has a [`SystemVirtualPath`](ruff_db::system::SystemVirtualPath)
pub fn is_file_open(self, db: &dyn Db, file: File) -> bool {
if let Some(open_files) = self.open_files(db) {
open_files.contains(&file)
} else if file.path(db).is_system_path() {
self.contains_file(db, file)
} else {
file.path(db).is_system_virtual_path()
}
}
/// Returns `true` if `file` is a first-party file part of this package.
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
self.files(db).contains(&file)
}
#[tracing::instrument(level = "debug", skip(db))]
pub fn remove_file(self, db: &mut dyn Db, file: File) {
tracing::debug!(
"Removing file `{}` from project `{}`",
file.path(db),
self.name(db)
);
let Some(mut index) = IndexedFiles::indexed_mut(db, self) else {
return;
};
index.remove(file);
}
pub fn add_file(self, db: &mut dyn Db, file: File) {
tracing::debug!(
"Adding file `{}` to project `{}`",
file.path(db),
self.name(db)
);
let Some(mut index) = IndexedFiles::indexed_mut(db, self) else {
return;
};
index.insert(file);
}
/// Returns the files belonging to this project.
pub fn files(self, db: &dyn Db) -> Indexed<'_> {
let files = self.file_set(db);
let indexed = match files.get() {
Index::Lazy(vacant) => {
let _entered =
tracing::debug_span!("Project::index_files", package = %self.name(db))
.entered();
let files = discover_project_files(db, self);
tracing::info!("Found {} files in project `{}`", files.len(), self.name(db));
vacant.set(files)
}
Index::Indexed(indexed) => indexed,
};
indexed
}
pub fn reload_files(self, db: &mut dyn Db) {
tracing::debug!("Reloading files for project `{}`", self.name(db));
if !self.file_set(db).is_lazy() {
// Force a re-index of the files in the next revision.
self.set_file_set(db).to(IndexedFiles::lazy());
}
}
}
pub(super) fn check_file(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
// Abort checking if there are IO errors.
let source = source_text(db.upcast(), file);
if let Some(read_error) = source.read_error() {
diagnostics.push(Box::new(IOErrorDiagnostic {
file,
error: read_error.clone(),
}));
return diagnostics;
}
let parsed = parsed_module(db.upcast(), file);
diagnostics.extend(parsed.errors().iter().map(|error| {
let diagnostic: Box<dyn Diagnostic> = Box::new(ParseDiagnostic::new(file, error.clone()));
diagnostic
}));
diagnostics.extend(check_types(db.upcast(), file).iter().map(|diagnostic| {
let boxed: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
boxed
}));
diagnostics.sort_unstable_by_key(|diagnostic| diagnostic.range().unwrap_or_default().start());
diagnostics
}
fn discover_project_files(db: &dyn Db, project: Project) -> FxHashSet<File> {
let paths = std::sync::Mutex::new(Vec::new());
db.system().walk_directory(project.root(db)).run(|| {
Box::new(|entry| {
match entry {
Ok(entry) => {
// Skip over any non python files to avoid creating too many entries in `Files`.
match entry.file_type() {
FileType::File => {
if entry
.path()
.extension()
.and_then(PySourceType::try_from_extension)
.is_some()
{
let mut paths = paths.lock().unwrap();
paths.push(entry.into_path());
}
}
FileType::Directory | FileType::Symlink => {}
}
}
Err(error) => {
// TODO Handle error
tracing::error!("Failed to walk path: {error}");
}
}
WalkState::Continue
})
});
let paths = paths.into_inner().unwrap();
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
for path in paths {
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
// We can ignore this.
if let Ok(file) = system_path_to_file(db.upcast(), &path) {
files.insert(file);
}
}
files
}
#[derive(Debug)]
enum ProjectFiles<'a> {
OpenFiles(&'a FxHashSet<File>),
Indexed(Indexed<'a>),
}
impl<'a> ProjectFiles<'a> {
fn new(db: &'a dyn Db, project: Project) -> Self {
if let Some(open_files) = project.open_files(db) {
ProjectFiles::OpenFiles(open_files)
} else {
ProjectFiles::Indexed(project.files(db))
}
}
}
impl<'a> IntoIterator for &'a ProjectFiles<'a> {
type Item = File;
type IntoIter = ProjectFilesIter<'a>;
fn into_iter(self) -> Self::IntoIter {
match self {
ProjectFiles::OpenFiles(files) => ProjectFilesIter::OpenFiles(files.iter()),
ProjectFiles::Indexed(indexed) => ProjectFilesIter::Indexed {
files: indexed.into_iter(),
},
}
}
}
enum ProjectFilesIter<'db> {
OpenFiles(std::collections::hash_set::Iter<'db, File>),
Indexed { files: IndexedIter<'db> },
}
impl Iterator for ProjectFilesIter<'_> {
type Item = File;
fn next(&mut self) -> Option<Self::Item> {
match self {
ProjectFilesIter::OpenFiles(files) => files.next().copied(),
ProjectFilesIter::Indexed { files } => files.next(),
}
}
}
#[derive(Debug)]
pub struct IOErrorDiagnostic {
file: File,
error: SourceTextError,
}
impl Diagnostic for IOErrorDiagnostic {
fn id(&self) -> DiagnosticId {
DiagnosticId::Io
}
fn message(&self) -> Cow<str> {
self.error.to_string().into()
}
fn file(&self) -> File {
self.file
}
fn range(&self) -> Option<TextRange> {
None
}
fn severity(&self) -> Severity {
Severity::Error
}
}
#[cfg(test)]
mod tests {
use crate::db::tests::TestDb;
use crate::project::{check_file, ProjectMetadata};
use red_knot_python_semantic::types::check_types;
use ruff_db::diagnostic::Diagnostic;
use ruff_db::files::system_path_to_file;
use ruff_db::source::source_text;
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
use ruff_db::testing::assert_function_query_was_not_run;
use ruff_python_ast::name::Name;
#[test]
fn check_file_skips_type_checking_when_file_cant_be_read() -> ruff_db::system::Result<()> {
let project = ProjectMetadata::new(Name::new_static("test"), SystemPathBuf::from("/"));
let mut db = TestDb::new(project);
let path = SystemPath::new("test.py");
db.write_file(path, "x = 10")?;
let file = system_path_to_file(&db, path).unwrap();
// Now the file gets deleted before we had a chance to read its source text.
db.memory_file_system().remove_file(path)?;
file.sync(&mut db);
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(
check_file(&db, file)
.into_iter()
.map(|diagnostic| diagnostic.message().into_owned())
.collect::<Vec<_>>(),
vec!["Failed to read file: No such file or directory".to_string()]
);
let events = db.take_salsa_events();
assert_function_query_was_not_run(&db, check_types, file, &events);
// The user now creates a new file with an empty text. The source text
// content returned by `source_text` remains unchanged, but the diagnostics should get updated.
db.write_file(path, "").unwrap();
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(
check_file(&db, file)
.into_iter()
.map(|diagnostic| diagnostic.message().into_owned())
.collect::<Vec<_>>(),
vec![] as Vec<String>
);
Ok(())
}
}

View File

@ -8,12 +8,12 @@ use salsa::Setter;
use ruff_db::files::File; use ruff_db::files::File;
use crate::db::Db; use crate::db::Db;
use crate::workspace::Package; use crate::project::Project;
/// Cheap cloneable hash set of files. /// Cheap cloneable hash set of files.
type FileSet = Arc<FxHashSet<File>>; type FileSet = Arc<FxHashSet<File>>;
/// The indexed files of a package. /// The indexed files of a project.
/// ///
/// The indexing happens lazily, but the files are then cached for subsequent reads. /// The indexing happens lazily, but the files are then cached for subsequent reads.
/// ///
@ -24,11 +24,11 @@ type FileSet = Arc<FxHashSet<File>>;
/// the indexed files must go through `IndexedMut`, which uses the Salsa setter `package.set_file_set` to /// the indexed files must go through `IndexedMut`, which uses the Salsa setter `package.set_file_set` to
/// ensure that Salsa always knows when the set of indexed files have changed. /// ensure that Salsa always knows when the set of indexed files have changed.
#[derive(Debug)] #[derive(Debug)]
pub struct PackageFiles { pub struct IndexedFiles {
state: std::sync::Mutex<State>, state: std::sync::Mutex<State>,
} }
impl PackageFiles { impl IndexedFiles {
pub fn lazy() -> Self { pub fn lazy() -> Self {
Self { Self {
state: std::sync::Mutex::new(State::Lazy), state: std::sync::Mutex::new(State::Lazy),
@ -60,7 +60,7 @@ impl PackageFiles {
/// Returns a mutable view on the index that allows cheap in-place mutations. /// Returns a mutable view on the index that allows cheap in-place mutations.
/// ///
/// The changes are automatically written back to the database once the view is dropped. /// The changes are automatically written back to the database once the view is dropped.
pub(super) fn indexed_mut(db: &mut dyn Db, package: Package) -> Option<IndexedMut> { pub(super) fn indexed_mut(db: &mut dyn Db, project: Project) -> Option<IndexedMut> {
// Calling `zalsa_mut` cancels all pending salsa queries. This ensures that there are no pending // Calling `zalsa_mut` cancels all pending salsa queries. This ensures that there are no pending
// reads to the file set. // reads to the file set.
// TODO: Use a non-internal API instead https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries // TODO: Use a non-internal API instead https://salsa.zulipchat.com/#narrow/stream/333573-salsa-3.2E0/topic/Expose.20an.20API.20to.20cancel.20other.20queries
@ -79,7 +79,7 @@ impl PackageFiles {
// all clones must have been dropped at this point and the `Indexed` // all clones must have been dropped at this point and the `Indexed`
// can't outlive the database (constrained by the `db` lifetime). // can't outlive the database (constrained by the `db` lifetime).
let state = { let state = {
let files = package.file_set(db); let files = project.file_set(db);
let mut locked = files.state.lock().unwrap(); let mut locked = files.state.lock().unwrap();
std::mem::replace(&mut *locked, State::Lazy) std::mem::replace(&mut *locked, State::Lazy)
}; };
@ -93,14 +93,14 @@ impl PackageFiles {
Some(IndexedMut { Some(IndexedMut {
db: Some(db), db: Some(db),
package, project,
files: indexed, files: indexed,
did_change: false, did_change: false,
}) })
} }
} }
impl Default for PackageFiles { impl Default for IndexedFiles {
fn default() -> Self { fn default() -> Self {
Self::lazy() Self::lazy()
} }
@ -142,7 +142,7 @@ impl<'db> LazyFiles<'db> {
/// The indexed files of a package. /// The indexed files of a package.
/// ///
/// Note: This type is intentionally non-cloneable. Making it cloneable requires /// Note: This type is intentionally non-cloneable. Making it cloneable requires
/// revisiting the locking behavior in [`PackageFiles::indexed_mut`]. /// revisiting the locking behavior in [`IndexedFiles::indexed_mut`].
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub struct Indexed<'db> { pub struct Indexed<'db> {
files: FileSet, files: FileSet,
@ -169,13 +169,13 @@ impl<'a> IntoIterator for &'a Indexed<'_> {
} }
} }
/// A Mutable view of a package's indexed files. /// A Mutable view of a project's indexed files.
/// ///
/// Allows in-place mutation of the files without deep cloning the hash set. /// Allows in-place mutation of the files without deep cloning the hash set.
/// The changes are written back when the mutable view is dropped or by calling [`Self::set`] manually. /// The changes are written back when the mutable view is dropped or by calling [`Self::set`] manually.
pub(super) struct IndexedMut<'db> { pub(super) struct IndexedMut<'db> {
db: Option<&'db mut dyn Db>, db: Option<&'db mut dyn Db>,
package: Package, project: Project,
files: FileSet, files: FileSet,
did_change: bool, did_change: bool,
} }
@ -212,12 +212,12 @@ impl IndexedMut<'_> {
if self.did_change { if self.did_change {
// If there are changes, set the new file_set to trigger a salsa revision change. // If there are changes, set the new file_set to trigger a salsa revision change.
self.package self.project
.set_file_set(db) .set_file_set(db)
.to(PackageFiles::indexed(files)); .to(IndexedFiles::indexed(files));
} else { } else {
// The `indexed_mut` replaced the `state` with Lazy. Restore it back to the indexed state. // The `indexed_mut` replaced the `state` with Lazy. Restore it back to the indexed state.
*self.package.file_set(db).state.lock().unwrap() = State::Indexed(files); *self.project.file_set(db).state.lock().unwrap() = State::Indexed(files);
} }
} }
} }
@ -234,30 +234,24 @@ mod tests {
use crate::db::tests::TestDb; use crate::db::tests::TestDb;
use crate::db::Db; use crate::db::Db;
use crate::workspace::files::Index; use crate::project::files::Index;
use crate::workspace::WorkspaceMetadata; use crate::project::ProjectMetadata;
use ruff_db::files::system_path_to_file; use ruff_db::files::system_path_to_file;
use ruff_db::system::{DbWithTestSystem, SystemPathBuf}; use ruff_db::system::{DbWithTestSystem, SystemPathBuf};
use ruff_python_ast::name::Name; use ruff_python_ast::name::Name;
#[test] #[test]
fn re_entrance() -> anyhow::Result<()> { fn re_entrance() -> anyhow::Result<()> {
let metadata = WorkspaceMetadata::single_package( let metadata = ProjectMetadata::new(Name::new_static("test"), SystemPathBuf::from("/test"));
Name::new_static("test"),
SystemPathBuf::from("/test"),
);
let mut db = TestDb::new(metadata); let mut db = TestDb::new(metadata);
db.write_file("test.py", "")?; db.write_file("test.py", "")?;
let package = db let project = db.project();
.workspace()
.package(&db, "/test")
.expect("test package to exist");
let file = system_path_to_file(&db, "test.py").unwrap(); let file = system_path_to_file(&db, "test.py").unwrap();
let files = match package.file_set(&db).get() { let files = match project.file_set(&db).get() {
Index::Lazy(lazy) => lazy.set(FxHashSet::from_iter([file])), Index::Lazy(lazy) => lazy.set(FxHashSet::from_iter([file])),
Index::Indexed(files) => files, Index::Indexed(files) => files,
}; };
@ -265,7 +259,7 @@ mod tests {
// Calling files a second time should not dead-lock. // Calling files a second time should not dead-lock.
// This can e.g. happen when `check_file` iterates over all files and // This can e.g. happen when `check_file` iterates over all files and
// `is_file_open` queries the open files. // `is_file_open` queries the open files.
let files_2 = package.file_set(&db).get(); let files_2 = project.file_set(&db).get();
match files_2 { match files_2 {
Index::Lazy(_) => { Index::Lazy(_) => {

View File

@ -0,0 +1,418 @@
use ruff_db::system::{System, SystemPath, SystemPathBuf};
use ruff_python_ast::name::Name;
use crate::project::pyproject::{PyProject, PyProjectError};
use crate::project::settings::Configuration;
use red_knot_python_semantic::ProgramSettings;
use thiserror::Error;
#[derive(Debug, PartialEq, Eq)]
#[cfg_attr(test, derive(serde::Serialize))]
pub struct ProjectMetadata {
pub(super) name: Name,
pub(super) root: SystemPathBuf,
/// The resolved settings for this project.
pub(super) configuration: Configuration,
}
impl ProjectMetadata {
/// Creates a project with the given name and root that uses the default configuration options.
pub fn new(name: Name, root: SystemPathBuf) -> Self {
Self {
name,
root,
configuration: Configuration::default(),
}
}
/// Loads a project from a `pyproject.toml` file.
pub(crate) fn from_pyproject(
pyproject: PyProject,
root: SystemPathBuf,
base_configuration: Option<&Configuration>,
) -> Self {
let name = pyproject.project.and_then(|project| project.name);
let name = name
.map(|name| Name::new(&*name))
.unwrap_or_else(|| Name::new(root.file_name().unwrap_or("root")));
// TODO: load configuration from pyrpoject.toml
let mut configuration = Configuration::default();
if let Some(base_configuration) = base_configuration {
configuration.extend(base_configuration.clone());
}
Self {
name,
root,
configuration,
}
}
/// Discovers the closest project at `path` and returns its metadata.
///
/// The algorithm traverses upwards in the `path`'s ancestor chain and uses the following precedence
/// the resolve the project's root.
///
/// 1. The closest `pyproject.toml` with a `tool.knot` section.
/// 1. The closest `pyproject.toml`.
/// 1. Fallback to use `path` as the root and use the default settings.
pub fn discover(
path: &SystemPath,
system: &dyn System,
base_configuration: Option<&Configuration>,
) -> Result<ProjectMetadata, ProjectDiscoveryError> {
tracing::debug!("Searching for a project in '{path}'");
if !system.is_directory(path) {
return Err(ProjectDiscoveryError::NotADirectory(path.to_path_buf()));
}
let mut closest_project: Option<ProjectMetadata> = None;
for ancestor in path.ancestors() {
let pyproject_path = ancestor.join("pyproject.toml");
if let Ok(pyproject_str) = system.read_to_string(&pyproject_path) {
let pyproject = PyProject::from_str(&pyproject_str).map_err(|error| {
ProjectDiscoveryError::InvalidPyProject {
path: pyproject_path,
source: Box::new(error),
}
})?;
let has_knot_section = pyproject.knot().is_some();
let metadata = ProjectMetadata::from_pyproject(
pyproject,
ancestor.to_path_buf(),
base_configuration,
);
if has_knot_section {
let project_root = ancestor;
tracing::debug!("Found project at '{}'", project_root);
return Ok(metadata);
}
// Not a project itself, keep looking for an enclosing project.
if closest_project.is_none() {
closest_project = Some(metadata);
}
}
}
// No project found, but maybe a pyproject.toml was found.
let metadata = if let Some(closest_project) = closest_project {
tracing::debug!(
"Project without `tool.knot` section: '{}'",
closest_project.root()
);
closest_project
} else {
tracing::debug!("The ancestor directories contain no `pyproject.toml`. Falling back to a virtual project.");
// Create a package with a default configuration
Self {
name: path.file_name().unwrap_or("root").into(),
root: path.to_path_buf(),
// TODO create the configuration from the pyproject toml
configuration: base_configuration.cloned().unwrap_or_default(),
}
};
Ok(metadata)
}
pub fn root(&self) -> &SystemPath {
&self.root
}
pub fn name(&self) -> &str {
&self.name
}
pub fn configuration(&self) -> &Configuration {
&self.configuration
}
pub fn to_program_settings(&self) -> ProgramSettings {
self.configuration.to_program_settings(self.root())
}
}
#[derive(Debug, Error)]
pub enum ProjectDiscoveryError {
#[error("project path '{0}' is not a directory")]
NotADirectory(SystemPathBuf),
#[error("{path} is not a valid `pyproject.toml`: {source}")]
InvalidPyProject {
source: Box<PyProjectError>,
path: SystemPathBuf,
},
}
#[cfg(test)]
mod tests {
//! Integration tests for project discovery
use crate::snapshot_project;
use anyhow::{anyhow, Context};
use insta::assert_ron_snapshot;
use ruff_db::system::{SystemPathBuf, TestSystem};
use crate::project::{ProjectDiscoveryError, ProjectMetadata};
#[test]
fn project_without_pyproject() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([(root.join("foo.py"), ""), (root.join("bar.py"), "")])
.context("Failed to write files")?;
let project = ProjectMetadata::discover(&root, &system, None)
.context("Failed to discover project")?;
assert_eq!(project.root(), &*root);
snapshot_project!(project);
Ok(())
}
#[test]
fn project_with_pyproject() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "backend"
"#,
),
(root.join("db/__init__.py"), ""),
])
.context("Failed to write files")?;
let project = ProjectMetadata::discover(&root, &system, None)
.context("Failed to discover project")?;
assert_eq!(project.root(), &*root);
snapshot_project!(project);
// Discovering the same package from a subdirectory should give the same result
let from_src = ProjectMetadata::discover(&root.join("db"), &system, None)
.context("Failed to discover project from src sub-directory")?;
assert_eq!(from_src, project);
Ok(())
}
#[test]
fn project_with_invalid_pyproject() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "backend"
[tool.knot
"#,
),
(root.join("db/__init__.py"), ""),
])
.context("Failed to write files")?;
let Err(error) = ProjectMetadata::discover(&root, &system, None) else {
return Err(anyhow!("Expected project discovery to fail because of invalid syntax in the pyproject.toml"));
};
assert_error_eq(
&error,
r#"/app/pyproject.toml is not a valid `pyproject.toml`: TOML parse error at line 5, column 31
|
5 | [tool.knot
| ^
invalid table header
expected `.`, `]`
"#,
);
Ok(())
}
#[test]
fn nested_projects_in_sub_project() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "project-root"
[tool.knot]
"#,
),
(
root.join("packages/a/pyproject.toml"),
r#"
[project]
name = "nested-project"
[tool.knot]
"#,
),
])
.context("Failed to write files")?;
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system, None)?;
snapshot_project!(sub_project);
Ok(())
}
#[test]
fn nested_projects_in_root_project() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "project-root"
[tool.knot]
"#,
),
(
root.join("packages/a/pyproject.toml"),
r#"
[project]
name = "nested-project"
[tool.knot]
"#,
),
])
.context("Failed to write files")?;
let root = ProjectMetadata::discover(&root, &system, None)?;
snapshot_project!(root);
Ok(())
}
#[test]
fn nested_projects_without_knot_sections() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "project-root"
"#,
),
(
root.join("packages/a/pyproject.toml"),
r#"
[project]
name = "nested-project"
"#,
),
])
.context("Failed to write files")?;
let sub_project = ProjectMetadata::discover(&root.join("packages/a"), &system, None)?;
snapshot_project!(sub_project);
Ok(())
}
#[test]
fn nested_projects_with_outer_knot_section() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "project-root"
[tool.knot]
"#,
),
(
root.join("packages/a/pyproject.toml"),
r#"
[project]
name = "nested-project"
"#,
),
])
.context("Failed to write files")?;
let root = ProjectMetadata::discover(&root.join("packages/a"), &system, None)?;
snapshot_project!(root);
Ok(())
}
#[track_caller]
fn assert_error_eq(error: &ProjectDiscoveryError, message: &str) {
assert_eq!(error.to_string().replace('\\', "/"), message);
}
/// Snapshots a project but with all paths using unix separators.
#[macro_export]
macro_rules! snapshot_project {
($project:expr) => {{
assert_ron_snapshot!($project,{
".root" => insta::dynamic_redaction(|content, _content_path| {
content.as_str().unwrap().replace("\\", "/")
}),
});
}};
}
}

View File

@ -4,9 +4,7 @@ use pep440_rs::{Version, VersionSpecifiers};
use serde::Deserialize; use serde::Deserialize;
use thiserror::Error; use thiserror::Error;
use crate::workspace::metadata::WorkspaceDiscoveryError;
pub(crate) use package_name::PackageName; pub(crate) use package_name::PackageName;
use ruff_db::system::SystemPath;
/// A `pyproject.toml` as specified in PEP 517. /// A `pyproject.toml` as specified in PEP 517.
#[derive(Deserialize, Debug, Default, Clone)] #[derive(Deserialize, Debug, Default, Clone)]
@ -19,11 +17,8 @@ pub(crate) struct PyProject {
} }
impl PyProject { impl PyProject {
pub(crate) fn workspace(&self) -> Option<&Workspace> { pub(crate) fn knot(&self) -> Option<&Knot> {
self.tool self.tool.as_ref().and_then(|tool| tool.knot.as_ref())
.as_ref()
.and_then(|tool| tool.knot.as_ref())
.and_then(|knot| knot.workspace.as_ref())
} }
} }
@ -62,47 +57,9 @@ pub(crate) struct Tool {
pub knot: Option<Knot>, pub knot: Option<Knot>,
} }
// TODO(micha): Remove allow once we add knot settings.
// We can't use a unit struct here or deserializing `[tool.knot]` fails.
#[allow(clippy::empty_structs_with_brackets)]
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)] #[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)] #[serde(rename_all = "kebab-case", deny_unknown_fields)]
pub(crate) struct Knot { pub(crate) struct Knot {}
pub(crate) workspace: Option<Workspace>,
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
pub(crate) struct Workspace {
pub(crate) members: Option<Vec<String>>,
pub(crate) exclude: Option<Vec<String>>,
}
impl Workspace {
pub(crate) fn members(&self) -> &[String] {
self.members.as_deref().unwrap_or_default()
}
pub(crate) fn exclude(&self) -> &[String] {
self.exclude.as_deref().unwrap_or_default()
}
pub(crate) fn is_excluded(
&self,
path: &SystemPath,
workspace_root: &SystemPath,
) -> Result<bool, WorkspaceDiscoveryError> {
for exclude in self.exclude() {
let full_glob =
glob::Pattern::new(workspace_root.join(exclude).as_str()).map_err(|error| {
WorkspaceDiscoveryError::InvalidMembersPattern {
raw_glob: exclude.clone(),
source: error,
}
})?;
if full_glob.matches_path(path.as_std_path()) {
return Ok(true);
}
}
Ok(false)
}
}

View File

@ -1,4 +1,3 @@
use crate::workspace::PackageMetadata;
use red_knot_python_semantic::{ use red_knot_python_semantic::{
ProgramSettings, PythonPlatform, PythonVersion, SearchPathSettings, SitePackages, ProgramSettings, PythonPlatform, PythonVersion, SearchPathSettings, SitePackages,
}; };
@ -9,17 +8,17 @@ use ruff_db::system::{SystemPath, SystemPathBuf};
/// The main difference to [`Configuration`] is that default values are filled in. /// The main difference to [`Configuration`] is that default values are filled in.
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(test, derive(serde::Serialize))] #[cfg_attr(test, derive(serde::Serialize))]
pub struct WorkspaceSettings { pub struct ProjectSettings {
pub(super) program: ProgramSettings, pub(super) program: ProgramSettings,
} }
impl WorkspaceSettings { impl ProjectSettings {
pub fn program(&self) -> &ProgramSettings { pub fn program(&self) -> &ProgramSettings {
&self.program &self.program
} }
} }
/// The configuration for the workspace or a package. /// The configuration for the project or a package.
#[derive(Debug, Default, Clone, PartialEq, Eq)] #[derive(Debug, Default, Clone, PartialEq, Eq)]
#[cfg_attr(test, derive(serde::Serialize))] #[cfg_attr(test, derive(serde::Serialize))]
pub struct Configuration { pub struct Configuration {
@ -34,17 +33,11 @@ impl Configuration {
self.search_paths.extend(with.search_paths); self.search_paths.extend(with.search_paths);
} }
pub fn to_workspace_settings( pub(super) fn to_program_settings(&self, first_party_root: &SystemPath) -> ProgramSettings {
&self, ProgramSettings {
workspace_root: &SystemPath,
_packages: &[PackageMetadata],
) -> WorkspaceSettings {
WorkspaceSettings {
program: ProgramSettings {
python_version: self.python_version.unwrap_or_default(), python_version: self.python_version.unwrap_or_default(),
python_platform: PythonPlatform::default(), python_platform: PythonPlatform::default(),
search_paths: self.search_paths.to_settings(workspace_root), search_paths: self.search_paths.to_settings(first_party_root),
},
} }
} }
} }
@ -57,7 +50,7 @@ pub struct SearchPathConfiguration {
/// or pyright's stubPath configuration setting. /// or pyright's stubPath configuration setting.
pub extra_paths: Option<Vec<SystemPathBuf>>, pub extra_paths: Option<Vec<SystemPathBuf>>,
/// The root of the workspace, used for finding first-party modules. /// The root of the project, used for finding first-party modules.
pub src_root: Option<SystemPathBuf>, pub src_root: Option<SystemPathBuf>,
/// Optional path to a "typeshed" directory on disk for us to use for standard-library types. /// Optional path to a "typeshed" directory on disk for us to use for standard-library types.

View File

@ -0,0 +1,17 @@
---
source: crates/red_knot_workspace/src/project/metadata.rs
expression: root
---
ProjectMetadata(
name: Name("project-root"),
root: "/app",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
)

View File

@ -0,0 +1,17 @@
---
source: crates/red_knot_workspace/src/project/metadata.rs
expression: sub_project
---
ProjectMetadata(
name: Name("nested-project"),
root: "/app/packages/a",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
)

View File

@ -0,0 +1,17 @@
---
source: crates/red_knot_workspace/src/project/metadata.rs
expression: root
---
ProjectMetadata(
name: Name("project-root"),
root: "/app",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
)

View File

@ -0,0 +1,17 @@
---
source: crates/red_knot_workspace/src/project/metadata.rs
expression: sub_project
---
ProjectMetadata(
name: Name("nested-project"),
root: "/app/packages/a",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
)

View File

@ -0,0 +1,17 @@
---
source: crates/red_knot_workspace/src/project/metadata.rs
expression: project
---
ProjectMetadata(
name: Name("backend"),
root: "/app",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
)

View File

@ -0,0 +1,17 @@
---
source: crates/red_knot_workspace/src/project/metadata.rs
expression: project
---
ProjectMetadata(
name: Name("app"),
root: "/app",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
)

View File

@ -1,9 +1,9 @@
pub use project_watcher::ProjectWatcher;
use ruff_db::system::{SystemPath, SystemPathBuf, SystemVirtualPathBuf}; use ruff_db::system::{SystemPath, SystemPathBuf, SystemVirtualPathBuf};
pub use watcher::{directory_watcher, EventHandler, Watcher}; pub use watcher::{directory_watcher, EventHandler, Watcher};
pub use workspace_watcher::WorkspaceWatcher;
mod project_watcher;
mod watcher; mod watcher;
mod workspace_watcher;
/// Classification of a file system change event. /// Classification of a file system change event.
/// ///

View File

@ -8,11 +8,11 @@ use ruff_cache::{CacheKey, CacheKeyHasher};
use ruff_db::system::{SystemPath, SystemPathBuf}; use ruff_db::system::{SystemPath, SystemPathBuf};
use ruff_db::{Db as _, Upcast}; use ruff_db::{Db as _, Upcast};
use crate::db::{Db, RootDatabase}; use crate::db::{Db, ProjectDatabase};
use crate::watch::Watcher; use crate::watch::Watcher;
/// Wrapper around a [`Watcher`] that watches the relevant paths of a workspace. /// Wrapper around a [`Watcher`] that watches the relevant paths of a project.
pub struct WorkspaceWatcher { pub struct ProjectWatcher {
watcher: Watcher, watcher: Watcher,
/// The paths that need to be watched. This includes paths for which setting up file watching failed. /// The paths that need to be watched. This includes paths for which setting up file watching failed.
@ -25,9 +25,9 @@ pub struct WorkspaceWatcher {
cache_key: Option<u64>, cache_key: Option<u64>,
} }
impl WorkspaceWatcher { impl ProjectWatcher {
/// Create a new workspace watcher. /// Create a new project watcher.
pub fn new(watcher: Watcher, db: &RootDatabase) -> Self { pub fn new(watcher: Watcher, db: &ProjectDatabase) -> Self {
let mut watcher = Self { let mut watcher = Self {
watcher, watcher,
watched_paths: Vec::new(), watched_paths: Vec::new(),
@ -40,11 +40,11 @@ impl WorkspaceWatcher {
watcher watcher
} }
pub fn update(&mut self, db: &RootDatabase) { pub fn update(&mut self, db: &ProjectDatabase) {
let search_paths: Vec<_> = system_module_search_paths(db.upcast()).collect(); let search_paths: Vec<_> = system_module_search_paths(db.upcast()).collect();
let workspace_path = db.workspace().root(db).to_path_buf(); let project_path = db.project().root(db).to_path_buf();
let new_cache_key = Self::compute_cache_key(&workspace_path, &search_paths); let new_cache_key = Self::compute_cache_key(&project_path, &search_paths);
if self.cache_key == Some(new_cache_key) { if self.cache_key == Some(new_cache_key) {
return; return;
@ -56,7 +56,7 @@ impl WorkspaceWatcher {
// ```text // ```text
// - bar // - bar
// - baz.py // - baz.py
// - workspace // - project
// - bar -> /bar // - bar -> /bar
// - foo.py // - foo.py
// ``` // ```
@ -68,23 +68,23 @@ impl WorkspaceWatcher {
self.has_errored_paths = false; self.has_errored_paths = false;
let workspace_path = db let project_path = db
.system() .system()
.canonicalize_path(&workspace_path) .canonicalize_path(&project_path)
.unwrap_or(workspace_path); .unwrap_or(project_path);
// Find the non-overlapping module search paths and filter out paths that are already covered by the workspace. // Find the non-overlapping module search paths and filter out paths that are already covered by the project.
// Module search paths are already canonicalized. // Module search paths are already canonicalized.
let unique_module_paths = ruff_db::system::deduplicate_nested_paths( let unique_module_paths = ruff_db::system::deduplicate_nested_paths(
search_paths search_paths
.into_iter() .into_iter()
.filter(|path| !path.starts_with(&workspace_path)), .filter(|path| !path.starts_with(&project_path)),
) )
.map(SystemPath::to_path_buf); .map(SystemPath::to_path_buf);
// Now add the new paths, first starting with the workspace path and then // Now add the new paths, first starting with the project path and then
// adding the library search paths. // adding the library search paths.
for path in std::iter::once(workspace_path).chain(unique_module_paths) { for path in std::iter::once(project_path).chain(unique_module_paths) {
// Log a warning. It's not worth aborting if registering a single folder fails because // Log a warning. It's not worth aborting if registering a single folder fails because
// Ruff otherwise stills works as expected. // Ruff otherwise stills works as expected.
if let Err(error) = self.watcher.watch(&path) { if let Err(error) = self.watcher.watch(&path) {
@ -106,10 +106,10 @@ impl WorkspaceWatcher {
self.cache_key = Some(new_cache_key); self.cache_key = Some(new_cache_key);
} }
fn compute_cache_key(workspace_root: &SystemPath, search_paths: &[&SystemPath]) -> u64 { fn compute_cache_key(project_root: &SystemPath, search_paths: &[&SystemPath]) -> u64 {
let mut cache_key_hasher = CacheKeyHasher::new(); let mut cache_key_hasher = CacheKeyHasher::new();
search_paths.cache_key(&mut cache_key_hasher); search_paths.cache_key(&mut cache_key_hasher);
workspace_root.cache_key(&mut cache_key_hasher); project_root.cache_key(&mut cache_key_hasher);
cache_key_hasher.finish() cache_key_hasher.finish()
} }

View File

@ -1,665 +0,0 @@
#![allow(clippy::ref_option)]
use crate::db::Db;
use crate::db::RootDatabase;
use crate::workspace::files::{Index, Indexed, IndexedIter, PackageFiles};
pub use metadata::{PackageMetadata, WorkspaceDiscoveryError, WorkspaceMetadata};
use red_knot_python_semantic::types::check_types;
use red_knot_python_semantic::SearchPathSettings;
use ruff_db::diagnostic::{Diagnostic, DiagnosticId, ParseDiagnostic, Severity};
use ruff_db::parsed::parsed_module;
use ruff_db::source::{source_text, SourceTextError};
use ruff_db::system::FileType;
use ruff_db::{
files::{system_path_to_file, File},
system::{walk_directory::WalkState, SystemPath, SystemPathBuf},
};
use ruff_python_ast::{name::Name, PySourceType};
use ruff_text_size::TextRange;
use rustc_hash::{FxBuildHasher, FxHashSet};
use salsa::{Durability, Setter as _};
use std::borrow::Cow;
use std::iter::FusedIterator;
use std::{collections::BTreeMap, sync::Arc};
mod files;
mod metadata;
mod pyproject;
pub mod settings;
/// The project workspace as a Salsa ingredient.
///
/// A workspace consists of one or multiple packages. Packages can be nested. A file in a workspace
/// belongs to no or exactly one package (files can't belong to multiple packages).
///
/// How workspaces and packages are discovered is TBD. For now, a workspace can be any directory,
/// and it always contains a single package which has the same root as the workspace.
///
/// ## Examples
///
/// ```text
/// app-1/
/// pyproject.toml
/// src/
/// ... python files
///
/// app-2/
/// pyproject.toml
/// src/
/// ... python files
///
/// shared/
/// pyproject.toml
/// src/
/// ... python files
///
/// pyproject.toml
/// ```
///
/// The above project structure has three packages: `app-1`, `app-2`, and `shared`.
/// Each of the packages can define their own settings in their `pyproject.toml` file, but
/// they must be compatible. For example, each package can define a different `requires-python` range,
/// but the ranges must overlap.
///
/// ## How is a workspace different from a program?
/// There are two (related) motivations:
///
/// 1. Program is defined in `ruff_db` and it can't reference the settings types for the linter and formatter
/// without introducing a cyclic dependency. The workspace is defined in a higher level crate
/// where it can reference these setting types.
/// 2. Running `ruff check` with different target versions results in different programs (settings) but
/// it remains the same workspace. That's why program is a narrowed view of the workspace only
/// holding on to the most fundamental settings required for checking.
#[salsa::input]
pub struct Workspace {
#[return_ref]
root_buf: SystemPathBuf,
/// The files that are open in the workspace.
///
/// Setting the open files to a non-`None` value changes `check` to only check the
/// open files rather than all files in the workspace.
#[return_ref]
#[default]
open_fileset: Option<Arc<FxHashSet<File>>>,
/// The (first-party) packages in this workspace.
#[return_ref]
package_tree: PackageTree,
/// The unresolved search path configuration.
#[return_ref]
pub search_path_settings: SearchPathSettings,
}
/// A first-party package in a workspace.
#[salsa::input]
pub struct Package {
#[return_ref]
pub name: Name,
/// The path to the root directory of the package.
#[return_ref]
root_buf: SystemPathBuf,
/// The files that are part of this package.
#[default]
#[return_ref]
file_set: PackageFiles,
// TODO: Add the loaded settings.
}
impl Workspace {
pub fn from_metadata(db: &dyn Db, metadata: WorkspaceMetadata) -> Self {
let mut packages = BTreeMap::new();
for package in metadata.packages {
packages.insert(package.root.clone(), Package::from_metadata(db, package));
}
let program_settings = metadata.settings.program;
Workspace::builder(
metadata.root,
PackageTree(packages),
program_settings.search_paths,
)
.durability(Durability::MEDIUM)
.open_fileset_durability(Durability::LOW)
.new(db)
}
pub fn root(self, db: &dyn Db) -> &SystemPath {
self.root_buf(db)
}
pub fn reload(self, db: &mut dyn Db, metadata: WorkspaceMetadata) {
tracing::debug!("Reloading workspace");
assert_eq!(self.root(db), metadata.root());
let mut old_packages = self.package_tree(db).0.clone();
let mut new_packages = BTreeMap::new();
for package_metadata in metadata.packages {
let path = package_metadata.root().to_path_buf();
let package = if let Some(old_package) = old_packages.remove(&path) {
old_package.update(db, package_metadata);
old_package
} else {
Package::from_metadata(db, package_metadata)
};
new_packages.insert(path, package);
}
if &metadata.settings.program.search_paths != self.search_path_settings(db) {
self.set_search_path_settings(db)
.to(metadata.settings.program.search_paths);
}
self.set_package_tree(db).to(PackageTree(new_packages));
}
pub fn update_package(self, db: &mut dyn Db, metadata: PackageMetadata) -> anyhow::Result<()> {
let path = metadata.root().to_path_buf();
if let Some(package) = self.package_tree(db).get(&path) {
package.update(db, metadata);
Ok(())
} else {
Err(anyhow::anyhow!("Package {path} not found"))
}
}
pub fn packages(self, db: &dyn Db) -> &PackageTree {
self.package_tree(db)
}
/// Returns the closest package to which the first-party `path` belongs.
///
/// Returns `None` if the `path` is outside of any package or if `file` isn't a first-party file
/// (e.g. third-party dependencies or `excluded`).
pub fn package(self, db: &dyn Db, path: impl AsRef<SystemPath>) -> Option<Package> {
let packages = self.package_tree(db);
packages.get(path.as_ref())
}
/// Checks all open files in the workspace and its dependencies.
pub fn check(self, db: &RootDatabase) -> Vec<Box<dyn Diagnostic>> {
let workspace_span = tracing::debug_span!("check_workspace");
let _span = workspace_span.enter();
tracing::debug!("Checking workspace");
let files = WorkspaceFiles::new(db, self);
let result = Arc::new(std::sync::Mutex::new(Vec::new()));
let inner_result = Arc::clone(&result);
let db = db.clone();
let workspace_span = workspace_span.clone();
rayon::scope(move |scope| {
for file in &files {
let result = inner_result.clone();
let db = db.clone();
let workspace_span = workspace_span.clone();
scope.spawn(move |_| {
let check_file_span = tracing::debug_span!(parent: &workspace_span, "check_file", file=%file.path(&db));
let _entered = check_file_span.entered();
let file_diagnostics = check_file(&db, file);
result.lock().unwrap().extend(file_diagnostics);
});
}
});
Arc::into_inner(result).unwrap().into_inner().unwrap()
}
/// Opens a file in the workspace.
///
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
pub fn open_file(self, db: &mut dyn Db, file: File) {
tracing::debug!("Opening file `{}`", file.path(db));
let mut open_files = self.take_open_files(db);
open_files.insert(file);
self.set_open_files(db, open_files);
}
/// Closes a file in the workspace.
pub fn close_file(self, db: &mut dyn Db, file: File) -> bool {
tracing::debug!("Closing file `{}`", file.path(db));
let mut open_files = self.take_open_files(db);
let removed = open_files.remove(&file);
if removed {
self.set_open_files(db, open_files);
}
removed
}
/// Returns the open files in the workspace or `None` if the entire workspace should be checked.
pub fn open_files(self, db: &dyn Db) -> Option<&FxHashSet<File>> {
self.open_fileset(db).as_deref()
}
/// Sets the open files in the workspace.
///
/// This changes the behavior of `check` to only check the open files rather than all files in the workspace.
#[tracing::instrument(level = "debug", skip(self, db))]
pub fn set_open_files(self, db: &mut dyn Db, open_files: FxHashSet<File>) {
tracing::debug!("Set open workspace files (count: {})", open_files.len());
self.set_open_fileset(db).to(Some(Arc::new(open_files)));
}
/// This takes the open files from the workspace and returns them.
///
/// This changes the behavior of `check` to check all files in the workspace instead of just the open files.
pub fn take_open_files(self, db: &mut dyn Db) -> FxHashSet<File> {
tracing::debug!("Take open workspace files");
// Salsa will cancel any pending queries and remove its own reference to `open_files`
// so that the reference counter to `open_files` now drops to 1.
let open_files = self.set_open_fileset(db).to(None);
if let Some(open_files) = open_files {
Arc::try_unwrap(open_files).unwrap()
} else {
FxHashSet::default()
}
}
/// Returns `true` if the file is open in the workspace.
///
/// A file is considered open when:
/// * explicitly set as an open file using [`open_file`](Self::open_file)
/// * It has a [`SystemPath`] and belongs to a package's `src` files
/// * It has a [`SystemVirtualPath`](ruff_db::system::SystemVirtualPath)
pub fn is_file_open(self, db: &dyn Db, file: File) -> bool {
if let Some(open_files) = self.open_files(db) {
open_files.contains(&file)
} else if let Some(system_path) = file.path(db).as_system_path() {
self.package(db, system_path)
.is_some_and(|package| package.contains_file(db, file))
} else {
file.path(db).is_system_virtual_path()
}
}
}
impl Package {
pub fn root(self, db: &dyn Db) -> &SystemPath {
self.root_buf(db)
}
/// Returns `true` if `file` is a first-party file part of this package.
pub fn contains_file(self, db: &dyn Db, file: File) -> bool {
self.files(db).contains(&file)
}
#[tracing::instrument(level = "debug", skip(db))]
pub fn remove_file(self, db: &mut dyn Db, file: File) {
tracing::debug!(
"Removing file `{}` from package `{}`",
file.path(db),
self.name(db)
);
let Some(mut index) = PackageFiles::indexed_mut(db, self) else {
return;
};
index.remove(file);
}
pub fn add_file(self, db: &mut dyn Db, file: File) {
tracing::debug!(
"Adding file `{}` to package `{}`",
file.path(db),
self.name(db)
);
let Some(mut index) = PackageFiles::indexed_mut(db, self) else {
return;
};
index.insert(file);
}
/// Returns the files belonging to this package.
pub fn files(self, db: &dyn Db) -> Indexed<'_> {
let files = self.file_set(db);
let indexed = match files.get() {
Index::Lazy(vacant) => {
let _entered =
tracing::debug_span!("index_package_files", package = %self.name(db)).entered();
let files = discover_package_files(db, self);
tracing::info!("Found {} files in package `{}`", files.len(), self.name(db));
vacant.set(files)
}
Index::Indexed(indexed) => indexed,
};
indexed
}
fn from_metadata(db: &dyn Db, metadata: PackageMetadata) -> Self {
Self::builder(metadata.name, metadata.root)
.durability(Durability::MEDIUM)
.file_set_durability(Durability::LOW)
.new(db)
}
fn update(self, db: &mut dyn Db, metadata: PackageMetadata) {
let root = self.root(db);
assert_eq!(root, metadata.root());
if self.name(db) != metadata.name() {
self.set_name(db).to(metadata.name);
}
}
pub fn reload_files(self, db: &mut dyn Db) {
tracing::debug!("Reloading files for package `{}`", self.name(db));
if !self.file_set(db).is_lazy() {
// Force a re-index of the files in the next revision.
self.set_file_set(db).to(PackageFiles::lazy());
}
}
}
pub(super) fn check_file(db: &dyn Db, file: File) -> Vec<Box<dyn Diagnostic>> {
let mut diagnostics: Vec<Box<dyn Diagnostic>> = Vec::new();
// Abort checking if there are IO errors.
let source = source_text(db.upcast(), file);
if let Some(read_error) = source.read_error() {
diagnostics.push(Box::new(IOErrorDiagnostic {
file,
error: read_error.clone(),
}));
return diagnostics;
}
let parsed = parsed_module(db.upcast(), file);
diagnostics.extend(parsed.errors().iter().map(|error| {
let diagnostic: Box<dyn Diagnostic> = Box::new(ParseDiagnostic::new(file, error.clone()));
diagnostic
}));
diagnostics.extend(check_types(db.upcast(), file).iter().map(|diagnostic| {
let boxed: Box<dyn Diagnostic> = Box::new(diagnostic.clone());
boxed
}));
diagnostics.sort_unstable_by_key(|diagnostic| diagnostic.range().unwrap_or_default().start());
diagnostics
}
fn discover_package_files(db: &dyn Db, package: Package) -> FxHashSet<File> {
let paths = std::sync::Mutex::new(Vec::new());
let packages = db.workspace().packages(db);
db.system().walk_directory(package.root(db)).run(|| {
Box::new(|entry| {
match entry {
Ok(entry) => {
// Skip over any non python files to avoid creating too many entries in `Files`.
match entry.file_type() {
FileType::File => {
if entry
.path()
.extension()
.and_then(PySourceType::try_from_extension)
.is_some()
{
let mut paths = paths.lock().unwrap();
paths.push(entry.into_path());
}
}
FileType::Directory | FileType::Symlink => {
// Don't traverse into nested packages (the workspace-package is an ancestor of all other packages)
if packages.get(entry.path()) != Some(package) {
return WalkState::Skip;
}
}
}
}
Err(error) => {
// TODO Handle error
tracing::error!("Failed to walk path: {error}");
}
}
WalkState::Continue
})
});
let paths = paths.into_inner().unwrap();
let mut files = FxHashSet::with_capacity_and_hasher(paths.len(), FxBuildHasher);
for path in paths {
// If this returns `None`, then the file was deleted between the `walk_directory` call and now.
// We can ignore this.
if let Ok(file) = system_path_to_file(db.upcast(), &path) {
files.insert(file);
}
}
files
}
#[derive(Debug)]
enum WorkspaceFiles<'a> {
OpenFiles(&'a FxHashSet<File>),
PackageFiles(Vec<Indexed<'a>>),
}
impl<'a> WorkspaceFiles<'a> {
fn new(db: &'a dyn Db, workspace: Workspace) -> Self {
if let Some(open_files) = workspace.open_files(db) {
WorkspaceFiles::OpenFiles(open_files)
} else {
WorkspaceFiles::PackageFiles(
workspace
.packages(db)
.iter()
.map(|package| package.files(db))
.collect(),
)
}
}
}
impl<'a> IntoIterator for &'a WorkspaceFiles<'a> {
type Item = File;
type IntoIter = WorkspaceFilesIter<'a>;
fn into_iter(self) -> Self::IntoIter {
match self {
WorkspaceFiles::OpenFiles(files) => WorkspaceFilesIter::OpenFiles(files.iter()),
WorkspaceFiles::PackageFiles(package_files) => {
let mut package_files = package_files.iter();
WorkspaceFilesIter::PackageFiles {
current: package_files.next().map(IntoIterator::into_iter),
package_files,
}
}
}
}
}
enum WorkspaceFilesIter<'db> {
OpenFiles(std::collections::hash_set::Iter<'db, File>),
PackageFiles {
package_files: std::slice::Iter<'db, Indexed<'db>>,
current: Option<IndexedIter<'db>>,
},
}
impl Iterator for WorkspaceFilesIter<'_> {
type Item = File;
fn next(&mut self) -> Option<Self::Item> {
match self {
WorkspaceFilesIter::OpenFiles(files) => files.next().copied(),
WorkspaceFilesIter::PackageFiles {
package_files,
current,
} => loop {
if let Some(file) = current.as_mut().and_then(Iterator::next) {
return Some(file);
}
*current = Some(package_files.next()?.into_iter());
},
}
}
}
#[derive(Debug)]
pub struct IOErrorDiagnostic {
file: File,
error: SourceTextError,
}
impl Diagnostic for IOErrorDiagnostic {
fn id(&self) -> DiagnosticId {
DiagnosticId::Io
}
fn message(&self) -> Cow<str> {
self.error.to_string().into()
}
fn file(&self) -> File {
self.file
}
fn range(&self) -> Option<TextRange> {
None
}
fn severity(&self) -> Severity {
Severity::Error
}
}
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct PackageTree(BTreeMap<SystemPathBuf, Package>);
impl PackageTree {
pub fn get(&self, path: &SystemPath) -> Option<Package> {
let (package_path, package) = self.0.range(..=path.to_path_buf()).next_back()?;
if path.starts_with(package_path) {
Some(*package)
} else {
None
}
}
// The package table should never be empty, that's why `is_empty` makes little sense
#[allow(clippy::len_without_is_empty)]
pub fn len(&self) -> usize {
self.0.len()
}
pub fn iter(&self) -> PackageTreeIter {
PackageTreeIter(self.0.values())
}
}
impl<'a> IntoIterator for &'a PackageTree {
type Item = Package;
type IntoIter = PackageTreeIter<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
pub struct PackageTreeIter<'a>(std::collections::btree_map::Values<'a, SystemPathBuf, Package>);
impl Iterator for PackageTreeIter<'_> {
type Item = Package;
fn next(&mut self) -> Option<Self::Item> {
self.0.next().copied()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
fn last(mut self) -> Option<Self::Item> {
self.0.next_back().copied()
}
}
impl ExactSizeIterator for PackageTreeIter<'_> {}
impl FusedIterator for PackageTreeIter<'_> {}
#[cfg(test)]
mod tests {
use crate::db::tests::TestDb;
use crate::workspace::{check_file, WorkspaceMetadata};
use red_knot_python_semantic::types::check_types;
use ruff_db::diagnostic::Diagnostic;
use ruff_db::files::system_path_to_file;
use ruff_db::source::source_text;
use ruff_db::system::{DbWithTestSystem, SystemPath, SystemPathBuf};
use ruff_db::testing::assert_function_query_was_not_run;
use ruff_python_ast::name::Name;
#[test]
fn check_file_skips_type_checking_when_file_cant_be_read() -> ruff_db::system::Result<()> {
let workspace =
WorkspaceMetadata::single_package(Name::new_static("test"), SystemPathBuf::from("/"));
let mut db = TestDb::new(workspace);
let path = SystemPath::new("test.py");
db.write_file(path, "x = 10")?;
let file = system_path_to_file(&db, path).unwrap();
// Now the file gets deleted before we had a chance to read its source text.
db.memory_file_system().remove_file(path)?;
file.sync(&mut db);
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(
check_file(&db, file)
.into_iter()
.map(|diagnostic| diagnostic.message().into_owned())
.collect::<Vec<_>>(),
vec!["Failed to read file: No such file or directory".to_string()]
);
let events = db.take_salsa_events();
assert_function_query_was_not_run(&db, check_types, file, &events);
// The user now creates a new file with an empty text. The source text
// content returned by `source_text` remains unchanged, but the diagnostics should get updated.
db.write_file(path, "").unwrap();
assert_eq!(source_text(&db, file).as_str(), "");
assert_eq!(
check_file(&db, file)
.into_iter()
.map(|diagnostic| diagnostic.message().into_owned())
.collect::<Vec<_>>(),
vec![] as Vec<String>
);
Ok(())
}
}

View File

@ -1,812 +0,0 @@
use ruff_db::system::{GlobError, System, SystemPath, SystemPathBuf};
use ruff_python_ast::name::Name;
use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet};
use thiserror::Error;
use crate::workspace::pyproject::{PyProject, PyProjectError, Workspace};
use crate::workspace::settings::{Configuration, WorkspaceSettings};
#[derive(Debug, PartialEq, Eq)]
#[cfg_attr(test, derive(serde::Serialize))]
pub struct WorkspaceMetadata {
pub(super) root: SystemPathBuf,
/// The (first-party) packages in this workspace.
pub(super) packages: Vec<PackageMetadata>,
/// The resolved settings for this workspace.
pub(super) settings: WorkspaceSettings,
}
/// A first-party package in a workspace.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(test, derive(serde::Serialize))]
pub struct PackageMetadata {
pub(super) name: Name,
/// The path to the root directory of the package.
pub(super) root: SystemPathBuf,
pub(super) configuration: Configuration,
}
impl WorkspaceMetadata {
/// Creates a workspace that consists of a single package located at `root`.
pub fn single_package(name: Name, root: SystemPathBuf) -> Self {
let package = PackageMetadata {
name,
root: root.clone(),
configuration: Configuration::default(),
};
let packages = vec![package];
let settings = packages[0]
.configuration
.to_workspace_settings(&root, &packages);
Self {
root,
packages,
settings,
}
}
/// Discovers the closest workspace at `path` and returns its metadata.
///
/// 1. Traverse upwards in the `path`'s ancestor chain and find the first `pyproject.toml`.
/// 1. If the `pyproject.toml` contains no `knot.workspace` table, then keep traversing the `path`'s ancestor
/// chain until we find one or reach the root.
/// 1. If we've found a workspace, then resolve the workspace's members and assert that the closest
/// package (the first found package without a `knot.workspace` table is a member. If not, create
/// a single package workspace for the closest package.
/// 1. If there's no `pyrpoject.toml` with a `knot.workspace` table, then create a single-package workspace.
/// 1. If no ancestor directory contains any `pyproject.toml`, create an ad-hoc workspace for `path`
/// that consists of a single package and uses the default settings.
pub fn discover(
path: &SystemPath,
system: &dyn System,
base_configuration: Option<&Configuration>,
) -> Result<WorkspaceMetadata, WorkspaceDiscoveryError> {
tracing::debug!("Searching for a workspace in '{path}'");
if !system.is_directory(path) {
return Err(WorkspaceDiscoveryError::NotADirectory(path.to_path_buf()));
}
let mut closest_package: Option<PackageMetadata> = None;
for ancestor in path.ancestors() {
let pyproject_path = ancestor.join("pyproject.toml");
if let Ok(pyproject_str) = system.read_to_string(&pyproject_path) {
let pyproject = PyProject::from_str(&pyproject_str).map_err(|error| {
WorkspaceDiscoveryError::InvalidPyProject {
path: pyproject_path,
source: Box::new(error),
}
})?;
let workspace_table = pyproject.workspace().cloned();
let package = PackageMetadata::from_pyproject(
pyproject,
ancestor.to_path_buf(),
base_configuration,
);
if let Some(workspace_table) = workspace_table {
let workspace_root = ancestor;
tracing::debug!("Found workspace at '{}'", workspace_root);
match collect_packages(
package,
&workspace_table,
closest_package,
base_configuration,
system,
)? {
CollectedPackagesOrStandalone::Packages(mut packages) => {
let mut by_name =
FxHashMap::with_capacity_and_hasher(packages.len(), FxBuildHasher);
let mut workspace_package = None;
for package in &packages {
if let Some(conflicting) = by_name.insert(package.name(), package) {
return Err(WorkspaceDiscoveryError::DuplicatePackageNames {
name: package.name().clone(),
first: conflicting.root().to_path_buf(),
second: package.root().to_path_buf(),
});
}
if package.root() == workspace_root {
workspace_package = Some(package);
} else if !package.root().starts_with(workspace_root) {
return Err(WorkspaceDiscoveryError::PackageOutsideWorkspace {
package_name: package.name().clone(),
package_root: package.root().to_path_buf(),
workspace_root: workspace_root.to_path_buf(),
});
}
}
let workspace_package = workspace_package
.expect("workspace package to be part of the workspace's packages");
let settings = workspace_package
.configuration
.to_workspace_settings(workspace_root, &packages);
packages.sort_unstable_by(|a, b| a.root().cmp(b.root()));
return Ok(Self {
root: workspace_root.to_path_buf(),
packages,
settings,
});
}
CollectedPackagesOrStandalone::Standalone(package) => {
closest_package = Some(package);
break;
}
}
}
// Not a workspace itself, keep looking for an enclosing workspace.
if closest_package.is_none() {
closest_package = Some(package);
}
}
}
// No workspace found, but maybe a pyproject.toml was found.
let package = if let Some(enclosing_package) = closest_package {
tracing::debug!("Single package workspace at '{}'", enclosing_package.root());
enclosing_package
} else {
tracing::debug!("The ancestor directories contain no `pyproject.toml`. Falling back to a virtual project.");
// Create a package with a default configuration
PackageMetadata {
name: path.file_name().unwrap_or("root").into(),
root: path.to_path_buf(),
// TODO create the configuration from the pyproject toml
configuration: base_configuration.cloned().unwrap_or_default(),
}
};
let root = package.root().to_path_buf();
let packages = vec![package];
let settings = packages[0]
.configuration
.to_workspace_settings(&root, &packages);
Ok(Self {
root,
packages,
settings,
})
}
pub fn root(&self) -> &SystemPath {
&self.root
}
pub fn packages(&self) -> &[PackageMetadata] {
&self.packages
}
pub fn settings(&self) -> &WorkspaceSettings {
&self.settings
}
}
impl PackageMetadata {
pub(crate) fn from_pyproject(
pyproject: PyProject,
root: SystemPathBuf,
base_configuration: Option<&Configuration>,
) -> Self {
let name = pyproject.project.and_then(|project| project.name);
let name = name
.map(|name| Name::new(&*name))
.unwrap_or_else(|| Name::new(root.file_name().unwrap_or("root")));
// TODO: load configuration from pyrpoject.toml
let mut configuration = Configuration::default();
if let Some(base_configuration) = base_configuration {
configuration.extend(base_configuration.clone());
}
PackageMetadata {
name,
root,
configuration,
}
}
pub fn name(&self) -> &Name {
&self.name
}
pub fn root(&self) -> &SystemPath {
&self.root
}
}
fn collect_packages(
workspace_package: PackageMetadata,
workspace_table: &Workspace,
closest_package: Option<PackageMetadata>,
base_configuration: Option<&Configuration>,
system: &dyn System,
) -> Result<CollectedPackagesOrStandalone, WorkspaceDiscoveryError> {
let workspace_root = workspace_package.root().to_path_buf();
let mut member_paths = FxHashSet::default();
for glob in workspace_table.members() {
let full_glob = workspace_package.root().join(glob);
let matches = system.glob(full_glob.as_str()).map_err(|error| {
WorkspaceDiscoveryError::InvalidMembersPattern {
raw_glob: glob.clone(),
source: error,
}
})?;
for result in matches {
let path = result?;
let normalized = SystemPath::absolute(path, &workspace_root);
// Skip over non-directory entry. E.g.finder might end up creating a `.DS_STORE` file
// that ends up matching `/projects/*`.
if system.is_directory(&normalized) {
member_paths.insert(normalized);
} else {
tracing::debug!("Ignoring non-directory workspace member '{normalized}'");
}
}
}
// The workspace root is always a member. Don't re-add it
let mut packages = vec![workspace_package];
member_paths.remove(&workspace_root);
// Add the package that is closest to the current working directory except
// if that package isn't a workspace member, then fallback to creating a single
// package workspace.
if let Some(closest_package) = closest_package {
// the closest `pyproject.toml` isn't a member of this workspace because it is
// explicitly included or simply not listed.
// Create a standalone workspace.
if !member_paths.remove(closest_package.root())
|| workspace_table.is_excluded(closest_package.root(), &workspace_root)?
{
tracing::debug!(
"Ignoring workspace '{workspace_root}' because package '{package}' is not a member",
package = closest_package.name()
);
return Ok(CollectedPackagesOrStandalone::Standalone(closest_package));
}
tracing::debug!("adding package '{}'", closest_package.name());
packages.push(closest_package);
}
// Add all remaining member paths
for member_path in member_paths {
if workspace_table.is_excluded(&member_path, workspace_root.as_path())? {
tracing::debug!("Ignoring excluded member '{member_path}'");
continue;
}
let pyproject_path = member_path.join("pyproject.toml");
let pyproject_str = match system.read_to_string(&pyproject_path) {
Ok(pyproject_str) => pyproject_str,
Err(error) => {
if error.kind() == std::io::ErrorKind::NotFound
&& member_path
.file_name()
.is_some_and(|name| name.starts_with('.'))
{
tracing::debug!(
"Ignore member '{member_path}' because it has no pyproject.toml and is hidden",
);
continue;
}
return Err(WorkspaceDiscoveryError::MemberFailedToReadPyProject {
package_root: member_path,
source: error,
});
}
};
let pyproject = PyProject::from_str(&pyproject_str).map_err(|error| {
WorkspaceDiscoveryError::InvalidPyProject {
source: Box::new(error),
path: pyproject_path,
}
})?;
if pyproject.workspace().is_some() {
return Err(WorkspaceDiscoveryError::NestedWorkspaces {
package_root: member_path,
});
}
let package = PackageMetadata::from_pyproject(pyproject, member_path, base_configuration);
tracing::debug!(
"Adding package '{}' at '{}'",
package.name(),
package.root()
);
packages.push(package);
}
packages.sort_unstable_by(|a, b| a.root().cmp(b.root()));
Ok(CollectedPackagesOrStandalone::Packages(packages))
}
enum CollectedPackagesOrStandalone {
Packages(Vec<PackageMetadata>),
Standalone(PackageMetadata),
}
#[derive(Debug, Error)]
pub enum WorkspaceDiscoveryError {
#[error("workspace path '{0}' is not a directory")]
NotADirectory(SystemPathBuf),
#[error("nested workspaces aren't supported but the package located at '{package_root}' defines a `knot.workspace` table")]
NestedWorkspaces { package_root: SystemPathBuf },
#[error("the workspace contains two packages named '{name}': '{first}' and '{second}'")]
DuplicatePackageNames {
name: Name,
first: SystemPathBuf,
second: SystemPathBuf,
},
#[error("the package '{package_name}' located at '{package_root}' is outside the workspace's root directory '{workspace_root}'")]
PackageOutsideWorkspace {
workspace_root: SystemPathBuf,
package_name: Name,
package_root: SystemPathBuf,
},
#[error(
"failed to read the `pyproject.toml` for the package located at '{package_root}': {source}"
)]
MemberFailedToReadPyProject {
package_root: SystemPathBuf,
source: std::io::Error,
},
#[error("{path} is not a valid `pyproject.toml`: {source}")]
InvalidPyProject {
source: Box<PyProjectError>,
path: SystemPathBuf,
},
#[error("invalid glob '{raw_glob}' in `tool.knot.workspace.members`: {source}")]
InvalidMembersPattern {
source: glob::PatternError,
raw_glob: String,
},
#[error("failed to match member glob: {error}")]
FailedToMatchGlob {
#[from]
error: GlobError,
},
}
#[cfg(test)]
mod tests {
//! Integration tests for workspace discovery
use crate::snapshot_workspace;
use anyhow::Context;
use insta::assert_ron_snapshot;
use ruff_db::system::{SystemPathBuf, TestSystem};
use crate::workspace::{WorkspaceDiscoveryError, WorkspaceMetadata};
#[test]
fn package_without_pyproject() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([(root.join("foo.py"), ""), (root.join("bar.py"), "")])
.context("Failed to write files")?;
let workspace = WorkspaceMetadata::discover(&root, &system, None)
.context("Failed to discover workspace")?;
assert_eq!(workspace.root(), &*root);
snapshot_workspace!(workspace);
Ok(())
}
#[test]
fn single_package() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "backend"
"#,
),
(root.join("db/__init__.py"), ""),
])
.context("Failed to write files")?;
let workspace = WorkspaceMetadata::discover(&root, &system, None)
.context("Failed to discover workspace")?;
assert_eq!(workspace.root(), &*root);
snapshot_workspace!(workspace);
// Discovering the same package from a subdirectory should give the same result
let from_src = WorkspaceMetadata::discover(&root.join("db"), &system, None)
.context("Failed to discover workspace from src sub-directory")?;
assert_eq!(from_src, workspace);
Ok(())
}
#[test]
fn workspace_members() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "workspace-root"
[tool.knot.workspace]
members = ["packages/*"]
exclude = ["packages/excluded"]
"#,
),
(
root.join("packages/a/pyproject.toml"),
r#"
[project]
name = "member-a"
"#,
),
(
root.join("packages/x/pyproject.toml"),
r#"
[project]
name = "member-x"
"#,
),
])
.context("Failed to write files")?;
let workspace = WorkspaceMetadata::discover(&root, &system, None)
.context("Failed to discover workspace")?;
assert_eq!(workspace.root(), &*root);
snapshot_workspace!(workspace);
// Discovering the same package from a member should give the same result
let from_src = WorkspaceMetadata::discover(&root.join("packages/a"), &system, None)
.context("Failed to discover workspace from src sub-directory")?;
assert_eq!(from_src, workspace);
Ok(())
}
#[test]
fn workspace_excluded() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "workspace-root"
[tool.knot.workspace]
members = ["packages/*"]
exclude = ["packages/excluded"]
"#,
),
(
root.join("packages/a/pyproject.toml"),
r#"
[project]
name = "member-a"
"#,
),
(
root.join("packages/excluded/pyproject.toml"),
r#"
[project]
name = "member-x"
"#,
),
])
.context("Failed to write files")?;
let workspace = WorkspaceMetadata::discover(&root, &system, None)
.context("Failed to discover workspace")?;
assert_eq!(workspace.root(), &*root);
snapshot_workspace!(workspace);
// Discovering the `workspace` for `excluded` should discover a single-package workspace
let excluded_workspace =
WorkspaceMetadata::discover(&root.join("packages/excluded"), &system, None)
.context("Failed to discover workspace from src sub-directory")?;
assert_ne!(excluded_workspace, workspace);
Ok(())
}
#[test]
fn workspace_non_unique_member_names() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "workspace-root"
[tool.knot.workspace]
members = ["packages/*"]
"#,
),
(
root.join("packages/a/pyproject.toml"),
r#"
[project]
name = "a"
"#,
),
(
root.join("packages/b/pyproject.toml"),
r#"
[project]
name = "a"
"#,
),
])
.context("Failed to write files")?;
let error = WorkspaceMetadata::discover(&root, &system, None).expect_err(
"Discovery should error because the workspace contains two packages with the same names.",
);
assert_error_eq(&error, "the workspace contains two packages named 'a': '/app/packages/a' and '/app/packages/b'");
Ok(())
}
#[test]
fn nested_workspaces() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "workspace-root"
[tool.knot.workspace]
members = ["packages/*"]
"#,
),
(
root.join("packages/a/pyproject.toml"),
r#"
[project]
name = "nested-workspace"
[tool.knot.workspace]
members = ["packages/*"]
"#,
),
])
.context("Failed to write files")?;
let error = WorkspaceMetadata::discover(&root, &system, None).expect_err(
"Discovery should error because the workspace has a package that itself is a workspace",
);
assert_error_eq(&error, "nested workspaces aren't supported but the package located at '/app/packages/a' defines a `knot.workspace` table");
Ok(())
}
#[test]
fn member_missing_pyproject_toml() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "workspace-root"
[tool.knot.workspace]
members = ["packages/*"]
"#,
),
(root.join("packages/a/test.py"), ""),
])
.context("Failed to write files")?;
let error = WorkspaceMetadata::discover(&root, &system, None)
.expect_err("Discovery should error because member `a` has no `pypyroject.toml`");
assert_error_eq(&error, "failed to read the `pyproject.toml` for the package located at '/app/packages/a': No such file or directory");
Ok(())
}
/// Folders that match the members pattern but don't have a pyproject.toml
/// aren't valid members and discovery fails. However, don't fail
/// if the folder name indicates that it is a hidden folder that might
/// have been created by another tool
#[test]
fn member_pattern_matching_hidden_folder() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "workspace-root"
[tool.knot.workspace]
members = ["packages/*"]
"#,
),
(root.join("packages/.hidden/a.py"), ""),
])
.context("Failed to write files")?;
let workspace = WorkspaceMetadata::discover(&root, &system, None)?;
snapshot_workspace!(workspace);
Ok(())
}
#[test]
fn member_pattern_matching_file() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "workspace-root"
[tool.knot.workspace]
members = ["packages/*"]
"#,
),
(root.join("packages/.DS_STORE"), ""),
])
.context("Failed to write files")?;
let workspace = WorkspaceMetadata::discover(&root, &system, None)?;
snapshot_workspace!(&workspace);
Ok(())
}
#[test]
fn workspace_root_not_an_ancestor_of_member() -> anyhow::Result<()> {
let system = TestSystem::default();
let root = SystemPathBuf::from("/app");
system
.memory_file_system()
.write_files([
(
root.join("pyproject.toml"),
r#"
[project]
name = "workspace-root"
[tool.knot.workspace]
members = ["../packages/*"]
"#,
),
(
root.join("../packages/a/pyproject.toml"),
r#"
[project]
name = "a"
"#,
),
])
.context("Failed to write files")?;
let error = WorkspaceMetadata::discover(&root, &system, None).expect_err(
"Discovery should error because member `a` is outside the workspace's directory`",
);
assert_error_eq(&error, "the package 'a' located at '/packages/a' is outside the workspace's root directory '/app'");
Ok(())
}
#[track_caller]
fn assert_error_eq(error: &WorkspaceDiscoveryError, message: &str) {
assert_eq!(error.to_string().replace('\\', "/"), message);
}
/// Snapshots a workspace but with all paths using unix separators.
#[macro_export]
macro_rules! snapshot_workspace {
($workspace:expr) => {{
assert_ron_snapshot!($workspace,{
".root" => insta::dynamic_redaction(|content, _content_path| {
content.as_str().unwrap().replace("\\", "/")
}),
".packages[].root" => insta::dynamic_redaction(|content, _content_path| {
content.as_str().unwrap().replace("\\", "/")
}),
});
}};
}
}

View File

@ -1,34 +0,0 @@
---
source: crates/red_knot_workspace/src/workspace/metadata.rs
expression: "&workspace"
---
WorkspaceMetadata(
root: "/app",
packages: [
PackageMetadata(
name: Name("workspace-root"),
root: "/app",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
),
],
settings: WorkspaceSettings(
program: ProgramSettings(
python_version: "3.9",
python_platform: all,
search_paths: SearchPathSettings(
extra_paths: [],
src_root: "/app",
typeshed: None,
site_packages: Known([]),
),
),
),
)

View File

@ -1,34 +0,0 @@
---
source: crates/red_knot_workspace/src/workspace/metadata.rs
expression: workspace
---
WorkspaceMetadata(
root: "/app",
packages: [
PackageMetadata(
name: Name("workspace-root"),
root: "/app",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
),
],
settings: WorkspaceSettings(
program: ProgramSettings(
python_version: "3.9",
python_platform: all,
search_paths: SearchPathSettings(
extra_paths: [],
src_root: "/app",
typeshed: None,
site_packages: Known([]),
),
),
),
)

View File

@ -1,34 +0,0 @@
---
source: crates/red_knot_workspace/src/workspace/metadata.rs
expression: workspace
---
WorkspaceMetadata(
root: "/app",
packages: [
PackageMetadata(
name: Name("app"),
root: "/app",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
),
],
settings: WorkspaceSettings(
program: ProgramSettings(
python_version: "3.9",
python_platform: all,
search_paths: SearchPathSettings(
extra_paths: [],
src_root: "/app",
typeshed: None,
site_packages: Known([]),
),
),
),
)

View File

@ -1,34 +0,0 @@
---
source: crates/red_knot_workspace/src/workspace/metadata.rs
expression: workspace
---
WorkspaceMetadata(
root: "/app",
packages: [
PackageMetadata(
name: Name("backend"),
root: "/app",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
),
],
settings: WorkspaceSettings(
program: ProgramSettings(
python_version: "3.9",
python_platform: all,
search_paths: SearchPathSettings(
extra_paths: [],
src_root: "/app",
typeshed: None,
site_packages: Known([]),
),
),
),
)

View File

@ -1,47 +0,0 @@
---
source: crates/red_knot_workspace/src/workspace/metadata.rs
expression: workspace
---
WorkspaceMetadata(
root: "/app",
packages: [
PackageMetadata(
name: Name("workspace-root"),
root: "/app",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
),
PackageMetadata(
name: Name("member-a"),
root: "/app/packages/a",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
),
],
settings: WorkspaceSettings(
program: ProgramSettings(
python_version: "3.9",
python_platform: all,
search_paths: SearchPathSettings(
extra_paths: [],
src_root: "/app",
typeshed: None,
site_packages: Known([]),
),
),
),
)

View File

@ -1,60 +0,0 @@
---
source: crates/red_knot_workspace/src/workspace/metadata.rs
expression: workspace
---
WorkspaceMetadata(
root: "/app",
packages: [
PackageMetadata(
name: Name("workspace-root"),
root: "/app",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
),
PackageMetadata(
name: Name("member-a"),
root: "/app/packages/a",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
),
PackageMetadata(
name: Name("member-x"),
root: "/app/packages/x",
configuration: Configuration(
python_version: None,
search_paths: SearchPathConfiguration(
extra_paths: None,
src_root: None,
typeshed: None,
site_packages: None,
),
),
),
],
settings: WorkspaceSettings(
program: ProgramSettings(
python_version: "3.9",
python_platform: all,
search_paths: SearchPathSettings(
extra_paths: [],
src_root: "/app",
typeshed: None,
site_packages: Known([]),
),
),
),
)

View File

@ -1,7 +1,7 @@
use anyhow::{anyhow, Context}; use anyhow::{anyhow, Context};
use red_knot_python_semantic::{HasTy, SemanticModel}; use red_knot_python_semantic::{HasTy, SemanticModel};
use red_knot_workspace::db::RootDatabase; use red_knot_workspace::db::ProjectDatabase;
use red_knot_workspace::workspace::WorkspaceMetadata; use red_knot_workspace::project::ProjectMetadata;
use ruff_db::files::{system_path_to_file, File}; use ruff_db::files::{system_path_to_file, File};
use ruff_db::parsed::parsed_module; use ruff_db::parsed::parsed_module;
use ruff_db::system::{SystemPath, SystemPathBuf, TestSystem}; use ruff_db::system::{SystemPath, SystemPathBuf, TestSystem};
@ -9,12 +9,12 @@ use ruff_python_ast::visitor::source_order;
use ruff_python_ast::visitor::source_order::SourceOrderVisitor; use ruff_python_ast::visitor::source_order::SourceOrderVisitor;
use ruff_python_ast::{self as ast, Alias, Expr, Parameter, ParameterWithDefault, Stmt}; use ruff_python_ast::{self as ast, Alias, Expr, Parameter, ParameterWithDefault, Stmt};
fn setup_db(workspace_root: &SystemPath, system: TestSystem) -> anyhow::Result<RootDatabase> { fn setup_db(workspace_root: &SystemPath, system: TestSystem) -> anyhow::Result<ProjectDatabase> {
let workspace = WorkspaceMetadata::discover(workspace_root, &system, None)?; let workspace = ProjectMetadata::discover(workspace_root, &system, None)?;
RootDatabase::new(workspace, system) ProjectDatabase::new(workspace, system)
} }
fn get_workspace_root() -> anyhow::Result<SystemPathBuf> { fn get_cargo_workspace_root() -> anyhow::Result<SystemPathBuf> {
Ok(SystemPathBuf::from(String::from_utf8( Ok(SystemPathBuf::from(String::from_utf8(
std::process::Command::new("cargo") std::process::Command::new("cargo")
.args(["locate-project", "--workspace", "--message-format", "plain"]) .args(["locate-project", "--workspace", "--message-format", "plain"])
@ -35,7 +35,7 @@ fn corpus_no_panic() -> anyhow::Result<()> {
#[test] #[test]
fn parser_no_panic() -> anyhow::Result<()> { fn parser_no_panic() -> anyhow::Result<()> {
let workspace_root = get_workspace_root()?; let workspace_root = get_cargo_workspace_root()?;
run_corpus_tests(&format!( run_corpus_tests(&format!(
"{workspace_root}/crates/ruff_python_parser/resources/**/*.py" "{workspace_root}/crates/ruff_python_parser/resources/**/*.py"
)) ))
@ -43,7 +43,7 @@ fn parser_no_panic() -> anyhow::Result<()> {
#[test] #[test]
fn linter_af_no_panic() -> anyhow::Result<()> { fn linter_af_no_panic() -> anyhow::Result<()> {
let workspace_root = get_workspace_root()?; let workspace_root = get_cargo_workspace_root()?;
run_corpus_tests(&format!( run_corpus_tests(&format!(
"{workspace_root}/crates/ruff_linter/resources/test/fixtures/[a-f]*/**/*.py" "{workspace_root}/crates/ruff_linter/resources/test/fixtures/[a-f]*/**/*.py"
)) ))
@ -51,7 +51,7 @@ fn linter_af_no_panic() -> anyhow::Result<()> {
#[test] #[test]
fn linter_gz_no_panic() -> anyhow::Result<()> { fn linter_gz_no_panic() -> anyhow::Result<()> {
let workspace_root = get_workspace_root()?; let workspace_root = get_cargo_workspace_root()?;
run_corpus_tests(&format!( run_corpus_tests(&format!(
"{workspace_root}/crates/ruff_linter/resources/test/fixtures/[g-z]*/**/*.py" "{workspace_root}/crates/ruff_linter/resources/test/fixtures/[g-z]*/**/*.py"
)) ))
@ -60,7 +60,7 @@ fn linter_gz_no_panic() -> anyhow::Result<()> {
#[test] #[test]
#[ignore = "Enable running once there are fewer failures"] #[ignore = "Enable running once there are fewer failures"]
fn linter_stubs_no_panic() -> anyhow::Result<()> { fn linter_stubs_no_panic() -> anyhow::Result<()> {
let workspace_root = get_workspace_root()?; let workspace_root = get_cargo_workspace_root()?;
run_corpus_tests(&format!( run_corpus_tests(&format!(
"{workspace_root}/crates/ruff_linter/resources/test/fixtures/**/*.pyi" "{workspace_root}/crates/ruff_linter/resources/test/fixtures/**/*.pyi"
)) ))
@ -69,7 +69,7 @@ fn linter_stubs_no_panic() -> anyhow::Result<()> {
#[test] #[test]
#[ignore = "Enable running over typeshed stubs once there are fewer failures"] #[ignore = "Enable running over typeshed stubs once there are fewer failures"]
fn typeshed_no_panic() -> anyhow::Result<()> { fn typeshed_no_panic() -> anyhow::Result<()> {
let workspace_root = get_workspace_root()?; let workspace_root = get_cargo_workspace_root()?;
run_corpus_tests(&format!( run_corpus_tests(&format!(
"{workspace_root}/crates/red_knot_vendored/vendor/typeshed/**/*.pyi" "{workspace_root}/crates/red_knot_vendored/vendor/typeshed/**/*.pyi"
)) ))
@ -85,7 +85,7 @@ fn run_corpus_tests(pattern: &str) -> anyhow::Result<()> {
let mut db = setup_db(&root, system.clone())?; let mut db = setup_db(&root, system.clone())?;
let workspace_root = get_workspace_root()?; let workspace_root = get_cargo_workspace_root()?;
let workspace_root = workspace_root.to_string(); let workspace_root = workspace_root.to_string();
let corpus = glob::glob(pattern).context("Failed to compile pattern")?; let corpus = glob::glob(pattern).context("Failed to compile pattern")?;
@ -163,7 +163,7 @@ fn run_corpus_tests(pattern: &str) -> anyhow::Result<()> {
Ok(()) Ok(())
} }
fn pull_types(db: &RootDatabase, file: File) { fn pull_types(db: &ProjectDatabase, file: File) {
let mut visitor = PullTypesVisitor::new(db, file); let mut visitor = PullTypesVisitor::new(db, file);
let ast = parsed_module(db, file); let ast = parsed_module(db, file);
@ -176,7 +176,7 @@ struct PullTypesVisitor<'db> {
} }
impl<'db> PullTypesVisitor<'db> { impl<'db> PullTypesVisitor<'db> {
fn new(db: &'db RootDatabase, file: File) -> Self { fn new(db: &'db ProjectDatabase, file: File) -> Self {
Self { Self {
model: SemanticModel::new(db, file), model: SemanticModel::new(db, file),
} }

View File

@ -2,10 +2,10 @@
use rayon::ThreadPoolBuilder; use rayon::ThreadPoolBuilder;
use red_knot_python_semantic::PythonVersion; use red_knot_python_semantic::PythonVersion;
use red_knot_workspace::db::{Db, RootDatabase}; use red_knot_workspace::db::{Db, ProjectDatabase};
use red_knot_workspace::project::settings::Configuration;
use red_knot_workspace::project::ProjectMetadata;
use red_knot_workspace::watch::{ChangeEvent, ChangedKind}; use red_knot_workspace::watch::{ChangeEvent, ChangedKind};
use red_knot_workspace::workspace::settings::Configuration;
use red_knot_workspace::workspace::WorkspaceMetadata;
use ruff_benchmark::criterion::{criterion_group, criterion_main, BatchSize, Criterion}; use ruff_benchmark::criterion::{criterion_group, criterion_main, BatchSize, Criterion};
use ruff_benchmark::TestFile; use ruff_benchmark::TestFile;
use ruff_db::diagnostic::Diagnostic; use ruff_db::diagnostic::Diagnostic;
@ -15,7 +15,7 @@ use ruff_db::system::{MemoryFileSystem, SystemPath, SystemPathBuf, TestSystem};
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
struct Case { struct Case {
db: RootDatabase, db: ProjectDatabase,
fs: MemoryFileSystem, fs: MemoryFileSystem,
re: File, re: File,
re_path: SystemPathBuf, re_path: SystemPathBuf,
@ -74,7 +74,7 @@ fn setup_case() -> Case {
.unwrap(); .unwrap();
let src_root = SystemPath::new("/src"); let src_root = SystemPath::new("/src");
let metadata = WorkspaceMetadata::discover( let metadata = ProjectMetadata::discover(
src_root, src_root,
&system, &system,
Some(&Configuration { Some(&Configuration {
@ -84,13 +84,13 @@ fn setup_case() -> Case {
) )
.unwrap(); .unwrap();
let mut db = RootDatabase::new(metadata, system).unwrap(); let mut db = ProjectDatabase::new(metadata, system).unwrap();
let tomllib_files: FxHashSet<File> = tomllib_filenames let tomllib_files: FxHashSet<File> = tomllib_filenames
.iter() .iter()
.map(|filename| system_path_to_file(&db, tomllib_path(filename)).unwrap()) .map(|filename| system_path_to_file(&db, tomllib_path(filename)).unwrap())
.collect(); .collect();
db.workspace().set_open_files(&mut db, tomllib_files); db.project().set_open_files(&mut db, tomllib_files);
let re_path = tomllib_path("_re.py"); let re_path = tomllib_path("_re.py");
let re = system_path_to_file(&db, &re_path).unwrap(); let re = system_path_to_file(&db, &re_path).unwrap();

View File

@ -11,7 +11,7 @@ use crate::Db;
/// ///
/// We currently create roots for: /// We currently create roots for:
/// * static module resolution paths /// * static module resolution paths
/// * the workspace root /// * the project root
/// ///
/// The main usage of file roots is to determine a file's durability. But it can also be used /// The main usage of file roots is to determine a file's durability. But it can also be used
/// to make a salsa query dependent on whether a file in a root has changed without writing any /// to make a salsa query dependent on whether a file in a root has changed without writing any
@ -43,17 +43,17 @@ impl FileRoot {
#[derive(Copy, Clone, Debug, Eq, PartialEq)] #[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum FileRootKind { pub enum FileRootKind {
/// The root of a workspace. /// The root of a project.
Workspace, Project,
/// A non-workspace module resolution search path. /// A non-project module resolution search path.
LibrarySearchPath, LibrarySearchPath,
} }
impl FileRootKind { impl FileRootKind {
const fn durability(self) -> Durability { const fn durability(self) -> Durability {
match self { match self {
FileRootKind::Workspace => Durability::LOW, FileRootKind::Project => Durability::LOW,
FileRootKind::LibrarySearchPath => Durability::HIGH, FileRootKind::LibrarySearchPath => Durability::HIGH,
} }
} }

View File

@ -50,7 +50,7 @@ impl ModuleDb {
let db = Self::default(); let db = Self::default();
Program::from_settings( Program::from_settings(
&db, &db,
&ProgramSettings { ProgramSettings {
python_version, python_version,
python_platform: PythonPlatform::default(), python_platform: PythonPlatform::default(),
search_paths, search_paths,

View File

@ -115,7 +115,7 @@ fn setup_db() -> TestDb {
Program::from_settings( Program::from_settings(
&db, &db,
&ProgramSettings { ProgramSettings {
python_version: PythonVersion::default(), python_version: PythonVersion::default(),
python_platform: PythonPlatform::default(), python_platform: PythonPlatform::default(),
search_paths: SearchPathSettings::new(src_root), search_paths: SearchPathSettings::new(src_root),