mirror of https://github.com/astral-sh/ruff
clear source texts
This commit is contained in:
parent
e352a50b74
commit
d1f16703ab
|
|
@ -141,7 +141,7 @@ fn benchmark_incremental(criterion: &mut Criterion) {
|
|||
&case.file_path,
|
||||
format!(
|
||||
"{}\n# A comment\n",
|
||||
source_text(&case.db, case.file).as_str()
|
||||
source_text(&case.db, case.file).load().as_str()
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
|
|
|
|||
|
|
@ -8,10 +8,11 @@ use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
|
|||
use ruff_text_size::{TextRange, TextSize};
|
||||
|
||||
use crate::diagnostic::stylesheet::{DiagnosticStylesheet, fmt_styled};
|
||||
use crate::source::SourceTextRef;
|
||||
use crate::{
|
||||
Db,
|
||||
files::File,
|
||||
source::{SourceText, line_index, source_text},
|
||||
source::{line_index, source_text},
|
||||
system::SystemPath,
|
||||
};
|
||||
|
||||
|
|
@ -644,7 +645,7 @@ impl FileResolver for &dyn Db {
|
|||
|
||||
fn input(&self, file: File) -> Input {
|
||||
Input {
|
||||
text: source_text(*self, file),
|
||||
text: source_text(*self, file).load(),
|
||||
line_index: line_index(*self, file),
|
||||
}
|
||||
}
|
||||
|
|
@ -657,7 +658,7 @@ impl FileResolver for &dyn Db {
|
|||
/// line index for efficiently querying its contents.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Input {
|
||||
pub(crate) text: SourceText,
|
||||
pub(crate) text: SourceTextRef,
|
||||
pub(crate) line_index: LineIndex,
|
||||
}
|
||||
|
||||
|
|
@ -2158,7 +2159,7 @@ watermelon
|
|||
let span = self.path(path);
|
||||
|
||||
let file = span.expect_ty_file();
|
||||
let text = source_text(&self.db, file);
|
||||
let text = source_text(&self.db, file).load();
|
||||
let line_index = line_index(&self.db, file);
|
||||
let source = SourceCode::new(text.as_str(), &line_index);
|
||||
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ pub fn parsed_module(db: &dyn Db, file: File) -> ParsedModule {
|
|||
}
|
||||
|
||||
pub fn parsed_module_impl(db: &dyn Db, file: File) -> Parsed<ModModule> {
|
||||
let source = source_text(db, file);
|
||||
let source = source_text(db, file).load();
|
||||
let ty = file.source_type(db);
|
||||
|
||||
let target_version = db.python_version();
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
|
||||
use arc_swap::ArcSwapOption;
|
||||
use countme::Count;
|
||||
|
||||
use ruff_notebook::Notebook;
|
||||
|
|
@ -11,7 +12,7 @@ use crate::Db;
|
|||
use crate::files::{File, FilePath};
|
||||
|
||||
/// Reads the source text of a python text file (must be valid UTF8) or notebook.
|
||||
#[salsa::tracked]
|
||||
#[salsa::tracked(no_eq)]
|
||||
pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
||||
let path = file.path(db);
|
||||
let _span = tracing::trace_span!("source_text", file = %path).entered();
|
||||
|
|
@ -38,11 +39,11 @@ pub fn source_text(db: &dyn Db, file: File) -> SourceText {
|
|||
};
|
||||
|
||||
SourceText {
|
||||
inner: Arc::new(SourceTextInner {
|
||||
inner: Arc::new(ArcSwapOption::new(Some(Arc::new(SourceTextInner {
|
||||
kind,
|
||||
read_error,
|
||||
count: Count::new(),
|
||||
}),
|
||||
_count: Count::new(),
|
||||
})))),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -65,12 +66,37 @@ fn is_notebook(path: &FilePath) -> bool {
|
|||
/// The file containing the source text can either be a text file or a notebook.
|
||||
///
|
||||
/// Cheap cloneable in `O(1)`.
|
||||
#[derive(Clone, Eq, PartialEq)]
|
||||
#[derive(Clone)]
|
||||
pub struct SourceText {
|
||||
inner: Arc<ArcSwapOption<SourceTextInner>>,
|
||||
}
|
||||
|
||||
impl PartialEq for SourceText {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
Arc::ptr_eq(&self.inner, &other.inner)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for SourceText {}
|
||||
|
||||
impl SourceText {
|
||||
pub fn load(&self) -> SourceTextRef {
|
||||
SourceTextRef {
|
||||
inner: self.inner.load_full().unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear(&self) {
|
||||
self.inner.store(None);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct SourceTextRef {
|
||||
inner: Arc<SourceTextInner>,
|
||||
}
|
||||
|
||||
impl SourceText {
|
||||
impl SourceTextRef {
|
||||
/// Returns the python code as a `str`.
|
||||
pub fn as_str(&self) -> &str {
|
||||
match &self.inner.kind {
|
||||
|
|
@ -98,7 +124,7 @@ impl SourceText {
|
|||
}
|
||||
}
|
||||
|
||||
impl Deref for SourceText {
|
||||
impl Deref for SourceTextRef {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &str {
|
||||
|
|
@ -106,7 +132,7 @@ impl Deref for SourceText {
|
|||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for SourceText {
|
||||
impl std::fmt::Debug for SourceTextRef {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut dbg = f.debug_tuple("SourceText");
|
||||
|
||||
|
|
@ -123,11 +149,18 @@ impl std::fmt::Debug for SourceText {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq)]
|
||||
struct SourceTextInner {
|
||||
count: Count<SourceText>,
|
||||
kind: SourceTextKind,
|
||||
read_error: Option<SourceTextError>,
|
||||
_count: Count<SourceText>,
|
||||
}
|
||||
|
||||
impl Eq for SourceTextInner {}
|
||||
|
||||
impl PartialEq for SourceTextInner {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.kind.eq(&other.kind) && self.read_error.eq(&other.read_error)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq)]
|
||||
|
|
@ -161,7 +194,7 @@ pub enum SourceTextError {
|
|||
pub fn line_index(db: &dyn Db, file: File) -> LineIndex {
|
||||
let _span = tracing::trace_span!("line_index", ?file).entered();
|
||||
|
||||
let source = source_text(db, file);
|
||||
let source = source_text(db, file).load();
|
||||
|
||||
LineIndex::from_source_text(&source)
|
||||
}
|
||||
|
|
@ -188,11 +221,11 @@ mod tests {
|
|||
|
||||
let file = system_path_to_file(&db, path).unwrap();
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "x = 10");
|
||||
assert_eq!(source_text(&db, file).load().as_str(), "x = 10");
|
||||
|
||||
db.write_file(path, "x = 20").unwrap();
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "x = 20");
|
||||
assert_eq!(source_text(&db, file).load().as_str(), "x = 20");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -206,13 +239,13 @@ mod tests {
|
|||
|
||||
let file = system_path_to_file(&db, path).unwrap();
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "x = 10");
|
||||
assert_eq!(source_text(&db, file).load().as_str(), "x = 10");
|
||||
|
||||
// Change the file permission only
|
||||
file.set_permissions(&mut db).to(Some(0o777));
|
||||
|
||||
db.clear_salsa_events();
|
||||
assert_eq!(source_text(&db, file).as_str(), "x = 10");
|
||||
assert_eq!(source_text(&db, file).load().as_str(), "x = 10");
|
||||
|
||||
let events = db.take_salsa_events();
|
||||
|
||||
|
|
@ -234,7 +267,7 @@ mod tests {
|
|||
|
||||
let file = system_path_to_file(&db, path).unwrap();
|
||||
let index = line_index(&db, file);
|
||||
let source = source_text(&db, file);
|
||||
let source = source_text(&db, file).load();
|
||||
|
||||
assert_eq!(index.line_count(), 2);
|
||||
assert_eq!(
|
||||
|
|
@ -276,7 +309,7 @@ mod tests {
|
|||
)?;
|
||||
|
||||
let file = system_path_to_file(&db, path).unwrap();
|
||||
let source = source_text(&db, file);
|
||||
let source = source_text(&db, file).load();
|
||||
|
||||
assert!(source.is_notebook());
|
||||
assert_eq!(source.as_str(), "x = 10\n");
|
||||
|
|
|
|||
|
|
@ -21,6 +21,7 @@
|
|||
//! represents the lint-rule analysis phase. In the future, these steps may be separated into
|
||||
//! distinct passes over the AST.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
use std::path::Path;
|
||||
|
||||
|
|
@ -667,8 +668,8 @@ impl SemanticSyntaxContext for Checker<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn source(&self) -> &str {
|
||||
self.source()
|
||||
fn source(&self) -> Cow<'_, str> {
|
||||
Cow::Borrowed(self.source())
|
||||
}
|
||||
|
||||
fn future_annotations_or_stub(&self) -> bool {
|
||||
|
|
|
|||
|
|
@ -172,7 +172,7 @@ pub fn formatted_file(db: &dyn Db, file: File) -> Result<Option<String>, FormatM
|
|||
}
|
||||
|
||||
let comment_ranges = CommentRanges::from(parsed.tokens());
|
||||
let source = source_text(db.upcast(), file);
|
||||
let source = source_text(db.upcast(), file).load();
|
||||
|
||||
let formatted = format_node(&parsed, &comment_ranges, &source, options)?;
|
||||
let printed = formatted.print()?;
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
//! This checker is not responsible for traversing the AST itself. Instead, its
|
||||
//! [`SemanticSyntaxChecker::visit_stmt`] and [`SemanticSyntaxChecker::visit_expr`] methods should
|
||||
//! be called in a parent `Visitor`'s `visit_stmt` and `visit_expr` methods, respectively.
|
||||
use std::borrow::Cow;
|
||||
use std::fmt::Display;
|
||||
|
||||
use ruff_python_ast::{
|
||||
|
|
@ -1698,7 +1699,7 @@ pub trait SemanticSyntaxContext {
|
|||
fn python_version(&self) -> PythonVersion;
|
||||
|
||||
/// Returns the source text under analysis.
|
||||
fn source(&self) -> &str;
|
||||
fn source(&self) -> Cow<'_, str>;
|
||||
|
||||
/// Return the [`TextRange`] at which a name is declared as `global` in the current scope.
|
||||
fn global(&self, name: &str) -> Option<TextRange>;
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
use std::borrow::Cow;
|
||||
use std::cell::RefCell;
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt::{Formatter, Write};
|
||||
|
|
@ -519,8 +520,8 @@ impl SemanticSyntaxContext for SemanticSyntaxCheckerVisitor<'_> {
|
|||
self.diagnostics.borrow_mut().push(error);
|
||||
}
|
||||
|
||||
fn source(&self) -> &str {
|
||||
self.source
|
||||
fn source(&self) -> Cow<'_, str> {
|
||||
Cow::Borrowed(self.source)
|
||||
}
|
||||
|
||||
fn global(&self, _name: &str) -> Option<TextRange> {
|
||||
|
|
|
|||
|
|
@ -643,7 +643,7 @@ fn changed_file() -> anyhow::Result<()> {
|
|||
let foo_path = case.project_path("foo.py");
|
||||
|
||||
let foo = case.system_file(&foo_path)?;
|
||||
assert_eq!(source_text(case.db(), foo).as_str(), foo_source);
|
||||
assert_eq!(source_text(case.db(), foo).load().as_str(), foo_source);
|
||||
case.assert_indexed_project_files([foo]);
|
||||
|
||||
update_file(&foo_path, "print('Version 2')")?;
|
||||
|
|
@ -654,7 +654,10 @@ fn changed_file() -> anyhow::Result<()> {
|
|||
|
||||
case.apply_changes(changes, None);
|
||||
|
||||
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
|
||||
assert_eq!(
|
||||
source_text(case.db(), foo).load().as_str(),
|
||||
"print('Version 2')"
|
||||
);
|
||||
case.assert_indexed_project_files([foo]);
|
||||
|
||||
Ok(())
|
||||
|
|
@ -1262,8 +1265,14 @@ fn hard_links_in_project() -> anyhow::Result<()> {
|
|||
let bar_path = case.project_path("bar.py");
|
||||
let bar = case.system_file(&bar_path).unwrap();
|
||||
|
||||
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 1')");
|
||||
assert_eq!(source_text(case.db(), bar).as_str(), "print('Version 1')");
|
||||
assert_eq!(
|
||||
source_text(case.db(), foo).load().as_str(),
|
||||
"print('Version 1')"
|
||||
);
|
||||
assert_eq!(
|
||||
source_text(case.db(), bar).load().as_str(),
|
||||
"print('Version 1')"
|
||||
);
|
||||
case.assert_indexed_project_files([bar, foo]);
|
||||
|
||||
// Write to the hard link target.
|
||||
|
|
@ -1273,11 +1282,17 @@ fn hard_links_in_project() -> anyhow::Result<()> {
|
|||
|
||||
case.apply_changes(changes, None);
|
||||
|
||||
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 2')");
|
||||
assert_eq!(
|
||||
source_text(case.db(), foo).load().as_str(),
|
||||
"print('Version 2')"
|
||||
);
|
||||
|
||||
// macOS is the only platform that emits events for every hardlink.
|
||||
if cfg!(target_os = "macos") {
|
||||
assert_eq!(source_text(case.db(), bar).as_str(), "print('Version 2')");
|
||||
assert_eq!(
|
||||
source_text(case.db(), bar).load().as_str(),
|
||||
"print('Version 2')"
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
|
@ -1334,8 +1349,14 @@ fn hard_links_to_target_outside_project() -> anyhow::Result<()> {
|
|||
let bar_path = case.project_path("bar.py");
|
||||
let bar = case.system_file(&bar_path).unwrap();
|
||||
|
||||
assert_eq!(source_text(case.db(), foo).as_str(), "print('Version 1')");
|
||||
assert_eq!(source_text(case.db(), bar).as_str(), "print('Version 1')");
|
||||
assert_eq!(
|
||||
source_text(case.db(), foo).load().as_str(),
|
||||
"print('Version 1')"
|
||||
);
|
||||
assert_eq!(
|
||||
source_text(case.db(), bar).load().as_str(),
|
||||
"print('Version 1')"
|
||||
);
|
||||
|
||||
// Write to the hard link target.
|
||||
update_file(foo_path, "print('Version 2')").context("Failed to update foo.py")?;
|
||||
|
|
@ -1344,7 +1365,10 @@ fn hard_links_to_target_outside_project() -> anyhow::Result<()> {
|
|||
|
||||
case.apply_changes(changes, None);
|
||||
|
||||
assert_eq!(source_text(case.db(), bar).as_str(), "print('Version 2')");
|
||||
assert_eq!(
|
||||
source_text(case.db(), bar).load().as_str(),
|
||||
"print('Version 2')"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -1450,7 +1474,10 @@ mod unix {
|
|||
let baz_project = case.project_path("bar/baz.py");
|
||||
let baz_file = baz.file().unwrap();
|
||||
|
||||
assert_eq!(source_text(case.db(), baz_file).as_str(), "def baz(): ...");
|
||||
assert_eq!(
|
||||
source_text(case.db(), baz_file).load().as_str(),
|
||||
"def baz(): ..."
|
||||
);
|
||||
assert_eq!(
|
||||
baz_file.path(case.db()).as_system_path(),
|
||||
Some(&*baz_project)
|
||||
|
|
@ -1467,7 +1494,7 @@ mod unix {
|
|||
case.apply_changes(changes, None);
|
||||
|
||||
assert_eq!(
|
||||
source_text(case.db(), baz_file).as_str(),
|
||||
source_text(case.db(), baz_file).load().as_str(),
|
||||
"def baz(): print('Version 2')"
|
||||
);
|
||||
|
||||
|
|
@ -1480,7 +1507,7 @@ mod unix {
|
|||
case.apply_changes(changes, None);
|
||||
|
||||
assert_eq!(
|
||||
source_text(case.db(), baz_file).as_str(),
|
||||
source_text(case.db(), baz_file).load().as_str(),
|
||||
"def baz(): print('Version 3')"
|
||||
);
|
||||
|
||||
|
|
@ -1533,11 +1560,14 @@ mod unix {
|
|||
let patched_bar_baz_file = case.system_file(&patched_bar_baz).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
source_text(case.db(), patched_bar_baz_file).as_str(),
|
||||
source_text(case.db(), patched_bar_baz_file).load().as_str(),
|
||||
"def baz(): ..."
|
||||
);
|
||||
|
||||
assert_eq!(source_text(case.db(), baz_file).as_str(), "def baz(): ...");
|
||||
assert_eq!(
|
||||
source_text(case.db(), baz_file).load().as_str(),
|
||||
"def baz(): ..."
|
||||
);
|
||||
assert_eq!(baz_file.path(case.db()).as_system_path(), Some(&*bar_baz));
|
||||
|
||||
case.assert_indexed_project_files([patched_bar_baz_file]);
|
||||
|
|
@ -1564,10 +1594,10 @@ mod unix {
|
|||
//
|
||||
// That's why I think it's fine to not support this case for now.
|
||||
|
||||
let patched_baz_text = source_text(case.db(), patched_bar_baz_file);
|
||||
let patched_baz_text = source_text(case.db(), patched_bar_baz_file).load();
|
||||
let did_update_patched_baz = patched_baz_text.as_str() == "def baz(): print('Version 2')";
|
||||
|
||||
let bar_baz_text = source_text(case.db(), baz_file);
|
||||
let bar_baz_text = source_text(case.db(), baz_file).load();
|
||||
let did_update_bar_baz = bar_baz_text.as_str() == "def baz(): print('Version 2')";
|
||||
|
||||
assert!(
|
||||
|
|
@ -1641,12 +1671,12 @@ mod unix {
|
|||
let baz_original_file = case.system_file(&baz_original).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
source_text(case.db(), baz_original_file).as_str(),
|
||||
source_text(case.db(), baz_original_file).load().as_str(),
|
||||
"def baz(): ..."
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
source_text(case.db(), baz_site_packages).as_str(),
|
||||
source_text(case.db(), baz_site_packages).load().as_str(),
|
||||
"def baz(): ..."
|
||||
);
|
||||
assert_eq!(
|
||||
|
|
@ -1665,7 +1695,7 @@ mod unix {
|
|||
case.apply_changes(changes, None);
|
||||
|
||||
assert_eq!(
|
||||
source_text(case.db(), baz_original_file).as_str(),
|
||||
source_text(case.db(), baz_original_file).load().as_str(),
|
||||
"def baz(): print('Version 2')"
|
||||
);
|
||||
|
||||
|
|
@ -1677,7 +1707,7 @@ mod unix {
|
|||
// it doesn't seem worth doing considering that as prominent tools like PyCharm don't support it.
|
||||
// Pyright does support it, thanks to chokidar.
|
||||
assert_ne!(
|
||||
source_text(case.db(), baz_site_packages).as_str(),
|
||||
source_text(case.db(), baz_site_packages).load().as_str(),
|
||||
"def baz(): print('Version 2')"
|
||||
);
|
||||
|
||||
|
|
|
|||
|
|
@ -216,7 +216,7 @@ mod tests {
|
|||
fn inlay_hints(&self) -> String {
|
||||
let hints = inlay_hints(&self.db, self.file, self.range);
|
||||
|
||||
let mut buf = source_text(&self.db, self.file).as_str().to_string();
|
||||
let mut buf = source_text(&self.db, self.file).load().as_str().to_string();
|
||||
|
||||
let mut offset = 0;
|
||||
|
||||
|
|
|
|||
|
|
@ -479,7 +479,7 @@ impl Project {
|
|||
// Abort checking if there are IO errors.
|
||||
let source = source_text(db.upcast(), file);
|
||||
|
||||
if let Some(read_error) = source.read_error() {
|
||||
if let Some(read_error) = source.load().read_error() {
|
||||
diagnostics.push(
|
||||
IOErrorDiagnostic {
|
||||
file: Some(file),
|
||||
|
|
@ -525,6 +525,8 @@ impl Project {
|
|||
.open_fileset(db)
|
||||
.is_none_or(|files| !files.contains(&file))
|
||||
{
|
||||
source.clear();
|
||||
|
||||
// Drop the AST now that we are done checking this file. It is not currently open,
|
||||
// so it is unlikely to be accessed again soon. If any queries need to access the AST
|
||||
// from across files, it will be re-parsed.
|
||||
|
|
@ -747,7 +749,7 @@ mod tests {
|
|||
db.memory_file_system().remove_file(path)?;
|
||||
file.sync(&mut db);
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(source_text(&db, file).load().as_str(), "");
|
||||
assert_eq!(
|
||||
db.project()
|
||||
.check_file_impl(&db, file)
|
||||
|
|
@ -764,7 +766,7 @@ mod tests {
|
|||
// content returned by `source_text` remains unchanged, but the diagnostics should get updated.
|
||||
db.write_file(path, "").unwrap();
|
||||
|
||||
assert_eq!(source_text(&db, file).as_str(), "");
|
||||
assert_eq!(source_text(&db, file).load().as_str(), "");
|
||||
assert_eq!(
|
||||
db.project()
|
||||
.check_file_impl(&db, file)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
use std::borrow::Cow;
|
||||
use std::cell::{OnceCell, RefCell};
|
||||
use std::sync::Arc;
|
||||
|
||||
|
|
@ -2274,8 +2275,9 @@ impl SemanticSyntaxContext for SemanticIndexBuilder<'_, '_> {
|
|||
self.python_version
|
||||
}
|
||||
|
||||
fn source(&self) -> &str {
|
||||
self.source_text().as_str()
|
||||
fn source(&self) -> Cow<'_, str> {
|
||||
let source_text = self.source_text().load();
|
||||
Cow::Owned(source_text.as_str().to_string())
|
||||
}
|
||||
|
||||
// We handle the one syntax error that relies on this method (`LoadBeforeGlobalDeclaration`)
|
||||
|
|
@ -2364,7 +2366,7 @@ impl SemanticSyntaxContext for SemanticIndexBuilder<'_, '_> {
|
|||
}
|
||||
|
||||
fn in_notebook(&self) -> bool {
|
||||
self.source_text().is_notebook()
|
||||
self.source_text().load().is_notebook()
|
||||
}
|
||||
|
||||
fn report_semantic_error(&self, error: SemanticSyntaxError) {
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ declare_lint! {
|
|||
#[salsa::tracked(returns(ref))]
|
||||
pub(crate) fn suppressions(db: &dyn Db, file: File) -> Suppressions {
|
||||
let parsed = parsed_module(db.upcast(), file).load(db.upcast());
|
||||
let source = source_text(db.upcast(), file);
|
||||
let source = source_text(db.upcast(), file).load();
|
||||
|
||||
let mut builder = SuppressionsBuilder::new(&source, db.lint_registry());
|
||||
let mut line_start = TextSize::default();
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ impl TypeDefinition<'_> {
|
|||
match self {
|
||||
Self::Module(module) => {
|
||||
let file = module.file()?;
|
||||
let source = source_text(db.upcast(), file);
|
||||
let source = source_text(db.upcast(), file).load();
|
||||
Some(FileRange::new(file, TextRange::up_to(source.text_len())))
|
||||
}
|
||||
Self::Class(definition)
|
||||
|
|
|
|||
|
|
@ -137,7 +137,7 @@ pub(crate) fn parse_string_annotation(
|
|||
let _span = tracing::trace_span!("parse_string_annotation", string=?string_expr.range(), ?file)
|
||||
.entered();
|
||||
|
||||
let source = source_text(db.upcast(), file);
|
||||
let source = source_text(db.upcast(), file).load();
|
||||
|
||||
if let Some(string_literal) = string_expr.as_single_part_string() {
|
||||
let prefix = string_literal.flags.prefix();
|
||||
|
|
|
|||
|
|
@ -31,14 +31,14 @@ impl ToLink for NavigationTarget {
|
|||
) -> Option<lsp_types::LocationLink> {
|
||||
let file = self.file();
|
||||
let uri = file_to_url(db.upcast(), file)?;
|
||||
let source = source_text(db.upcast(), file);
|
||||
let source = source_text(db.upcast(), file).load();
|
||||
let index = line_index(db.upcast(), file);
|
||||
|
||||
let target_range = self.full_range().to_lsp_range(&source, &index, encoding);
|
||||
let selection_range = self.focus_range().to_lsp_range(&source, &index, encoding);
|
||||
|
||||
let src = src.map(|src| {
|
||||
let source = source_text(db.upcast(), src.file());
|
||||
let source = source_text(db.upcast(), src.file()).load();
|
||||
let index = line_index(db.upcast(), src.file());
|
||||
|
||||
src.range().to_lsp_range(&source, &index, encoding)
|
||||
|
|
|
|||
|
|
@ -163,7 +163,7 @@ impl FileRangeExt for FileRange {
|
|||
fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option<Location> {
|
||||
let file = self.file();
|
||||
let uri = file_to_url(db, file)?;
|
||||
let source = source_text(db.upcast(), file);
|
||||
let source = source_text(db.upcast(), file).load();
|
||||
let line_index = line_index(db.upcast(), file);
|
||||
|
||||
let range = self.range().to_lsp_range(&source, &line_index, encoding);
|
||||
|
|
|
|||
|
|
@ -180,7 +180,7 @@ fn to_lsp_diagnostic(
|
|||
let range = if let Some(span) = diagnostic.primary_span() {
|
||||
let file = span.expect_ty_file();
|
||||
let index = line_index(db.upcast(), file);
|
||||
let source = source_text(db.upcast(), file);
|
||||
let source = source_text(db.upcast(), file).load();
|
||||
|
||||
span.range()
|
||||
.map(|range| range.to_lsp_range(&source, &index, encoding))
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ impl BackgroundDocumentRequestHandler for CompletionRequestHandler {
|
|||
return Ok(None);
|
||||
};
|
||||
|
||||
let source = source_text(db, file);
|
||||
let source = source_text(db, file).load();
|
||||
let line_index = line_index(db, file);
|
||||
let offset = params.text_document_position.position.to_text_size(
|
||||
&source,
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ impl BackgroundDocumentRequestHandler for GotoTypeDefinitionRequestHandler {
|
|||
return Ok(None);
|
||||
};
|
||||
|
||||
let source = source_text(db, file);
|
||||
let source = source_text(db, file).load();
|
||||
let line_index = line_index(db, file);
|
||||
let offset = params.text_document_position_params.position.to_text_size(
|
||||
&source,
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ impl BackgroundDocumentRequestHandler for HoverRequestHandler {
|
|||
return Ok(None);
|
||||
};
|
||||
|
||||
let source = source_text(db, file);
|
||||
let source = source_text(db, file).load();
|
||||
let line_index = line_index(db, file);
|
||||
let offset = params.text_document_position_params.position.to_text_size(
|
||||
&source,
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ impl BackgroundDocumentRequestHandler for InlayHintRequestHandler {
|
|||
};
|
||||
|
||||
let index = line_index(db, file);
|
||||
let source = source_text(db, file);
|
||||
let source = source_text(db, file).load();
|
||||
|
||||
let range = params
|
||||
.range
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@
|
|||
use crate::db::Db;
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::parsed::parsed_module;
|
||||
use ruff_db::source::{SourceText, line_index, source_text};
|
||||
use ruff_db::source::{SourceTextRef, line_index, source_text};
|
||||
use ruff_python_trivia::{CommentRanges, Cursor};
|
||||
use ruff_source_file::{LineIndex, OneIndexed};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
|
@ -49,13 +49,13 @@ use std::str::FromStr;
|
|||
#[derive(Debug)]
|
||||
pub(crate) struct InlineFileAssertions {
|
||||
comment_ranges: CommentRanges,
|
||||
source: SourceText,
|
||||
source: SourceTextRef,
|
||||
lines: LineIndex,
|
||||
}
|
||||
|
||||
impl InlineFileAssertions {
|
||||
pub(crate) fn from_file(db: &Db, file: File) -> Self {
|
||||
let source = source_text(db, file);
|
||||
let source = source_text(db, file).load();
|
||||
let lines = line_index(db, file);
|
||||
let parsed = parsed_module(db, file).load(db);
|
||||
let comment_ranges = CommentRanges::from(parsed.tokens());
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ use crate::diagnostic::SortedDiagnostics;
|
|||
use colored::Colorize;
|
||||
use ruff_db::diagnostic::{Diagnostic, DiagnosticId};
|
||||
use ruff_db::files::File;
|
||||
use ruff_db::source::{SourceText, line_index, source_text};
|
||||
use ruff_db::source::{SourceTextRef, line_index, source_text};
|
||||
use ruff_source_file::{LineIndex, OneIndexed};
|
||||
use std::cmp::Ordering;
|
||||
use std::ops::Range;
|
||||
|
|
@ -204,14 +204,14 @@ fn discard_todo_metadata(ty: &str) -> std::borrow::Cow<'_, str> {
|
|||
|
||||
struct Matcher {
|
||||
line_index: LineIndex,
|
||||
source: SourceText,
|
||||
source: SourceTextRef,
|
||||
}
|
||||
|
||||
impl Matcher {
|
||||
fn from_file(db: &Db, file: File) -> Self {
|
||||
Self {
|
||||
line_index: line_index(db, file),
|
||||
source: source_text(db, file),
|
||||
source: source_text(db, file).load(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -219,7 +219,7 @@ impl Workspace {
|
|||
|
||||
#[wasm_bindgen(js_name = "sourceText")]
|
||||
pub fn source_text(&self, file_id: &FileHandle) -> Result<String, Error> {
|
||||
let source_text = ruff_db::source::source_text(&self.db, file_id.file);
|
||||
let source_text = ruff_db::source::source_text(&self.db, file_id.file).load();
|
||||
|
||||
Ok(source_text.to_string())
|
||||
}
|
||||
|
|
@ -230,7 +230,7 @@ impl Workspace {
|
|||
file_id: &FileHandle,
|
||||
position: Position,
|
||||
) -> Result<Vec<LocationLink>, Error> {
|
||||
let source = source_text(&self.db, file_id.file);
|
||||
let source = source_text(&self.db, file_id.file).load();
|
||||
let index = line_index(&self.db, file_id.file);
|
||||
|
||||
let offset = position.to_text_size(&source, &index, self.position_encoding)?;
|
||||
|
|
@ -269,7 +269,7 @@ impl Workspace {
|
|||
|
||||
#[wasm_bindgen]
|
||||
pub fn hover(&self, file_id: &FileHandle, position: Position) -> Result<Option<Hover>, Error> {
|
||||
let source = source_text(&self.db, file_id.file);
|
||||
let source = source_text(&self.db, file_id.file).load();
|
||||
let index = line_index(&self.db, file_id.file);
|
||||
|
||||
let offset = position.to_text_size(&source, &index, self.position_encoding)?;
|
||||
|
|
@ -299,7 +299,7 @@ impl Workspace {
|
|||
file_id: &FileHandle,
|
||||
position: Position,
|
||||
) -> Result<Vec<Completion>, Error> {
|
||||
let source = source_text(&self.db, file_id.file);
|
||||
let source = source_text(&self.db, file_id.file).load();
|
||||
let index = line_index(&self.db, file_id.file);
|
||||
|
||||
let offset = position.to_text_size(&source, &index, self.position_encoding)?;
|
||||
|
|
@ -317,7 +317,7 @@ impl Workspace {
|
|||
#[wasm_bindgen(js_name = "inlayHints")]
|
||||
pub fn inlay_hints(&self, file_id: &FileHandle, range: Range) -> Result<Vec<InlayHint>, Error> {
|
||||
let index = line_index(&self.db, file_id.file);
|
||||
let source = source_text(&self.db, file_id.file);
|
||||
let source = source_text(&self.db, file_id.file).load();
|
||||
|
||||
let result = inlay_hints(
|
||||
&self.db,
|
||||
|
|
@ -440,7 +440,7 @@ impl Range {
|
|||
position_encoding: PositionEncoding,
|
||||
) -> Self {
|
||||
let index = line_index(db.upcast(), file_range.file());
|
||||
let source = source_text(db.upcast(), file_range.file());
|
||||
let source = source_text(db.upcast(), file_range.file()).load();
|
||||
|
||||
Self::from_text_range(file_range.range(), &index, &source, position_encoding)
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue