[ty] Implement lsp support for string annotations (#21577)

Fixes https://github.com/astral-sh/ty/issues/1009

## Summary

This adds support for:

* semantic-tokens (syntax highlighting)
* goto-type **(partially implemented, but want to land as-is)**
* goto-declaration
* goto-definition (falls out of goto-declaration)
* hover **(limited by goto-type)**
* find-references
* rename-references (falls out of find-references)

There are 3 major things being introduced here:

* `TypeInferenceBuilder::string_annotations` is a `FxHashSet` of exprs
which were determined to be string annotations during inference. It's
bubbled up in `extras` to hopefully minimize the overhead as in most
contexts it's empty.
* Very happy to hear if this is too hacky and if I should do something
better, but it's IMO important that we get an authoritative answer on
whether something is a string annotation or not.
* `SemanticModel::enter_string_annotation` checks if the expr was marked
by `TypeInferenceBuilder::string_annotations` and then parses the subast
and produces a sub-SemanticModel that sets
`SemanticModel::in_string_annotation_expr`. This expr will be used by
the model whenever we need to query e.g. the scope of the current
expression (otherwise the code will constantly panic as the subast nodes
are not in the current File's AST)
* This hazard consequently encouraged me to refactor a bunch of code to
replace uses of file/db with SemanticModel to minimize hazards (it is no
longer as safe to randomly materialize a SemanticModel in the middle of
analysis, you need to thread through the one you have in case it has
`in_string_annotation_expr` set).
* `GotoTarget::StringAnnotationSubexpr` (and a semantic-tokens impl)
which involves invoking `SemanticModel::enter_string_annotation` before
invoking the same kind of subroutine a normal expression would.
* goto-type (and consequently displaying the type in hover) is the main
hole here, because we can only get the type iff the string annotation is
the entire subexpression (i.e. we can get the type of `"int"` but not
the parts of `"int | str"`). This is shippable IMO.

## Test Plan

Messed around in IDE, wrote a ton of tests.
This commit is contained in:
Aria Desires 2025-11-25 08:31:04 -05:00 committed by GitHub
parent 15cb41c1f9
commit 66d233134f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 1344 additions and 130 deletions

View File

@ -3,6 +3,7 @@ use crate::references::{ReferencesMode, references};
use crate::{Db, ReferenceTarget};
use ruff_db::files::File;
use ruff_text_size::TextSize;
use ty_python_semantic::SemanticModel;
/// Find all document highlights for a symbol at the given position.
/// Document highlights are limited to the current file only.
@ -13,9 +14,10 @@ pub fn document_highlights(
) -> Option<Vec<ReferenceTarget>> {
let parsed = ruff_db::parsed::parsed_module(db, file);
let module = parsed.load(db);
let model = SemanticModel::new(db, file);
// Get the definitions for the symbol at the cursor position
let goto_target = find_goto_target(&module, offset)?;
let goto_target = find_goto_target(&model, &module, offset)?;
// Use DocumentHighlights mode which limits search to current file only
references(db, file, &goto_target, ReferencesMode::DocumentHighlights)

View File

@ -190,6 +190,24 @@ pub(crate) enum GotoTarget<'a> {
/// The call of the callable
call: &'a ast::ExprCall,
},
/// Go to on a sub-expression of a string annotation's sub-AST
///
/// ```py
/// x: "int | None"
/// ^^^^
/// ```
///
/// This is equivalent to `GotoTarget::Expression` but the expression
/// isn't actually in the AST.
StringAnnotationSubexpr {
/// The string literal that is a string annotation.
string_expr: &'a ast::ExprStringLiteral,
/// The range to query in the sub-AST for the sub-expression.
subrange: TextRange,
/// If the expression is a Name of some kind this is the name (just a cached result).
name: Option<String>,
},
}
/// The resolved definitions for a `GotoTarget`
@ -227,7 +245,7 @@ impl<'db> DefinitionsOrTargets<'db> {
/// In this case it basically returns exactly what was found.
pub(crate) fn declaration_targets(
self,
db: &'db dyn crate::Db,
db: &'db dyn ty_python_semantic::Db,
) -> Option<crate::NavigationTargets> {
match self {
DefinitionsOrTargets::Definitions(definitions) => {
@ -243,7 +261,7 @@ impl<'db> DefinitionsOrTargets<'db> {
/// if the definition we have is found in a stub file.
pub(crate) fn definition_targets(
self,
db: &'db dyn crate::Db,
db: &'db dyn ty_python_semantic::Db,
) -> Option<crate::NavigationTargets> {
match self {
DefinitionsOrTargets::Definitions(definitions) => {
@ -322,14 +340,30 @@ impl GotoTarget<'_> {
| GotoTarget::TypeParamTypeVarTupleName(_)
| GotoTarget::NonLocal { .. }
| GotoTarget::Globals { .. } => return None,
GotoTarget::StringAnnotationSubexpr {
string_expr,
subrange,
..
} => {
let (subast, _submodel) = model.enter_string_annotation(string_expr)?;
let submod = subast.syntax();
let subnode = covering_node(submod.into(), *subrange).node();
// The type checker knows the type of the full annotation but nothing else
if AnyNodeRef::from(&*submod.body) == subnode {
string_expr.inferred_type(model)
} else {
// TODO: force the typechecker to tell us its secrets
// (it computes but then immediately discards these types)
return None;
}
}
GotoTarget::BinOp { expression, .. } => {
let (_, ty) =
ty_python_semantic::definitions_for_bin_op(model.db(), model, expression)?;
let (_, ty) = ty_python_semantic::definitions_for_bin_op(model, expression)?;
ty
}
GotoTarget::UnaryOp { expression, .. } => {
let (_, ty) =
ty_python_semantic::definitions_for_unary_op(model.db(), model, expression)?;
let (_, ty) = ty_python_semantic::definitions_for_unary_op(model, expression)?;
ty
}
};
@ -343,7 +377,7 @@ impl GotoTarget<'_> {
model: &SemanticModel,
) -> Option<String> {
if let GotoTarget::Call { call, .. } = self {
call_type_simplified_by_overloads(model.db(), model, call)
call_type_simplified_by_overloads(model, call)
} else {
None
}
@ -367,14 +401,10 @@ impl GotoTarget<'_> {
alias_resolution: ImportAliasResolution,
) -> Option<DefinitionsOrTargets<'db>> {
use crate::NavigationTarget;
let db = model.db();
let file = model.file();
match self {
GotoTarget::Expression(expression) => {
definitions_for_expression(model, expression).map(DefinitionsOrTargets::Definitions)
}
// For already-defined symbols, they are their own definitions
GotoTarget::FunctionDef(function) => Some(DefinitionsOrTargets::Definitions(vec![
ResolvedDefinition::Definition(function.definition(model)),
@ -395,8 +425,7 @@ impl GotoTarget<'_> {
let symbol_name = alias.name.as_str();
Some(DefinitionsOrTargets::Definitions(
definitions_for_imported_symbol(
db,
file,
model,
import_from,
symbol_name,
alias_resolution,
@ -423,7 +452,7 @@ impl GotoTarget<'_> {
let alias_range = alias.asname.as_ref().unwrap().range;
Some(DefinitionsOrTargets::Targets(
crate::NavigationTargets::single(NavigationTarget {
file,
file: model.file(),
focus_range: alias_range,
full_range: alias.range(),
}),
@ -436,7 +465,7 @@ impl GotoTarget<'_> {
keyword,
call_expression,
} => Some(DefinitionsOrTargets::Definitions(
definitions_for_keyword_argument(db, file, keyword, call_expression),
definitions_for_keyword_argument(model, keyword, call_expression),
)),
// For exception variables, they are their own definitions (like parameters)
@ -451,7 +480,10 @@ impl GotoTarget<'_> {
if let Some(rest_name) = &pattern_mapping.rest {
let range = rest_name.range;
Some(DefinitionsOrTargets::Targets(
crate::NavigationTargets::single(NavigationTarget::new(file, range)),
crate::NavigationTargets::single(NavigationTarget::new(
model.file(),
range,
)),
))
} else {
None
@ -463,7 +495,10 @@ impl GotoTarget<'_> {
if let Some(name) = &pattern_as.name {
let range = name.range;
Some(DefinitionsOrTargets::Targets(
crate::NavigationTargets::single(NavigationTarget::new(file, range)),
crate::NavigationTargets::single(NavigationTarget::new(
model.file(),
range,
)),
))
} else {
None
@ -488,19 +523,40 @@ impl GotoTarget<'_> {
GotoTarget::BinOp { expression, .. } => {
let (definitions, _) =
ty_python_semantic::definitions_for_bin_op(db, model, expression)?;
ty_python_semantic::definitions_for_bin_op(model, expression)?;
Some(DefinitionsOrTargets::Definitions(definitions))
}
GotoTarget::UnaryOp { expression, .. } => {
let (definitions, _) =
ty_python_semantic::definitions_for_unary_op(db, model, expression)?;
ty_python_semantic::definitions_for_unary_op(model, expression)?;
Some(DefinitionsOrTargets::Definitions(definitions))
}
_ => None,
// String annotations sub-expressions require us to recurse into the sub-AST
GotoTarget::StringAnnotationSubexpr {
string_expr,
subrange,
..
} => {
let (subast, submodel) = model.enter_string_annotation(string_expr)?;
let subexpr = covering_node(subast.syntax().into(), *subrange)
.node()
.as_expr_ref()?;
definitions_for_expression(&submodel, &subexpr)
.map(DefinitionsOrTargets::Definitions)
}
// TODO: implement these
GotoTarget::PatternKeywordArgument(..)
| GotoTarget::PatternMatchStarName(..)
| GotoTarget::TypeParamTypeVarName(..)
| GotoTarget::TypeParamParamSpecName(..)
| GotoTarget::TypeParamTypeVarTupleName(..)
| GotoTarget::NonLocal { .. }
| GotoTarget::Globals { .. } => None,
}
}
@ -519,6 +575,7 @@ impl GotoTarget<'_> {
ast::ExprRef::Attribute(attr) => Some(Cow::Borrowed(attr.attr.as_str())),
_ => None,
},
GotoTarget::StringAnnotationSubexpr { name, .. } => name.as_deref().map(Cow::Borrowed),
GotoTarget::FunctionDef(function) => Some(Cow::Borrowed(function.name.as_str())),
GotoTarget::ClassDef(class) => Some(Cow::Borrowed(class.name.as_str())),
GotoTarget::Parameter(parameter) => Some(Cow::Borrowed(parameter.name.as_str())),
@ -579,6 +636,7 @@ impl GotoTarget<'_> {
/// Creates a `GotoTarget` from a `CoveringNode` and an offset within the node
pub(crate) fn from_covering_node<'a>(
model: &SemanticModel,
covering_node: &crate::find_node::CoveringNode<'a>,
offset: TextSize,
tokens: &Tokens,
@ -778,6 +836,31 @@ impl GotoTarget<'_> {
Some(GotoTarget::Expression(unary.into()))
}
node @ AnyNodeRef::ExprStringLiteral(string_expr) => {
// Check if we've clicked on a sub-GotoTarget inside a string annotation's sub-AST
if let Some((subast, submodel)) = model.enter_string_annotation(string_expr)
&& let Some(GotoTarget::Expression(subexpr)) = find_goto_target_impl(
&submodel,
subast.tokens(),
subast.syntax().into(),
offset,
)
{
let name = match subexpr {
ast::ExprRef::Name(name) => Some(name.id.to_string()),
ast::ExprRef::Attribute(attr) => Some(attr.attr.to_string()),
_ => None,
};
Some(GotoTarget::StringAnnotationSubexpr {
string_expr,
subrange: subexpr.range(),
name,
})
} else {
node.as_expr_ref().map(GotoTarget::Expression)
}
}
node => {
// Check if this is seemingly a callable being invoked (the `x` in `x(...)`)
let parent = covering_node.parent();
@ -813,6 +896,7 @@ impl Ranged for GotoTarget<'_> {
GotoTarget::ImportModuleComponent {
component_range, ..
} => *component_range,
GotoTarget::StringAnnotationSubexpr { subrange, .. } => *subrange,
GotoTarget::ImportModuleAlias { alias } => alias.asname.as_ref().unwrap().range,
GotoTarget::ExceptVariable(except) => except.name.as_ref().unwrap().range,
GotoTarget::KeywordArgument { keyword, .. } => keyword.arg.as_ref().unwrap().range,
@ -833,7 +917,7 @@ impl Ranged for GotoTarget<'_> {
/// Converts a collection of `ResolvedDefinition` items into `NavigationTarget` items.
fn convert_resolved_definitions_to_targets(
db: &dyn crate::Db,
db: &dyn ty_python_semantic::Db,
definitions: Vec<ty_python_semantic::ResolvedDefinition<'_>>,
) -> Vec<crate::NavigationTarget> {
definitions
@ -872,11 +956,9 @@ fn definitions_for_expression<'db>(
expression: &ruff_python_ast::ExprRef<'_>,
) -> Option<Vec<ResolvedDefinition<'db>>> {
match expression {
ast::ExprRef::Name(name) => Some(definitions_for_name(model.db(), model.file(), name)),
ast::ExprRef::Name(name) => Some(definitions_for_name(model, name)),
ast::ExprRef::Attribute(attribute) => Some(ty_python_semantic::definitions_for_attribute(
model.db(),
model.file(),
attribute,
model, attribute,
)),
_ => None,
}
@ -887,7 +969,7 @@ fn definitions_for_callable<'db>(
call: &ast::ExprCall,
) -> Vec<ResolvedDefinition<'db>> {
// Attempt to refine to a specific call
let signature_info = call_signature_details(model.db(), model, call);
let signature_info = call_signature_details(model, call);
signature_info
.into_iter()
.filter_map(|signature| signature.definition.map(ResolvedDefinition::Definition))
@ -896,7 +978,7 @@ fn definitions_for_callable<'db>(
/// Shared helper to map and convert resolved definitions into navigation targets.
fn definitions_to_navigation_targets<'db>(
db: &dyn crate::Db,
db: &dyn ty_python_semantic::Db,
stub_mapper: Option<&StubMapper<'db>>,
mut definitions: Vec<ty_python_semantic::ResolvedDefinition<'db>>,
) -> Option<crate::NavigationTargets> {
@ -911,12 +993,21 @@ fn definitions_to_navigation_targets<'db>(
}
}
pub(crate) fn find_goto_target(
parsed: &ParsedModuleRef,
pub(crate) fn find_goto_target<'a>(
model: &'a SemanticModel,
parsed: &'a ParsedModuleRef,
offset: TextSize,
) -> Option<GotoTarget<'_>> {
let token = parsed
.tokens()
) -> Option<GotoTarget<'a>> {
find_goto_target_impl(model, parsed.tokens(), parsed.syntax().into(), offset)
}
pub(crate) fn find_goto_target_impl<'a>(
model: &'a SemanticModel,
tokens: &'a Tokens,
syntax: AnyNodeRef<'a>,
offset: TextSize,
) -> Option<GotoTarget<'a>> {
let token = tokens
.at_offset(offset)
.max_by_key(|token| match token.kind() {
TokenKind::Name
@ -937,18 +1028,18 @@ pub(crate) fn find_goto_target(
return None;
}
let covering_node = covering_node(parsed.syntax().into(), token.range())
let covering_node = covering_node(syntax, token.range())
.find_first(|node| {
node.is_identifier() || node.is_expression() || node.is_stmt_import_from()
})
.ok()?;
GotoTarget::from_covering_node(&covering_node, offset, parsed.tokens())
GotoTarget::from_covering_node(model, &covering_node, offset, tokens)
}
/// Helper function to resolve a module name and create a navigation target.
fn definitions_for_module<'db>(
model: &SemanticModel,
model: &SemanticModel<'db>,
module: Option<&str>,
level: u32,
) -> Option<DefinitionsOrTargets<'db>> {

View File

@ -16,9 +16,9 @@ pub fn goto_declaration(
offset: TextSize,
) -> Option<RangedValue<NavigationTargets>> {
let module = parsed_module(db, file).load(db);
let goto_target = find_goto_target(&module, offset)?;
let model = SemanticModel::new(db, file);
let goto_target = find_goto_target(&model, &module, offset)?;
let declaration_targets = goto_target
.get_definition_targets(&model, ImportAliasResolution::ResolveAliases)?
.declaration_targets(db)?;
@ -889,6 +889,190 @@ def another_helper(path):
");
}
#[test]
fn goto_declaration_string_annotation1() {
let test = cursor_test(
r#"
a: "MyCla<CURSOR>ss" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_declaration(), @r#"
info[goto-declaration]: Declaration
--> main.py:4:7
|
2 | a: "MyClass" = 1
3 |
4 | class MyClass:
| ^^^^^^^
5 | """some docs"""
|
info: Source
--> main.py:2:5
|
2 | a: "MyClass" = 1
| ^^^^^^^
3 |
4 | class MyClass:
|
"#);
}
#[test]
fn goto_declaration_string_annotation2() {
let test = cursor_test(
r#"
a: "None | MyCl<CURSOR>ass" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_declaration(), @r#"
info[goto-declaration]: Declaration
--> main.py:4:7
|
2 | a: "None | MyClass" = 1
3 |
4 | class MyClass:
| ^^^^^^^
5 | """some docs"""
|
info: Source
--> main.py:2:12
|
2 | a: "None | MyClass" = 1
| ^^^^^^^
3 |
4 | class MyClass:
|
"#);
}
#[test]
fn goto_declaration_string_annotation3() {
let test = cursor_test(
r#"
a: "None |<CURSOR> MyClass" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_declaration(), @"No goto target found");
}
#[test]
fn goto_declaration_string_annotation4() {
let test = cursor_test(
r#"
a: "None | MyClass<CURSOR>" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_declaration(), @r#"
info[goto-declaration]: Declaration
--> main.py:4:7
|
2 | a: "None | MyClass" = 1
3 |
4 | class MyClass:
| ^^^^^^^
5 | """some docs"""
|
info: Source
--> main.py:2:12
|
2 | a: "None | MyClass" = 1
| ^^^^^^^
3 |
4 | class MyClass:
|
"#);
}
#[test]
fn goto_declaration_string_annotation5() {
let test = cursor_test(
r#"
a: "None | MyClass"<CURSOR> = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_declaration(), @"No goto target found");
}
#[test]
fn goto_declaration_string_annotation_dangling1() {
let test = cursor_test(
r#"
a: "MyCl<CURSOR>ass |" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_declaration(), @"No goto target found");
}
#[test]
fn goto_declaration_string_annotation_dangling2() {
let test = cursor_test(
r#"
a: "MyCl<CURSOR>ass | No" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_declaration(), @r#"
info[goto-declaration]: Declaration
--> main.py:4:7
|
2 | a: "MyClass | No" = 1
3 |
4 | class MyClass:
| ^^^^^^^
5 | """some docs"""
|
info: Source
--> main.py:2:5
|
2 | a: "MyClass | No" = 1
| ^^^^^^^
3 |
4 | class MyClass:
|
"#);
}
#[test]
fn goto_declaration_string_annotation_dangling3() {
let test = cursor_test(
r#"
a: "MyClass | N<CURSOR>o" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_declaration(), @"No goto target found");
}
#[test]
fn goto_declaration_nested_instance_attribute() {
let test = cursor_test(

View File

@ -17,8 +17,8 @@ pub fn goto_definition(
offset: TextSize,
) -> Option<RangedValue<NavigationTargets>> {
let module = parsed_module(db, file).load(db);
let goto_target = find_goto_target(&module, offset)?;
let model = SemanticModel::new(db, file);
let goto_target = find_goto_target(&model, &module, offset)?;
let definition_targets = goto_target
.get_definition_targets(&model, ImportAliasResolution::ResolveAliases)?
.definition_targets(db)?;

View File

@ -3,6 +3,7 @@ use crate::references::{ReferencesMode, references};
use crate::{Db, ReferenceTarget};
use ruff_db::files::File;
use ruff_text_size::TextSize;
use ty_python_semantic::SemanticModel;
/// Find all references to a symbol at the given position.
/// Search for references across all files in the project.
@ -14,9 +15,10 @@ pub fn goto_references(
) -> Option<Vec<ReferenceTarget>> {
let parsed = ruff_db::parsed::parsed_module(db, file);
let module = parsed.load(db);
let model = SemanticModel::new(db, file);
// Get the definitions for the symbol at the cursor position
let goto_target = find_goto_target(&module, offset)?;
let goto_target = find_goto_target(&model, &module, offset)?;
let mode = if include_declaration {
ReferencesMode::References
@ -710,6 +712,194 @@ cls = MyClass
");
}
#[test]
fn references_string_annotation1() {
let test = cursor_test(
r#"
a: "MyCla<CURSOR>ss" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.references(), @r#"
info[references]: Reference 1
--> main.py:2:5
|
2 | a: "MyClass" = 1
| ^^^^^^^
3 |
4 | class MyClass:
|
info[references]: Reference 2
--> main.py:4:7
|
2 | a: "MyClass" = 1
3 |
4 | class MyClass:
| ^^^^^^^
5 | """some docs"""
|
"#);
}
#[test]
fn references_string_annotation2() {
let test = cursor_test(
r#"
a: "None | MyCl<CURSOR>ass" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.references(), @r#"
info[references]: Reference 1
--> main.py:2:12
|
2 | a: "None | MyClass" = 1
| ^^^^^^^
3 |
4 | class MyClass:
|
info[references]: Reference 2
--> main.py:4:7
|
2 | a: "None | MyClass" = 1
3 |
4 | class MyClass:
| ^^^^^^^
5 | """some docs"""
|
"#);
}
#[test]
fn references_string_annotation3() {
let test = cursor_test(
r#"
a: "None |<CURSOR> MyClass" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.references(), @"No references found");
}
#[test]
fn references_string_annotation4() {
let test = cursor_test(
r#"
a: "None | MyClass<CURSOR>" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.references(), @r#"
info[references]: Reference 1
--> main.py:2:12
|
2 | a: "None | MyClass" = 1
| ^^^^^^^
3 |
4 | class MyClass:
|
info[references]: Reference 2
--> main.py:4:7
|
2 | a: "None | MyClass" = 1
3 |
4 | class MyClass:
| ^^^^^^^
5 | """some docs"""
|
"#);
}
#[test]
fn references_string_annotation5() {
let test = cursor_test(
r#"
a: "None | MyClass"<CURSOR> = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.references(), @"No references found");
}
#[test]
fn references_string_annotation_dangling1() {
let test = cursor_test(
r#"
a: "MyCl<CURSOR>ass |" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.references(), @"No references found");
}
#[test]
fn references_string_annotation_dangling2() {
let test = cursor_test(
r#"
a: "MyCl<CURSOR>ass | No" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.references(), @r#"
info[references]: Reference 1
--> main.py:2:5
|
2 | a: "MyClass | No" = 1
| ^^^^^^^
3 |
4 | class MyClass:
|
info[references]: Reference 2
--> main.py:4:7
|
2 | a: "MyClass | No" = 1
3 |
4 | class MyClass:
| ^^^^^^^
5 | """some docs"""
|
"#);
}
#[test]
fn references_string_annotation_dangling3() {
let test = cursor_test(
r#"
a: "MyClass | N<CURSOR>o" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.references(), @"No references found");
}
#[test]
fn test_multi_file_function_references() {
let test = CursorTest::builder()

View File

@ -11,9 +11,9 @@ pub fn goto_type_definition(
offset: TextSize,
) -> Option<RangedValue<NavigationTargets>> {
let module = parsed_module(db, file).load(db);
let goto_target = find_goto_target(&module, offset)?;
let model = SemanticModel::new(db, file);
let goto_target = find_goto_target(&model, &module, offset)?;
let ty = goto_target.inferred_type(&model)?;
tracing::debug!("Inferred type of covering node is {}", ty.display(db));
@ -744,6 +744,226 @@ mod tests {
assert_snapshot!(test.goto_type_definition(), @"No type definitions found");
}
#[test]
fn goto_type_string_annotation1() {
let test = cursor_test(
r#"
a: "MyCla<CURSOR>ss" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_type_definition(), @r#"
info[goto-type-definition]: Type definition
--> main.py:4:7
|
2 | a: "MyClass" = 1
3 |
4 | class MyClass:
| ^^^^^^^
5 | """some docs"""
|
info: Source
--> main.py:2:5
|
2 | a: "MyClass" = 1
| ^^^^^^^
3 |
4 | class MyClass:
|
"#);
}
#[test]
fn goto_type_string_annotation2() {
let test = cursor_test(
r#"
a: "None | MyCl<CURSOR>ass" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_type_definition(), @"No goto target found");
}
#[test]
fn goto_type_string_annotation3() {
let test = cursor_test(
r#"
a: "None |<CURSOR> MyClass" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_type_definition(), @r#"
info[goto-type-definition]: Type definition
--> main.py:4:7
|
2 | a: "None | MyClass" = 1
3 |
4 | class MyClass:
| ^^^^^^^
5 | """some docs"""
|
info: Source
--> main.py:2:4
|
2 | a: "None | MyClass" = 1
| ^^^^^^^^^^^^^^^^
3 |
4 | class MyClass:
|
info[goto-type-definition]: Type definition
--> stdlib/types.pyi:950:11
|
948 | if sys.version_info >= (3, 10):
949 | @final
950 | class NoneType:
| ^^^^^^^^
951 | """The type of the None singleton."""
|
info: Source
--> main.py:2:4
|
2 | a: "None | MyClass" = 1
| ^^^^^^^^^^^^^^^^
3 |
4 | class MyClass:
|
"#);
}
#[test]
fn goto_type_string_annotation4() {
let test = cursor_test(
r#"
a: "None | MyClass<CURSOR>" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_type_definition(), @"No goto target found");
}
#[test]
fn goto_type_string_annotation5() {
let test = cursor_test(
r#"
a: "None | MyClass"<CURSOR> = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_type_definition(), @r#"
info[goto-type-definition]: Type definition
--> main.py:4:7
|
2 | a: "None | MyClass" = 1
3 |
4 | class MyClass:
| ^^^^^^^
5 | """some docs"""
|
info: Source
--> main.py:2:4
|
2 | a: "None | MyClass" = 1
| ^^^^^^^^^^^^^^^^
3 |
4 | class MyClass:
|
info[goto-type-definition]: Type definition
--> stdlib/types.pyi:950:11
|
948 | if sys.version_info >= (3, 10):
949 | @final
950 | class NoneType:
| ^^^^^^^^
951 | """The type of the None singleton."""
|
info: Source
--> main.py:2:4
|
2 | a: "None | MyClass" = 1
| ^^^^^^^^^^^^^^^^
3 |
4 | class MyClass:
|
"#);
}
#[test]
fn goto_type_string_annotation_dangling1() {
let test = cursor_test(
r#"
a: "MyCl<CURSOR>ass |" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_type_definition(), @r#"
info[goto-type-definition]: Type definition
--> stdlib/ty_extensions.pyi:20:1
|
19 | # Types
20 | Unknown = object()
| ^^^^^^^
21 | AlwaysTruthy = object()
22 | AlwaysFalsy = object()
|
info: Source
--> main.py:2:4
|
2 | a: "MyClass |" = 1
| ^^^^^^^^^^^
3 |
4 | class MyClass:
|
"#);
}
#[test]
fn goto_type_string_annotation_dangling2() {
let test = cursor_test(
r#"
a: "MyCl<CURSOR>ass | No" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_type_definition(), @"No goto target found");
}
#[test]
fn goto_type_string_annotation_dangling3() {
let test = cursor_test(
r#"
a: "MyClass | N<CURSOR>o" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.goto_type_definition(), @"No goto target found");
}
#[test]
fn goto_type_on_keyword_argument() {
let test = cursor_test(

View File

@ -11,7 +11,8 @@ use ty_python_semantic::{DisplaySettings, SemanticModel};
pub fn hover(db: &dyn Db, file: File, offset: TextSize) -> Option<RangedValue<Hover<'_>>> {
let parsed = parsed_module(db, file).load(db);
let goto_target = find_goto_target(&parsed, offset)?;
let model = SemanticModel::new(db, file);
let goto_target = find_goto_target(&model, &parsed, offset)?;
if let GotoTarget::Expression(expr) = goto_target {
if expr.is_literal_expr() {
@ -19,7 +20,6 @@ pub fn hover(db: &dyn Db, file: File, offset: TextSize) -> Option<RangedValue<Ho
}
}
let model = SemanticModel::new(db, file);
let docs = goto_target
.get_definition_targets(
&model,
@ -904,6 +904,191 @@ mod tests {
");
}
#[test]
fn hover_string_annotation1() {
let test = cursor_test(
r#"
a: "MyCla<CURSOR>ss" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.hover(), @r#"
MyClass
---------------------------------------------
some docs
---------------------------------------------
```python
MyClass
```
---
some docs
---------------------------------------------
info[hover]: Hovered content is
--> main.py:2:5
|
2 | a: "MyClass" = 1
| ^^^^^-^
| | |
| | Cursor offset
| source
3 |
4 | class MyClass:
|
"#);
}
#[test]
fn hover_string_annotation2() {
let test = cursor_test(
r#"
a: "None | MyCl<CURSOR>ass" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.hover(), @r#"
some docs
---------------------------------------------
some docs
---------------------------------------------
info[hover]: Hovered content is
--> main.py:2:12
|
2 | a: "None | MyClass" = 1
| ^^^^-^^
| | |
| | Cursor offset
| source
3 |
4 | class MyClass:
|
"#);
}
#[test]
fn hover_string_annotation3() {
let test = cursor_test(
r#"
a: "None |<CURSOR> MyClass" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.hover(), @"Hover provided no content");
}
#[test]
fn hover_string_annotation4() {
let test = cursor_test(
r#"
a: "None | MyClass<CURSOR>" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.hover(), @r#"
some docs
---------------------------------------------
some docs
---------------------------------------------
info[hover]: Hovered content is
--> main.py:2:12
|
2 | a: "None | MyClass" = 1
| ^^^^^^^- Cursor offset
| |
| source
3 |
4 | class MyClass:
|
"#);
}
#[test]
fn hover_string_annotation5() {
let test = cursor_test(
r#"
a: "None | MyClass"<CURSOR> = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.hover(), @"Hover provided no content");
}
#[test]
fn hover_string_annotation_dangling1() {
let test = cursor_test(
r#"
a: "MyCl<CURSOR>ass |" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.hover(), @"Hover provided no content");
}
#[test]
fn hover_string_annotation_dangling2() {
let test = cursor_test(
r#"
a: "MyCl<CURSOR>ass | No" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.hover(), @r#"
some docs
---------------------------------------------
some docs
---------------------------------------------
info[hover]: Hovered content is
--> main.py:2:5
|
2 | a: "MyClass | No" = 1
| ^^^^-^^
| | |
| | Cursor offset
| source
3 |
4 | class MyClass:
|
"#);
}
#[test]
fn hover_string_annotation_dangling3() {
let test = cursor_test(
r#"
a: "MyClass | N<CURSOR>o" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.hover(), @"Hover provided no content");
}
#[test]
fn hover_overload_type_disambiguated1() {
let test = CursorTest::builder()

View File

@ -122,10 +122,10 @@ fn references_for_file(
) {
let parsed = ruff_db::parsed::parsed_module(db, file);
let module = parsed.load(db);
let model = SemanticModel::new(db, file);
let mut finder = LocalReferencesFinder {
db,
file,
model: &model,
target_definitions,
references,
mode,
@ -157,8 +157,7 @@ fn is_symbol_externally_visible(goto_target: &GotoTarget<'_>) -> bool {
/// AST visitor to find all references to a specific symbol by comparing semantic definitions
struct LocalReferencesFinder<'a> {
db: &'a dyn Db,
file: File,
model: &'a SemanticModel<'a>,
tokens: &'a Tokens,
target_definitions: &'a [NavigationTarget],
references: &'a mut Vec<ReferenceTarget>,
@ -227,6 +226,22 @@ impl<'a> SourceOrderVisitor<'a> for LocalReferencesFinder<'a> {
self.check_identifier_reference(rest_name);
}
}
AnyNodeRef::ExprStringLiteral(string_expr) if self.should_include_declaration() => {
// Highlight the sub-AST of a string annotation
if let Some((sub_ast, sub_model)) = self.model.enter_string_annotation(string_expr)
{
let mut sub_finder = LocalReferencesFinder {
model: &sub_model,
target_definitions: self.target_definitions,
references: self.references,
mode: self.mode,
tokens: sub_ast.tokens(),
target_text: self.target_text,
ancestors: Vec::new(),
};
sub_finder.visit_expr(sub_ast.expr());
}
}
AnyNodeRef::Alias(alias) if self.should_include_declaration() => {
// Handle import alias declarations
if let Some(asname) = &alias.asname {
@ -285,15 +300,13 @@ impl LocalReferencesFinder<'_> {
// the node is fine here. Offsets matter only for import statements
// where the identifier might be a multi-part module name.
let offset = covering_node.node().start();
if let Some(goto_target) =
GotoTarget::from_covering_node(covering_node, offset, self.tokens)
GotoTarget::from_covering_node(self.model, covering_node, offset, self.tokens)
{
// Get the definitions for this goto target
let model = SemanticModel::new(self.db, self.file);
if let Some(current_definitions_nav) = goto_target
.get_definition_targets(&model, ImportAliasResolution::PreserveAliases)
.and_then(|definitions| definitions.declaration_targets(self.db))
.get_definition_targets(self.model, ImportAliasResolution::PreserveAliases)
.and_then(|definitions| definitions.declaration_targets(self.model.db()))
{
let current_definitions: Vec<NavigationTarget> =
current_definitions_nav.into_iter().collect();
@ -302,7 +315,7 @@ impl LocalReferencesFinder<'_> {
// Determine if this is a read or write reference
let kind = self.determine_reference_kind(covering_node);
let target =
ReferenceTarget::new(self.file, covering_node.node().range(), kind);
ReferenceTarget::new(self.model.file(), covering_node.node().range(), kind);
self.references.push(target);
}
}

View File

@ -12,7 +12,7 @@ pub fn can_rename(db: &dyn Db, file: File, offset: TextSize) -> Option<ruff_text
let model = SemanticModel::new(db, file);
// Get the definitions for the symbol at the offset
let goto_target = find_goto_target(&module, offset)?;
let goto_target = find_goto_target(&model, &module, offset)?;
// Don't allow renaming of import module components
if matches!(
@ -59,9 +59,10 @@ pub fn rename(
) -> Option<Vec<ReferenceTarget>> {
let parsed = ruff_db::parsed::parsed_module(db, file);
let module = parsed.load(db);
let model = SemanticModel::new(db, file);
// Get the definitions for the symbol at the offset
let goto_target = find_goto_target(&module, offset)?;
let goto_target = find_goto_target(&model, &module, offset)?;
// Clients shouldn't call us with an empty new name, but just in case...
if new_name.is_empty() {
@ -338,6 +339,162 @@ class DataProcessor:
");
}
#[test]
fn rename_string_annotation1() {
let test = cursor_test(
r#"
a: "MyCla<CURSOR>ss" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.rename("MyNewClass"), @r#"
info[rename]: Rename symbol (found 2 locations)
--> main.py:2:5
|
2 | a: "MyClass" = 1
| ^^^^^^^
3 |
4 | class MyClass:
| -------
5 | """some docs"""
|
"#);
}
#[test]
fn rename_string_annotation2() {
let test = cursor_test(
r#"
a: "None | MyCl<CURSOR>ass" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.rename("MyNewClass"), @r#"
info[rename]: Rename symbol (found 2 locations)
--> main.py:2:12
|
2 | a: "None | MyClass" = 1
| ^^^^^^^
3 |
4 | class MyClass:
| -------
5 | """some docs"""
|
"#);
}
#[test]
fn rename_string_annotation3() {
let test = cursor_test(
r#"
a: "None |<CURSOR> MyClass" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.rename("MyNewClass"), @"Cannot rename");
}
#[test]
fn rename_string_annotation4() {
let test = cursor_test(
r#"
a: "None | MyClass<CURSOR>" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.rename("MyNewClass"), @r#"
info[rename]: Rename symbol (found 2 locations)
--> main.py:2:12
|
2 | a: "None | MyClass" = 1
| ^^^^^^^
3 |
4 | class MyClass:
| -------
5 | """some docs"""
|
"#);
}
#[test]
fn rename_string_annotation5() {
let test = cursor_test(
r#"
a: "None | MyClass"<CURSOR> = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.rename("MyNewClass"), @"Cannot rename");
}
#[test]
fn rename_string_annotation_dangling1() {
let test = cursor_test(
r#"
a: "MyCl<CURSOR>ass |" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.rename("MyNewClass"), @"Cannot rename");
}
#[test]
fn rename_string_annotation_dangling2() {
let test = cursor_test(
r#"
a: "MyCl<CURSOR>ass | No" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.rename("MyNewClass"), @r#"
info[rename]: Rename symbol (found 2 locations)
--> main.py:2:5
|
2 | a: "MyClass | No" = 1
| ^^^^^^^
3 |
4 | class MyClass:
| -------
5 | """some docs"""
|
"#);
}
#[test]
fn rename_string_annotation_dangling3() {
let test = cursor_test(
r#"
a: "MyClass | N<CURSOR>o" = 1
class MyClass:
"""some docs"""
"#,
);
assert_snapshot!(test.rename("MyNewClass"), @"Cannot rename");
}
#[test]
fn test_cannot_rename_import_module_component() {
// Test that we cannot rename parts of module names in import statements

View File

@ -187,9 +187,9 @@ impl Deref for SemanticTokens {
/// Pass None to get tokens for the entire file.
pub fn semantic_tokens(db: &dyn Db, file: File, range: Option<TextRange>) -> SemanticTokens {
let parsed = parsed_module(db, file).load(db);
let semantic_model = SemanticModel::new(db, file);
let model = SemanticModel::new(db, file);
let mut visitor = SemanticTokenVisitor::new(&semantic_model, file, range);
let mut visitor = SemanticTokenVisitor::new(&model, range);
visitor.visit_body(parsed.suite());
SemanticTokens::new(visitor.tokens)
@ -197,8 +197,7 @@ pub fn semantic_tokens(db: &dyn Db, file: File, range: Option<TextRange>) -> Sem
/// AST visitor that collects semantic tokens.
struct SemanticTokenVisitor<'db> {
semantic_model: &'db SemanticModel<'db>,
file: File,
model: &'db SemanticModel<'db>,
tokens: Vec<SemanticToken>,
in_class_scope: bool,
in_type_annotation: bool,
@ -207,14 +206,9 @@ struct SemanticTokenVisitor<'db> {
}
impl<'db> SemanticTokenVisitor<'db> {
fn new(
semantic_model: &'db SemanticModel<'db>,
file: File,
range_filter: Option<TextRange>,
) -> Self {
fn new(model: &'db SemanticModel<'db>, range_filter: Option<TextRange>) -> Self {
Self {
semantic_model,
file,
model,
tokens: Vec::new(),
in_class_scope: false,
in_target_creating_definition: false,
@ -265,7 +259,7 @@ impl<'db> SemanticTokenVisitor<'db> {
fn classify_name(&self, name: &ast::ExprName) -> (SemanticTokenType, SemanticTokenModifier) {
// First try to classify the token based on its definition kind.
let definition = definition_for_name(self.semantic_model.db(), self.file, name);
let definition = definition_for_name(self.model, name);
if let Some(definition) = definition {
let name_str = name.id.as_str();
@ -275,7 +269,7 @@ impl<'db> SemanticTokenVisitor<'db> {
}
// Fall back to type-based classification.
let ty = name.inferred_type(self.semantic_model);
let ty = name.inferred_type(self.model);
let name_str = name.id.as_str();
self.classify_from_type_and_name_str(ty, name_str)
}
@ -286,7 +280,7 @@ impl<'db> SemanticTokenVisitor<'db> {
name_str: &str,
) -> Option<(SemanticTokenType, SemanticTokenModifier)> {
let mut modifiers = SemanticTokenModifier::empty();
let db = self.semantic_model.db();
let db = self.model.db();
let model = SemanticModel::new(db, definition.file(db));
match definition.kind(db) {
@ -712,12 +706,12 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> {
for alias in &import.names {
if let Some(asname) = &alias.asname {
// For aliased imports (from X import Y as Z), classify Z based on what Y is
let ty = alias.inferred_type(self.semantic_model);
let ty = alias.inferred_type(self.model);
let (token_type, modifiers) = self.classify_from_alias_type(ty, asname);
self.add_token(asname, token_type, modifiers);
} else {
// For direct imports (from X import Y), use semantic classification
let ty = alias.inferred_type(self.semantic_model);
let ty = alias.inferred_type(self.model);
let (token_type, modifiers) =
self.classify_from_alias_type(ty, &alias.name);
self.add_token(&alias.name, token_type, modifiers);
@ -837,7 +831,7 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> {
self.visit_expr(&attr.value);
// Then add token for the attribute name (e.g., 'path' in 'os.path')
let ty = expr.inferred_type(self.semantic_model);
let ty = expr.inferred_type(self.model);
let (token_type, modifiers) =
Self::classify_from_type_for_attribute(ty, &attr.attr);
self.add_token(&attr.attr, token_type, modifiers);
@ -881,6 +875,17 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> {
self.visit_expr(&named.value);
}
ast::Expr::StringLiteral(string_expr) => {
// Highlight the sub-AST of a string annotation
if let Some((sub_ast, sub_model)) = self.model.enter_string_annotation(string_expr)
{
let mut sub_visitor = SemanticTokenVisitor::new(&sub_model, None);
sub_visitor.visit_expr(sub_ast.expr());
self.tokens.extend(sub_visitor.tokens);
} else {
walk_expr(self, expr);
}
}
_ => {
// For all other expression types, let the default visitor handle them
walk_expr(self, expr);
@ -1564,6 +1569,44 @@ from mymodule import CONSTANT, my_function, MyClass
"#);
}
#[test]
fn test_str_annotation() {
let test = SemanticTokenTest::new(
r#"
x: int = 1
y: "int" = 1
z = "int"
w1: "int | str" = "hello"
w2: "int | sr" = "hello"
w3: "int | " = "hello"
"#,
);
let tokens = test.highlight_file();
assert_snapshot!(test.to_snapshot(&tokens), @r#"
"x" @ 1..2: Variable [definition]
"int" @ 4..7: Class
"1" @ 10..11: Number
"y" @ 12..13: Variable [definition]
"int" @ 16..19: Class
"1" @ 23..24: Number
"z" @ 25..26: Variable [definition]
"\"int\"" @ 29..34: String
"w1" @ 35..37: Variable [definition]
"int" @ 40..43: Class
"str" @ 46..49: Class
"\"hello\"" @ 53..60: String
"w2" @ 61..63: Variable [definition]
"int" @ 66..69: Class
"sr" @ 72..74: Variable
"\"hello\"" @ 78..85: String
"w3" @ 86..88: Variable [definition]
"\"int | \"" @ 90..98: String
"\"hello\"" @ 101..108: String
"#);
}
#[test]
fn test_attribute_classification() {
let test = SemanticTokenTest::new(

View File

@ -74,7 +74,7 @@ pub fn signature_help(db: &dyn Db, file: File, offset: TextSize) -> Option<Signa
// Get signature details from the semantic analyzer.
let signature_details: Vec<CallSignatureDetails<'_>> =
call_signature_details(db, &model, call_expr);
call_signature_details(&model, call_expr);
if signature_details.is_empty() {
return None;

View File

@ -11,12 +11,12 @@ use crate::cached_vendored_root;
/// other language server providers (like hover, completion, and signature help) to find
/// docstrings for functions that resolve to stubs.
pub(crate) struct StubMapper<'db> {
db: &'db dyn crate::Db,
db: &'db dyn ty_python_semantic::Db,
cached_vendored_root: Option<SystemPathBuf>,
}
impl<'db> StubMapper<'db> {
pub(crate) fn new(db: &'db dyn crate::Db) -> Self {
pub(crate) fn new(db: &'db dyn ty_python_semantic::Db) -> Self {
let cached_vendored_root = cached_vendored_root(db);
Self {
db,

View File

@ -1,7 +1,8 @@
use ruff_db::files::{File, FilePath};
use ruff_db::source::line_index;
use ruff_python_ast as ast;
use ruff_db::source::{line_index, source_text};
use ruff_python_ast::{self as ast, ExprStringLiteral, ModExpression};
use ruff_python_ast::{Expr, ExprRef, HasNodeIndex, name::Name};
use ruff_python_parser::Parsed;
use ruff_source_file::LineIndex;
use rustc_hash::FxHashMap;
@ -15,18 +16,34 @@ use crate::types::ide_support::all_declarations_and_bindings;
use crate::types::ide_support::{Member, all_members};
use crate::types::{Type, binding_type, infer_scope_types};
/// The primary interface the LSP should use for querying semantic information about a [`File`].
///
/// Although you can in principle freely construct this type given a `db` and `file`, you should
/// try to construct this at the start of your analysis and thread the same instance through
/// the full analysis.
///
/// The primary reason for this is that it manages traversing into the sub-ASTs of string
/// annotations (see [`Self::enter_string_annotation`]). When you do this you will be handling
/// AST nodes that don't belong to the file's AST (or *any* file's AST). These kinds of nodes
/// will result in panics and confusing results if handed to the wrong subsystem. `SemanticModel`
/// methods will automatically handle using the string literal's AST node when necessary.
pub struct SemanticModel<'db> {
db: &'db dyn Db,
file: File,
/// If `Some` then this `SemanticModel` is for analyzing the sub-AST of a string annotation.
/// This expression will be used as a witness to the scope/location we're analyzing.
in_string_annotation_expr: Option<Box<Expr>>,
}
impl<'db> SemanticModel<'db> {
pub fn new(db: &'db dyn Db, file: File) -> Self {
Self { db, file }
Self {
db,
file,
in_string_annotation_expr: None,
}
}
// TODO we don't actually want to expose the Db directly to lint rules, but we need to find a
// solution for exposing information from types
pub fn db(&self) -> &'db dyn Db {
self.db
}
@ -213,10 +230,10 @@ impl<'db> SemanticModel<'db> {
completions
}
fn scope(&self, node: ast::AnyNodeRef<'_>) -> Option<FileScopeId> {
/// Get the scope of the given node (handles string annotations)
pub fn scope(&self, node: ast::AnyNodeRef<'_>) -> Option<FileScopeId> {
let index = semantic_index(self.db, self.file);
match node {
match self.node_in_ast(node) {
ast::AnyNodeRef::Identifier(identifier) => index.try_expression_scope_id(identifier),
node => match node.as_expr_ref() {
// If we couldn't identify a specific
@ -227,6 +244,82 @@ impl<'db> SemanticModel<'db> {
},
}
}
/// Get a "safe" [`ast::AnyNodeRef`] to use for referring to the given (sub-)AST node.
///
/// If we're analyzing a string annotation, it will return the string literal's node.
/// Otherwise it will return the input.
pub fn node_in_ast<'a>(&'a self, node: ast::AnyNodeRef<'a>) -> ast::AnyNodeRef<'a> {
if let Some(string_annotation) = &self.in_string_annotation_expr {
(&**string_annotation).into()
} else {
node
}
}
/// Get a "safe" [`Expr`] to use for referring to the given (sub-)expression.
///
/// If we're analyzing a string annotation, it will return the string literal's expression.
/// Otherwise it will return the input.
pub fn expr_in_ast<'a>(&'a self, expr: &'a Expr) -> &'a Expr {
if let Some(string_annotation) = &self.in_string_annotation_expr {
string_annotation
} else {
expr
}
}
/// Get a "safe" [`ExprRef`] to use for referring to the given (sub-)expression.
///
/// If we're analyzing a string annotation, it will return the string literal's expression.
/// Otherwise it will return the input.
pub fn expr_ref_in_ast<'a>(&'a self, expr: ExprRef<'a>) -> ExprRef<'a> {
if let Some(string_annotation) = &self.in_string_annotation_expr {
ExprRef::from(string_annotation)
} else {
expr
}
}
/// Given a string expression, determine if it's a string annotation, and if it is,
/// yield the parsed sub-AST and a sub-model that knows it's analyzing a sub-AST.
///
/// Analysis of the sub-AST should only be done with the sub-model, or else things
/// may return nonsense results or even panic!
pub fn enter_string_annotation(
&self,
string_expr: &ExprStringLiteral,
) -> Option<(Parsed<ModExpression>, Self)> {
// String annotations can't contain string annotations
if self.in_string_annotation_expr.is_some() {
return None;
}
// Ask the inference engine whether this is actually a string annotation
let expr = ExprRef::StringLiteral(string_expr);
let index = semantic_index(self.db, self.file);
let file_scope = index.expression_scope_id(&expr);
let scope = file_scope.to_scope_id(self.db, self.file);
if !infer_scope_types(self.db, scope).is_string_annotation(expr) {
return None;
}
// Parse the sub-AST and create a semantic model that knows it's in a sub-AST
//
// The string_annotation will be used as the expr/node for any query that needs
// to look up a node in the AST to prevent panics, because these sub-AST nodes
// are not in the File's AST!
let source = source_text(self.db, self.file);
let string_literal = string_expr.as_single_part_string()?;
let ast =
ruff_python_parser::parse_string_annotation(source.as_str(), string_literal).ok()?;
let model = Self {
db: self.db,
file: self.file,
in_string_annotation_expr: Some(Box::new(Expr::StringLiteral(string_expr.clone()))),
};
Some((ast, model))
}
}
/// The type and definition (if available) of a symbol.
@ -315,7 +408,7 @@ pub trait HasDefinition {
impl HasType for ast::ExprRef<'_> {
fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> {
let index = semantic_index(model.db, model.file);
let file_scope = index.expression_scope_id(self);
let file_scope = index.expression_scope_id(&model.expr_ref_in_ast(*self));
let scope = file_scope.to_scope_id(model.db, model.file);
infer_scope_types(model.db, scope).expression_type(*self)

View File

@ -19,7 +19,7 @@ use crate::types::{
TypeVarBoundOrConstraints, class::CodeGeneratorKind,
};
use crate::{Db, DisplaySettings, HasType, NameKind, SemanticModel};
use ruff_db::files::{File, FileRange};
use ruff_db::files::FileRange;
use ruff_db::parsed::parsed_module;
use ruff_python_ast::name::Name;
use ruff_python_ast::{self as ast};
@ -512,11 +512,10 @@ pub fn all_members<'db>(db: &'db dyn Db, ty: Type<'db>) -> FxHashSet<Member<'db>
/// Returns the first definition kind that is reachable for this name in its scope.
/// This is useful for IDE features like semantic tokens.
pub fn definition_for_name<'db>(
db: &'db dyn Db,
file: File,
model: &SemanticModel<'db>,
name: &ast::ExprName,
) -> Option<Definition<'db>> {
let definitions = definitions_for_name(db, file, name);
let definitions = definitions_for_name(model, name);
// Find the first valid definition and return its kind
for declaration in definitions {
@ -531,15 +530,18 @@ pub fn definition_for_name<'db>(
/// Returns all definitions for a name. If any definitions are imports, they
/// are resolved (recursively) to the original definitions or module files.
pub fn definitions_for_name<'db>(
db: &'db dyn Db,
file: File,
model: &SemanticModel<'db>,
name: &ast::ExprName,
) -> Vec<ResolvedDefinition<'db>> {
let db = model.db();
let file = model.file();
let index = semantic_index(db, file);
let name_str = name.id.as_str();
// Get the scope for this name expression
let file_scope = index.expression_scope_id(&ast::ExprRef::from(name));
let Some(file_scope) = model.scope(name.into()) else {
return vec![];
};
let mut all_definitions = Vec::new();
@ -716,19 +718,18 @@ fn is_float_or_complex_annotation(db: &dyn Db, ty: UnionType, name: &str) -> boo
/// changing the corresponding logic in the semantic analyzer to conditionally
/// handle this case through the use of mode flags.
pub fn definitions_for_attribute<'db>(
db: &'db dyn Db,
file: File,
model: &SemanticModel<'db>,
attribute: &ast::ExprAttribute,
) -> Vec<ResolvedDefinition<'db>> {
let db = model.db();
let name_str = attribute.attr.as_str();
let model = SemanticModel::new(db, file);
let mut resolved = Vec::new();
// Determine the type of the LHS
let lhs_ty = attribute.value.inferred_type(&model);
let lhs_ty = attribute.value.inferred_type(model);
let tys = match lhs_ty {
Type::Union(union) => union.elements(db).to_vec(),
Type::Union(union) => union.elements(model.db()).to_vec(),
_ => vec![lhs_ty],
};
@ -861,13 +862,12 @@ pub fn definitions_for_attribute<'db>(
/// Returns definitions for a keyword argument in a call expression.
/// This resolves the keyword argument to the corresponding parameter(s) in the callable's signature(s).
pub fn definitions_for_keyword_argument<'db>(
db: &'db dyn Db,
file: File,
model: &SemanticModel<'db>,
keyword: &ast::Keyword,
call_expr: &ast::ExprCall,
) -> Vec<ResolvedDefinition<'db>> {
let model = SemanticModel::new(db, file);
let func_type = call_expr.func.inferred_type(&model);
let db = model.db();
let func_type = call_expr.func.inferred_type(model);
let Some(keyword_name) = keyword.arg.as_ref() else {
return Vec::new();
@ -918,16 +918,15 @@ pub fn definitions_for_keyword_argument<'db>(
/// aliases (like "x" in "from a import b as x") are resolved to their targets or kept
/// as aliases.
pub fn definitions_for_imported_symbol<'db>(
db: &'db dyn Db,
file: File,
model: &SemanticModel<'db>,
import_node: &ast::StmtImportFrom,
symbol_name: &str,
alias_resolution: ImportAliasResolution,
) -> Vec<ResolvedDefinition<'db>> {
let mut visited = FxHashSet::default();
resolve_definition::resolve_from_import_definitions(
db,
file,
model.db(),
model.file(),
import_node,
symbol_name,
&mut visited,
@ -983,7 +982,6 @@ impl CallSignatureDetails<'_> {
/// `CallSignatureDetails` objects, each representing one possible signature
/// (in case of overloads or union types).
pub fn call_signature_details<'db>(
db: &'db dyn Db,
model: &SemanticModel<'db>,
call_expr: &ast::ExprCall,
) -> Vec<CallSignatureDetails<'db>> {
@ -991,16 +989,16 @@ pub fn call_signature_details<'db>(
// Use into_callable to handle all the complex type conversions
if let Some(callable_type) = func_type
.try_upcast_to_callable(db)
.map(|callables| callables.into_type(db))
.try_upcast_to_callable(model.db())
.map(|callables| callables.into_type(model.db()))
{
let call_arguments =
CallArguments::from_arguments(&call_expr.arguments, |_, splatted_value| {
splatted_value.inferred_type(model)
});
let bindings = callable_type
.bindings(db)
.match_parameters(db, &call_arguments);
.bindings(model.db())
.match_parameters(model.db(), &call_arguments);
// Extract signature details from all callable bindings
bindings
@ -1009,7 +1007,7 @@ pub fn call_signature_details<'db>(
.map(|binding| {
let argument_to_parameter_mapping = binding.argument_matches().to_vec();
let signature = binding.signature;
let display_details = signature.display(db).to_string_parts();
let display_details = signature.display(model.db()).to_string_parts();
let parameter_label_offsets = display_details.parameter_ranges;
let parameter_names = display_details.parameter_names;
@ -1040,11 +1038,11 @@ pub fn call_signature_details<'db>(
/// so it has a "worse" display than say `Type::FunctionLiteral` or `Type::BoundMethod`,
/// which this analysis would naturally wipe away. The contexts this function
/// succeeds in are those where we would print a complicated/ugly type anyway.
pub fn call_type_simplified_by_overloads<'db>(
db: &'db dyn Db,
model: &SemanticModel<'db>,
pub fn call_type_simplified_by_overloads(
model: &SemanticModel,
call_expr: &ast::ExprCall,
) -> Option<String> {
let db = model.db();
let func_type = call_expr.func.inferred_type(model);
// Use into_callable to handle all the complex type conversions
@ -1090,18 +1088,17 @@ pub fn call_type_simplified_by_overloads<'db>(
/// Returns the definitions of the binary operation along with its callable type.
pub fn definitions_for_bin_op<'db>(
db: &'db dyn Db,
model: &SemanticModel<'db>,
binary_op: &ast::ExprBinOp,
) -> Option<(Vec<ResolvedDefinition<'db>>, Type<'db>)> {
let left_ty = binary_op.left.inferred_type(model);
let right_ty = binary_op.right.inferred_type(model);
let Ok(bindings) = Type::try_call_bin_op(db, left_ty, binary_op.op, right_ty) else {
let Ok(bindings) = Type::try_call_bin_op(model.db(), left_ty, binary_op.op, right_ty) else {
return None;
};
let callable_type = promote_literals_for_self(db, bindings.callable_type());
let callable_type = promote_literals_for_self(model.db(), bindings.callable_type());
let definitions: Vec<_> = bindings
.into_iter()
@ -1118,7 +1115,6 @@ pub fn definitions_for_bin_op<'db>(
/// Returns the definitions for an unary operator along with their callable types.
pub fn definitions_for_unary_op<'db>(
db: &'db dyn Db,
model: &SemanticModel<'db>,
unary_op: &ast::ExprUnaryOp,
) -> Option<(Vec<ResolvedDefinition<'db>>, Type<'db>)> {
@ -1132,7 +1128,7 @@ pub fn definitions_for_unary_op<'db>(
};
let bindings = match operand_ty.try_call_dunder(
db,
model.db(),
unary_dunder_method,
CallArguments::none(),
TypeContext::default(),
@ -1141,7 +1137,7 @@ pub fn definitions_for_unary_op<'db>(
Err(CallDunderError::MethodNotAvailable) if unary_op.op == ast::UnaryOp::Not => {
// The runtime falls back to `__len__` for `not` if `__bool__` is not defined.
match operand_ty.try_call_dunder(
db,
model.db(),
"__len__",
CallArguments::none(),
TypeContext::default(),
@ -1160,7 +1156,7 @@ pub fn definitions_for_unary_op<'db>(
) => *bindings,
};
let callable_type = promote_literals_for_self(db, bindings.callable_type());
let callable_type = promote_literals_for_self(model.db(), bindings.callable_type());
let definitions = bindings
.into_iter()
@ -1245,7 +1241,7 @@ pub fn inlay_hint_call_argument_details<'db>(
model: &SemanticModel<'db>,
call_expr: &ast::ExprCall,
) -> Option<InlayHintCallArgumentDetails> {
let signature_details = call_signature_details(db, model, call_expr);
let signature_details = call_signature_details(model, call_expr);
if signature_details.is_empty() {
return None;

View File

@ -39,7 +39,7 @@
use ruff_db::parsed::{ParsedModuleRef, parsed_module};
use ruff_python_ast as ast;
use ruff_text_size::Ranged;
use rustc_hash::FxHashMap;
use rustc_hash::{FxHashMap, FxHashSet};
use salsa;
use salsa::plumbing::AsId;
@ -554,6 +554,9 @@ pub(crate) struct ScopeInference<'db> {
#[derive(Debug, Eq, PartialEq, get_size2::GetSize, salsa::Update, Default)]
struct ScopeInferenceExtra<'db> {
/// String annotations found in this region
string_annotations: FxHashSet<ExpressionNodeKey>,
/// Is this a cycle-recovery inference result, and if so, what kind?
cycle_recovery: Option<CycleRecovery<'db>>,
@ -598,6 +601,16 @@ impl<'db> ScopeInference<'db> {
.as_ref()
.and_then(|extra| extra.cycle_recovery.map(CycleRecovery::fallback_type))
}
/// Returns whether the given expression is a string annotation
/// (the string in `x: "int | None"`).
pub(crate) fn is_string_annotation(&self, expression: impl Into<ExpressionNodeKey>) -> bool {
let Some(extra) = &self.extra else {
return false;
};
extra.string_annotations.contains(&expression.into())
}
}
/// The inferred types for a definition region.
@ -629,6 +642,9 @@ pub(crate) struct DefinitionInference<'db> {
#[derive(Debug, Eq, PartialEq, get_size2::GetSize, salsa::Update, Default)]
struct DefinitionInferenceExtra<'db> {
/// String annotations found in this region
string_annotations: FxHashSet<ExpressionNodeKey>,
/// Is this a cycle-recovery inference result, and if so, what kind?
cycle_recovery: Option<CycleRecovery<'db>>,
@ -765,6 +781,9 @@ pub(crate) struct ExpressionInference<'db> {
/// Extra data that only exists for few inferred expression regions.
#[derive(Debug, Eq, PartialEq, salsa::Update, get_size2::GetSize, Default)]
struct ExpressionInferenceExtra<'db> {
/// String annotations found in this region
string_annotations: FxHashSet<ExpressionNodeKey>,
/// The types of every binding in this expression region.
///
/// Only very few expression regions have bindings (around 0.1%).

View File

@ -220,6 +220,9 @@ pub(super) struct TypeInferenceBuilder<'db, 'ast> {
/// The types of every expression in this region.
expressions: FxHashMap<ExpressionNodeKey, Type<'db>>,
/// Expressions that are string annotations
string_annotations: FxHashSet<ExpressionNodeKey>,
/// The scope this region is part of.
scope: ScopeId<'db>,
@ -325,6 +328,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
deferred_state: DeferredExpressionState::None,
multi_inference_state: MultiInferenceState::Panic,
expressions: FxHashMap::default(),
string_annotations: FxHashSet::default(),
bindings: VecMap::default(),
declarations: VecMap::default(),
typevar_binding_context: None,
@ -365,6 +369,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
self.context.extend(&extra.diagnostics);
self.deferred
.extend(extra.deferred.iter().copied(), self.multi_inference_state);
self.string_annotations
.extend(extra.string_annotations.iter().copied());
}
}
@ -381,6 +387,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
if let Some(extra) = &inference.extra {
self.context.extend(&extra.diagnostics);
self.extend_cycle_recovery(extra.cycle_recovery);
self.string_annotations
.extend(extra.string_annotations.iter().copied());
if !matches!(self.region, InferenceRegion::Scope(..)) {
self.bindings
@ -11708,6 +11716,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let Self {
context,
mut expressions,
string_annotations,
scope,
bindings,
declarations,
@ -11742,7 +11751,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
);
let extra =
(cycle_recovery.is_some() || !bindings.is_empty() || !diagnostics.is_empty() || !all_definitely_bound).then(|| {
(!string_annotations.is_empty() || cycle_recovery.is_some() || !bindings.is_empty() || !diagnostics.is_empty() || !all_definitely_bound).then(|| {
if bindings.len() > 20 {
tracing::debug!(
"Inferred expression region `{:?}` contains {} bindings. Lookups by linear scan might be slow.",
@ -11752,6 +11761,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
}
Box::new(ExpressionInferenceExtra {
string_annotations,
bindings: bindings.into_boxed_slice(),
diagnostics,
cycle_recovery,
@ -11775,6 +11785,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let Self {
context,
mut expressions,
string_annotations,
scope,
bindings,
declarations,
@ -11797,11 +11808,13 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let diagnostics = context.finish();
let extra = (!diagnostics.is_empty()
|| !string_annotations.is_empty()
|| cycle_recovery.is_some()
|| undecorated_type.is_some()
|| !deferred.is_empty())
.then(|| {
Box::new(DefinitionInferenceExtra {
string_annotations,
cycle_recovery,
deferred: deferred.into_boxed_slice(),
diagnostics,
@ -11842,6 +11855,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let Self {
context,
string_annotations,
mut expressions,
scope,
cycle_recovery,
@ -11869,12 +11883,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
let _ = scope;
let diagnostics = context.finish();
let extra = (!diagnostics.is_empty() || cycle_recovery.is_some()).then(|| {
Box::new(ScopeInferenceExtra {
cycle_recovery,
diagnostics,
})
});
let extra =
(!string_annotations.is_empty() || !diagnostics.is_empty() || cycle_recovery.is_some())
.then(|| {
Box::new(ScopeInferenceExtra {
string_annotations,
cycle_recovery,
diagnostics,
})
});
expressions.shrink_to_fit();

View File

@ -373,6 +373,8 @@ impl<'db> TypeInferenceBuilder<'db, '_> {
) -> TypeAndQualifiers<'db> {
match parse_string_annotation(&self.context, string) {
Some(parsed) => {
self.string_annotations
.insert(ruff_python_ast::ExprRef::StringLiteral(string).into());
// String annotations are always evaluated in the deferred context.
self.infer_annotation_expression(
parsed.expr(),

View File

@ -522,6 +522,8 @@ impl<'db> TypeInferenceBuilder<'db, '_> {
) -> Type<'db> {
match parse_string_annotation(&self.context, string) {
Some(parsed) => {
self.string_annotations
.insert(ruff_python_ast::ExprRef::StringLiteral(string).into());
// String annotations are always evaluated in the deferred context.
self.infer_type_expression_with_state(
parsed.expr(),