From 13375d0e42bcf7e4dac346d8fad991b97222371d Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Thu, 30 Oct 2025 16:44:51 -0400 Subject: [PATCH 001/180] [ty] Use the top materialization of classes for narrowing in class-patterns for `match` statements (#21150) --- .../mdtest/exhaustiveness_checking.md | 19 +++++ .../resources/mdtest/narrow/match.md | 75 +++++++++++++++++++ .../reachability_constraints.rs | 5 +- crates/ty_python_semantic/src/types/narrow.rs | 22 ++++-- 4 files changed, 113 insertions(+), 8 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/exhaustiveness_checking.md b/crates/ty_python_semantic/resources/mdtest/exhaustiveness_checking.md index 7218359750..29b267024b 100644 --- a/crates/ty_python_semantic/resources/mdtest/exhaustiveness_checking.md +++ b/crates/ty_python_semantic/resources/mdtest/exhaustiveness_checking.md @@ -182,6 +182,11 @@ def match_non_exhaustive(x: Color): ## `isinstance` checks +```toml +[environment] +python-version = "3.12" +``` + ```py from typing import assert_never @@ -189,6 +194,9 @@ class A: ... class B: ... class C: ... +class GenericClass[T]: + x: T + def if_else_exhaustive(x: A | B | C): if isinstance(x, A): pass @@ -253,6 +261,17 @@ def match_non_exhaustive(x: A | B | C): # this diagnostic is correct: the inferred type of `x` is `B & ~A & ~C` assert_never(x) # error: [type-assertion-failure] + +# Note: no invalid-return-type diagnostic; the `match` is exhaustive +def match_exhaustive_generic[T](obj: GenericClass[T]) -> GenericClass[T]: + match obj: + case GenericClass(x=42): + reveal_type(obj) # revealed: GenericClass[T@match_exhaustive_generic] + return obj + case GenericClass(x=x): + reveal_type(x) # revealed: @Todo(`match` pattern definition types) + reveal_type(obj) # revealed: GenericClass[T@match_exhaustive_generic] + return obj ``` ## `isinstance` checks with generics diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/match.md b/crates/ty_python_semantic/resources/mdtest/narrow/match.md index ee51d50af2..f0c107851b 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/match.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/match.md @@ -69,6 +69,81 @@ match x: reveal_type(x) # revealed: object ``` +## Class patterns with generic classes + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import assert_never + +class Covariant[T]: + def get(self) -> T: + raise NotImplementedError + +def f(x: Covariant[int]): + match x: + case Covariant(): + reveal_type(x) # revealed: Covariant[int] + case _: + reveal_type(x) # revealed: Never + assert_never(x) +``` + +## Class patterns with generic `@final` classes + +These work the same as non-`@final` classes. + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import assert_never, final + +@final +class Covariant[T]: + def get(self) -> T: + raise NotImplementedError + +def f(x: Covariant[int]): + match x: + case Covariant(): + reveal_type(x) # revealed: Covariant[int] + case _: + reveal_type(x) # revealed: Never + assert_never(x) +``` + +## Class patterns where the class pattern does not resolve to a class + +In general this does not allow for narrowing, but we make an exception for `Any`. This is to support +[real ecosystem code](https://github.com/jax-ml/jax/blob/d2ce04b6c3d03ae18b145965b8b8b92e09e8009c/jax/_src/pallas/mosaic_gpu/lowering.py#L3372-L3387) +found in `jax`. + +```py +from typing import Any + +X = Any + +def f(obj: object): + match obj: + case int(): + reveal_type(obj) # revealed: int + case X(): + reveal_type(obj) # revealed: Any & ~int + +def g(obj: object, Y: Any): + match obj: + case int(): + reveal_type(obj) # revealed: int + case Y(): + reveal_type(obj) # revealed: Any & ~int +``` + ## Value patterns Value patterns are evaluated by equality, which is overridable. Therefore successfully matching on diff --git a/crates/ty_python_semantic/src/semantic_index/reachability_constraints.rs b/crates/ty_python_semantic/src/semantic_index/reachability_constraints.rs index af3ef642e3..1224190209 100644 --- a/crates/ty_python_semantic/src/semantic_index/reachability_constraints.rs +++ b/crates/ty_python_semantic/src/semantic_index/reachability_constraints.rs @@ -771,8 +771,9 @@ impl ReachabilityConstraints { truthiness } PatternPredicateKind::Class(class_expr, kind) => { - let class_ty = - infer_expression_type(db, *class_expr, TypeContext::default()).to_instance(db); + let class_ty = infer_expression_type(db, *class_expr, TypeContext::default()) + .as_class_literal() + .map(|class| Type::instance(db, class.top_materialization(db))); class_ty.map_or(Truthiness::Ambiguous, |class_ty| { if subject_ty.is_subtype_of(db, class_ty) { diff --git a/crates/ty_python_semantic/src/types/narrow.rs b/crates/ty_python_semantic/src/types/narrow.rs index 736272cb4a..5b709551f5 100644 --- a/crates/ty_python_semantic/src/types/narrow.rs +++ b/crates/ty_python_semantic/src/types/narrow.rs @@ -11,8 +11,9 @@ use crate::types::enums::{enum_member_literals, enum_metadata}; use crate::types::function::KnownFunction; use crate::types::infer::infer_same_file_expression_type; use crate::types::{ - ClassLiteral, ClassType, IntersectionBuilder, KnownClass, SubclassOfInner, SubclassOfType, - Truthiness, Type, TypeContext, TypeVarBoundOrConstraints, UnionBuilder, infer_expression_types, + ClassLiteral, ClassType, IntersectionBuilder, KnownClass, SpecialFormType, SubclassOfInner, + SubclassOfType, Truthiness, Type, TypeContext, TypeVarBoundOrConstraints, UnionBuilder, + infer_expression_types, }; use ruff_db::parsed::{ParsedModuleRef, parsed_module}; @@ -962,11 +963,20 @@ impl<'db, 'ast> NarrowingConstraintsBuilder<'db, 'ast> { let subject = place_expr(subject.node_ref(self.db, self.module))?; let place = self.expect_place(&subject); - let ty = infer_same_file_expression_type(self.db, cls, TypeContext::default(), self.module) - .to_instance(self.db)?; + let class_type = + infer_same_file_expression_type(self.db, cls, TypeContext::default(), self.module); - let ty = ty.negate_if(self.db, !is_positive); - Some(NarrowingConstraints::from_iter([(place, ty)])) + let narrowed_type = match class_type { + Type::ClassLiteral(class) => { + Type::instance(self.db, class.top_materialization(self.db)) + .negate_if(self.db, !is_positive) + } + dynamic @ Type::Dynamic(_) => dynamic, + Type::SpecialForm(SpecialFormType::Any) => Type::any(), + _ => return None, + }; + + Some(NarrowingConstraints::from_iter([(place, narrowed_type)])) } fn evaluate_match_pattern_value( From 3be3a10a2fc6ddaafc13d1f5bca566282285be5f Mon Sep 17 00:00:00 2001 From: Matthew Mckee Date: Thu, 30 Oct 2025 23:19:59 +0000 Subject: [PATCH 002/180] [ty] Don't provide completions when in class or function definition (#21146) --- crates/ty_ide/src/completion.rs | 111 ++++++++++++++++++++++++++++++-- 1 file changed, 106 insertions(+), 5 deletions(-) diff --git a/crates/ty_ide/src/completion.rs b/crates/ty_ide/src/completion.rs index 99debdf4d9..6118b4c85b 100644 --- a/crates/ty_ide/src/completion.rs +++ b/crates/ty_ide/src/completion.rs @@ -212,7 +212,10 @@ pub fn completion<'db>( offset: TextSize, ) -> Vec> { let parsed = parsed_module(db, file).load(db); - if is_in_comment(&parsed, offset) || is_in_string(&parsed, offset) { + + let tokens = tokens_start_before(parsed.tokens(), offset); + + if is_in_comment(tokens) || is_in_string(tokens) || is_in_definition_place(db, tokens, file) { return vec![]; } @@ -829,8 +832,7 @@ fn find_typed_text( /// Whether the given offset within the parsed module is within /// a comment or not. -fn is_in_comment(parsed: &ParsedModuleRef, offset: TextSize) -> bool { - let tokens = tokens_start_before(parsed.tokens(), offset); +fn is_in_comment(tokens: &[Token]) -> bool { tokens.last().is_some_and(|t| t.kind().is_comment()) } @@ -839,8 +841,7 @@ fn is_in_comment(parsed: &ParsedModuleRef, offset: TextSize) -> bool { /// /// Note that this will return `false` when positioned within an /// interpolation block in an f-string or a t-string. -fn is_in_string(parsed: &ParsedModuleRef, offset: TextSize) -> bool { - let tokens = tokens_start_before(parsed.tokens(), offset); +fn is_in_string(tokens: &[Token]) -> bool { tokens.last().is_some_and(|t| { matches!( t.kind(), @@ -849,6 +850,29 @@ fn is_in_string(parsed: &ParsedModuleRef, offset: TextSize) -> bool { }) } +/// If the tokens end with `class f` or `def f` we return true. +/// If the tokens end with `class` or `def`, we return false. +/// This is fine because we don't provide completions anyway. +fn is_in_definition_place(db: &dyn Db, tokens: &[Token], file: File) -> bool { + tokens + .len() + .checked_sub(2) + .and_then(|i| tokens.get(i)) + .is_some_and(|t| { + if matches!( + t.kind(), + TokenKind::Def | TokenKind::Class | TokenKind::Type + ) { + true + } else if t.kind() == TokenKind::Name { + let source = source_text(db, file); + &source[t.range()] == "type" + } else { + false + } + }) +} + /// Order completions according to the following rules: /// /// 1) Names with no underscore prefix @@ -4058,6 +4082,83 @@ def f[T](x: T): test.build().contains("__repr__"); } + #[test] + fn no_completions_in_function_def_name() { + let builder = completion_test_builder( + "\ +def f + ", + ); + + builder.auto_import().build().not_contains("fabs"); + } + + #[test] + fn no_completions_in_function_def_empty_name() { + let builder = completion_test_builder( + "\ +def + ", + ); + + builder.auto_import().build().not_contains("fabs"); + } + + #[test] + fn no_completions_in_class_def_name() { + let builder = completion_test_builder( + "\ +class f + ", + ); + + builder.auto_import().build().not_contains("fabs"); + } + + #[test] + fn no_completions_in_class_def_empty_name() { + let builder = completion_test_builder( + "\ +class + ", + ); + + builder.auto_import().build().not_contains("fabs"); + } + + #[test] + fn no_completions_in_type_def_name() { + let builder = completion_test_builder( + "\ +type f = int + ", + ); + + builder.auto_import().build().not_contains("fabs"); + } + + #[test] + fn no_completions_in_maybe_type_def_name() { + let builder = completion_test_builder( + "\ +type f + ", + ); + + builder.auto_import().build().not_contains("fabs"); + } + + #[test] + fn no_completions_in_type_def_empty_name() { + let builder = completion_test_builder( + "\ +type + ", + ); + + builder.auto_import().build().not_contains("fabs"); + } + /// A way to create a simple single-file (named `main.py`) completion test /// builder. /// From 8737a2d5f5138d855ef4b3ff6982bd7684324eab Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Thu, 30 Oct 2025 17:06:29 -0700 Subject: [PATCH 003/180] Bump v0.14.3 (#21152) - **Upgrade to rooster==0.1.1** - **Changelog for v0.14.3** - **Bump v0.14.3** --- CHANGELOG.md | 55 +++++++++++++++++++++++++++++++ Cargo.lock | 6 ++-- README.md | 6 ++-- crates/ruff/Cargo.toml | 2 +- crates/ruff_linter/Cargo.toml | 2 +- crates/ruff_wasm/Cargo.toml | 2 +- docs/integrations.md | 8 ++--- docs/tutorial.md | 2 +- pyproject.toml | 2 +- scripts/benchmarks/pyproject.toml | 2 +- scripts/release.sh | 2 +- 11 files changed, 72 insertions(+), 17 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4689757c34..e7d5ed2e2b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,60 @@ # Changelog +## 0.14.3 + +Released on 2025-10-30. + +### Preview features + +- Respect `--output-format` with `--watch` ([#21097](https://github.com/astral-sh/ruff/pull/21097)) +- \[`pydoclint`\] Fix false positive on explicit exception re-raising (`DOC501`, `DOC502`) ([#21011](https://github.com/astral-sh/ruff/pull/21011)) +- \[`pyflakes`\] Revert to stable behavior if imports for module lie in alternate branches for `F401` ([#20878](https://github.com/astral-sh/ruff/pull/20878)) +- \[`pylint`\] Implement `stop-iteration-return` (`PLR1708`) ([#20733](https://github.com/astral-sh/ruff/pull/20733)) +- \[`ruff`\] Add support for additional eager conversion patterns (`RUF065`) ([#20657](https://github.com/astral-sh/ruff/pull/20657)) + +### Bug fixes + +- Fix finding keyword range for clause header after statement ending with semicolon ([#21067](https://github.com/astral-sh/ruff/pull/21067)) +- Fix syntax error false positive on nested alternative patterns ([#21104](https://github.com/astral-sh/ruff/pull/21104)) +- \[`ISC001`\] Fix panic when string literals are unclosed ([#21034](https://github.com/astral-sh/ruff/pull/21034)) +- \[`flake8-django`\] Apply `DJ001` to annotated fields ([#20907](https://github.com/astral-sh/ruff/pull/20907)) +- \[`flake8-pyi`\] Fix `PYI034` to not trigger on metaclasses (`PYI034`) ([#20881](https://github.com/astral-sh/ruff/pull/20881)) +- \[`flake8-type-checking`\] Fix `TC003` false positive with `future-annotations` ([#21125](https://github.com/astral-sh/ruff/pull/21125)) +- \[`pyflakes`\] Fix false positive for `__class__` in lambda expressions within class definitions (`F821`) ([#20564](https://github.com/astral-sh/ruff/pull/20564)) +- \[`pyupgrade`\] Fix false positive for `TypeVar` with default on Python \<3.13 (`UP046`,`UP047`) ([#21045](https://github.com/astral-sh/ruff/pull/21045)) + +### Rule changes + +- Add missing docstring sections to the numpy list ([#20931](https://github.com/astral-sh/ruff/pull/20931)) +- \[`airflow`\] Extend `airflow.models..Param` check (`AIR311`) ([#21043](https://github.com/astral-sh/ruff/pull/21043)) +- \[`airflow`\] Warn that `airflow....DAG.create_dagrun` has been removed (`AIR301`) ([#21093](https://github.com/astral-sh/ruff/pull/21093)) +- \[`refurb`\] Preserve digit separators in `Decimal` constructor (`FURB157`) ([#20588](https://github.com/astral-sh/ruff/pull/20588)) + +### Server + +- Avoid sending an unnecessary "clear diagnostics" message for clients supporting pull diagnostics ([#21105](https://github.com/astral-sh/ruff/pull/21105)) + +### Documentation + +- \[`flake8-bandit`\] Fix correct example for `S308` ([#21128](https://github.com/astral-sh/ruff/pull/21128)) + +### Other changes + +- Clearer error message when `line-length` goes beyond threshold ([#21072](https://github.com/astral-sh/ruff/pull/21072)) + +### Contributors + +- [@danparizher](https://github.com/danparizher) +- [@jvacek](https://github.com/jvacek) +- [@ntBre](https://github.com/ntBre) +- [@augustelalande](https://github.com/augustelalande) +- [@prakhar1144](https://github.com/prakhar1144) +- [@TaKO8Ki](https://github.com/TaKO8Ki) +- [@dylwil3](https://github.com/dylwil3) +- [@fatelei](https://github.com/fatelei) +- [@ShaharNaveh](https://github.com/ShaharNaveh) +- [@Lee-W](https://github.com/Lee-W) + ## 0.14.2 Released on 2025-10-23. diff --git a/Cargo.lock b/Cargo.lock index 070f471e4e..38eff20a3f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2835,7 +2835,7 @@ dependencies = [ [[package]] name = "ruff" -version = "0.14.2" +version = "0.14.3" dependencies = [ "anyhow", "argfile", @@ -3092,7 +3092,7 @@ dependencies = [ [[package]] name = "ruff_linter" -version = "0.14.2" +version = "0.14.3" dependencies = [ "aho-corasick", "anyhow", @@ -3447,7 +3447,7 @@ dependencies = [ [[package]] name = "ruff_wasm" -version = "0.14.2" +version = "0.14.3" dependencies = [ "console_error_panic_hook", "console_log", diff --git a/README.md b/README.md index 92d707838a..dcb399dd83 100644 --- a/README.md +++ b/README.md @@ -147,8 +147,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh powershell -c "irm https://astral.sh/ruff/install.ps1 | iex" # For a specific version. -curl -LsSf https://astral.sh/ruff/0.14.2/install.sh | sh -powershell -c "irm https://astral.sh/ruff/0.14.2/install.ps1 | iex" +curl -LsSf https://astral.sh/ruff/0.14.3/install.sh | sh +powershell -c "irm https://astral.sh/ruff/0.14.3/install.ps1 | iex" ``` You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff), @@ -181,7 +181,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.2 + rev: v0.14.3 hooks: # Run the linter. - id: ruff-check diff --git a/crates/ruff/Cargo.toml b/crates/ruff/Cargo.toml index c1511d805b..e977d5223e 100644 --- a/crates/ruff/Cargo.toml +++ b/crates/ruff/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ruff" -version = "0.14.2" +version = "0.14.3" publish = true authors = { workspace = true } edition = { workspace = true } diff --git a/crates/ruff_linter/Cargo.toml b/crates/ruff_linter/Cargo.toml index bc25d4574f..0826f28fbc 100644 --- a/crates/ruff_linter/Cargo.toml +++ b/crates/ruff_linter/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ruff_linter" -version = "0.14.2" +version = "0.14.3" publish = false authors = { workspace = true } edition = { workspace = true } diff --git a/crates/ruff_wasm/Cargo.toml b/crates/ruff_wasm/Cargo.toml index f399ef1007..2dc77f3b8e 100644 --- a/crates/ruff_wasm/Cargo.toml +++ b/crates/ruff_wasm/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ruff_wasm" -version = "0.14.2" +version = "0.14.3" publish = false authors = { workspace = true } edition = { workspace = true } diff --git a/docs/integrations.md b/docs/integrations.md index 441845a474..f37ce29852 100644 --- a/docs/integrations.md +++ b/docs/integrations.md @@ -80,7 +80,7 @@ You can add the following configuration to `.gitlab-ci.yml` to run a `ruff forma stage: build interruptible: true image: - name: ghcr.io/astral-sh/ruff:0.14.2-alpine + name: ghcr.io/astral-sh/ruff:0.14.3-alpine before_script: - cd $CI_PROJECT_DIR - ruff --version @@ -106,7 +106,7 @@ Ruff can be used as a [pre-commit](https://pre-commit.com) hook via [`ruff-pre-c ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.2 + rev: v0.14.3 hooks: # Run the linter. - id: ruff-check @@ -119,7 +119,7 @@ To enable lint fixes, add the `--fix` argument to the lint hook: ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.2 + rev: v0.14.3 hooks: # Run the linter. - id: ruff-check @@ -133,7 +133,7 @@ To avoid running on Jupyter Notebooks, remove `jupyter` from the list of allowed ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.2 + rev: v0.14.3 hooks: # Run the linter. - id: ruff-check diff --git a/docs/tutorial.md b/docs/tutorial.md index f3f2a8b3dd..4b0c43ac06 100644 --- a/docs/tutorial.md +++ b/docs/tutorial.md @@ -369,7 +369,7 @@ This tutorial has focused on Ruff's command-line interface, but Ruff can also be ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.2 + rev: v0.14.3 hooks: # Run the linter. - id: ruff diff --git a/pyproject.toml b/pyproject.toml index 91b5430a8e..28c9c93b39 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "maturin" [project] name = "ruff" -version = "0.14.2" +version = "0.14.3" description = "An extremely fast Python linter and code formatter, written in Rust." authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }] readme = "README.md" diff --git a/scripts/benchmarks/pyproject.toml b/scripts/benchmarks/pyproject.toml index df9f38e5db..92a8da6ea7 100644 --- a/scripts/benchmarks/pyproject.toml +++ b/scripts/benchmarks/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "scripts" -version = "0.14.2" +version = "0.14.3" description = "" authors = ["Charles Marsh "] diff --git a/scripts/release.sh b/scripts/release.sh index ad97c20c8a..ae8d22f525 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -12,7 +12,7 @@ project_root="$(dirname "$script_root")" echo "Updating metadata with rooster..." cd "$project_root" uvx --python 3.12 --isolated -- \ - rooster@0.1.0 release "$@" + rooster@0.1.1 release "$@" echo "Updating lockfile..." cargo update -p ruff From 4b758b374689f0ce8d96a25629786ae0fb9a8a03 Mon Sep 17 00:00:00 2001 From: Matthew Mckee Date: Fri, 31 Oct 2025 00:43:50 +0000 Subject: [PATCH 004/180] [ty] Fix tests for definition completions (#21153) --- crates/ty_ide/src/completion.rs | 51 ++++++++++++++++++--------------- 1 file changed, 28 insertions(+), 23 deletions(-) diff --git a/crates/ty_ide/src/completion.rs b/crates/ty_ide/src/completion.rs index 6118b4c85b..273a148ef3 100644 --- a/crates/ty_ide/src/completion.rs +++ b/crates/ty_ide/src/completion.rs @@ -854,23 +854,25 @@ fn is_in_string(tokens: &[Token]) -> bool { /// If the tokens end with `class` or `def`, we return false. /// This is fine because we don't provide completions anyway. fn is_in_definition_place(db: &dyn Db, tokens: &[Token], file: File) -> bool { + let is_definition_keyword = |token: &Token| { + if matches!( + token.kind(), + TokenKind::Def | TokenKind::Class | TokenKind::Type + ) { + true + } else if token.kind() == TokenKind::Name { + let source = source_text(db, file); + &source[token.range()] == "type" + } else { + false + } + }; + tokens .len() .checked_sub(2) .and_then(|i| tokens.get(i)) - .is_some_and(|t| { - if matches!( - t.kind(), - TokenKind::Def | TokenKind::Class | TokenKind::Type - ) { - true - } else if t.kind() == TokenKind::Name { - let source = source_text(db, file); - &source[t.range()] == "type" - } else { - false - } - }) + .is_some_and(is_definition_keyword) } /// Order completions according to the following rules: @@ -4090,18 +4092,19 @@ def f ", ); - builder.auto_import().build().not_contains("fabs"); + assert!(builder.auto_import().build().completions().is_empty()); } #[test] - fn no_completions_in_function_def_empty_name() { + fn completions_in_function_def_empty_name() { let builder = completion_test_builder( "\ def ", ); - builder.auto_import().build().not_contains("fabs"); + // This is okay because the ide will not request completions when the cursor is in this position. + assert!(!builder.auto_import().build().completions().is_empty()); } #[test] @@ -4112,18 +4115,19 @@ class f ", ); - builder.auto_import().build().not_contains("fabs"); + assert!(builder.auto_import().build().completions().is_empty()); } #[test] - fn no_completions_in_class_def_empty_name() { + fn completions_in_class_def_empty_name() { let builder = completion_test_builder( "\ class ", ); - builder.auto_import().build().not_contains("fabs"); + // This is okay because the ide will not request completions when the cursor is in this position. + assert!(!builder.auto_import().build().completions().is_empty()); } #[test] @@ -4134,7 +4138,7 @@ type f = int ", ); - builder.auto_import().build().not_contains("fabs"); + assert!(builder.auto_import().build().completions().is_empty()); } #[test] @@ -4145,18 +4149,19 @@ type f ", ); - builder.auto_import().build().not_contains("fabs"); + assert!(builder.auto_import().build().completions().is_empty()); } #[test] - fn no_completions_in_type_def_empty_name() { + fn completions_in_type_def_empty_name() { let builder = completion_test_builder( "\ type ", ); - builder.auto_import().build().not_contains("fabs"); + // This is okay because the ide will not request completions when the cursor is in this position. + assert!(!builder.auto_import().build().completions().is_empty()); } /// A way to create a simple single-file (named `main.py`) completion test From 4b026c2a553caff2a25641c14f9cdc8153ee3a63 Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Fri, 31 Oct 2025 02:16:43 +0100 Subject: [PATCH 005/180] Fix missing diagnostics for notebooks (#21156) --- .../ruff_server/src/server/api/diagnostics.rs | 23 ++----------------- .../server/api/notifications/did_change.rs | 6 ++++- .../api/notifications/did_change_notebook.rs | 5 +++- .../notifications/did_change_watched_files.rs | 10 ++++++-- .../src/server/api/notifications/did_close.rs | 2 +- .../src/server/api/notifications/did_open.rs | 12 +++++++++- .../api/notifications/did_open_notebook.rs | 5 +++- 7 files changed, 35 insertions(+), 28 deletions(-) diff --git a/crates/ruff_server/src/server/api/diagnostics.rs b/crates/ruff_server/src/server/api/diagnostics.rs index 2c8faab6db..6f8efe47e8 100644 --- a/crates/ruff_server/src/server/api/diagnostics.rs +++ b/crates/ruff_server/src/server/api/diagnostics.rs @@ -1,7 +1,4 @@ -use lsp_types::Url; - use crate::{ - Session, lint::DiagnosticsMap, session::{Client, DocumentQuery, DocumentSnapshot}, }; @@ -22,21 +19,10 @@ pub(super) fn generate_diagnostics(snapshot: &DocumentSnapshot) -> DiagnosticsMa } pub(super) fn publish_diagnostics_for_document( - session: &Session, - url: &Url, + snapshot: &DocumentSnapshot, client: &Client, ) -> crate::server::Result<()> { - // Publish diagnostics if the client doesn't support pull diagnostics - if session.resolved_client_capabilities().pull_diagnostics { - return Ok(()); - } - - let snapshot = session - .take_snapshot(url.clone()) - .ok_or_else(|| anyhow::anyhow!("Unable to take snapshot for document with URL {url}")) - .with_failure_code(lsp_server::ErrorCode::InternalError)?; - - for (uri, diagnostics) in generate_diagnostics(&snapshot) { + for (uri, diagnostics) in generate_diagnostics(snapshot) { client .send_notification::( lsp_types::PublishDiagnosticsParams { @@ -52,14 +38,9 @@ pub(super) fn publish_diagnostics_for_document( } pub(super) fn clear_diagnostics_for_document( - session: &Session, query: &DocumentQuery, client: &Client, ) -> crate::server::Result<()> { - if session.resolved_client_capabilities().pull_diagnostics { - return Ok(()); - } - client .send_notification::( lsp_types::PublishDiagnosticsParams { diff --git a/crates/ruff_server/src/server/api/notifications/did_change.rs b/crates/ruff_server/src/server/api/notifications/did_change.rs index 5ac7a1f606..8e77cb593f 100644 --- a/crates/ruff_server/src/server/api/notifications/did_change.rs +++ b/crates/ruff_server/src/server/api/notifications/did_change.rs @@ -31,7 +31,11 @@ impl super::SyncNotificationHandler for DidChange { .update_text_document(&key, content_changes, new_version) .with_failure_code(ErrorCode::InternalError)?; - publish_diagnostics_for_document(session, &key.into_url(), client)?; + // Publish diagnostics if the client doesn't support pull diagnostics + if !session.resolved_client_capabilities().pull_diagnostics { + let snapshot = session.take_snapshot(key.into_url()).unwrap(); + publish_diagnostics_for_document(&snapshot, client)?; + } Ok(()) } diff --git a/crates/ruff_server/src/server/api/notifications/did_change_notebook.rs b/crates/ruff_server/src/server/api/notifications/did_change_notebook.rs index da11755d71..d092ccacb8 100644 --- a/crates/ruff_server/src/server/api/notifications/did_change_notebook.rs +++ b/crates/ruff_server/src/server/api/notifications/did_change_notebook.rs @@ -27,7 +27,10 @@ impl super::SyncNotificationHandler for DidChangeNotebook { .with_failure_code(ErrorCode::InternalError)?; // publish new diagnostics - publish_diagnostics_for_document(session, &key.into_url(), client)?; + let snapshot = session + .take_snapshot(key.into_url()) + .expect("snapshot should be available"); + publish_diagnostics_for_document(&snapshot, client)?; Ok(()) } diff --git a/crates/ruff_server/src/server/api/notifications/did_change_watched_files.rs b/crates/ruff_server/src/server/api/notifications/did_change_watched_files.rs index cb157d81f9..bc97231411 100644 --- a/crates/ruff_server/src/server/api/notifications/did_change_watched_files.rs +++ b/crates/ruff_server/src/server/api/notifications/did_change_watched_files.rs @@ -31,13 +31,19 @@ impl super::SyncNotificationHandler for DidChangeWatchedFiles { } else { // publish diagnostics for text documents for url in session.text_document_urls() { - publish_diagnostics_for_document(session, url, client)?; + let snapshot = session + .take_snapshot(url.clone()) + .expect("snapshot should be available"); + publish_diagnostics_for_document(&snapshot, client)?; } } // always publish diagnostics for notebook files (since they don't use pull diagnostics) for url in session.notebook_document_urls() { - publish_diagnostics_for_document(session, url, client)?; + let snapshot = session + .take_snapshot(url.clone()) + .expect("snapshot should be available"); + publish_diagnostics_for_document(&snapshot, client)?; } } diff --git a/crates/ruff_server/src/server/api/notifications/did_close.rs b/crates/ruff_server/src/server/api/notifications/did_close.rs index 5a482c4fcc..a3075a4846 100644 --- a/crates/ruff_server/src/server/api/notifications/did_close.rs +++ b/crates/ruff_server/src/server/api/notifications/did_close.rs @@ -27,7 +27,7 @@ impl super::SyncNotificationHandler for DidClose { ); return Ok(()); }; - clear_diagnostics_for_document(session, snapshot.query(), client)?; + clear_diagnostics_for_document(snapshot.query(), client)?; session .close_document(&key) diff --git a/crates/ruff_server/src/server/api/notifications/did_open.rs b/crates/ruff_server/src/server/api/notifications/did_open.rs index fa5f6b92df..41a6fb6cf8 100644 --- a/crates/ruff_server/src/server/api/notifications/did_open.rs +++ b/crates/ruff_server/src/server/api/notifications/did_open.rs @@ -1,5 +1,6 @@ use crate::TextDocument; use crate::server::Result; +use crate::server::api::LSPResult; use crate::server::api::diagnostics::publish_diagnostics_for_document; use crate::session::{Client, Session}; use lsp_types as types; @@ -29,7 +30,16 @@ impl super::SyncNotificationHandler for DidOpen { session.open_text_document(uri.clone(), document); - publish_diagnostics_for_document(session, &uri, client)?; + // Publish diagnostics if the client doesn't support pull diagnostics + if !session.resolved_client_capabilities().pull_diagnostics { + let snapshot = session + .take_snapshot(uri.clone()) + .ok_or_else(|| { + anyhow::anyhow!("Unable to take snapshot for document with URL {uri}") + }) + .with_failure_code(lsp_server::ErrorCode::InternalError)?; + publish_diagnostics_for_document(&snapshot, client)?; + } Ok(()) } diff --git a/crates/ruff_server/src/server/api/notifications/did_open_notebook.rs b/crates/ruff_server/src/server/api/notifications/did_open_notebook.rs index 3ce27168e4..a75e88ecc5 100644 --- a/crates/ruff_server/src/server/api/notifications/did_open_notebook.rs +++ b/crates/ruff_server/src/server/api/notifications/did_open_notebook.rs @@ -40,7 +40,10 @@ impl super::SyncNotificationHandler for DidOpenNotebook { session.open_notebook_document(uri.clone(), notebook); // publish diagnostics - publish_diagnostics_for_document(session, &uri, client)?; + let snapshot = session + .take_snapshot(uri) + .expect("snapshot should be available"); + publish_diagnostics_for_document(&snapshot, client)?; Ok(()) } From 3585c96ea551366f97f5d1b4743b2f0d648b9e9c Mon Sep 17 00:00:00 2001 From: Ben Beasley Date: Fri, 31 Oct 2025 12:53:18 +0000 Subject: [PATCH 006/180] Update etcetera to 0.11.0 (#21160) --- Cargo.lock | 38 ++++++++++++++------------------------ Cargo.toml | 2 +- 2 files changed, 15 insertions(+), 25 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 38eff20a3f..af119dab7e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -243,7 +243,7 @@ dependencies = [ "bitflags 2.9.4", "cexpr", "clang-sys", - "itertools 0.10.5", + "itertools 0.13.0", "log", "prettyplease", "proc-macro2", @@ -633,7 +633,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" dependencies = [ "lazy_static", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -642,7 +642,7 @@ version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1007,7 +1007,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.60.2", + "windows-sys 0.61.0", ] [[package]] @@ -1093,7 +1093,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.61.0", ] [[package]] @@ -1115,13 +1115,12 @@ dependencies = [ [[package]] name = "etcetera" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26c7b13d0780cb82722fd59f6f57f925e143427e4a75313a6c77243bf5326ae6" +checksum = "de48cc4d1c1d97a20fd819def54b890cadde72ed3ad0c614822a0a433361be96" dependencies = [ "cfg-if", - "home", - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] @@ -1366,15 +1365,6 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" -[[package]] -name = "home" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" -dependencies = [ - "windows-sys 0.59.0", -] - [[package]] name = "html-escape" version = "0.2.13" @@ -1563,7 +1553,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" dependencies = [ "equivalent", - "hashbrown 0.15.5", + "hashbrown 0.16.0", "serde", "serde_core", ] @@ -1690,7 +1680,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" dependencies = [ "hermit-abi", "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1754,7 +1744,7 @@ dependencies = [ "portable-atomic", "portable-atomic-util", "serde", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3545,7 +3535,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.61.0", ] [[package]] @@ -3941,7 +3931,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix", - "windows-sys 0.52.0", + "windows-sys 0.61.0", ] [[package]] @@ -5021,7 +5011,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.61.0", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 1cce423668..935196f6a5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -84,7 +84,7 @@ dashmap = { version = "6.0.1" } dir-test = { version = "0.4.0" } dunce = { version = "1.0.5" } drop_bomb = { version = "0.1.5" } -etcetera = { version = "0.10.0" } +etcetera = { version = "0.11.0" } fern = { version = "0.7.0" } filetime = { version = "0.2.23" } getrandom = { version = "0.3.1" } From 735ec0c1f97d5b80f1161835ffb3a784d3eebcac Mon Sep 17 00:00:00 2001 From: Mahmoud Saada Date: Fri, 31 Oct 2025 08:55:17 -0400 Subject: [PATCH 007/180] [ty] Fix generic inference for non-dataclass inheriting from generic dataclass (#21159) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Summary Fixes https://github.com/astral-sh/ty/issues/1427 This PR fixes a regression introduced in alpha.24 where non-dataclass children of generic dataclasses lost generic type parameter information during `__init__` synthesis. The issue occurred because when looking up inherited members in the MRO, the child class's `inherited_generic_context` was correctly passed down, but `own_synthesized_member()` (which synthesizes dataclass `__init__` methods) didn't accept this parameter. It only used `self.inherited_generic_context(db)`, which returned the parent's context instead of the child's. The fix threads the child's generic context through to the synthesis logic, allowing proper generic type inference for inherited dataclass constructors. ## Test Plan - Added regression test for non-dataclass inheriting from generic dataclass - Verified the exact repro case from the issue now works - All 277 mdtest tests passing - Clippy clean - Manually verified with Python runtime, mypy, and pyright - all accept this code pattern ## Verification Tested against multiple type checkers: - ✅ Python runtime: Code works correctly - ✅ mypy: No issues found - ✅ pyright: 0 errors, 0 warnings - ✅ ty alpha.23: Worked (before regression) - ❌ ty alpha.24: Regression - ✅ ty with this fix: Works correctly --------- Co-authored-by: Claude Co-authored-by: David Peter --- .../mdtest/dataclasses/dataclasses.md | 34 +++++++++++++++++++ crates/ty_python_semantic/src/types/class.rs | 8 +++-- 2 files changed, 39 insertions(+), 3 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md b/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md index e7171b6dd4..d8619851a2 100644 --- a/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md +++ b/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md @@ -838,6 +838,40 @@ class WrappedIntAndExtraData[T](Wrap[int]): reveal_type(WrappedIntAndExtraData[bytes].__init__) ``` +### Non-dataclass inheriting from generic dataclass + +This is a regression test for . + +When a non-dataclass inherits from a generic dataclass, the generic type parameters should still be +properly inferred when calling the inherited `__init__` method. + +```py +from dataclasses import dataclass + +@dataclass +class ParentDataclass[T]: + value: T + +# Non-dataclass inheriting from generic dataclass +class ChildOfParentDataclass[T](ParentDataclass[T]): ... + +def uses_dataclass[T](x: T) -> ChildOfParentDataclass[T]: + return ChildOfParentDataclass(x) + +# TODO: ParentDataclass.__init__ should show generic types, not Unknown +# revealed: (self: ParentDataclass[Unknown], value: Unknown) -> None +reveal_type(ParentDataclass.__init__) + +# revealed: (self: ParentDataclass[T@ChildOfParentDataclass], value: T@ChildOfParentDataclass) -> None +reveal_type(ChildOfParentDataclass.__init__) + +result_int = uses_dataclass(42) +reveal_type(result_int) # revealed: ChildOfParentDataclass[Literal[42]] + +result_str = uses_dataclass("hello") +reveal_type(result_str) # revealed: ChildOfParentDataclass[Literal["hello"]] +``` + ## Descriptor-typed fields ### Same type in `__get__` and `__set__` diff --git a/crates/ty_python_semantic/src/types/class.rs b/crates/ty_python_semantic/src/types/class.rs index 75190a3c3a..4f8ee4c1fc 100644 --- a/crates/ty_python_semantic/src/types/class.rs +++ b/crates/ty_python_semantic/src/types/class.rs @@ -2176,7 +2176,8 @@ impl<'db> ClassLiteral<'db> { }); if member.is_undefined() { - if let Some(synthesized_member) = self.own_synthesized_member(db, specialization, name) + if let Some(synthesized_member) = + self.own_synthesized_member(db, specialization, inherited_generic_context, name) { return Member::definitely_declared(synthesized_member); } @@ -2192,6 +2193,7 @@ impl<'db> ClassLiteral<'db> { self, db: &'db dyn Db, specialization: Option>, + inherited_generic_context: Option>, name: &str, ) -> Option> { let dataclass_params = self.dataclass_params(db); @@ -2320,7 +2322,7 @@ impl<'db> ClassLiteral<'db> { let signature = match name { "__new__" | "__init__" => Signature::new_generic( - self.inherited_generic_context(db), + inherited_generic_context.or_else(|| self.inherited_generic_context(db)), Parameters::new(parameters), return_ty, ), @@ -2702,7 +2704,7 @@ impl<'db> ClassLiteral<'db> { name: &str, policy: MemberLookupPolicy, ) -> PlaceAndQualifiers<'db> { - if let Some(member) = self.own_synthesized_member(db, specialization, name) { + if let Some(member) = self.own_synthesized_member(db, specialization, None, name) { Place::bound(member).into() } else { KnownClass::TypedDictFallback From 172e8d4ae060f2d1749b4627d3672254c6bb1366 Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Fri, 31 Oct 2025 10:29:24 -0400 Subject: [PATCH 008/180] [ty] Support implicit imports of submodules in `__init__.pyi` (#20855) This is a second take at the implicit imports approach, allowing `from . import submodule` in an `__init__.pyi` to create the `mypackage.submodule` attribute everyhere. This implementation operates inside of the available_submodule_attributes subsystem instead of as a re-export rule. The upside of this is we are no longer purely syntactic, and absolute from imports that happen to target submodules work (an intentional discussed deviation from pyright which demands a relative from import). Also we don't re-export functions or classes. The downside(?) of this is star imports no longer see these attributes (this may be either good or bad. I believe it's not a huge lift to make it work with star imports but it's some non-trivial reworking). I've also intentionally made `import mypackage.submodule` not trigger this rule although it's trivial to change that. I've tried to cover as many relevant cases as possible for discussion in the new test file I've added (there are some random overlaps with existing tests but trying to add them piecemeal felt confusing and weird, so I just made a dedicated file for this extension to the rules). Fixes https://github.com/astral-sh/ty/issues/133 ## Summary ## Test Plan --- crates/ruff_db/src/files.rs | 5 + .../mdtest/import/nonstandard_conventions.md | 824 ++++++++++++++++++ .../ty_python_semantic/src/semantic_index.rs | 87 +- .../src/semantic_index/builder.rs | 17 +- crates/ty_python_semantic/src/types.rs | 61 +- 5 files changed, 982 insertions(+), 12 deletions(-) create mode 100644 crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md diff --git a/crates/ruff_db/src/files.rs b/crates/ruff_db/src/files.rs index 754b65642a..4d57162c7c 100644 --- a/crates/ruff_db/src/files.rs +++ b/crates/ruff_db/src/files.rs @@ -470,6 +470,11 @@ impl File { self.source_type(db).is_stub() } + /// Returns `true` if the file is an `__init__.pyi` + pub fn is_package_stub(self, db: &dyn Db) -> bool { + self.path(db).as_str().ends_with("__init__.pyi") + } + pub fn source_type(self, db: &dyn Db) -> PySourceType { match self.path(db) { FilePath::System(path) => path diff --git a/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md b/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md new file mode 100644 index 0000000000..848eaae387 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md @@ -0,0 +1,824 @@ +# Nonstandard Import Conventions + +This document covers ty-specific extensions to the +[standard import conventions](https://typing.python.org/en/latest/spec/distributing.html#import-conventions). + +It's a common idiom for a package's `__init__.py(i)` to include several imports like +`from . import mysubmodule`, with the intent that the `mypackage.mysubmodule` attribute should work +for anyone who only imports `mypackage`. + +In the context of a `.py` we handle this well through our general attempts to faithfully implement +import side-effects. However for `.pyi` files we are expected to apply +[a more strict set of rules](https://typing.python.org/en/latest/spec/distributing.html#import-conventions) +to encourage intentional API design. Although `.pyi` files are explicitly designed to work with +typecheckers, which ostensibly should all enforce these strict rules, every typechecker has its own +defacto "extensions" to them and so a few idioms like `from . import mysubmodule` have found their +way into `.pyi` files too. + +Thus for the sake of compatibility, we need to define our own "extensions". Any extensions we define +here have several competing concerns: + +- Extensions should ideally be kept narrow to continue to encourage explicit API design +- Extensions should be easy to explain, document, and understand +- Extensions should ideally still be a subset of runtime behaviour (if it works in a stub, it works + at runtime) +- Extensions should ideally not make `.pyi` files more permissive than `.py` files (if it works in a + stub, it works in an impl) + +To that end we define the following extension: + +> If an `__init__.pyi` for `mypackage` contains a `from...import` targetting a direct submodule of +> `mypackage`, then that submodule should be available as an attribute of `mypackage`. + +## Relative `from` Import of Direct Submodule in `__init__` + +The `from . import submodule` idiom in an `__init__.pyi` is fairly explicit and we should definitely +support it. + +`mypackage/__init__.pyi`: + +```pyi +from . import imported +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`mypackage/fails.pyi`: + +```pyi +Y: int = 47 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.imported.X) # revealed: int +# error: "has no member `fails`" +reveal_type(mypackage.fails.Y) # revealed: Unknown +``` + +## Relative `from` Import of Direct Submodule in `__init__` (Non-Stub Check) + +`mypackage/__init__.py`: + +```py +from . import imported +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`mypackage/fails.py`: + +```py +Y: int = 47 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.imported.X) # revealed: int +# error: "has no member `fails`" +reveal_type(mypackage.fails.Y) # revealed: Unknown +``` + +## Absolute `from` Import of Direct Submodule in `__init__` + +If an absolute `from...import` happens to import a submodule, it works just as well as a relative +one. + +`mypackage/__init__.pyi`: + +```pyi +from mypackage import imported +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`mypackage/fails.pyi`: + +```pyi +Y: int = 47 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.imported.X) # revealed: int +# error: "has no member `fails`" +reveal_type(mypackage.fails.Y) # revealed: Unknown +``` + +## Absolute `from` Import of Direct Submodule in `__init__` (Non-Stub Check) + +`mypackage/__init__.py`: + +```py +from mypackage import imported +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`mypackage/fails.py`: + +```py +Y: int = 47 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.imported.X) # revealed: int +# error: "has no member `fails`" +reveal_type(mypackage.fails.Y) # revealed: Unknown +``` + +## Import of Direct Submodule in `__init__` + +An `import` that happens to import a submodule does not expose the submodule as an attribute. (This +is an arbitrary decision and can be changed easily!) + +`mypackage/__init__.pyi`: + +```pyi +import mypackage.imported +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this is probably safe to allow, as it's an unambiguous import of a submodule +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +``` + +## Import of Direct Submodule in `__init__` (Non-Stub Check) + +`mypackage/__init__.py`: + +```py +import mypackage.imported +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this is probably safe to allow, as it's an unambiguous import of a submodule +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +``` + +## Relative `from` Import of Nested Submodule in `__init__` + +`from .submodule import nested` in an `__init__.pyi` is currently not supported as a way to expose +`mypackage.submodule` or `mypackage.submodule.nested` but it could be. + +`mypackage/__init__.pyi`: + +```pyi +from .submodule import nested +``` + +`mypackage/submodule/__init__.pyi`: + +```pyi +``` + +`mypackage/submodule/nested.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this would be nice to allow +# error: "has no member `submodule`" +reveal_type(mypackage.submodule) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +``` + +## Relative `from` Import of Nested Submodule in `__init__` (Non-Stub Check) + +`mypackage/__init__.py`: + +```py +from .submodule import nested +``` + +`mypackage/submodule/__init__.py`: + +```py +``` + +`mypackage/submodule/nested.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this would be nice to support +# error: "has no member `submodule`" +reveal_type(mypackage.submodule) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +``` + +## Absolute `from` Import of Nested Submodule in `__init__` + +`from mypackage.submodule import nested` in an `__init__.pyi` is currently not supported as a way to +expose `mypackage.submodule` or `mypackage.submodule.nested` but it could be. + +`mypackage/__init__.pyi`: + +```pyi +from mypackage.submodule import nested +``` + +`mypackage/submodule/__init__.pyi`: + +```pyi +``` + +`mypackage/submodule/nested.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this would be nice to support +# error: "has no member `submodule`" +reveal_type(mypackage.submodule) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +``` + +## Absolute `from` Import of Nested Submodule in `__init__` (Non-Stub Check) + +`mypackage/__init__.py`: + +```py +from mypackage.submodule import nested +``` + +`mypackage/submodule/__init__.py`: + +```py +``` + +`mypackage/submodule/nested.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this would be nice to support +# error: "has no member `submodule`" +reveal_type(mypackage.submodule) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +``` + +## Import of Nested Submodule in `__init__` + +`import mypackage.submodule.nested` in an `__init__.pyi` is currently not supported as a way to +expose `mypackage.submodule` or `mypackage.submodule.nested` but it could be. + +`mypackage/__init__.pyi`: + +```pyi +import mypackage.submodule.nested +``` + +`mypackage/submodule/__init__.pyi`: + +```pyi +``` + +`mypackage/submodule/nested.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this would be nice to support, and is probably safe to do as it's unambiguous +# error: "has no member `submodule`" +reveal_type(mypackage.submodule) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +``` + +## Import of Nested Submodule in `__init__` (Non-Stub Check) + +`mypackage/__init__.py`: + +```py +import mypackage.submodule.nested +``` + +`mypackage/submodule/__init__.py`: + +```py +``` + +`mypackage/submodule/nested.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this would be nice to support, and is probably safe to do as it's unambiguous +# error: "has no member `submodule`" +reveal_type(mypackage.submodule) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested) # revealed: Unknown +# error: "has no member `submodule`" +reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +``` + +## Relative `from` Import of Direct Submodule in `__init__`, Mismatched Alias + +Renaming the submodule to something else disables the `__init__.pyi` idiom. + +`mypackage/__init__.pyi`: + +```pyi +from . import imported as imported_m +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +# error: "has no member `imported_m`" +reveal_type(mypackage.imported_m.X) # revealed: Unknown +``` + +## Relative `from` Import of Direct Submodule in `__init__`, Mismatched Alias (Non-Stub Check) + +`mypackage/__init__.py`: + +```py +from . import imported as imported_m +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this would be nice to support, as it works at runtime +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +reveal_type(mypackage.imported_m.X) # revealed: int +``` + +## Relative `from` Import of Direct Submodule in `__init__`, Matched Alias + +The `__init__.pyi` idiom should definitely always work if the submodule is renamed to itself, as +this is the re-export idiom. + +`mypackage/__init__.pyi`: + +```pyi +from . import imported as imported +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.imported.X) # revealed: int +``` + +## Relative `from` Import of Direct Submodule in `__init__`, Matched Alias (Non-Stub Check) + +`mypackage/__init__.py`: + +```py +from . import imported as imported +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +reveal_type(mypackage.imported.X) # revealed: int +``` + +## Star Import Unaffected + +Even if the `__init__` idiom is in effect, star imports do not pick it up. (This is an arbitrary +decision that mostly fell out of the implementation details and can be changed!) + +`mypackage/__init__.pyi`: + +```pyi +from . import imported +Z: int = 17 +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +from mypackage import * + +# TODO: this would be nice to support (available_submodule_attributes isn't visible to `*` imports) +# error: "`imported` used when not defined" +reveal_type(imported.X) # revealed: Unknown +reveal_type(Z) # revealed: int +``` + +## Star Import Unaffected (Non-Stub Check) + +`mypackage/__init__.py`: + +```py +from . import imported + +Z: int = 17 +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +from mypackage import * + +reveal_type(imported.X) # revealed: int +reveal_type(Z) # revealed: int +``` + +## `from` Import of Non-Submodule + +A from import that terminates in a non-submodule should not expose the intermediate submodules as +attributes. This is an arbitrary decision but on balance probably safe and correct, as otherwise it +would be hard for a stub author to be intentional about the submodules being exposed as attributes. + +`mypackage/__init__.pyi`: + +```pyi +from .imported import X +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +``` + +## `from` Import of Non-Submodule (Non-Stub Check) + +`mypackage/__init__.py`: + +```py +from .imported import X +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage + +# TODO: this would be nice to support, as it works at runtime +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +``` + +## `from` Import of Other Package's Submodule + +`from mypackage import submodule` from outside the package is not modeled as a side-effect on +`mypackage`, even in the importing file (this could be changed!). + +`mypackage/__init__.pyi`: + +```pyi +``` + +`mypackage/imported.pyi`: + +```pyi +X: int = 42 +``` + +`main.py`: + +```py +import mypackage +from mypackage import imported + +# TODO: this would be nice to support, but it's dangerous with available_submodule_attributes +reveal_type(imported.X) # revealed: int +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +``` + +## `from` Import of Other Package's Submodule (Non-Stub Check) + +`mypackage/__init__.py`: + +```py +``` + +`mypackage/imported.py`: + +```py +X: int = 42 +``` + +`main.py`: + +```py +import mypackage +from mypackage import imported + +# TODO: this would be nice to support, as it works at runtime +reveal_type(imported.X) # revealed: int +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown +``` + +## `from` Import of Sibling Module + +`from . import submodule` from a sibling module is not modeled as a side-effect on `mypackage` or a +re-export from `submodule`. + +`mypackage/__init__.pyi`: + +```pyi +``` + +`mypackage/imported.pyi`: + +```pyi +from . import fails +X: int = 42 +``` + +`mypackage/fails.pyi`: + +```pyi +Y: int = 47 +``` + +`main.py`: + +```py +import mypackage +from mypackage import imported + +reveal_type(imported.X) # revealed: int +# error: "has no member `fails`" +reveal_type(imported.fails.Y) # revealed: Unknown +# error: "has no member `fails`" +reveal_type(mypackage.fails.Y) # revealed: Unknown +``` + +## `from` Import of Sibling Module (Non-Stub Check) + +`mypackage/__init__.py`: + +```py +``` + +`mypackage/imported.py`: + +```py +from . import fails + +X: int = 42 +``` + +`mypackage/fails.py`: + +```py +Y: int = 47 +``` + +`main.py`: + +```py +import mypackage +from mypackage import imported + +reveal_type(imported.X) # revealed: int +reveal_type(imported.fails.Y) # revealed: int +# error: "has no member `fails`" +reveal_type(mypackage.fails.Y) # revealed: Unknown +``` + +## Fractal Re-export Nameclash Problems + +This precise configuration of: + +- a subpackage that defines a submodule with its own name +- that in turn defines a function/class with its own name +- and re-exporting that name through every layer using `from` imports and `__all__` + +Can easily result in the typechecker getting "confused" and thinking imports of the name from the +top-level package are referring to the subpackage and not the function/class. This issue can be +found with the `lobpcg` function in `scipy.sparse.linalg`. + +This kind of failure mode is why the rule is restricted to *direct* submodule imports, as anything +more powerful than that in the current implementation strategy quickly gets the functions and +submodules mixed up. + +`mypackage/__init__.pyi`: + +```pyi +from .funcmod import funcmod + +__all__ = ["funcmod"] +``` + +`mypackage/funcmod/__init__.pyi`: + +```pyi +from .funcmod import funcmod + +__all__ = ["funcmod"] +``` + +`mypackage/funcmod/funcmod.pyi`: + +```pyi +__all__ = ["funcmod"] + +def funcmod(x: int) -> int: ... +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` + +## Fractal Re-export Nameclash Problems (Non-Stub Check) + +`mypackage/__init__.py`: + +```py +from .funcmod import funcmod + +__all__ = ["funcmod"] +``` + +`mypackage/funcmod/__init__.py`: + +```py +from .funcmod import funcmod + +__all__ = ["funcmod"] +``` + +`mypackage/funcmod/funcmod.py`: + +```py +__all__ = ["funcmod"] + +def funcmod(x: int) -> int: + return x +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` diff --git a/crates/ty_python_semantic/src/semantic_index.rs b/crates/ty_python_semantic/src/semantic_index.rs index 558243f59c..a654873db3 100644 --- a/crates/ty_python_semantic/src/semantic_index.rs +++ b/crates/ty_python_semantic/src/semantic_index.rs @@ -6,12 +6,12 @@ use ruff_db::parsed::parsed_module; use ruff_index::{IndexSlice, IndexVec}; use ruff_python_ast::NodeIndex; +use ruff_python_ast::name::Name; use ruff_python_parser::semantic_errors::SemanticSyntaxError; use rustc_hash::{FxHashMap, FxHashSet}; use salsa::Update; use salsa::plumbing::AsId; -use crate::Db; use crate::module_name::ModuleName; use crate::node_key::NodeKey; use crate::semantic_index::ast_ids::AstIds; @@ -28,6 +28,7 @@ use crate::semantic_index::scope::{ use crate::semantic_index::symbol::ScopedSymbolId; use crate::semantic_index::use_def::{EnclosingSnapshotKey, ScopedEnclosingSnapshotId, UseDefMap}; use crate::semantic_model::HasTrackedScope; +use crate::{Db, Module, resolve_module}; pub mod ast_ids; mod builder; @@ -75,20 +76,73 @@ pub(crate) fn place_table<'db>(db: &'db dyn Db, scope: ScopeId<'db>) -> Arc(db: &'db dyn Db, file: File) -> Arc> { semantic_index(db, file).imported_modules.clone() } +/// Returns the set of relative submodules that are explicitly imported anywhere in +/// `importing_module`. +/// +/// This set only considers `from...import` statements (but it could also include `import`). +/// It also only returns a non-empty result for `__init__.pyi` files. +/// See [`ModuleLiteralType::available_submodule_attributes`] for discussion +/// of why this analysis is intentionally limited. +/// +/// This function specifically implements the rule that if an `__init__.pyi` file +/// contains a `from...import` that imports a direct submodule of the package, +/// that submodule should be available as an attribute of the package. +/// +/// While we endeavour to accurately model import side-effects for `.py` files, we intentionally +/// limit them for `.pyi` files to encourage more intentional API design. The standard escape +/// hatches for this are the `import x as x` idiom or listing them in `__all__`, but in practice +/// some other idioms are popular. +/// +/// In particular, many packages have their `__init__` include lines like +/// `from . import subpackage`, with the intent that `mypackage.subpackage` should be +/// available for anyone who only does `import mypackage`. +#[salsa::tracked(returns(deref), heap_size=ruff_memory_usage::heap_size)] +pub(crate) fn imported_relative_submodules_of_stub_package<'db>( + db: &'db dyn Db, + importing_module: Module<'db>, +) -> Box<[ModuleName]> { + let Some(file) = importing_module.file(db) else { + return Box::default(); + }; + if !file.is_package_stub(db) { + return Box::default(); + } + semantic_index(db, file) + .maybe_imported_modules + .iter() + .filter_map(|import| { + let mut submodule = ModuleName::from_identifier_parts( + db, + file, + import.from_module.as_deref(), + import.level, + ) + .ok()?; + // We only actually care if this is a direct submodule of the package + // so this part should actually be exactly the importing module. + let importing_module_name = importing_module.name(db); + if importing_module_name != &submodule { + return None; + } + submodule.extend(&ModuleName::new(import.submodule.as_str())?); + // Throw out the result if this doesn't resolve to an actual module. + // This is quite expensive, but we've gone through a lot of hoops to + // get here so it won't happen too much. + resolve_module(db, &submodule)?; + // Return only the relative part + submodule.relative_to(importing_module_name) + }) + .collect() +} + /// Returns the use-def map for a specific `scope`. /// /// Using [`use_def_map`] over [`semantic_index`] has the advantage that @@ -230,6 +284,9 @@ pub(crate) struct SemanticIndex<'db> { /// The set of modules that are imported anywhere within this file. imported_modules: Arc>, + /// `from...import` statements within this file that might import a submodule. + maybe_imported_modules: FxHashSet, + /// Flags about the global scope (code usage impacting inference) has_future_annotations: bool, @@ -243,6 +300,16 @@ pub(crate) struct SemanticIndex<'db> { generator_functions: FxHashSet, } +/// A `from...import` that may be an import of a module +/// +/// Later analysis will determine if it is. +#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, get_size2::GetSize)] +pub(crate) struct MaybeModuleImport { + level: u32, + from_module: Option, + submodule: Name, +} + impl<'db> SemanticIndex<'db> { /// Returns the place table for a specific scope. /// diff --git a/crates/ty_python_semantic/src/semantic_index/builder.rs b/crates/ty_python_semantic/src/semantic_index/builder.rs index 8107f9c122..5645fed7d4 100644 --- a/crates/ty_python_semantic/src/semantic_index/builder.rs +++ b/crates/ty_python_semantic/src/semantic_index/builder.rs @@ -47,7 +47,9 @@ use crate::semantic_index::symbol::{ScopedSymbolId, Symbol}; use crate::semantic_index::use_def::{ EnclosingSnapshotKey, FlowSnapshot, ScopedEnclosingSnapshotId, UseDefMapBuilder, }; -use crate::semantic_index::{ExpressionsScopeMap, SemanticIndex, VisibleAncestorsIter}; +use crate::semantic_index::{ + ExpressionsScopeMap, MaybeModuleImport, SemanticIndex, VisibleAncestorsIter, +}; use crate::semantic_model::HasTrackedScope; use crate::unpack::{EvaluationMode, Unpack, UnpackKind, UnpackPosition, UnpackValue}; use crate::{Db, Program}; @@ -111,6 +113,7 @@ pub(super) struct SemanticIndexBuilder<'db, 'ast> { definitions_by_node: FxHashMap>, expressions_by_node: FxHashMap>, imported_modules: FxHashSet, + maybe_imported_modules: FxHashSet, /// Hashset of all [`FileScopeId`]s that correspond to [generator functions]. /// /// [generator functions]: https://docs.python.org/3/glossary.html#term-generator @@ -148,6 +151,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { definitions_by_node: FxHashMap::default(), expressions_by_node: FxHashMap::default(), + maybe_imported_modules: FxHashSet::default(), imported_modules: FxHashSet::default(), generator_functions: FxHashSet::default(), @@ -1262,6 +1266,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { self.scopes_by_node.shrink_to_fit(); self.generator_functions.shrink_to_fit(); self.enclosing_snapshots.shrink_to_fit(); + self.maybe_imported_modules.shrink_to_fit(); SemanticIndex { place_tables, @@ -1274,6 +1279,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { scopes_by_node: self.scopes_by_node, use_def_maps, imported_modules: Arc::new(self.imported_modules), + maybe_imported_modules: self.maybe_imported_modules, has_future_annotations: self.has_future_annotations, enclosing_snapshots: self.enclosing_snapshots, semantic_syntax_errors: self.semantic_syntax_errors.into_inner(), @@ -1558,6 +1564,15 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { (&alias.name.id, false) }; + // If there's no alias or a redundant alias, record this as a potential import of a submodule + if alias.asname.is_none() || is_reexported { + self.maybe_imported_modules.insert(MaybeModuleImport { + level: node.level, + from_module: node.module.clone().map(Into::into), + submodule: alias.name.clone().into(), + }); + } + // Look for imports `from __future__ import annotations`, ignore `as ...` // We intentionally don't enforce the rules about location of `__future__` // imports here, we assume the user's intent was to apply the `__future__` diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index 6b48499e9b..be3816ac12 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -39,7 +39,9 @@ use crate::place::{ use crate::semantic_index::definition::{Definition, DefinitionKind}; use crate::semantic_index::place::ScopedPlaceId; use crate::semantic_index::scope::ScopeId; -use crate::semantic_index::{imported_modules, place_table, semantic_index}; +use crate::semantic_index::{ + imported_modules, imported_relative_submodules_of_stub_package, place_table, semantic_index, +}; use crate::suppression::check_suppressions; use crate::types::bound_super::BoundSuperType; use crate::types::call::{Binding, Bindings, CallArguments, CallableBinding}; @@ -10830,11 +10832,68 @@ impl<'db> ModuleLiteralType<'db> { self._importing_file(db) } + /// Get the submodule attributes we believe to be defined on this module. + /// + /// Note that `ModuleLiteralType` is per-importing-file, so this analysis + /// includes "imports the importing file has performed". + /// + /// + /// # Danger! Powerful Hammer! + /// + /// These results immediately make the attribute always defined in the importing file, + /// shadowing any other attribute in the module with the same name, even if the + /// non-submodule-attribute is in fact always the one defined in practice. + /// + /// Intuitively this means `available_submodule_attributes` "win all tie-breaks", + /// with the idea that if we're ever confused about complicated code then usually + /// the import is the thing people want in scope. + /// + /// However this "always defined, always shadows" rule if applied too aggressively + /// creates VERY confusing conclusions that break perfectly reasonable code. + /// + /// For instance, consider a package which has a `myfunc` submodule which defines a + /// `myfunc` function (a common idiom). If the package "re-exports" this function + /// (`from .myfunc import myfunc`), then at runtime in python + /// `from mypackage import myfunc` should import the function and not the submodule. + /// + /// However, if we were to consider `from mypackage import myfunc` as introducing + /// the attribute `mypackage.myfunc` in `available_submodule_attributes`, we would + /// fail to ever resolve the function. This is because `available_submodule_attributes` + /// is *so early* and *so powerful* in our analysis that **this conclusion would be + /// used when actually resolving `from mypackage import myfunc`**! + /// + /// This currently cannot be fixed by considering the actual symbols defined in `mypackage`, + /// because `available_submodule_attributes` is an *input* to that analysis. + /// + /// We should therefore avoid marking something as an `available_submodule_attribute` + /// when the import could be importing a non-submodule (a function, class, or value). + /// + /// + /// # Rules + /// + /// We have two rules for whether a submodule attribute is defined: + /// + /// * If the importing file include `import x.y` then `x.y` is defined in the importing file. + /// This is an easy rule to justify because `import` can only ever import a module, and so + /// *should* shadow any non-submodule of the same name. + /// + /// * If the module is an `__init__.pyi` for `mypackage`, and it contains a `from...import` + /// that normalizes to `from mypackage import submodule`, then `mypackage.submodule` is + /// defined in all files. This supports the `from . import submodule` idiom. Critically, + /// we do *not* allow `from mypackage.nested import submodule` to affect `mypackage`. + /// The idea here is that `from mypackage import submodule` *from mypackage itself* can + /// only ever reasonably be an import of a submodule. It doesn't make any sense to import + /// a function or class from yourself! (You *can* do it but... why? Don't? Please?) fn available_submodule_attributes(&self, db: &'db dyn Db) -> impl Iterator { self.importing_file(db) .into_iter() .flat_map(|file| imported_modules(db, file)) .filter_map(|submodule_name| submodule_name.relative_to(self.module(db).name(db))) + .chain( + imported_relative_submodules_of_stub_package(db, self.module(db)) + .iter() + .cloned(), + ) .filter_map(|relative_submodule| relative_submodule.components().next().map(Name::from)) } From 3179b052215260e1c573b67d7d48afe20135c994 Mon Sep 17 00:00:00 2001 From: Carl Meyer Date: Fri, 31 Oct 2025 10:49:59 -0400 Subject: [PATCH 009/180] [ty] don't assume in diagnostic messages that a TypedDict key error is about subscript access (#21166) ## Summary Before this PR, we would emit diagnostics like "Invalid key access" for a TypedDict literal with invalid key, which doesn't make sense since there's no "access" in that case. This PR just adjusts the wording to be more general, and adjusts the documentation of the lint rule too. I noticed this in the playground and thought it would be a quick fix. As usual, it turned out to be a bit more subtle than I expected, but for now I chose to punt on the complexity. We may ultimately want to have different rules for invalid subscript vs invalid TypedDict literal, because an invalid key in a TypedDict literal is low severity: it's a typo detector, but not actually a type error. But then there's another wrinkle there: if the TypedDict is `closed=True`, then it _is_ a type error. So would we want to separate the open and closed cases into separate rules, too? I decided to leave this as a question for future. If we wanted to use separate rules, or use specific wording for each case instead of the generalized wording I chose here, that would also involve a bit of extra work to distinguish the cases, since we use a generic set of functions for reporting these errors. ## Test Plan Added and updated mdtests. --- crates/ty/docs/rules.md | 110 ++++++++++-------- ...ict`_-_Diagnostics_(e5289abf5c570c29).snap | 74 +++++++++--- .../resources/mdtest/typed_dict.md | 48 ++++---- .../src/types/diagnostic.rs | 18 ++- ty.schema.json | 4 +- 5 files changed, 155 insertions(+), 99 deletions(-) diff --git a/crates/ty/docs/rules.md b/crates/ty/docs/rules.md index 858f1f0c7c..4218eee1af 100644 --- a/crates/ty/docs/rules.md +++ b/crates/ty/docs/rules.md @@ -474,7 +474,7 @@ an atypical memory layout. Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -501,7 +501,7 @@ func("foo") # error: [invalid-argument-type] Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -529,7 +529,7 @@ a: int = '' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -563,7 +563,7 @@ C.instance_var = 3 # error: Cannot assign to instance variable Default level: error · Added in 0.0.1-alpha.19 · Related issues · -View source +View source @@ -599,7 +599,7 @@ asyncio.run(main()) Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -623,7 +623,7 @@ class A(42): ... # error: [invalid-base] Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -650,7 +650,7 @@ with 1: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -679,7 +679,7 @@ a: str Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -723,7 +723,7 @@ except ZeroDivisionError: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -762,11 +762,15 @@ Added in 0 **What it does** -Checks for subscript accesses with invalid keys. +Checks for subscript accesses with invalid keys and `TypedDict` construction with an +unknown key. **Why is this bad?** -Using an invalid key will raise a `KeyError` at runtime. +Subscripting with an invalid key will raise a `KeyError` at runtime. + +Creating a `TypedDict` with an unknown key is likely a mistake; if the `TypedDict` is +`closed=true` it also violates the expectations of the type. **Examples** @@ -779,6 +783,10 @@ class Person(TypedDict): alice = Person(name="Alice", age=30) alice["height"] # KeyError: 'height' + +bob: Person = { "name": "Bob", "age": 30 } # typo! + +carol = Person(name="Carol", age=25) # typo! ``` ## `invalid-legacy-type-variable` @@ -787,7 +795,7 @@ alice["height"] # KeyError: 'height' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -822,7 +830,7 @@ def f(t: TypeVar("U")): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -888,7 +896,7 @@ TypeError: can only inherit from a NamedTuple type and Generic Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -938,7 +946,7 @@ def foo(x: int) -> int: ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -998,7 +1006,7 @@ TypeError: Protocols can only inherit from other protocols, got Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1047,7 +1055,7 @@ def g(): Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1072,7 +1080,7 @@ def func() -> int: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1130,7 +1138,7 @@ TODO #14889 Default level: error · Added in 0.0.1-alpha.6 · Related issues · -View source +View source @@ -1157,7 +1165,7 @@ NewAlias = TypeAliasType(get_name(), int) # error: TypeAliasType name mus Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1187,7 +1195,7 @@ TYPE_CHECKING = '' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1217,7 +1225,7 @@ b: Annotated[int] # `Annotated` expects at least two arguments Default level: error · Added in 0.0.1-alpha.11 · Related issues · -View source +View source @@ -1251,7 +1259,7 @@ f(10) # Error Default level: error · Added in 0.0.1-alpha.11 · Related issues · -View source +View source @@ -1285,7 +1293,7 @@ class C: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1320,7 +1328,7 @@ T = TypeVar('T', bound=str) # valid bound TypeVar Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1345,7 +1353,7 @@ func() # TypeError: func() missing 1 required positional argument: 'x' Default level: error · Added in 0.0.1-alpha.20 · Related issues · -View source +View source @@ -1378,7 +1386,7 @@ alice["age"] # KeyError Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1407,7 +1415,7 @@ func("string") # error: [no-matching-overload] Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1431,7 +1439,7 @@ Subscripting an object that does not support it will raise a `TypeError` at runt Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1457,7 +1465,7 @@ for i in 34: # TypeError: 'int' object is not iterable Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1484,7 +1492,7 @@ f(1, x=2) # Error raised here Default level: error · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -1542,7 +1550,7 @@ def test(): -> "int": Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1572,7 +1580,7 @@ static_assert(int(2.0 * 3.0) == 6) # error: does not have a statically known tr Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1601,7 +1609,7 @@ class B(A): ... # Error raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1628,7 +1636,7 @@ f("foo") # Error raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1656,7 +1664,7 @@ def _(x: int): Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1702,7 +1710,7 @@ class A: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1729,7 +1737,7 @@ f(x=1, y=2) # Error raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1757,7 +1765,7 @@ A().foo # AttributeError: 'A' object has no attribute 'foo' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1782,7 +1790,7 @@ import foo # ModuleNotFoundError: No module named 'foo' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1807,7 +1815,7 @@ print(x) # NameError: name 'x' is not defined Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1844,7 +1852,7 @@ b1 < b2 < b1 # exception raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1872,7 +1880,7 @@ A() + A() # TypeError: unsupported operand type(s) for +: 'A' and 'A' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2026,7 +2034,7 @@ a = 20 / 0 # type: ignore Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2086,7 +2094,7 @@ A()[0] # TypeError: 'A' object is not subscriptable Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2118,7 +2126,7 @@ from module import a # ImportError: cannot import name 'a' from 'module' Default level: warn · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2145,7 +2153,7 @@ cast(int, f()) # Redundant Default level: warn · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2169,7 +2177,7 @@ reveal_type(1) # NameError: name 'reveal_type' is not defined Default level: warn · Added in 0.0.1-alpha.15 · Related issues · -View source +View source @@ -2227,7 +2235,7 @@ def g(): Default level: warn · Added in 0.0.1-alpha.7 · Related issues · -View source +View source @@ -2266,7 +2274,7 @@ class D(C): ... # error: [unsupported-base] Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2353,7 +2361,7 @@ Dividing by zero raises a `ZeroDivisionError` at runtime. Default level: ignore · Added in 0.0.1-alpha.1 · Related issues · -View source +View source diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap index b80700fa08..155b4ea618 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap @@ -37,20 +37,24 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/typed_dict.md 23 | 24 | def write_to_non_literal_string_key(person: Person, str_key: str): 25 | person[str_key] = "Alice" # error: [invalid-key] -26 | from typing_extensions import ReadOnly -27 | -28 | class Employee(TypedDict): -29 | id: ReadOnly[int] -30 | name: str +26 | +27 | def create_with_invalid_string_key(): +28 | alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"} # error: [invalid-key] +29 | bob = Person(name="Bob", age=25, unknown="Bar") # error: [invalid-key] +30 | from typing_extensions import ReadOnly 31 | -32 | def write_to_readonly_key(employee: Employee): -33 | employee["id"] = 42 # error: [invalid-assignment] +32 | class Employee(TypedDict): +33 | id: ReadOnly[int] +34 | name: str +35 | +36 | def write_to_readonly_key(employee: Employee): +37 | employee["id"] = 42 # error: [invalid-assignment] ``` # Diagnostics ``` -error[invalid-key]: Invalid key access on TypedDict `Person` +error[invalid-key]: Invalid key for TypedDict `Person` --> src/mdtest_snippet.py:8:5 | 7 | def access_invalid_literal_string_key(person: Person): @@ -66,7 +70,7 @@ info: rule `invalid-key` is enabled by default ``` ``` -error[invalid-key]: Invalid key access on TypedDict `Person` +error[invalid-key]: Invalid key for TypedDict `Person` --> src/mdtest_snippet.py:13:5 | 12 | def access_invalid_key(person: Person): @@ -82,7 +86,7 @@ info: rule `invalid-key` is enabled by default ``` ``` -error[invalid-key]: TypedDict `Person` cannot be indexed with a key of type `str` +error[invalid-key]: Invalid key for TypedDict `Person` of type `str` --> src/mdtest_snippet.py:16:12 | 15 | def access_with_str_key(person: Person, str_key: str): @@ -123,7 +127,7 @@ info: rule `invalid-assignment` is enabled by default ``` ``` -error[invalid-key]: Invalid key access on TypedDict `Person` +error[invalid-key]: Invalid key for TypedDict `Person` --> src/mdtest_snippet.py:22:5 | 21 | def write_to_non_existing_key(person: Person): @@ -145,7 +149,39 @@ error[invalid-key]: Cannot access `Person` with a key of type `str`. Only string 24 | def write_to_non_literal_string_key(person: Person, str_key: str): 25 | person[str_key] = "Alice" # error: [invalid-key] | ^^^^^^^ -26 | from typing_extensions import ReadOnly +26 | +27 | def create_with_invalid_string_key(): + | +info: rule `invalid-key` is enabled by default + +``` + +``` +error[invalid-key]: Invalid key for TypedDict `Person` + --> src/mdtest_snippet.py:28:21 + | +27 | def create_with_invalid_string_key(): +28 | alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"} # error: [invalid-key] + | -----------------------------^^^^^^^^^-------- + | | | + | | Unknown key "unknown" + | TypedDict `Person` +29 | bob = Person(name="Bob", age=25, unknown="Bar") # error: [invalid-key] +30 | from typing_extensions import ReadOnly + | +info: rule `invalid-key` is enabled by default + +``` + +``` +error[invalid-key]: Invalid key for TypedDict `Person` + --> src/mdtest_snippet.py:29:11 + | +27 | def create_with_invalid_string_key(): +28 | alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"} # error: [invalid-key] +29 | bob = Person(name="Bob", age=25, unknown="Bar") # error: [invalid-key] + | ------ TypedDict `Person` ^^^^^^^^^^^^^ Unknown key "unknown" +30 | from typing_extensions import ReadOnly | info: rule `invalid-key` is enabled by default @@ -153,21 +189,21 @@ info: rule `invalid-key` is enabled by default ``` error[invalid-assignment]: Cannot assign to key "id" on TypedDict `Employee` - --> src/mdtest_snippet.py:33:5 + --> src/mdtest_snippet.py:37:5 | -32 | def write_to_readonly_key(employee: Employee): -33 | employee["id"] = 42 # error: [invalid-assignment] +36 | def write_to_readonly_key(employee: Employee): +37 | employee["id"] = 42 # error: [invalid-assignment] | -------- ^^^^ key is marked read-only | | | TypedDict `Employee` | info: Item declaration - --> src/mdtest_snippet.py:29:5 + --> src/mdtest_snippet.py:33:5 | -28 | class Employee(TypedDict): -29 | id: ReadOnly[int] +32 | class Employee(TypedDict): +33 | id: ReadOnly[int] | ----------------- Read-only item declared here -30 | name: str +34 | name: str | info: rule `invalid-assignment` is enabled by default diff --git a/crates/ty_python_semantic/resources/mdtest/typed_dict.md b/crates/ty_python_semantic/resources/mdtest/typed_dict.md index d810a79efe..042d6317a2 100644 --- a/crates/ty_python_semantic/resources/mdtest/typed_dict.md +++ b/crates/ty_python_semantic/resources/mdtest/typed_dict.md @@ -29,7 +29,7 @@ alice: Person = {"name": "Alice", "age": 30} reveal_type(alice["name"]) # revealed: str reveal_type(alice["age"]) # revealed: int | None -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "non_existing"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "non_existing"" reveal_type(alice["non_existing"]) # revealed: Unknown ``` @@ -41,7 +41,7 @@ bob = Person(name="Bob", age=25) reveal_type(bob["name"]) # revealed: str reveal_type(bob["age"]) # revealed: int | None -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "non_existing"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "non_existing"" reveal_type(bob["non_existing"]) # revealed: Unknown ``` @@ -69,7 +69,7 @@ def name_or_age() -> Literal["name", "age"]: carol: Person = {NAME: "Carol", AGE: 20} reveal_type(carol[NAME]) # revealed: str -# error: [invalid-key] "TypedDict `Person` cannot be indexed with a key of type `str`" +# error: [invalid-key] "Invalid key for TypedDict `Person` of type `str`" reveal_type(carol[non_literal()]) # revealed: Unknown reveal_type(carol[name_or_age()]) # revealed: str | int | None @@ -81,7 +81,7 @@ def _(): CAPITALIZED_NAME = "Name" -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "Name" - did you mean "name"?" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "Name" - did you mean "name"?" # error: [missing-typed-dict-key] "Missing required key 'name' in TypedDict `Person` constructor" dave: Person = {CAPITALIZED_NAME: "Dave", "age": 20} @@ -104,9 +104,9 @@ eve2a: Person = {"age": 22} # error: [missing-typed-dict-key] "Missing required key 'name' in TypedDict `Person` constructor" eve2b = Person(age=22) -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" eve3a: Person = {"name": "Eve", "age": 25, "extra": True} -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" eve3b = Person(name="Eve", age=25, extra=True) ``` @@ -157,10 +157,10 @@ bob["name"] = None Assignments to non-existing keys are disallowed: ```py -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" alice["extra"] = True -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" bob["extra"] = True ``` @@ -185,10 +185,10 @@ alice: Person = {"inner": {"name": "Alice", "age": 30}} reveal_type(alice["inner"]["name"]) # revealed: str reveal_type(alice["inner"]["age"]) # revealed: int | None -# error: [invalid-key] "Invalid key access on TypedDict `Inner`: Unknown key "non_existing"" +# error: [invalid-key] "Invalid key for TypedDict `Inner`: Unknown key "non_existing"" reveal_type(alice["inner"]["non_existing"]) # revealed: Unknown -# error: [invalid-key] "Invalid key access on TypedDict `Inner`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Inner`: Unknown key "extra"" alice: Person = {"inner": {"name": "Alice", "age": 30, "extra": 1}} ``` @@ -267,22 +267,22 @@ a_person = {"name": None, "age": 30} All of these have an extra field that is not defined in the `TypedDict`: ```py -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" alice4: Person = {"name": "Alice", "age": 30, "extra": True} -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" Person(name="Alice", age=30, extra=True) -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" Person({"name": "Alice", "age": 30, "extra": True}) -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" accepts_person({"name": "Alice", "age": 30, "extra": True}) # TODO: this should be an error house.owner = {"name": "Alice", "age": 30, "extra": True} a_person: Person -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" a_person = {"name": "Alice", "age": 30, "extra": True} -# error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" (a_person := {"name": "Alice", "age": 30, "extra": True}) ``` @@ -323,7 +323,7 @@ user2 = User({"name": "Bob"}) # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `User`: value of type `None`" user3 = User({"name": None, "age": 25}) -# error: [invalid-key] "Invalid key access on TypedDict `User`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `User`: Unknown key "extra"" user4 = User({"name": "Charlie", "age": 30, "extra": True}) ``` @@ -360,7 +360,7 @@ invalid = OptionalPerson(name=123) Extra fields are still not allowed, even with `total=False`: ```py -# error: [invalid-key] "Invalid key access on TypedDict `OptionalPerson`: Unknown key "extra"" +# error: [invalid-key] "Invalid key for TypedDict `OptionalPerson`: Unknown key "extra"" invalid_extra = OptionalPerson(name="George", extra=True) ``` @@ -503,10 +503,10 @@ def _(person: Person, literal_key: Literal["age"], union_of_keys: Literal["age", reveal_type(person[union_of_keys]) # revealed: int | None | str - # error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "non_existing"" + # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "non_existing"" reveal_type(person["non_existing"]) # revealed: Unknown - # error: [invalid-key] "TypedDict `Person` cannot be indexed with a key of type `str`" + # error: [invalid-key] "Invalid key for TypedDict `Person` of type `str`" reveal_type(person[str_key]) # revealed: Unknown # No error here: @@ -530,7 +530,7 @@ def _(person: Person): person["name"] = "Alice" person["age"] = 30 - # error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "naem" - did you mean "name"?" + # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "naem" - did you mean "name"?" person["naem"] = "Alice" def _(person: Person): @@ -646,7 +646,7 @@ def _(p: Person) -> None: reveal_type(p.setdefault("name", "Alice")) # revealed: str reveal_type(p.setdefault("extra", "default")) # revealed: str - # error: [invalid-key] "Invalid key access on TypedDict `Person`: Unknown key "extraz" - did you mean "extra"?" + # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extraz" - did you mean "extra"?" reveal_type(p.setdefault("extraz", "value")) # revealed: Unknown ``` @@ -1015,6 +1015,10 @@ def write_to_non_existing_key(person: Person): def write_to_non_literal_string_key(person: Person, str_key: str): person[str_key] = "Alice" # error: [invalid-key] + +def create_with_invalid_string_key(): + alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"} # error: [invalid-key] + bob = Person(name="Bob", age=25, unknown="Bar") # error: [invalid-key] ``` Assignment to `ReadOnly` keys: diff --git a/crates/ty_python_semantic/src/types/diagnostic.rs b/crates/ty_python_semantic/src/types/diagnostic.rs index 7db83b9b88..2dd75e57aa 100644 --- a/crates/ty_python_semantic/src/types/diagnostic.rs +++ b/crates/ty_python_semantic/src/types/diagnostic.rs @@ -572,10 +572,14 @@ declare_lint! { // Added in #19763. declare_lint! { /// ## What it does - /// Checks for subscript accesses with invalid keys. + /// Checks for subscript accesses with invalid keys and `TypedDict` construction with an + /// unknown key. /// /// ## Why is this bad? - /// Using an invalid key will raise a `KeyError` at runtime. + /// Subscripting with an invalid key will raise a `KeyError` at runtime. + /// + /// Creating a `TypedDict` with an unknown key is likely a mistake; if the `TypedDict` is + /// `closed=true` it also violates the expectations of the type. /// /// ## Examples /// ```python @@ -587,9 +591,13 @@ declare_lint! { /// /// alice = Person(name="Alice", age=30) /// alice["height"] # KeyError: 'height' + /// + /// bob: Person = { "name": "Bob", "age": 30 } # typo! + /// + /// carol = Person(name="Carol", age=25) # typo! /// ``` pub(crate) static INVALID_KEY = { - summary: "detects invalid subscript accesses", + summary: "detects invalid subscript accesses or TypedDict literal keys", status: LintStatus::stable("0.0.1-alpha.17"), default_level: Level::Error, } @@ -2966,7 +2974,7 @@ pub(crate) fn report_invalid_key_on_typed_dict<'db>( let typed_dict_name = typed_dict_ty.display(db); let mut diagnostic = builder.into_diagnostic(format_args!( - "Invalid key access on TypedDict `{typed_dict_name}`", + "Invalid key for TypedDict `{typed_dict_name}`", )); diagnostic.annotate( @@ -2989,7 +2997,7 @@ pub(crate) fn report_invalid_key_on_typed_dict<'db>( diagnostic } _ => builder.into_diagnostic(format_args!( - "TypedDict `{}` cannot be indexed with a key of type `{}`", + "Invalid key for TypedDict `{}` of type `{}`", typed_dict_ty.display(db), key_ty.display(db), )), diff --git a/ty.schema.json b/ty.schema.json index 270241fb28..55d5bdf996 100644 --- a/ty.schema.json +++ b/ty.schema.json @@ -584,8 +584,8 @@ ] }, "invalid-key": { - "title": "detects invalid subscript accesses", - "description": "## What it does\nChecks for subscript accesses with invalid keys.\n\n## Why is this bad?\nUsing an invalid key will raise a `KeyError` at runtime.\n\n## Examples\n```python\nfrom typing import TypedDict\n\nclass Person(TypedDict):\n name: str\n age: int\n\nalice = Person(name=\"Alice\", age=30)\nalice[\"height\"] # KeyError: 'height'\n```", + "title": "detects invalid subscript accesses or TypedDict literal keys", + "description": "## What it does\nChecks for subscript accesses with invalid keys and `TypedDict` construction with an\nunknown key.\n\n## Why is this bad?\nSubscripting with an invalid key will raise a `KeyError` at runtime.\n\nCreating a `TypedDict` with an unknown key is likely a mistake; if the `TypedDict` is\n`closed=true` it also violates the expectations of the type.\n\n## Examples\n```python\nfrom typing import TypedDict\n\nclass Person(TypedDict):\n name: str\n age: int\n\nalice = Person(name=\"Alice\", age=30)\nalice[\"height\"] # KeyError: 'height'\n\nbob: Person = { \"name\": \"Bob\", \"age\": 30 } # typo!\n\ncarol = Person(name=\"Carol\", age=25) # typo!\n```", "default": "error", "oneOf": [ { From 1baf98aab3e07355c62390668865c54be6258f5a Mon Sep 17 00:00:00 2001 From: Ibraheem Ahmed Date: Fri, 31 Oct 2025 10:50:54 -0400 Subject: [PATCH 010/180] [ty] Fix `is_disjoint_from` with `@final` classes (#21167) ## Summary We currently perform a subtyping check instead of the intended subclass check (and the subtyping check is confusingly named `is_subclass_of`). This showed up in https://github.com/astral-sh/ruff/pull/21070. --- .../type_properties/is_disjoint_from.md | 25 +++++++++++++++++++ crates/ty_python_semantic/src/types/class.rs | 11 +++++--- 2 files changed, 33 insertions(+), 3 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md b/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md index d80a2b5b82..dfad076726 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md +++ b/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md @@ -87,6 +87,31 @@ static_assert(is_disjoint_from(memoryview, Foo)) static_assert(is_disjoint_from(type[memoryview], type[Foo])) ``` +## Specialized `@final` types + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import final +from ty_extensions import static_assert, is_disjoint_from + +@final +class Foo[T]: + def get(self) -> T: + raise NotImplementedError + +class A: ... +class B: ... + +static_assert(not is_disjoint_from(Foo[A], Foo[B])) + +# TODO: `int` and `str` are disjoint bases, so these should be disjoint. +static_assert(not is_disjoint_from(Foo[int], Foo[str])) +``` + ## "Disjoint base" builtin types Most other builtins can be subclassed and can even be used in multiple inheritance. However, builtin diff --git a/crates/ty_python_semantic/src/types/class.rs b/crates/ty_python_semantic/src/types/class.rs index 4f8ee4c1fc..c3ff51e47f 100644 --- a/crates/ty_python_semantic/src/types/class.rs +++ b/crates/ty_python_semantic/src/types/class.rs @@ -637,12 +637,17 @@ impl<'db> ClassType<'db> { return true; } - // Optimisation: if either class is `@final`, we only need to do one `is_subclass_of` call. if self.is_final(db) { - return self.is_subclass_of(db, other); + return self + .iter_mro(db) + .filter_map(ClassBase::into_class) + .any(|class| class.class_literal(db).0 == other.class_literal(db).0); } if other.is_final(db) { - return other.is_subclass_of(db, self); + return other + .iter_mro(db) + .filter_map(ClassBase::into_class) + .any(|class| class.class_literal(db).0 == self.class_literal(db).0); } // Two disjoint bases can only coexist in an MRO if one is a subclass of the other. From cf4e82d4b0ea4087b91ef3aade1159127689ca85 Mon Sep 17 00:00:00 2001 From: Douglas Creager Date: Fri, 31 Oct 2025 10:53:37 -0400 Subject: [PATCH 011/180] [ty] Add and test when constraint sets are satisfied by their typevars (#21129) This PR adds a new `satisfied_by_all_typevar` method, which implements one of the final steps of actually using these dang constraint sets. Constraint sets exist to help us check assignability and subtyping of types in the presence of typevars. We construct a constraint set describing the conditions under which assignability holds between the two types. Then we check whether that constraint set is satisfied for the valid specializations of the relevant typevars (which is this new method). We also add a new `ty_extensions.ConstraintSet` method so that we can test this method's behavior in mdtests, before hooking it up to the rest of the specialization inference machinery. --- .../satisfied_by_all_typevars.md | 220 ++++++++++++++++++ crates/ty_python_semantic/src/types.rs | 57 ++++- .../ty_python_semantic/src/types/call/bind.rs | 44 +++- .../src/types/constraints.rs | 107 ++++++++- .../ty_python_semantic/src/types/display.rs | 3 + .../ty_extensions/ty_extensions.pyi | 10 + 6 files changed, 425 insertions(+), 16 deletions(-) create mode 100644 crates/ty_python_semantic/resources/mdtest/type_properties/satisfied_by_all_typevars.md diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/satisfied_by_all_typevars.md b/crates/ty_python_semantic/resources/mdtest/type_properties/satisfied_by_all_typevars.md new file mode 100644 index 0000000000..8d9f563250 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/type_properties/satisfied_by_all_typevars.md @@ -0,0 +1,220 @@ +# Constraint set satisfaction + +```toml +[environment] +python-version = "3.12" +``` + +Constraint sets exist to help us check assignability and subtyping of types in the presence of +typevars. We construct a constraint set describing the conditions under which assignability holds +between the two types. Then we check whether that constraint set is satisfied for the valid +specializations of the relevant typevars. This file tests that final step. + +## Inferable vs non-inferable typevars + +Typevars can appear in _inferable_ or _non-inferable_ positions. + +When a typevar is in an inferable position, the constraint set only needs to be satisfied for _some_ +valid specialization. The most common inferable position occurs when invoking a generic function: +all of the function's typevars are inferable, because we want to use the argument types to infer +which specialization is being invoked. + +When a typevar is in a non-inferable position, the constraint set must be satisfied for _every_ +valid specialization. The most common non-inferable position occurs in the body of a generic +function or class: here we don't know in advance what type the typevar will be specialized to, and +so we have to ensure that the body is valid for all possible specializations. + +```py +def f[T](t: T) -> T: + # In the function body, T is non-inferable. All assignability checks involving T must be + # satisfied for _all_ valid specializations of T. + return t + +# When invoking the function, T is inferable — we attempt to infer a specialization that is valid +# for the particular arguments that are passed to the function. Assignability checks (in particular, +# that the argument type is assignable to the parameter type) only need to succeed for _at least +# one_ specialization. +f(1) +``` + +In all of the examples below, for ease of reproducibility, we explicitly list the typevars that are +inferable in each `satisfied_by_all_typevars` call; any typevar not listed is assumed to be +non-inferable. + +## Unbounded typevar + +If a typevar has no bound or constraints, then it can specialize to any type. In an inferable +position, that means we just need a single type (any type at all!) that satisfies the constraint +set. In a non-inferable position, that means the constraint set must be satisfied for every possible +type. + +```py +from typing import final, Never +from ty_extensions import ConstraintSet, static_assert + +class Super: ... +class Base(Super): ... +class Sub(Base): ... + +@final +class Unrelated: ... + +def unbounded[T](): + static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(ConstraintSet.always().satisfied_by_all_typevars()) + + static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.never().satisfied_by_all_typevars()) + + # (T = Never) is a valid specialization, which satisfies (T ≤ Unrelated). + static_assert(ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T ≤ Unrelated). + static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars()) + + # (T = Base) is a valid specialization, which satisfies (T ≤ Super). + static_assert(ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Unrelated) is a valid specialization, which does not satisfy (T ≤ Super). + static_assert(not ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars()) + + # (T = Base) is a valid specialization, which satisfies (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Unrelated) is a valid specialization, which does not satisfy (T ≤ Base). + static_assert(not ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars()) + + # (T = Sub) is a valid specialization, which satisfies (T ≤ Sub). + static_assert(ConstraintSet.range(Never, T, Sub).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Unrelated) is a valid specialization, which does not satisfy (T ≤ Sub). + static_assert(not ConstraintSet.range(Never, T, Sub).satisfied_by_all_typevars()) +``` + +## Typevar with an upper bound + +If a typevar has an upper bound, then it must specialize to a type that is a subtype of that bound. +For an inferable typevar, that means we need a single type that satisfies both the constraint set +and the upper bound. For a non-inferable typevar, that means the constraint set must be satisfied +for every type that satisfies the upper bound. + +```py +from typing import final, Never +from ty_extensions import ConstraintSet, static_assert + +class Super: ... +class Base(Super): ... +class Sub(Base): ... + +@final +class Unrelated: ... + +def bounded[T: Base](): + static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(ConstraintSet.always().satisfied_by_all_typevars()) + + static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.never().satisfied_by_all_typevars()) + + # (T = Base) is a valid specialization, which satisfies (T ≤ Super). + static_assert(ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars(inferable=tuple[T])) + # Every valid specialization satisfies (T ≤ Base). Since (Base ≤ Super), every valid + # specialization also satisfies (T ≤ Super). + static_assert(ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars()) + + # (T = Base) is a valid specialization, which satisfies (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars(inferable=tuple[T])) + # Every valid specialization satisfies (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars()) + + # (T = Sub) is a valid specialization, which satisfies (T ≤ Sub). + static_assert(ConstraintSet.range(Never, T, Sub).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T ≤ Sub). + static_assert(not ConstraintSet.range(Never, T, Sub).satisfied_by_all_typevars()) + + # (T = Never) is a valid specialization, which satisfies (T ≤ Unrelated). + constraints = ConstraintSet.range(Never, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T ≤ Unrelated). + static_assert(not constraints.satisfied_by_all_typevars()) + + # Never is the only type that satisfies both (T ≤ Base) and (T ≤ Unrelated). So there is no + # valid specialization that satisfies (T ≤ Unrelated ∧ T ≠ Never). + constraints = constraints & ~ConstraintSet.range(Never, T, Never) + static_assert(not constraints.satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not constraints.satisfied_by_all_typevars()) +``` + +## Constrained typevar + +If a typevar has constraints, then it must specialize to one of those specific types. (Not to a +subtype of one of those types!) For an inferable typevar, that means we need the constraint set to +be satisfied by any one of the constraints. For a non-inferable typevar, that means we need the +constraint set to be satisfied by all of those constraints. + +```py +from typing import final, Never +from ty_extensions import ConstraintSet, static_assert + +class Super: ... +class Base(Super): ... +class Sub(Base): ... + +@final +class Unrelated: ... + +def constrained[T: (Base, Unrelated)](): + static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(ConstraintSet.always().satisfied_by_all_typevars()) + + static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.never().satisfied_by_all_typevars()) + + # (T = Unrelated) is a valid specialization, which satisfies (T ≤ Unrelated). + static_assert(ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T ≤ Unrelated). + static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars()) + + # (T = Base) is a valid specialization, which satisfies (T ≤ Super). + static_assert(ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Unrelated) is a valid specialization, which does not satisfy (T ≤ Super). + static_assert(not ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars()) + + # (T = Base) is a valid specialization, which satisfies (T ≤ Base). + static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Unrelated) is a valid specialization, which does not satisfy (T ≤ Base). + static_assert(not ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars()) + + # Neither (T = Base) nor (T = Unrelated) satisfy (T ≤ Sub). + static_assert(not ConstraintSet.range(Never, T, Sub).satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(not ConstraintSet.range(Never, T, Sub).satisfied_by_all_typevars()) + + # (T = Base) and (T = Unrelated) both satisfy (T ≤ Super ∨ T ≤ Unrelated). + constraints = ConstraintSet.range(Never, T, Super) | ConstraintSet.range(Never, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(constraints.satisfied_by_all_typevars()) + + # (T = Base) and (T = Unrelated) both satisfy (T ≤ Base ∨ T ≤ Unrelated). + constraints = ConstraintSet.range(Never, T, Base) | ConstraintSet.range(Never, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(constraints.satisfied_by_all_typevars()) + + # (T = Unrelated) is a valid specialization, which satisfies (T ≤ Sub ∨ T ≤ Unrelated). + constraints = ConstraintSet.range(Never, T, Sub) | ConstraintSet.range(Never, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T ≤ Sub ∨ T ≤ Unrelated). + static_assert(not constraints.satisfied_by_all_typevars()) + + # (T = Unrelated) is a valid specialization, which satisfies (T = Super ∨ T = Unrelated). + constraints = ConstraintSet.range(Super, T, Super) | ConstraintSet.range(Unrelated, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T = Super ∨ T = Unrelated). + static_assert(not constraints.satisfied_by_all_typevars()) + + # (T = Base) and (T = Unrelated) both satisfy (T = Base ∨ T = Unrelated). + constraints = ConstraintSet.range(Base, T, Base) | ConstraintSet.range(Unrelated, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + static_assert(constraints.satisfied_by_all_typevars()) + + # (T = Unrelated) is a valid specialization, which satisfies (T = Sub ∨ T = Unrelated). + constraints = ConstraintSet.range(Sub, T, Sub) | ConstraintSet.range(Unrelated, T, Unrelated) + static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T])) + # (T = Base) is a valid specialization, which does not satisfy (T = Sub ∨ T = Unrelated). + static_assert(not constraints.satisfied_by_all_typevars()) +``` diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index be3816ac12..a4eb563e6a 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -4161,6 +4161,14 @@ impl<'db> Type<'db> { )) .into() } + Type::KnownInstance(KnownInstanceType::ConstraintSet(tracked)) + if name == "satisfied_by_all_typevars" => + { + Place::bound(Type::KnownBoundMethod( + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(tracked), + )) + .into() + } Type::ClassLiteral(class) if name == "__get__" && class.is_known(db, KnownClass::FunctionType) => @@ -6923,6 +6931,7 @@ impl<'db> Type<'db> { | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) ) | Type::DataclassDecorator(_) | Type::DataclassTransformer(_) @@ -7074,7 +7083,8 @@ impl<'db> Type<'db> { | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), ) | Type::DataclassDecorator(_) | Type::DataclassTransformer(_) @@ -10339,6 +10349,7 @@ pub enum KnownBoundMethodType<'db> { ConstraintSetAlways, ConstraintSetNever, ConstraintSetImpliesSubtypeOf(TrackedConstraintSet<'db>), + ConstraintSetSatisfiedByAllTypeVars(TrackedConstraintSet<'db>), } pub(super) fn walk_method_wrapper_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>( @@ -10366,7 +10377,8 @@ pub(super) fn walk_method_wrapper_type<'db, V: visitor::TypeVisitor<'db> + ?Size | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) => {} + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) => {} } } @@ -10434,6 +10446,10 @@ impl<'db> KnownBoundMethodType<'db> { | ( KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), + ) + | ( + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), ) => ConstraintSet::from(true), ( @@ -10446,7 +10462,8 @@ impl<'db> KnownBoundMethodType<'db> { | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), KnownBoundMethodType::FunctionTypeDunderGet(_) | KnownBoundMethodType::FunctionTypeDunderCall(_) | KnownBoundMethodType::PropertyDunderGet(_) @@ -10456,7 +10473,8 @@ impl<'db> KnownBoundMethodType<'db> { | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), ) => ConstraintSet::from(false), } } @@ -10509,6 +10527,10 @@ impl<'db> KnownBoundMethodType<'db> { ( KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(left_constraints), KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(right_constraints), + ) + | ( + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(left_constraints), + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(right_constraints), ) => left_constraints .constraints(db) .iff(db, right_constraints.constraints(db)), @@ -10523,7 +10545,8 @@ impl<'db> KnownBoundMethodType<'db> { | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), KnownBoundMethodType::FunctionTypeDunderGet(_) | KnownBoundMethodType::FunctionTypeDunderCall(_) | KnownBoundMethodType::PropertyDunderGet(_) @@ -10533,7 +10556,8 @@ impl<'db> KnownBoundMethodType<'db> { | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_), + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_), ) => ConstraintSet::from(false), } } @@ -10557,7 +10581,8 @@ impl<'db> KnownBoundMethodType<'db> { | KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) => self, + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) => self, } } @@ -10573,7 +10598,10 @@ impl<'db> KnownBoundMethodType<'db> { KnownBoundMethodType::ConstraintSetRange | KnownBoundMethodType::ConstraintSetAlways | KnownBoundMethodType::ConstraintSetNever - | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) => KnownClass::ConstraintSet, + | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_) + | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) => { + KnownClass::ConstraintSet + } } } @@ -10712,6 +10740,19 @@ impl<'db> KnownBoundMethodType<'db> { Some(KnownClass::ConstraintSet.to_instance(db)), ))) } + + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) => { + Either::Right(std::iter::once(Signature::new( + Parameters::new([Parameter::keyword_only(Name::new_static("inferable")) + .type_form() + .with_annotated_type(UnionType::from_elements( + db, + [Type::homogeneous_tuple(db, Type::any()), Type::none(db)], + )) + .with_default_type(Type::none(db))]), + Some(KnownClass::Bool.to_instance(db)), + ))) + } } } } diff --git a/crates/ty_python_semantic/src/types/call/bind.rs b/crates/ty_python_semantic/src/types/call/bind.rs index 1b4629b301..b0a5cc1b91 100644 --- a/crates/ty_python_semantic/src/types/call/bind.rs +++ b/crates/ty_python_semantic/src/types/call/bind.rs @@ -9,6 +9,7 @@ use std::fmt; use itertools::{Either, Itertools}; use ruff_db::parsed::parsed_module; use ruff_python_ast::name::Name; +use rustc_hash::FxHashSet; use smallvec::{SmallVec, smallvec, smallvec_inline}; use super::{Argument, CallArguments, CallError, CallErrorKind, InferContext, Signature, Type}; @@ -35,9 +36,10 @@ use crate::types::signatures::{Parameter, ParameterForm, ParameterKind, Paramete use crate::types::tuple::{TupleLength, TupleType}; use crate::types::{ BoundMethodType, ClassLiteral, DataclassFlags, DataclassParams, FieldInstance, - KnownBoundMethodType, KnownClass, KnownInstanceType, MemberLookupPolicy, PropertyInstanceType, - SpecialFormType, TrackedConstraintSet, TypeAliasType, TypeContext, UnionBuilder, UnionType, - WrapperDescriptorKind, enums, ide_support, infer_isolated_expression, todo_type, + KnownBoundMethodType, KnownClass, KnownInstanceType, MemberLookupPolicy, NominalInstanceType, + PropertyInstanceType, SpecialFormType, TrackedConstraintSet, TypeAliasType, TypeContext, + UnionBuilder, UnionType, WrapperDescriptorKind, enums, ide_support, infer_isolated_expression, + todo_type, }; use ruff_db::diagnostic::{Annotation, Diagnostic, SubDiagnostic, SubDiagnosticSeverity}; use ruff_python_ast::{self as ast, ArgOrKeyword, PythonVersion}; @@ -1174,6 +1176,42 @@ impl<'db> Bindings<'db> { )); } + Type::KnownBoundMethod( + KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(tracked), + ) => { + let extract_inferable = |instance: &NominalInstanceType<'db>| { + if instance.has_known_class(db, KnownClass::NoneType) { + // Caller explicitly passed None, so no typevars are inferable. + return Some(FxHashSet::default()); + } + instance + .tuple_spec(db)? + .fixed_elements() + .map(|ty| { + ty.as_typevar() + .map(|bound_typevar| bound_typevar.identity(db)) + }) + .collect() + }; + + let inferable = match overload.parameter_types() { + // Caller did not provide argument, so no typevars are inferable. + [None] => FxHashSet::default(), + [Some(Type::NominalInstance(instance))] => { + match extract_inferable(instance) { + Some(inferable) => inferable, + None => continue, + } + } + _ => continue, + }; + + let result = tracked + .constraints(db) + .satisfied_by_all_typevars(db, InferableTypeVars::One(&inferable)); + overload.set_return_type(Type::BooleanLiteral(result)); + } + Type::ClassLiteral(class) => match class.known(db) { Some(KnownClass::Bool) => match overload.parameter_types() { [Some(arg)] => overload.set_return_type(arg.bool(db).into_type(db)), diff --git a/crates/ty_python_semantic/src/types/constraints.rs b/crates/ty_python_semantic/src/types/constraints.rs index ef7632ff2e..ee66cd85f3 100644 --- a/crates/ty_python_semantic/src/types/constraints.rs +++ b/crates/ty_python_semantic/src/types/constraints.rs @@ -65,7 +65,10 @@ use salsa::plumbing::AsId; use crate::Db; use crate::types::generics::InferableTypeVars; -use crate::types::{BoundTypeVarInstance, IntersectionType, Type, TypeRelation, UnionType}; +use crate::types::{ + BoundTypeVarInstance, IntersectionType, Type, TypeRelation, TypeVarBoundOrConstraints, + UnionType, +}; /// An extension trait for building constraint sets from [`Option`] values. pub(crate) trait OptionConstraintsExtension { @@ -256,6 +259,28 @@ impl<'db> ConstraintSet<'db> { } } + /// Returns whether this constraint set is satisfied by all of the typevars that it mentions. + /// + /// Each typevar has a set of _valid specializations_, which is defined by any upper bound or + /// constraints that the typevar has. + /// + /// Each typevar is also either _inferable_ or _non-inferable_. (You provide a list of the + /// `inferable` typevars; all others are considered non-inferable.) For an inferable typevar, + /// then there must be _some_ valid specialization that satisfies the constraint set. For a + /// non-inferable typevar, then _all_ valid specializations must satisfy it. + /// + /// Note that we don't have to consider typevars that aren't mentioned in the constraint set, + /// since the constraint set cannot be affected by any typevars that it does not mention. That + /// means that those additional typevars trivially satisfy the constraint set, regardless of + /// whether they are inferable or not. + pub(crate) fn satisfied_by_all_typevars( + self, + db: &'db dyn Db, + inferable: InferableTypeVars<'_, 'db>, + ) -> bool { + self.node.satisfied_by_all_typevars(db, inferable) + } + /// Updates this constraint set to hold the union of itself and another constraint set. pub(crate) fn union(&mut self, db: &'db dyn Db, other: Self) -> Self { self.node = self.node.or(db, other.node); @@ -746,6 +771,13 @@ impl<'db> Node<'db> { .or(db, self.negate(db).and(db, else_node)) } + fn satisfies(self, db: &'db dyn Db, other: Self) -> Self { + let simplified_self = self.simplify(db); + let implication = simplified_self.implies(db, other); + let (simplified, domain) = implication.simplify_and_domain(db); + simplified.and(db, domain) + } + fn when_subtype_of_given( self, db: &'db dyn Db, @@ -767,10 +799,48 @@ impl<'db> Node<'db> { _ => return lhs.when_subtype_of(db, rhs, inferable).node, }; - let simplified_self = self.simplify(db); - let implication = simplified_self.implies(db, constraint); - let (simplified, domain) = implication.simplify_and_domain(db); - simplified.and(db, domain) + self.satisfies(db, constraint) + } + + fn satisfied_by_all_typevars( + self, + db: &'db dyn Db, + inferable: InferableTypeVars<'_, 'db>, + ) -> bool { + match self { + Node::AlwaysTrue => return true, + Node::AlwaysFalse => return false, + Node::Interior(_) => {} + } + + let mut typevars = FxHashSet::default(); + self.for_each_constraint(db, &mut |constraint| { + typevars.insert(constraint.typevar(db)); + }); + + for typevar in typevars { + // Determine which valid specializations of this typevar satisfy the constraint set. + let valid_specializations = typevar.valid_specializations(db).node; + let when_satisfied = valid_specializations + .satisfies(db, self) + .and(db, valid_specializations); + let satisfied = if typevar.is_inferable(db, inferable) { + // If the typevar is inferable, then we only need one valid specialization to + // satisfy the constraint set. + !when_satisfied.is_never_satisfied() + } else { + // If the typevar is non-inferable, then we need _all_ valid specializations to + // satisfy the constraint set. + when_satisfied + .iff(db, valid_specializations) + .is_always_satisfied(db) + }; + if !satisfied { + return false; + } + } + + true } /// Returns a new BDD that returns the same results as `self`, but with some inputs fixed to @@ -1861,6 +1931,33 @@ impl<'db> SatisfiedClauses<'db> { } } +/// Returns a constraint set describing the valid specializations of a typevar. +impl<'db> BoundTypeVarInstance<'db> { + pub(crate) fn valid_specializations(self, db: &'db dyn Db) -> ConstraintSet<'db> { + match self.typevar(db).bound_or_constraints(db) { + None => ConstraintSet::from(true), + Some(TypeVarBoundOrConstraints::UpperBound(bound)) => ConstraintSet::constrain_typevar( + db, + self, + Type::Never, + bound, + TypeRelation::Assignability, + ), + Some(TypeVarBoundOrConstraints::Constraints(constraints)) => { + constraints.elements(db).iter().when_any(db, |constraint| { + ConstraintSet::constrain_typevar( + db, + self, + *constraint, + *constraint, + TypeRelation::Assignability, + ) + }) + } + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/ty_python_semantic/src/types/display.rs b/crates/ty_python_semantic/src/types/display.rs index 7748dd3ab5..8500c142e8 100644 --- a/crates/ty_python_semantic/src/types/display.rs +++ b/crates/ty_python_semantic/src/types/display.rs @@ -535,6 +535,9 @@ impl Display for DisplayRepresentation<'_> { Type::KnownBoundMethod(KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_)) => { f.write_str("bound method `ConstraintSet.implies_subtype_of`") } + Type::KnownBoundMethod(KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars( + _, + )) => f.write_str("bound method `ConstraintSet.satisfied_by_all_typevars`"), Type::WrapperDescriptor(kind) => { let (method, object) = match kind { WrapperDescriptorKind::FunctionTypeDunderGet => ("__get__", "function"), diff --git a/crates/ty_vendored/ty_extensions/ty_extensions.pyi b/crates/ty_vendored/ty_extensions/ty_extensions.pyi index 79cda64bef..d23554f0ae 100644 --- a/crates/ty_vendored/ty_extensions/ty_extensions.pyi +++ b/crates/ty_vendored/ty_extensions/ty_extensions.pyi @@ -67,6 +67,16 @@ class ConstraintSet: .. _subtype: https://typing.python.org/en/latest/spec/concepts.html#subtype-supertype-and-type-equivalence """ + def satisfied_by_all_typevars( + self, *, inferable: tuple[Any, ...] | None = None + ) -> bool: + """ + Returns whether this constraint set is satisfied by all of the typevars + that it mentions. You must provide a tuple of the typevars that should + be considered `inferable`. All other typevars mentioned in the + constraint set will be considered non-inferable. + """ + def __bool__(self) -> bool: ... def __eq__(self, other: ConstraintSet) -> bool: ... def __ne__(self, other: ConstraintSet) -> bool: ... From 1d6ae8596a0acd2d84582a2c1cb29db4e89d505f Mon Sep 17 00:00:00 2001 From: Ibraheem Ahmed Date: Fri, 31 Oct 2025 10:58:09 -0400 Subject: [PATCH 012/180] [ty] Prefer exact matches when solving constrained type variables (#21165) ## Summary The solver is currently order-dependent, and will choose a supertype over the exact type if it appears earlier in the list of constraints. We could be smarter and try to choose the most precise subtype, but I imagine this is something the new constraint solver will fix anyways, and this fixes the issue showing up on https://github.com/astral-sh/ruff/pull/21070. --- .../mdtest/generics/legacy/functions.md | 25 +++++++++++++++++++ .../ty_python_semantic/src/types/generics.rs | 8 ++++++ 2 files changed, 33 insertions(+) diff --git a/crates/ty_python_semantic/resources/mdtest/generics/legacy/functions.md b/crates/ty_python_semantic/resources/mdtest/generics/legacy/functions.md index 9745fdca21..2bbe85b5ec 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/legacy/functions.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/legacy/functions.md @@ -545,3 +545,28 @@ def f(x: T, y: Not[T]) -> T: y = x # error: [invalid-assignment] return x ``` + +## Prefer exact matches for constrained typevars + +```py +from typing import TypeVar + +class Base: ... +class Sub(Base): ... + +# We solve to `Sub`, regardless of the order of constraints. +T = TypeVar("T", Base, Sub) +T2 = TypeVar("T2", Sub, Base) + +def f(x: T) -> list[T]: + return [x] + +def f2(x: T2) -> list[T2]: + return [x] + +x: list[Sub] = f(Sub()) +reveal_type(x) # revealed: list[Sub] + +y: list[Sub] = f2(Sub()) +reveal_type(y) # revealed: list[Sub] +``` diff --git a/crates/ty_python_semantic/src/types/generics.rs b/crates/ty_python_semantic/src/types/generics.rs index 59216ca607..8485931ff2 100644 --- a/crates/ty_python_semantic/src/types/generics.rs +++ b/crates/ty_python_semantic/src/types/generics.rs @@ -1483,6 +1483,14 @@ impl<'db> SpecializationBuilder<'db> { self.add_type_mapping(bound_typevar, ty); } Some(TypeVarBoundOrConstraints::Constraints(constraints)) => { + // Prefer an exact match first. + for constraint in constraints.elements(self.db) { + if ty == *constraint { + self.add_type_mapping(bound_typevar, ty); + return Ok(()); + } + } + for constraint in constraints.elements(self.db) { if ty .when_assignable_to(self.db, *constraint, self.inferable) From 0c2cf7586903040436237b03aebc5bc9f0c62735 Mon Sep 17 00:00:00 2001 From: David Peter Date: Fri, 31 Oct 2025 16:00:30 +0100 Subject: [PATCH 013/180] [ty] Do not promote literals in contravariant position (#21164) ## Summary closes https://github.com/astral-sh/ty/issues/1463 ## Test Plan Regression tests --- .../resources/mdtest/literal_promotion.md | 32 ++++++++++ crates/ty_python_semantic/src/types.rs | 60 ++++++++++++++----- .../src/types/signatures.rs | 15 ++--- 3 files changed, 84 insertions(+), 23 deletions(-) create mode 100644 crates/ty_python_semantic/resources/mdtest/literal_promotion.md diff --git a/crates/ty_python_semantic/resources/mdtest/literal_promotion.md b/crates/ty_python_semantic/resources/mdtest/literal_promotion.md new file mode 100644 index 0000000000..726ca59d20 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/literal_promotion.md @@ -0,0 +1,32 @@ +# Literal promotion + +There are certain places where we promote literals to their common supertype: + +```py +reveal_type([1, 2, 3]) # revealed: list[Unknown | int] +reveal_type({"a", "b", "c"}) # revealed: set[Unknown | str] +``` + +This promotion should not take place if the literal type appears in contravariant position: + +```py +from typing import Callable, Literal + +def in_negated_position(non_zero_number: int): + if non_zero_number == 0: + raise ValueError() + + reveal_type(non_zero_number) # revealed: int & ~Literal[0] + + reveal_type([non_zero_number]) # revealed: list[Unknown | (int & ~Literal[0])] + +def in_parameter_position(callback: Callable[[Literal[1]], None]): + reveal_type(callback) # revealed: (Literal[1], /) -> None + + reveal_type([callback]) # revealed: list[Unknown | ((Literal[1], /) -> None)] + +def double_negation(callback: Callable[[Callable[[Literal[1]], None]], None]): + reveal_type(callback) # revealed: ((Literal[1], /) -> None, /) -> None + + reveal_type([callback]) # revealed: list[Unknown | (((int, /) -> None, /) -> None)] +``` diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index a4eb563e6a..be2fb264d8 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -1270,7 +1270,11 @@ impl<'db> Type<'db> { /// /// It also avoids literal promotion if a literal type annotation was provided as type context. pub(crate) fn promote_literals(self, db: &'db dyn Db, tcx: TypeContext<'db>) -> Type<'db> { - self.apply_type_mapping(db, &TypeMapping::PromoteLiterals, tcx) + self.apply_type_mapping( + db, + &TypeMapping::PromoteLiterals(PromoteLiteralsMode::On), + tcx, + ) } /// Like [`Type::promote_literals`], but does not recurse into nested types. @@ -6765,7 +6769,7 @@ impl<'db> Type<'db> { self } } - TypeMapping::PromoteLiterals + TypeMapping::PromoteLiterals(_) | TypeMapping::ReplaceParameterDefaults | TypeMapping::BindLegacyTypevars(_) => self, TypeMapping::Materialize(materialization_kind) => { @@ -6779,7 +6783,7 @@ impl<'db> Type<'db> { } TypeMapping::Specialization(_) | TypeMapping::PartialSpecialization(_) | - TypeMapping::PromoteLiterals | + TypeMapping::PromoteLiterals(_) | TypeMapping::BindSelf(_) | TypeMapping::ReplaceSelf { .. } | TypeMapping::Materialize(_) | @@ -6790,7 +6794,7 @@ impl<'db> Type<'db> { let function = Type::FunctionLiteral(function.apply_type_mapping_impl(db, type_mapping, tcx, visitor)); match type_mapping { - TypeMapping::PromoteLiterals => function.promote_literals_impl(db, tcx), + TypeMapping::PromoteLiterals(PromoteLiteralsMode::On) => function.promote_literals_impl(db, tcx), _ => function } } @@ -6867,13 +6871,9 @@ impl<'db> Type<'db> { builder = builder.add_positive(positive.apply_type_mapping_impl(db, type_mapping, tcx, visitor)); } - let flipped_mapping = match type_mapping { - TypeMapping::Materialize(materialization_kind) => &TypeMapping::Materialize(materialization_kind.flip()), - _ => type_mapping, - }; for negative in intersection.negative(db) { builder = - builder.add_negative(negative.apply_type_mapping_impl(db, flipped_mapping, tcx, visitor)); + builder.add_negative(negative.apply_type_mapping_impl(db, &type_mapping.flip(), tcx, visitor)); } builder.build() } @@ -6902,8 +6902,9 @@ impl<'db> Type<'db> { TypeMapping::BindSelf(_) | TypeMapping::ReplaceSelf { .. } | TypeMapping::Materialize(_) | - TypeMapping::ReplaceParameterDefaults => self, - TypeMapping::PromoteLiterals => self.promote_literals_impl(db, tcx) + TypeMapping::ReplaceParameterDefaults | + TypeMapping::PromoteLiterals(PromoteLiteralsMode::Off) => self, + TypeMapping::PromoteLiterals(PromoteLiteralsMode::On) => self.promote_literals_impl(db, tcx) } Type::Dynamic(_) => match type_mapping { @@ -6912,7 +6913,7 @@ impl<'db> Type<'db> { TypeMapping::BindLegacyTypevars(_) | TypeMapping::BindSelf(_) | TypeMapping::ReplaceSelf { .. } | - TypeMapping::PromoteLiterals | + TypeMapping::PromoteLiterals(_) | TypeMapping::ReplaceParameterDefaults => self, TypeMapping::Materialize(materialization_kind) => match materialization_kind { MaterializationKind::Top => Type::object(), @@ -7456,6 +7457,21 @@ fn apply_specialization_cycle_initial<'db>( Type::Never } +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, get_size2::GetSize)] +pub enum PromoteLiteralsMode { + On, + Off, +} + +impl PromoteLiteralsMode { + const fn flip(self) -> Self { + match self { + PromoteLiteralsMode::On => PromoteLiteralsMode::Off, + PromoteLiteralsMode::Off => PromoteLiteralsMode::On, + } + } +} + /// A mapping that can be applied to a type, producing another type. This is applied inductively to /// the components of complex types. /// @@ -7470,7 +7486,7 @@ pub enum TypeMapping<'a, 'db> { PartialSpecialization(PartialSpecialization<'a, 'db>), /// Replaces any literal types with their corresponding promoted type form (e.g. `Literal["string"]` /// to `str`, or `def _() -> int` to `Callable[[], int]`). - PromoteLiterals, + PromoteLiterals(PromoteLiteralsMode), /// Binds a legacy typevar with the generic context (class, function, type alias) that it is /// being used in. BindLegacyTypevars(BindingContext<'db>), @@ -7495,7 +7511,7 @@ impl<'db> TypeMapping<'_, 'db> { match self { TypeMapping::Specialization(_) | TypeMapping::PartialSpecialization(_) - | TypeMapping::PromoteLiterals + | TypeMapping::PromoteLiterals(_) | TypeMapping::BindLegacyTypevars(_) | TypeMapping::Materialize(_) | TypeMapping::ReplaceParameterDefaults => context, @@ -7521,6 +7537,22 @@ impl<'db> TypeMapping<'_, 'db> { ), } } + + /// Returns a new `TypeMapping` that should be applied in contravariant positions. + pub(crate) fn flip(&self) -> Self { + match self { + TypeMapping::Materialize(materialization_kind) => { + TypeMapping::Materialize(materialization_kind.flip()) + } + TypeMapping::PromoteLiterals(mode) => TypeMapping::PromoteLiterals(mode.flip()), + TypeMapping::Specialization(_) + | TypeMapping::PartialSpecialization(_) + | TypeMapping::BindLegacyTypevars(_) + | TypeMapping::BindSelf(_) + | TypeMapping::ReplaceSelf { .. } + | TypeMapping::ReplaceParameterDefaults => self.clone(), + } + } } /// A Salsa-tracked constraint set. This is only needed to have something appropriately small to diff --git a/crates/ty_python_semantic/src/types/signatures.rs b/crates/ty_python_semantic/src/types/signatures.rs index b0ff205e48..11979100bb 100644 --- a/crates/ty_python_semantic/src/types/signatures.rs +++ b/crates/ty_python_semantic/src/types/signatures.rs @@ -509,20 +509,17 @@ impl<'db> Signature<'db> { tcx: TypeContext<'db>, visitor: &ApplyTypeMappingVisitor<'db>, ) -> Self { - let flipped_mapping = match type_mapping { - TypeMapping::Materialize(materialization_kind) => { - &TypeMapping::Materialize(materialization_kind.flip()) - } - _ => type_mapping, - }; Self { generic_context: self .generic_context .map(|context| type_mapping.update_signature_generic_context(db, context)), definition: self.definition, - parameters: self - .parameters - .apply_type_mapping_impl(db, flipped_mapping, tcx, visitor), + parameters: self.parameters.apply_type_mapping_impl( + db, + &type_mapping.flip(), + tcx, + visitor, + ), return_ty: self .return_ty .map(|ty| ty.apply_type_mapping_impl(db, type_mapping, tcx, visitor)), From 9d7da914b9c7fdc11f3334f45fb00a58c70b0bd2 Mon Sep 17 00:00:00 2001 From: chiri Date: Fri, 31 Oct 2025 18:10:14 +0300 Subject: [PATCH 014/180] Improve `extend` docs (#21135) Co-authored-by: Micha Reiser --- crates/ruff_workspace/src/options.rs | 15 +++++++++++---- ruff.schema.json | 2 +- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/crates/ruff_workspace/src/options.rs b/crates/ruff_workspace/src/options.rs index 47ee0fe738..708d6dcf0b 100644 --- a/crates/ruff_workspace/src/options.rs +++ b/crates/ruff_workspace/src/options.rs @@ -59,13 +59,20 @@ pub struct Options { )] pub cache_dir: Option, - /// A path to a local `pyproject.toml` file to merge into this + /// A path to a local `pyproject.toml` or `ruff.toml` file to merge into this /// configuration. User home directory and environment variables will be /// expanded. /// - /// To resolve the current `pyproject.toml` file, Ruff will first resolve - /// this base configuration file, then merge in any properties defined - /// in the current configuration file. + /// To resolve the current configuration file, Ruff will first load + /// this base configuration file, then merge in properties defined + /// in the current configuration file. Most settings follow simple override + /// behavior where the child value replaces the parent value. However, + /// rule selection (`lint.select` and `lint.ignore`) has special merging + /// behavior: if the child configuration specifies `lint.select`, it + /// establishes a new baseline rule set and the parent's `lint.ignore` + /// rules are discarded; if the child configuration omits `lint.select`, + /// the parent's rule selection is inherited and both parent and child + /// `lint.ignore` rules are accumulated together. #[option( default = r#"null"#, value_type = "str", diff --git a/ruff.schema.json b/ruff.schema.json index 04ef3fcc3d..a16e91fbd7 100644 --- a/ruff.schema.json +++ b/ruff.schema.json @@ -71,7 +71,7 @@ "deprecated": true }, "extend": { - "description": "A path to a local `pyproject.toml` file to merge into this\nconfiguration. User home directory and environment variables will be\nexpanded.\n\nTo resolve the current `pyproject.toml` file, Ruff will first resolve\nthis base configuration file, then merge in any properties defined\nin the current configuration file.", + "description": "A path to a local `pyproject.toml` or `ruff.toml` file to merge into this\nconfiguration. User home directory and environment variables will be\nexpanded.\n\nTo resolve the current configuration file, Ruff will first load\nthis base configuration file, then merge in properties defined\nin the current configuration file. Most settings follow simple override\nbehavior where the child value replaces the parent value. However,\nrule selection (`lint.select` and `lint.ignore`) has special merging\nbehavior: if the child configuration specifies `lint.select`, it\nestablishes a new baseline rule set and the parent's `lint.ignore`\nrules are discarded; if the child configuration omits `lint.select`,\nthe parent's rule selection is inherited and both parent and child\n`lint.ignore` rules are accumulated together.", "type": [ "string", "null" From 1d111c878085eed772315aa0fa440b78c4977ed0 Mon Sep 17 00:00:00 2001 From: Carl Meyer Date: Fri, 31 Oct 2025 11:12:06 -0400 Subject: [PATCH 015/180] [ty] prefer declared type on invalid TypedDict creation (#21168) ## Summary In general, when we have an invalid assignment (inferred assigned type is not assignable to declared type), we fall back to inferring the declared type, since the declared type is a more explicit declaration of the programmer's intent. This also maintains the invariant that our inferred type for a name is always assignable to the declared type for that same name. For example: ```py x: str = 1 reveal_type(x) # revealed: str ``` We weren't following this pattern for dictionary literals inferred (via type context) as a typed dictionary; if the literal was not valid for the annotated TypedDict type, we would just fall back to the normal inferred type of the dict literal, effectively ignoring the annotation, and resulting in inferred type not assignable to declared type. ## Test Plan Added mdtest assertions. --- .../resources/mdtest/typed_dict.md | 9 +++++++++ .../ty_python_semantic/src/types/infer/builder.rs | 10 ++++------ crates/ty_python_semantic/src/types/typed_dict.rs | 15 ++++----------- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/typed_dict.md b/crates/ty_python_semantic/resources/mdtest/typed_dict.md index 042d6317a2..8be6de4ef3 100644 --- a/crates/ty_python_semantic/resources/mdtest/typed_dict.md +++ b/crates/ty_python_semantic/resources/mdtest/typed_dict.md @@ -99,15 +99,24 @@ eve1a: Person = {"name": b"Eve", "age": None} # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`" eve1b = Person(name=b"Eve", age=None) +reveal_type(eve1a) # revealed: Person +reveal_type(eve1b) # revealed: Person + # error: [missing-typed-dict-key] "Missing required key 'name' in TypedDict `Person` constructor" eve2a: Person = {"age": 22} # error: [missing-typed-dict-key] "Missing required key 'name' in TypedDict `Person` constructor" eve2b = Person(age=22) +reveal_type(eve2a) # revealed: Person +reveal_type(eve2b) # revealed: Person + # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" eve3a: Person = {"name": "Eve", "age": 25, "extra": True} # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" eve3b = Person(name="Eve", age=25, extra=True) + +reveal_type(eve3a) # revealed: Person +reveal_type(eve3b) # revealed: Person ``` Also, the value types ​​declared in a `TypedDict` affect generic call inference: diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index edf8581bcd..ea3f739f22 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -6103,9 +6103,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { && let Some(typed_dict) = tcx .filter_union(self.db(), Type::is_typed_dict) .as_typed_dict() - && let Some(ty) = self.infer_typed_dict_expression(dict, typed_dict) { - return ty; + self.infer_typed_dict_expression(dict, typed_dict); + return Type::TypedDict(typed_dict); } // Avoid false positives for the functional `TypedDict` form, which is currently @@ -6130,7 +6130,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { &mut self, dict: &ast::ExprDict, typed_dict: TypedDictType<'db>, - ) -> Option> { + ) { let ast::ExprDict { range: _, node_index: _, @@ -6153,9 +6153,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { validate_typed_dict_dict_literal(&self.context, typed_dict, dict, dict.into(), |expr| { self.expression_type(expr) - }) - .ok() - .map(|_| Type::TypedDict(typed_dict)) + }); } // Infer the type of a collection literal expression. diff --git a/crates/ty_python_semantic/src/types/typed_dict.rs b/crates/ty_python_semantic/src/types/typed_dict.rs index e29b836d8a..632d2a2933 100644 --- a/crates/ty_python_semantic/src/types/typed_dict.rs +++ b/crates/ty_python_semantic/src/types/typed_dict.rs @@ -389,7 +389,7 @@ fn validate_from_keywords<'db, 'ast>( provided_keys } -/// Validates a `TypedDict` dictionary literal assignment, +/// Validates a `TypedDict` dictionary literal assignment, emitting any needed diagnostics. /// e.g. `person: Person = {"name": "Alice", "age": 30}` pub(super) fn validate_typed_dict_dict_literal<'db>( context: &InferContext<'db, '_>, @@ -397,8 +397,7 @@ pub(super) fn validate_typed_dict_dict_literal<'db>( dict_expr: &ast::ExprDict, error_node: AnyNodeRef, expression_type_fn: impl Fn(&ast::Expr) -> Type<'db>, -) -> Result, OrderSet<&'db str>> { - let mut valid = true; +) { let mut provided_keys = OrderSet::new(); // Validate each key-value pair in the dictionary literal @@ -411,7 +410,7 @@ pub(super) fn validate_typed_dict_dict_literal<'db>( let value_type = expression_type_fn(&item.value); - valid &= validate_typed_dict_key_assignment( + validate_typed_dict_key_assignment( context, typed_dict, key_str, @@ -424,11 +423,5 @@ pub(super) fn validate_typed_dict_dict_literal<'db>( } } - valid &= validate_typed_dict_required_keys(context, typed_dict, &provided_keys, error_node); - - if valid { - Ok(provided_keys) - } else { - Err(provided_keys) - } + validate_typed_dict_required_keys(context, typed_dict, &provided_keys, error_node); } From b93d8f2b9fa834ab7c672d801c725c0031d9408e Mon Sep 17 00:00:00 2001 From: chiri Date: Fri, 31 Oct 2025 18:16:09 +0300 Subject: [PATCH 016/180] [`refurb`] Preserve argument ordering in autofix (`FURB103`) (#20790) Fixes https://github.com/astral-sh/ruff/issues/20785 --- .../resources/test/fixtures/refurb/FURB103.py | 8 +++++++ .../rules/refurb/rules/write_whole_file.rs | 1 - ...es__refurb__tests__FURB103_FURB103.py.snap | 11 +++++++++ ...rb__tests__preview_FURB103_FURB103.py.snap | 23 ++++++++++++++++++- ...rb__tests__write_whole_file_python_39.snap | 11 +++++++++ crates/ruff_python_ast/src/nodes.rs | 2 +- 6 files changed, 53 insertions(+), 3 deletions(-) diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB103.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB103.py index b6d8e1d034..35d9600d41 100644 --- a/crates/ruff_linter/resources/test/fixtures/refurb/FURB103.py +++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB103.py @@ -145,3 +145,11 @@ with open("file.txt", "w") as f: with open("file.txt", "w") as f: for line in text: f.write(line) + +# See: https://github.com/astral-sh/ruff/issues/20785 +import json + +data = {"price": 100} + +with open("test.json", "wb") as f: + f.write(json.dumps(data, indent=4).encode("utf-8")) \ No newline at end of file diff --git a/crates/ruff_linter/src/rules/refurb/rules/write_whole_file.rs b/crates/ruff_linter/src/rules/refurb/rules/write_whole_file.rs index bbee6dcb5a..da99733efd 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/write_whole_file.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/write_whole_file.rs @@ -5,7 +5,6 @@ use ruff_python_ast::{ relocate::relocate_expr, visitor::{self, Visitor}, }; - use ruff_python_codegen::Generator; use ruff_text_size::{Ranged, TextRange}; diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB103_FURB103.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB103_FURB103.py.snap index dfb111341e..74f3749953 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB103_FURB103.py.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB103_FURB103.py.snap @@ -134,3 +134,14 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo 75 | f.write(foobar) | help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")` + +FURB103 `open` and `write` should be replaced by `Path("test.json")....` + --> FURB103.py:154:6 + | +152 | data = {"price": 100} +153 | +154 | with open("test.json", "wb") as f: + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +155 | f.write(json.dumps(data, indent=4).encode("utf-8")) + | +help: Replace with `Path("test.json")....` diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB103_FURB103.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB103_FURB103.py.snap index eef0992839..8148035435 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB103_FURB103.py.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB103_FURB103.py.snap @@ -257,4 +257,25 @@ help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")` 75 + pathlib.Path("file.txt").write_text(foobar, newline="\r\n") 76 | 77 | # Non-errors. -78 | +78 | + +FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....` + --> FURB103.py:154:6 + | +152 | data = {"price": 100} +153 | +154 | with open("test.json", "wb") as f: + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +155 | f.write(json.dumps(data, indent=4).encode("utf-8")) + | +help: Replace with `Path("test.json")....` +148 | +149 | # See: https://github.com/astral-sh/ruff/issues/20785 +150 | import json +151 + import pathlib +152 | +153 | data = {"price": 100} +154 | + - with open("test.json", "wb") as f: + - f.write(json.dumps(data, indent=4).encode("utf-8")) +155 + pathlib.Path("test.json").write_bytes(json.dumps(data, indent=4).encode("utf-8")) diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__write_whole_file_python_39.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__write_whole_file_python_39.snap index 81eea0c159..140a274468 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__write_whole_file_python_39.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__write_whole_file_python_39.snap @@ -104,3 +104,14 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(ba 51 | # writes a single time to file and that bit they can replace. | help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))` + +FURB103 `open` and `write` should be replaced by `Path("test.json")....` + --> FURB103.py:154:6 + | +152 | data = {"price": 100} +153 | +154 | with open("test.json", "wb") as f: + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +155 | f.write(json.dumps(data, indent=4).encode("utf-8")) + | +help: Replace with `Path("test.json")....` diff --git a/crates/ruff_python_ast/src/nodes.rs b/crates/ruff_python_ast/src/nodes.rs index f71f420d09..5cb58e7f05 100644 --- a/crates/ruff_python_ast/src/nodes.rs +++ b/crates/ruff_python_ast/src/nodes.rs @@ -3372,7 +3372,7 @@ impl Arguments { pub fn arguments_source_order(&self) -> impl Iterator> { let args = self.args.iter().map(ArgOrKeyword::Arg); let keywords = self.keywords.iter().map(ArgOrKeyword::Keyword); - args.merge_by(keywords, |left, right| left.start() < right.start()) + args.merge_by(keywords, |left, right| left.start() <= right.start()) } pub fn inner_range(&self) -> TextRange { From bb40c3436120defba5cf72da9c771d585f7f62ae Mon Sep 17 00:00:00 2001 From: Ibraheem Ahmed Date: Fri, 31 Oct 2025 11:48:28 -0400 Subject: [PATCH 017/180] [ty] Use declared attribute types as type context (#21143) ## Summary For example: ```py class X: x: list[int | str] def _(x: X): x.x = [1] ``` Resolves https://github.com/astral-sh/ty/issues/1375. --- .../resources/mdtest/bidirectional.md | 112 +++++++- .../resources/mdtest/call/union.md | 43 --- .../src/types/infer/builder.rs | 256 ++++++++++++++---- 3 files changed, 304 insertions(+), 107 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/bidirectional.md b/crates/ty_python_semantic/resources/mdtest/bidirectional.md index 627492855f..1cc3dba162 100644 --- a/crates/ty_python_semantic/resources/mdtest/bidirectional.md +++ b/crates/ty_python_semantic/resources/mdtest/bidirectional.md @@ -185,12 +185,12 @@ Declared attribute types: ```py class E: - e: list[Literal[1]] + a: list[Literal[1]] + b: list[Literal[1]] def _(e: E): - # TODO: Implement attribute type context. - # error: [invalid-assignment] "Object of type `list[Unknown | int]` is not assignable to attribute `e` of type `list[Literal[1]]`" - e.e = [1] + e.a = [1] + E.b = [1] ``` Function return types: @@ -200,6 +200,41 @@ def f() -> list[Literal[1]]: return [1] ``` +## Instance attribute + +```toml +[environment] +python-version = "3.12" +``` + +Both meta and class/instance attribute annotations are used as type context: + +```py +from typing import Literal, Any + +class DataDescriptor: + def __get__(self, instance: object, owner: type | None = None) -> list[Literal[1]]: + return [] + + def __set__(self, instance: object, value: list[Literal[1]]) -> None: + pass + +def lst[T](x: T) -> list[T]: + return [x] + +def _(flag: bool): + class Meta(type): + if flag: + x: DataDescriptor = DataDescriptor() + + class C(metaclass=Meta): + x: list[int | None] + + def _(c: C): + c.x = lst(1) + C.x = lst(1) +``` + ## Class constructor parameters ```toml @@ -226,3 +261,72 @@ A(f(1)) # error: [invalid-argument-type] "Argument to bound method `__init__` is incorrect: Expected `list[int | None]`, found `list[list[Unknown]]`" A(f([])) ``` + +## Multi-inference diagnostics + +```toml +[environment] +python-version = "3.12" +``` + +Diagnostics unrelated to the type-context are only reported once: + +`call.py`: + +```py +def f[T](x: T) -> list[T]: + return [x] + +def a(x: list[bool], y: list[bool]): ... +def b(x: list[int], y: list[int]): ... +def c(x: list[int], y: list[int]): ... +def _(x: int): + if x == 0: + y = a + elif x == 1: + y = b + else: + y = c + + if x == 0: + z = True + + y(f(True), [True]) + + # error: [possibly-unresolved-reference] "Name `z` used when possibly not defined" + y(f(True), [z]) +``` + +`call_standalone_expression.py`: + +```py +def f(_: str): ... +def g(_: str): ... +def _(a: object, b: object, flag: bool): + if flag: + x = f + else: + x = g + + # error: [unsupported-operator] "Operator `>` is not supported for types `object` and `object`" + x(f"{'a' if a > b else 'b'}") +``` + +`attribute_assignment.py`: + +```py +from typing import TypedDict + +class TD(TypedDict): + y: int + +class X: + td: TD + +def _(x: X, flag: bool): + if flag: + y = 1 + + # error: [possibly-unresolved-reference] "Name `y` used when possibly not defined" + x.td = {"y": y} +``` diff --git a/crates/ty_python_semantic/resources/mdtest/call/union.md b/crates/ty_python_semantic/resources/mdtest/call/union.md index 1a4079204d..69695c3f5c 100644 --- a/crates/ty_python_semantic/resources/mdtest/call/union.md +++ b/crates/ty_python_semantic/resources/mdtest/call/union.md @@ -281,46 +281,3 @@ def _(flag: bool): # we currently consider `TypedDict` instances to be subtypes of `dict` f({"y": 1}) ``` - -Diagnostics unrelated to the type-context are only reported once: - -`expression.py`: - -```py -def f[T](x: T) -> list[T]: - return [x] - -def a(x: list[bool], y: list[bool]): ... -def b(x: list[int], y: list[int]): ... -def c(x: list[int], y: list[int]): ... -def _(x: int): - if x == 0: - y = a - elif x == 1: - y = b - else: - y = c - - if x == 0: - z = True - - y(f(True), [True]) - - # error: [possibly-unresolved-reference] "Name `z` used when possibly not defined" - y(f(True), [z]) -``` - -`standalone_expression.py`: - -```py -def f(_: str): ... -def g(_: str): ... -def _(a: object, b: object, flag: bool): - if flag: - x = f - else: - x = g - - # error: [unsupported-operator] "Operator `>` is not supported for types `object` and `object`" - x(f"{'a' if a > b else 'b'}") -``` diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index ea3f739f22..f58f093a44 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -2924,12 +2924,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { for item in items { let target = item.optional_vars.as_deref(); if let Some(target) = target { - self.infer_target(target, &item.context_expr, |builder| { + self.infer_target(target, &item.context_expr, |builder, tcx| { // TODO: `infer_with_statement_definition` reports a diagnostic if `ctx_manager_ty` isn't a context manager // but only if the target is a name. We should report a diagnostic here if the target isn't a name: // `with not_context_manager as a.x: ... builder - .infer_standalone_expression(&item.context_expr, TypeContext::default()) + .infer_standalone_expression(&item.context_expr, tcx) .enter(builder.db()) }); } else { @@ -3393,8 +3393,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } = assignment; for target in targets { - self.infer_target(target, value, |builder| { - builder.infer_standalone_expression(value, TypeContext::default()) + self.infer_target(target, value, |builder, tcx| { + builder.infer_standalone_expression(value, tcx) }); } } @@ -3410,13 +3410,19 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { /// `target`. fn infer_target(&mut self, target: &ast::Expr, value: &ast::Expr, infer_value_expr: F) where - F: Fn(&mut Self) -> Type<'db>, + F: Fn(&mut Self, TypeContext<'db>) -> Type<'db>, { - let assigned_ty = match target { - ast::Expr::Name(_) => None, - _ => Some(infer_value_expr(self)), - }; - self.infer_target_impl(target, value, assigned_ty); + match target { + ast::Expr::Name(_) => { + self.infer_target_impl(target, value, None); + } + + _ => self.infer_target_impl( + target, + value, + Some(&|builder, tcx| infer_value_expr(builder, tcx)), + ), + } } /// Make sure that the subscript assignment `obj[slice] = value` is valid. @@ -3568,30 +3574,68 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { target: &ast::ExprAttribute, object_ty: Type<'db>, attribute: &str, - value_ty: Type<'db>, + infer_value_ty: &dyn Fn(&mut Self, TypeContext<'db>) -> Type<'db>, emit_diagnostics: bool, ) -> bool { let db = self.db(); - let ensure_assignable_to = |attr_ty| -> bool { - let assignable = value_ty.is_assignable_to(db, attr_ty); - if !assignable && emit_diagnostics { - report_invalid_attribute_assignment( - &self.context, - target.into(), - attr_ty, - value_ty, - attribute, - ); - } - assignable + let mut first_tcx = None; + + // A wrapper over `infer_value_ty` that allows inferring the value type multiple times + // during attribute resolution. + let pure_infer_value_ty = infer_value_ty; + let mut infer_value_ty = |builder: &mut Self, tcx: TypeContext<'db>| -> Type<'db> { + // Overwrite the previously inferred value, preferring later inferences, which are + // likely more precise. Note that we still ensure each inference is assignable to + // its declared type, so this mainly affects the IDE hover type. + let prev_multi_inference_state = mem::replace( + &mut builder.multi_inference_state, + MultiInferenceState::Overwrite, + ); + + // If we are inferring the argument multiple times, silence diagnostics to avoid duplicated warnings. + let was_in_multi_inference = if let Some(first_tcx) = first_tcx { + // The first time we infer an argument during multi-inference must be without type context, + // to avoid leaking diagnostics for bidirectional inference attempts. + debug_assert_eq!(first_tcx, TypeContext::default()); + + builder.context.set_multi_inference(true) + } else { + builder.context.is_in_multi_inference() + }; + + let value_ty = pure_infer_value_ty(builder, tcx); + + // Reset the multi-inference state. + first_tcx.get_or_insert(tcx); + builder.multi_inference_state = prev_multi_inference_state; + builder.context.set_multi_inference(was_in_multi_inference); + + value_ty }; + // This closure should only be called if `value_ty` was inferred with `attr_ty` as type context. + let ensure_assignable_to = + |builder: &Self, value_ty: Type<'db>, attr_ty: Type<'db>| -> bool { + let assignable = value_ty.is_assignable_to(db, attr_ty); + if !assignable && emit_diagnostics { + report_invalid_attribute_assignment( + &builder.context, + target.into(), + attr_ty, + value_ty, + attribute, + ); + } + assignable + }; + // Return true (and emit a diagnostic) if this is an invalid assignment to a `Final` attribute. - let invalid_assignment_to_final = |qualifiers: TypeQualifiers| -> bool { + let invalid_assignment_to_final = |builder: &Self, qualifiers: TypeQualifiers| -> bool { if qualifiers.contains(TypeQualifiers::FINAL) { if emit_diagnostics { - if let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target) { + if let Some(builder) = builder.context.report_lint(&INVALID_ASSIGNMENT, target) + { builder.into_diagnostic(format_args!( "Cannot assign to final attribute `{attribute}` \ on type `{}`", @@ -3607,8 +3651,17 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { match object_ty { Type::Union(union) => { + // TODO: We could perform multi-inference here with each element of the union as type context. + let value_ty = infer_value_ty(self, TypeContext::default()); + if union.elements(self.db()).iter().all(|elem| { - self.validate_attribute_assignment(target, *elem, attribute, value_ty, false) + self.validate_attribute_assignment( + target, + *elem, + attribute, + &|_, _| value_ty, + false, + ) }) { true } else { @@ -3631,9 +3684,18 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } Type::Intersection(intersection) => { + // TODO: We could perform multi-inference here with each element of the union as type context. + let value_ty = infer_value_ty(self, TypeContext::default()); + // TODO: Handle negative intersection elements if intersection.positive(db).iter().any(|elem| { - self.validate_attribute_assignment(target, *elem, attribute, value_ty, false) + self.validate_attribute_assignment( + target, + *elem, + attribute, + &|_, _| value_ty, + false, + ) }) { true } else { @@ -3657,12 +3719,14 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { target, alias.value_type(self.db()), attribute, - value_ty, + pure_infer_value_ty, emit_diagnostics, ), // Super instances do not allow attribute assignment Type::NominalInstance(instance) if instance.has_known_class(db, KnownClass::Super) => { + infer_value_ty(self, TypeContext::default()); + if emit_diagnostics { if let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target) { builder.into_diagnostic(format_args!( @@ -3674,6 +3738,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { false } Type::BoundSuper(_) => { + infer_value_ty(self, TypeContext::default()); + if emit_diagnostics { if let Some(builder) = self.context.report_lint(&INVALID_ASSIGNMENT, target) { builder.into_diagnostic(format_args!( @@ -3685,7 +3751,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { false } - Type::Dynamic(..) | Type::Never => true, + Type::Dynamic(..) | Type::Never => { + infer_value_ty(self, TypeContext::default()); + true + } Type::NominalInstance(..) | Type::ProtocolInstance(_) @@ -3710,6 +3779,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { | Type::AlwaysFalsy | Type::TypeIs(_) | Type::TypedDict(_) => { + // TODO: We could use the annotated parameter type of `__setattr__` as type context here. + // However, we would still have to perform the first inference without type context. + let value_ty = infer_value_ty(self, TypeContext::default()); + // First, try to call the `__setattr__` dunder method. If this is present/defined, overrides // assigning the attributed by the normal mechanism. let setattr_dunder_call_result = object_ty.try_call_dunder_with_policy( @@ -3811,7 +3884,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { place: Place::Defined(meta_attr_ty, _, meta_attr_boundness), qualifiers, } => { - if invalid_assignment_to_final(qualifiers) { + if invalid_assignment_to_final(self, qualifiers) { return false; } @@ -3819,6 +3892,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { if let Place::Defined(meta_dunder_set, _, _) = meta_attr_ty.class_member(db, "__set__".into()).place { + // TODO: We could use the annotated parameter type of `__set__` as + // type context here. let dunder_set_result = meta_dunder_set.try_call( db, &CallArguments::positional([ @@ -3844,7 +3919,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { dunder_set_result.is_ok() } else { - ensure_assignable_to(meta_attr_ty) + let value_ty = infer_value_ty( + self, + TypeContext::new(Some(meta_attr_ty)), + ); + + ensure_assignable_to(self, value_ty, meta_attr_ty) }; let assignable_to_instance_attribute = if meta_attr_boundness @@ -3857,12 +3937,16 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } = object_ty.instance_member(db, attribute) { - if invalid_assignment_to_final(qualifiers) { + let value_ty = infer_value_ty( + self, + TypeContext::new(Some(instance_attr_ty)), + ); + if invalid_assignment_to_final(self, qualifiers) { return false; } ( - ensure_assignable_to(instance_attr_ty), + ensure_assignable_to(self, value_ty, instance_attr_ty), instance_attr_boundness, ) } else { @@ -3896,7 +3980,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { qualifiers, } = object_ty.instance_member(db, attribute) { - if invalid_assignment_to_final(qualifiers) { + let value_ty = infer_value_ty( + self, + TypeContext::new(Some(instance_attr_ty)), + ); + if invalid_assignment_to_final(self, qualifiers) { return false; } @@ -3909,7 +3997,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ); } - ensure_assignable_to(instance_attr_ty) + ensure_assignable_to(self, value_ty, instance_attr_ty) } else { if emit_diagnostics { if let Some(builder) = @@ -3937,13 +4025,19 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { place: Place::Defined(meta_attr_ty, _, meta_attr_boundness), qualifiers, } => { - if invalid_assignment_to_final(qualifiers) { + // We may have to perform multi-inference if the meta attribute is possibly unbound. + // However, we are required to perform the first inference without type context. + let value_ty = infer_value_ty(self, TypeContext::default()); + + if invalid_assignment_to_final(self, qualifiers) { return false; } let assignable_to_meta_attr = if let Place::Defined(meta_dunder_set, _, _) = meta_attr_ty.class_member(db, "__set__".into()).place { + // TODO: We could use the annotated parameter type of `__set__` as + // type context here. let dunder_set_result = meta_dunder_set.try_call( db, &CallArguments::positional([meta_attr_ty, object_ty, value_ty]), @@ -3963,7 +4057,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { dunder_set_result.is_ok() } else { - ensure_assignable_to(meta_attr_ty) + let value_ty = + infer_value_ty(self, TypeContext::new(Some(meta_attr_ty))); + ensure_assignable_to(self, value_ty, meta_attr_ty) }; let assignable_to_class_attr = if meta_attr_boundness @@ -3976,7 +4072,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .expect("called on Type::ClassLiteral or Type::SubclassOf") .place { - (ensure_assignable_to(class_attr_ty), class_attr_boundness) + let value_ty = + infer_value_ty(self, TypeContext::new(Some(class_attr_ty))); + ( + ensure_assignable_to(self, value_ty, class_attr_ty), + class_attr_boundness, + ) } else { (true, Definedness::PossiblyUndefined) }; @@ -4008,7 +4109,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .find_name_in_mro(db, attribute) .expect("called on Type::ClassLiteral or Type::SubclassOf") { - if invalid_assignment_to_final(qualifiers) { + let value_ty = + infer_value_ty(self, TypeContext::new(Some(class_attr_ty))); + if invalid_assignment_to_final(self, qualifiers) { return false; } @@ -4021,8 +4124,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ); } - ensure_assignable_to(class_attr_ty) + ensure_assignable_to(self, value_ty, class_attr_ty) } else { + infer_value_ty(self, TypeContext::default()); + let attribute_is_bound_on_instance = object_ty.to_instance(self.db()).is_some_and(|instance| { !instance @@ -4064,6 +4169,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { Type::ModuleLiteral(module) => { if let Place::Defined(attr_ty, _, _) = module.static_member(db, attribute).place { + let value_ty = infer_value_ty(self, TypeContext::new(Some(attr_ty))); + let assignable = value_ty.is_assignable_to(db, attr_ty); if assignable { true @@ -4080,6 +4187,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { false } } else { + infer_value_ty(self, TypeContext::default()); + if emit_diagnostics { if let Some(builder) = self.context.report_lint(&UNRESOLVED_ATTRIBUTE, target) @@ -4098,22 +4207,35 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } + #[expect(clippy::type_complexity)] fn infer_target_impl( &mut self, target: &ast::Expr, value: &ast::Expr, - assigned_ty: Option>, + infer_assigned_ty: Option<&dyn Fn(&mut Self, TypeContext<'db>) -> Type<'db>>, ) { match target { - ast::Expr::Name(name) => self.infer_definition(name), + ast::Expr::Name(name) => { + if let Some(infer_assigned_ty) = infer_assigned_ty { + infer_assigned_ty(self, TypeContext::default()); + } + + self.infer_definition(name); + } ast::Expr::List(ast::ExprList { elts, .. }) | ast::Expr::Tuple(ast::ExprTuple { elts, .. }) => { + let assigned_ty = infer_assigned_ty.map(|f| f(self, TypeContext::default())); + if let Some(tuple_spec) = assigned_ty.and_then(|ty| ty.tuple_instance_spec(self.db())) { - let mut assigned_tys = tuple_spec.all_elements(); - for element in elts { - self.infer_target_impl(element, value, assigned_tys.next().copied()); + let assigned_tys = tuple_spec.all_elements().copied().collect::>(); + + for (i, element) in elts.iter().enumerate() { + match assigned_tys.get(i).copied() { + None => self.infer_target_impl(element, value, None), + Some(ty) => self.infer_target_impl(element, value, Some(&|_, _| ty)), + } } } else { for element in elts { @@ -4129,29 +4251,39 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .. }, ) => { - self.store_expression_type(target, assigned_ty.unwrap_or(Type::unknown())); - let object_ty = self.infer_expression(object, TypeContext::default()); - if let Some(assigned_ty) = assigned_ty { + if let Some(infer_assigned_ty) = infer_assigned_ty { + let infer_assigned_ty = &|builder: &mut Self, tcx| { + let assigned_ty = infer_assigned_ty(builder, tcx); + builder.store_expression_type(target, assigned_ty); + assigned_ty + }; + self.validate_attribute_assignment( attr_expr, object_ty, attr.id(), - assigned_ty, + infer_assigned_ty, true, ); } } ast::Expr::Subscript(subscript_expr) => { + let assigned_ty = infer_assigned_ty.map(|f| f(self, TypeContext::default())); self.store_expression_type(target, assigned_ty.unwrap_or(Type::unknown())); if let Some(assigned_ty) = assigned_ty { self.validate_subscript_assignment(subscript_expr, value, assigned_ty); } } + + // TODO: Remove this once we handle all possible assignment targets. _ => { - // TODO: Remove this once we handle all possible assignment targets. + if let Some(infer_assigned_ty) = infer_assigned_ty { + infer_assigned_ty(self, TypeContext::default()); + } + self.infer_expression(target, TypeContext::default()); } } @@ -4836,12 +4968,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { is_async: _, } = for_statement; - self.infer_target(target, iter, |builder| { + self.infer_target(target, iter, |builder, tcx| { // TODO: `infer_for_statement_definition` reports a diagnostic if `iter_ty` isn't iterable // but only if the target is a name. We should report a diagnostic here if the target isn't a name: // `for a.x in not_iterable: ... builder - .infer_standalone_expression(iter, TypeContext::default()) + .infer_standalone_expression(iter, tcx) .iterate(builder.db()) .homogeneous_element_type(builder.db()) }); @@ -5863,6 +5995,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { assert_eq!(previous, None); } + MultiInferenceState::Overwrite => { + self.expressions.insert(expression.into(), ty); + } + MultiInferenceState::Intersect => { self.expressions .entry(expression.into()) @@ -6430,7 +6566,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { is_async: _, } = comprehension; - self.infer_target(target, iter, |builder| { + self.infer_target(target, iter, |builder, tcx| { // TODO: `infer_comprehension_definition` reports a diagnostic if `iter_ty` isn't iterable // but only if the target is a name. We should report a diagnostic here if the target isn't a name: // `[... for a.x in not_iterable] @@ -6438,11 +6574,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { infer_same_file_expression_type( builder.db(), builder.index.expression(iter), - TypeContext::default(), + tcx, builder.module(), ) } else { - builder.infer_standalone_expression(iter, TypeContext::default()) + builder.infer_standalone_expression(iter, tcx) } .iterate(builder.db()) .homogeneous_element_type(builder.db()) @@ -10153,16 +10289,16 @@ enum MultiInferenceState { #[default] Panic, + /// Overwrite the previously inferred value. + Overwrite, + /// Store the intersection of all types inferred for the expression. Intersect, } impl MultiInferenceState { - fn is_panic(self) -> bool { - match self { - MultiInferenceState::Panic => true, - MultiInferenceState::Intersect => false, - } + const fn is_panic(self) -> bool { + matches!(self, MultiInferenceState::Panic) } } From 69b4c29924c75047655868c9895d8018d11e34e3 Mon Sep 17 00:00:00 2001 From: Luca Chiodini Date: Fri, 31 Oct 2025 16:59:11 +0100 Subject: [PATCH 018/180] Consistently wrap tokens in parser diagnostics in `backticks` instead of 'quotes' (#21163) The parser currently uses single quotes to wrap tokens. This is inconsistent with the rest of ruff/ty, which use backticks. For example, see the inconsistent diagnostics produced in this simple example: https://play.ty.dev/0a9d6eab-6599-4a1d-8e40-032091f7f50f Consistently wrapping tokens in backticks produces uniform diagnostics. Following the style decision of #723, in #2889 some quotes were already switched into backticks. This is also in line with Rust's guide on diagnostics (https://rustc-dev-guide.rust-lang.org/diagnostics.html#diagnostic-structure): > When code or an identifier must appear in a message or label, it should be surrounded with backticks --- ...essage__grouped__tests__syntax_errors.snap | 2 +- ...ules__pycodestyle__tests__E231_E23.py.snap | 92 ++++----- ...tyle__tests__E301_E30_syntax_error.py.snap | 8 +- ...tyle__tests__E302_E30_syntax_error.py.snap | 8 +- ...tyle__tests__E303_E30_syntax_error.py.snap | 8 +- ...tyle__tests__E305_E30_syntax_error.py.snap | 8 +- ...tyle__tests__E306_E30_syntax_error.py.snap | 8 +- crates/ruff_python_parser/src/error.rs | 10 +- crates/ruff_python_parser/src/token.rs | 174 +++++++++--------- ...id_syntax@assert_invalid_test_expr.py.snap | 2 +- ..._syntax@assign_stmt_keyword_target.py.snap | 4 +- ...alid_syntax@async_unexpected_token.py.snap | 10 +- ...tax@aug_assign_stmt_invalid_target.py.snap | 2 +- ...class_def_unclosed_type_param_list.py.snap | 2 +- ...ntax@comma_separated_missing_comma.py.snap | 2 +- ...ted_missing_comma_between_elements.py.snap | 2 +- ...prehension_missing_for_after_async.py.snap | 4 +- ...yntax@decorator_missing_expression.py.snap | 2 +- ...d_syntax@decorator_missing_newline.py.snap | 6 +- ...essions__arguments__double_starred.py.snap | 2 +- ...ressions__arguments__missing_comma.py.snap | 2 +- ...expressions__arguments__unclosed_0.py.snap | 2 +- ...expressions__arguments__unclosed_1.py.snap | 2 +- ...expressions__arguments__unclosed_2.py.snap | 2 +- ...xpressions__compare__invalid_order.py.snap | 2 +- ...tax@expressions__dict__double_star.py.snap | 4 +- ...s__dict__double_star_comprehension.py.snap | 8 +- ...ons__dict__missing_closing_brace_0.py.snap | 6 +- ...ons__dict__missing_closing_brace_2.py.snap | 2 +- ...ressions__dict__named_expression_0.py.snap | 2 +- ...ressions__dict__named_expression_1.py.snap | 8 +- ..._syntax@expressions__dict__recover.py.snap | 2 +- ...sions__lambda_duplicate_parameters.py.snap | 2 +- ...s__list__missing_closing_bracket_3.py.snap | 2 +- ..._syntax@expressions__list__recover.py.snap | 2 +- ...sions__named__missing_expression_2.py.snap | 6 +- ...ressions__parenthesized__generator.py.snap | 4 +- ...nthesized__missing_closing_paren_3.py.snap | 2 +- ...@expressions__parenthesized__tuple.py.snap | 8 +- ..._parenthesized__tuple_starred_expr.py.snap | 2 +- ...set__missing_closing_curly_brace_3.py.snap | 2 +- ...d_syntax@expressions__set__recover.py.snap | 2 +- ...sions__subscript__unclosed_slice_1.py.snap | 6 +- ...pressions__yield__named_expression.py.snap | 2 +- ..._string_lambda_without_parentheses.py.snap | 2 +- ...id_syntax@f_string_unclosed_lbrace.py.snap | 4 +- ...ing_unclosed_lbrace_in_format_spec.py.snap | 4 +- ..._syntax@for_stmt_invalid_iter_expr.py.snap | 2 +- ...lid_syntax@for_stmt_invalid_target.py.snap | 2 +- ...syntax@for_stmt_missing_in_keyword.py.snap | 4 +- ...lid_syntax@for_stmt_missing_target.py.snap | 4 +- ...id_syntax@from_import_dotted_names.py.snap | 8 +- ...id_syntax@from_import_missing_rpar.py.snap | 4 +- ...nction_def_unclosed_parameter_list.py.snap | 4 +- ...ction_def_unclosed_type_param_list.py.snap | 2 +- ..._syntax@if_stmt_elif_missing_colon.py.snap | 2 +- ...valid_syntax@if_stmt_missing_colon.py.snap | 4 +- ...nvalid_syntax@match_expected_colon.py.snap | 2 +- ...@match_stmt_no_newline_before_case.py.snap | 2 +- ...ntax@multiple_clauses_on_same_line.py.snap | 24 +-- .../invalid_syntax@named_expr_slice.py.snap | 4 +- ...@nested_quote_in_format_spec_py312.py.snap | 2 +- ...nvalid_syntax@node_range_with_gaps.py.snap | 8 +- ...ntax@param_with_invalid_annotation.py.snap | 2 +- ...rams_expected_after_star_separator.py.snap | 10 +- ...@params_kwarg_after_star_separator.py.snap | 2 +- ...ax@params_var_keyword_with_default.py.snap | 6 +- ...params_var_positional_with_default.py.snap | 6 +- .../invalid_syntax@pos_only_py37.py.snap | 2 +- ...nvalid_syntax@re_lex_logical_token.py.snap | 24 +-- ...yntax@re_lex_logical_token_mac_eol.py.snap | 2 +- ...x@re_lex_logical_token_windows_eol.py.snap | 2 +- ...x@re_lexing__fstring_format_spec_1.py.snap | 6 +- ...tax@re_lexing__line_continuation_1.py.snap | 2 +- ...ing__line_continuation_windows_eol.py.snap | 2 +- ...re_lexing__triple_quoted_fstring_1.py.snap | 2 +- ...re_lexing__triple_quoted_fstring_2.py.snap | 2 +- ...re_lexing__triple_quoted_fstring_3.py.snap | 4 +- ...atements__function_type_parameters.py.snap | 8 +- ...ents__if_extra_closing_parentheses.py.snap | 2 +- ...ax@statements__match__as_pattern_2.py.snap | 2 +- ...ax@statements__match__as_pattern_3.py.snap | 4 +- ...ax@statements__match__as_pattern_4.py.snap | 4 +- ...ts__match__invalid_mapping_pattern.py.snap | 6 +- ...tements__match__star_pattern_usage.py.snap | 2 +- ...s__with__ambiguous_lpar_with_items.py.snap | 32 ++-- ...__with__unparenthesized_with_items.py.snap | 2 +- ..._string_lambda_without_parentheses.py.snap | 2 +- ...id_syntax@t_string_unclosed_lbrace.py.snap | 4 +- ...ing_unclosed_lbrace_in_format_spec.py.snap | 4 +- ...ntax@type_param_invalid_bound_expr.py.snap | 2 +- ...syntax@type_param_param_spec_bound.py.snap | 2 +- ...am_param_spec_invalid_default_expr.py.snap | 2 +- ...aram_type_var_invalid_default_expr.py.snap | 2 +- ...ax@type_param_type_var_tuple_bound.py.snap | 2 +- ...ype_var_tuple_invalid_default_expr.py.snap | 2 +- ...erminated_fstring_newline_recovery.py.snap | 2 +- ...yntax@while_stmt_invalid_test_expr.py.snap | 4 +- ...id_syntax@while_stmt_missing_colon.py.snap | 2 +- ..._items_parenthesized_missing_colon.py.snap | 2 +- ..._items_parenthesized_missing_comma.py.snap | 10 +- .../mdtest/comprehensions/invalid_syntax.md | 12 +- .../resources/mdtest/import/invalid_syntax.md | 2 +- 103 files changed, 359 insertions(+), 359 deletions(-) diff --git a/crates/ruff_linter/src/message/snapshots/ruff_linter__message__grouped__tests__syntax_errors.snap b/crates/ruff_linter/src/message/snapshots/ruff_linter__message__grouped__tests__syntax_errors.snap index 1d077b7321..f22a079523 100644 --- a/crates/ruff_linter/src/message/snapshots/ruff_linter__message__grouped__tests__syntax_errors.snap +++ b/crates/ruff_linter/src/message/snapshots/ruff_linter__message__grouped__tests__syntax_errors.snap @@ -4,4 +4,4 @@ expression: content --- syntax_errors.py: 1:15 invalid-syntax: Expected one or more symbol names after import - 3:12 invalid-syntax: Expected ')', found newline + 3:12 invalid-syntax: Expected `)`, found newline diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E231_E23.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E231_E23.py.snap index c210a6768b..d436a9826a 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E231_E23.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E231_E23.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:2:7 | 1 | #: E231 @@ -18,7 +18,7 @@ help: Add missing whitespace 4 | a[b1,:] 5 | #: E231 -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:4:5 | 2 | a = (1,2) @@ -38,7 +38,7 @@ help: Add missing whitespace 6 | a = [{'a':''}] 7 | #: Okay -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:6:10 | 4 | a[b1,:] @@ -58,7 +58,7 @@ help: Add missing whitespace 8 | a = (4,) 9 | b = (5, ) -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:19:10 | 17 | def foo() -> None: @@ -77,7 +77,7 @@ help: Add missing whitespace 21 | 22 | #: Okay -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:29:20 | 27 | mdtypes_template = { @@ -96,7 +96,7 @@ help: Add missing whitespace 31 | 32 | # E231 -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:33:6 | 32 | # E231 @@ -115,7 +115,7 @@ help: Add missing whitespace 35 | # Okay because it's hard to differentiate between the usages of a colon in a f-string 36 | f"{a:=1}" -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:47:37 | 46 | #: E231 @@ -134,7 +134,7 @@ help: Add missing whitespace 49 | #: Okay 50 | a = (1,) -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:60:13 | 58 | results = { @@ -154,7 +154,7 @@ help: Add missing whitespace 62 | results_in_tuple = ( 63 | { -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:65:17 | 63 | { @@ -174,7 +174,7 @@ help: Add missing whitespace 67 | ) 68 | results_in_list = [ -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:71:17 | 69 | { @@ -194,7 +194,7 @@ help: Add missing whitespace 73 | ] 74 | results_in_list_first = [ -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:76:17 | 74 | results_in_list_first = [ @@ -214,7 +214,7 @@ help: Add missing whitespace 78 | ] 79 | -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:82:13 | 80 | x = [ @@ -234,7 +234,7 @@ help: Add missing whitespace 84 | "k3":[2], # E231 85 | "k4": [2], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:84:13 | 82 | "k1":[2], # E231 @@ -254,7 +254,7 @@ help: Add missing whitespace 86 | "k5": [2], 87 | "k6": [1, 2, 3, 4,5,6,7] # E231 -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:87:26 | 85 | "k4": [2], @@ -274,7 +274,7 @@ help: Add missing whitespace 89 | { 90 | "k1": [ -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:87:28 | 85 | "k4": [2], @@ -294,7 +294,7 @@ help: Add missing whitespace 89 | { 90 | "k1": [ -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:87:30 | 85 | "k4": [2], @@ -314,7 +314,7 @@ help: Add missing whitespace 89 | { 90 | "k1": [ -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:92:21 | 90 | "k1": [ @@ -334,7 +334,7 @@ help: Add missing whitespace 94 | { 95 | "kb": [2,3], # E231 -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:92:24 | 90 | "k1": [ @@ -354,7 +354,7 @@ help: Add missing whitespace 94 | { 95 | "kb": [2,3], # E231 -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:95:25 | 93 | }, @@ -374,7 +374,7 @@ help: Add missing whitespace 97 | { 98 | "ka":[2, 3], # E231 -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:98:21 | 96 | }, @@ -394,7 +394,7 @@ help: Add missing whitespace 100 | "kc": [2, 3], # Ok 101 | "kd": [2,3], # E231 -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:101:25 | 99 | "kb": [2, 3], # Ok @@ -414,7 +414,7 @@ help: Add missing whitespace 103 | }, 104 | ] -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:102:21 | 100 | "kc": [2, 3], # Ok @@ -434,7 +434,7 @@ help: Add missing whitespace 104 | ] 105 | } -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:102:24 | 100 | "kc": [2, 3], # Ok @@ -454,7 +454,7 @@ help: Add missing whitespace 104 | ] 105 | } -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:109:18 | 108 | # Should be E231 errors on all of these type parameters and function parameters, but not on their (strange) defaults @@ -473,7 +473,7 @@ help: Add missing whitespace 111 | y:B = [[["foo", "bar"]]], 112 | z:object = "fooo", -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:109:40 | 108 | # Should be E231 errors on all of these type parameters and function parameters, but not on their (strange) defaults @@ -492,7 +492,7 @@ help: Add missing whitespace 111 | y:B = [[["foo", "bar"]]], 112 | z:object = "fooo", -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:109:70 | 108 | # Should be E231 errors on all of these type parameters and function parameters, but not on their (strange) defaults @@ -511,7 +511,7 @@ help: Add missing whitespace 111 | y:B = [[["foo", "bar"]]], 112 | z:object = "fooo", -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:110:6 | 108 | # Should be E231 errors on all of these type parameters and function parameters, but not on their (strange) defaults @@ -531,7 +531,7 @@ help: Add missing whitespace 112 | z:object = "fooo", 113 | ): -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:111:6 | 109 | def pep_696_bad[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes]( @@ -551,7 +551,7 @@ help: Add missing whitespace 113 | ): 114 | pass -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:112:6 | 110 | x:A = "foo"[::-1], @@ -571,7 +571,7 @@ help: Add missing whitespace 114 | pass 115 | -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:116:18 | 114 | pass @@ -591,7 +591,7 @@ help: Add missing whitespace 118 | self, 119 | x:A = "foo"[::-1], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:116:40 | 114 | pass @@ -611,7 +611,7 @@ help: Add missing whitespace 118 | self, 119 | x:A = "foo"[::-1], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:116:70 | 114 | pass @@ -631,7 +631,7 @@ help: Add missing whitespace 118 | self, 119 | x:A = "foo"[::-1], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:117:29 | 116 | class PEP696Bad[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes]: @@ -650,7 +650,7 @@ help: Add missing whitespace 119 | x:A = "foo"[::-1], 120 | y:B = [[["foo", "bar"]]], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:117:51 | 116 | class PEP696Bad[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes]: @@ -669,7 +669,7 @@ help: Add missing whitespace 119 | x:A = "foo"[::-1], 120 | y:B = [[["foo", "bar"]]], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:117:81 | 116 | class PEP696Bad[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes]: @@ -688,7 +688,7 @@ help: Add missing whitespace 119 | x:A = "foo"[::-1], 120 | y:B = [[["foo", "bar"]]], -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:119:10 | 117 | def pep_696_bad_method[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes]( @@ -708,7 +708,7 @@ help: Add missing whitespace 121 | z:object = "fooo", 122 | ): -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:120:10 | 118 | self, @@ -728,7 +728,7 @@ help: Add missing whitespace 122 | ): 123 | pass -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:121:10 | 119 | x:A = "foo"[::-1], @@ -748,7 +748,7 @@ help: Add missing whitespace 123 | pass 124 | -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:125:32 | 123 | pass @@ -768,7 +768,7 @@ help: Add missing whitespace 127 | pass 128 | -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:125:54 | 123 | pass @@ -788,7 +788,7 @@ help: Add missing whitespace 127 | pass 128 | -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:125:84 | 123 | pass @@ -808,7 +808,7 @@ help: Add missing whitespace 127 | pass 128 | -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:126:47 | 125 | class PEP696BadWithEmptyBases[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes](): @@ -826,7 +826,7 @@ help: Add missing whitespace 128 | 129 | # Should be no E231 errors on any of these: -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:126:69 | 125 | class PEP696BadWithEmptyBases[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes](): @@ -844,7 +844,7 @@ help: Add missing whitespace 128 | 129 | # Should be no E231 errors on any of these: -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:126:99 | 125 | class PEP696BadWithEmptyBases[A:object="foo"[::-1], B:object =[[["foo", "bar"]]], C:object= bytes](): @@ -862,7 +862,7 @@ help: Add missing whitespace 128 | 129 | # Should be no E231 errors on any of these: -E231 [*] Missing whitespace after ',' +E231 [*] Missing whitespace after `,` --> E23.py:147:6 | 146 | # E231 @@ -881,7 +881,7 @@ help: Add missing whitespace 149 | # Okay because it's hard to differentiate between the usages of a colon in a t-string 150 | t"{a:=1}" -E231 [*] Missing whitespace after ':' +E231 [*] Missing whitespace after `:` --> E23.py:161:37 | 160 | #: E231 diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E301_E30_syntax_error.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E301_E30_syntax_error.py.snap index 7cf04e5cc7..b8c6413c1d 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E301_E30_syntax_error.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E301_E30_syntax_error.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -invalid-syntax: Expected ']', found '(' +invalid-syntax: Expected `]`, found `(` --> E30_syntax_error.py:4:15 | 2 | # parenthesis. @@ -11,7 +11,7 @@ invalid-syntax: Expected ']', found '(' 5 | pass | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:13:18 | 12 | class Foo: @@ -32,7 +32,7 @@ E301 Expected 1 blank line, found 0 | help: Add missing blank line -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:18:11 | 16 | pass @@ -41,7 +41,7 @@ invalid-syntax: Expected ')', found newline | ^ | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:21:9 | 21 | def top( diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E302_E30_syntax_error.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E302_E30_syntax_error.py.snap index e28bb8562d..76c3d31211 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E302_E30_syntax_error.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E302_E30_syntax_error.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -invalid-syntax: Expected ']', found '(' +invalid-syntax: Expected `]`, found `(` --> E30_syntax_error.py:4:15 | 2 | # parenthesis. @@ -22,7 +22,7 @@ E302 Expected 2 blank lines, found 1 | help: Add missing blank line(s) -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:13:18 | 12 | class Foo: @@ -32,7 +32,7 @@ invalid-syntax: Expected ')', found newline 15 | def method(): | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:18:11 | 16 | pass @@ -41,7 +41,7 @@ invalid-syntax: Expected ')', found newline | ^ | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:21:9 | 21 | def top( diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E303_E30_syntax_error.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E303_E30_syntax_error.py.snap index c70c94baad..af23f16de9 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E303_E30_syntax_error.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E303_E30_syntax_error.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -invalid-syntax: Expected ']', found '(' +invalid-syntax: Expected `]`, found `(` --> E30_syntax_error.py:4:15 | 2 | # parenthesis. @@ -21,7 +21,7 @@ E303 Too many blank lines (3) | help: Remove extraneous blank line(s) -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:13:18 | 12 | class Foo: @@ -31,7 +31,7 @@ invalid-syntax: Expected ')', found newline 15 | def method(): | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:18:11 | 16 | pass @@ -40,7 +40,7 @@ invalid-syntax: Expected ')', found newline | ^ | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:21:9 | 21 | def top( diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E305_E30_syntax_error.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E305_E30_syntax_error.py.snap index dd97fe9010..f72c198e1e 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E305_E30_syntax_error.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E305_E30_syntax_error.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -invalid-syntax: Expected ']', found '(' +invalid-syntax: Expected `]`, found `(` --> E30_syntax_error.py:4:15 | 2 | # parenthesis. @@ -11,7 +11,7 @@ invalid-syntax: Expected ']', found '(' 5 | pass | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:13:18 | 12 | class Foo: @@ -31,7 +31,7 @@ E305 Expected 2 blank lines after class or function definition, found (1) | help: Add missing blank line(s) -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:18:11 | 16 | pass @@ -40,7 +40,7 @@ invalid-syntax: Expected ')', found newline | ^ | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:21:9 | 21 | def top( diff --git a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E306_E30_syntax_error.py.snap b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E306_E30_syntax_error.py.snap index d3a6b15d4e..98d00f77af 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E306_E30_syntax_error.py.snap +++ b/crates/ruff_linter/src/rules/pycodestyle/snapshots/ruff_linter__rules__pycodestyle__tests__E306_E30_syntax_error.py.snap @@ -1,7 +1,7 @@ --- source: crates/ruff_linter/src/rules/pycodestyle/mod.rs --- -invalid-syntax: Expected ']', found '(' +invalid-syntax: Expected `]`, found `(` --> E30_syntax_error.py:4:15 | 2 | # parenthesis. @@ -11,7 +11,7 @@ invalid-syntax: Expected ']', found '(' 5 | pass | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:13:18 | 12 | class Foo: @@ -21,7 +21,7 @@ invalid-syntax: Expected ')', found newline 15 | def method(): | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:18:11 | 16 | pass @@ -30,7 +30,7 @@ invalid-syntax: Expected ')', found newline | ^ | -invalid-syntax: Expected ')', found newline +invalid-syntax: Expected `)`, found newline --> E30_syntax_error.py:21:9 | 21 | def top( diff --git a/crates/ruff_python_parser/src/error.rs b/crates/ruff_python_parser/src/error.rs index 2c2baa8dd7..8b02546d3b 100644 --- a/crates/ruff_python_parser/src/error.rs +++ b/crates/ruff_python_parser/src/error.rs @@ -78,9 +78,9 @@ pub enum InterpolatedStringErrorType { impl std::fmt::Display for InterpolatedStringErrorType { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { - Self::UnclosedLbrace => write!(f, "expecting '}}'"), + Self::UnclosedLbrace => write!(f, "expecting `}}`"), Self::InvalidConversionFlag => write!(f, "invalid conversion character"), - Self::SingleRbrace => write!(f, "single '}}' is not allowed"), + Self::SingleRbrace => write!(f, "single `}}` is not allowed"), Self::UnterminatedString => write!(f, "unterminated string"), Self::UnterminatedTripleQuotedString => write!(f, "unterminated triple-quoted string"), Self::LambdaWithoutParentheses => { @@ -232,7 +232,7 @@ impl std::fmt::Display for ParseErrorType { ParseErrorType::UnexpectedTokenAfterAsync(kind) => { write!( f, - "Expected 'def', 'with' or 'for' to follow 'async', found {kind}", + "Expected `def`, `with` or `for` to follow `async`, found {kind}", ) } ParseErrorType::InvalidArgumentUnpackingOrder => { @@ -286,10 +286,10 @@ impl std::fmt::Display for ParseErrorType { f.write_str("Parameter without a default cannot follow a parameter with a default") } ParseErrorType::ExpectedKeywordParam => { - f.write_str("Expected one or more keyword parameter after '*' separator") + f.write_str("Expected one or more keyword parameter after `*` separator") } ParseErrorType::VarParameterWithDefault => { - f.write_str("Parameter with '*' or '**' cannot have default value") + f.write_str("Parameter with `*` or `**` cannot have default value") } ParseErrorType::InvalidStarPatternUsage => { f.write_str("Star pattern cannot be used here") diff --git a/crates/ruff_python_parser/src/token.rs b/crates/ruff_python_parser/src/token.rs index 18b7648c4c..a5790a9597 100644 --- a/crates/ruff_python_parser/src/token.rs +++ b/crates/ruff_python_parser/src/token.rs @@ -635,93 +635,93 @@ impl fmt::Display for TokenKind { TokenKind::TStringEnd => "TStringEnd", TokenKind::IpyEscapeCommand => "IPython escape command", TokenKind::Comment => "comment", - TokenKind::Question => "'?'", - TokenKind::Exclamation => "'!'", - TokenKind::Lpar => "'('", - TokenKind::Rpar => "')'", - TokenKind::Lsqb => "'['", - TokenKind::Rsqb => "']'", - TokenKind::Lbrace => "'{'", - TokenKind::Rbrace => "'}'", - TokenKind::Equal => "'='", - TokenKind::ColonEqual => "':='", - TokenKind::Dot => "'.'", - TokenKind::Colon => "':'", - TokenKind::Semi => "';'", - TokenKind::Comma => "','", - TokenKind::Rarrow => "'->'", - TokenKind::Plus => "'+'", - TokenKind::Minus => "'-'", - TokenKind::Star => "'*'", - TokenKind::DoubleStar => "'**'", - TokenKind::Slash => "'/'", - TokenKind::DoubleSlash => "'//'", - TokenKind::Percent => "'%'", - TokenKind::Vbar => "'|'", - TokenKind::Amper => "'&'", - TokenKind::CircumFlex => "'^'", - TokenKind::LeftShift => "'<<'", - TokenKind::RightShift => "'>>'", - TokenKind::Tilde => "'~'", - TokenKind::At => "'@'", - TokenKind::Less => "'<'", - TokenKind::Greater => "'>'", - TokenKind::EqEqual => "'=='", - TokenKind::NotEqual => "'!='", - TokenKind::LessEqual => "'<='", - TokenKind::GreaterEqual => "'>='", - TokenKind::PlusEqual => "'+='", - TokenKind::MinusEqual => "'-='", - TokenKind::StarEqual => "'*='", - TokenKind::DoubleStarEqual => "'**='", - TokenKind::SlashEqual => "'/='", - TokenKind::DoubleSlashEqual => "'//='", - TokenKind::PercentEqual => "'%='", - TokenKind::VbarEqual => "'|='", - TokenKind::AmperEqual => "'&='", - TokenKind::CircumflexEqual => "'^='", - TokenKind::LeftShiftEqual => "'<<='", - TokenKind::RightShiftEqual => "'>>='", - TokenKind::AtEqual => "'@='", - TokenKind::Ellipsis => "'...'", - TokenKind::False => "'False'", - TokenKind::None => "'None'", - TokenKind::True => "'True'", - TokenKind::And => "'and'", - TokenKind::As => "'as'", - TokenKind::Assert => "'assert'", - TokenKind::Async => "'async'", - TokenKind::Await => "'await'", - TokenKind::Break => "'break'", - TokenKind::Class => "'class'", - TokenKind::Continue => "'continue'", - TokenKind::Def => "'def'", - TokenKind::Del => "'del'", - TokenKind::Elif => "'elif'", - TokenKind::Else => "'else'", - TokenKind::Except => "'except'", - TokenKind::Finally => "'finally'", - TokenKind::For => "'for'", - TokenKind::From => "'from'", - TokenKind::Global => "'global'", - TokenKind::If => "'if'", - TokenKind::Import => "'import'", - TokenKind::In => "'in'", - TokenKind::Is => "'is'", - TokenKind::Lambda => "'lambda'", - TokenKind::Nonlocal => "'nonlocal'", - TokenKind::Not => "'not'", - TokenKind::Or => "'or'", - TokenKind::Pass => "'pass'", - TokenKind::Raise => "'raise'", - TokenKind::Return => "'return'", - TokenKind::Try => "'try'", - TokenKind::While => "'while'", - TokenKind::Match => "'match'", - TokenKind::Type => "'type'", - TokenKind::Case => "'case'", - TokenKind::With => "'with'", - TokenKind::Yield => "'yield'", + TokenKind::Question => "`?`", + TokenKind::Exclamation => "`!`", + TokenKind::Lpar => "`(`", + TokenKind::Rpar => "`)`", + TokenKind::Lsqb => "`[`", + TokenKind::Rsqb => "`]`", + TokenKind::Lbrace => "`{`", + TokenKind::Rbrace => "`}`", + TokenKind::Equal => "`=`", + TokenKind::ColonEqual => "`:=`", + TokenKind::Dot => "`.`", + TokenKind::Colon => "`:`", + TokenKind::Semi => "`;`", + TokenKind::Comma => "`,`", + TokenKind::Rarrow => "`->`", + TokenKind::Plus => "`+`", + TokenKind::Minus => "`-`", + TokenKind::Star => "`*`", + TokenKind::DoubleStar => "`**`", + TokenKind::Slash => "`/`", + TokenKind::DoubleSlash => "`//`", + TokenKind::Percent => "`%`", + TokenKind::Vbar => "`|`", + TokenKind::Amper => "`&`", + TokenKind::CircumFlex => "`^`", + TokenKind::LeftShift => "`<<`", + TokenKind::RightShift => "`>>`", + TokenKind::Tilde => "`~`", + TokenKind::At => "`@`", + TokenKind::Less => "`<`", + TokenKind::Greater => "`>`", + TokenKind::EqEqual => "`==`", + TokenKind::NotEqual => "`!=`", + TokenKind::LessEqual => "`<=`", + TokenKind::GreaterEqual => "`>=`", + TokenKind::PlusEqual => "`+=`", + TokenKind::MinusEqual => "`-=`", + TokenKind::StarEqual => "`*=`", + TokenKind::DoubleStarEqual => "`**=`", + TokenKind::SlashEqual => "`/=`", + TokenKind::DoubleSlashEqual => "`//=`", + TokenKind::PercentEqual => "`%=`", + TokenKind::VbarEqual => "`|=`", + TokenKind::AmperEqual => "`&=`", + TokenKind::CircumflexEqual => "`^=`", + TokenKind::LeftShiftEqual => "`<<=`", + TokenKind::RightShiftEqual => "`>>=`", + TokenKind::AtEqual => "`@=`", + TokenKind::Ellipsis => "`...`", + TokenKind::False => "`False`", + TokenKind::None => "`None`", + TokenKind::True => "`True`", + TokenKind::And => "`and`", + TokenKind::As => "`as`", + TokenKind::Assert => "`assert`", + TokenKind::Async => "`async`", + TokenKind::Await => "`await`", + TokenKind::Break => "`break`", + TokenKind::Class => "`class`", + TokenKind::Continue => "`continue`", + TokenKind::Def => "`def`", + TokenKind::Del => "`del`", + TokenKind::Elif => "`elif`", + TokenKind::Else => "`else`", + TokenKind::Except => "`except`", + TokenKind::Finally => "`finally`", + TokenKind::For => "`for`", + TokenKind::From => "`from`", + TokenKind::Global => "`global`", + TokenKind::If => "`if`", + TokenKind::Import => "`import`", + TokenKind::In => "`in`", + TokenKind::Is => "`is`", + TokenKind::Lambda => "`lambda`", + TokenKind::Nonlocal => "`nonlocal`", + TokenKind::Not => "`not`", + TokenKind::Or => "`or`", + TokenKind::Pass => "`pass`", + TokenKind::Raise => "`raise`", + TokenKind::Return => "`return`", + TokenKind::Try => "`try`", + TokenKind::While => "`while`", + TokenKind::Match => "`match`", + TokenKind::Type => "`type`", + TokenKind::Case => "`case`", + TokenKind::With => "`with`", + TokenKind::Yield => "`yield`", }; f.write_str(value) } diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assert_invalid_test_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assert_invalid_test_expr.py.snap index 1a843b29c9..87c0dcf672 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assert_invalid_test_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assert_invalid_test_expr.py.snap @@ -131,7 +131,7 @@ Module( | 1 | assert *x 2 | assert assert x - | ^^^^^^ Syntax Error: Expected an identifier, but found a keyword 'assert' that cannot be used here + | ^^^^^^ Syntax Error: Expected an identifier, but found a keyword `assert` that cannot be used here 3 | assert yield x 4 | assert x := 1 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assign_stmt_keyword_target.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assign_stmt_keyword_target.py.snap index 6264d907b5..e59c71cea8 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assign_stmt_keyword_target.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@assign_stmt_keyword_target.py.snap @@ -148,7 +148,7 @@ Module( | 1 | a = pass = c - | ^^^^ Syntax Error: Expected an identifier, but found a keyword 'pass' that cannot be used here + | ^^^^ Syntax Error: Expected an identifier, but found a keyword `pass` that cannot be used here 2 | a + b 3 | a = b = pass = c | @@ -158,6 +158,6 @@ Module( 1 | a = pass = c 2 | a + b 3 | a = b = pass = c - | ^^^^ Syntax Error: Expected an identifier, but found a keyword 'pass' that cannot be used here + | ^^^^ Syntax Error: Expected an identifier, but found a keyword `pass` that cannot be used here 4 | a + b | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@async_unexpected_token.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@async_unexpected_token.py.snap index 2dd2bddfc4..dc62f1b446 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@async_unexpected_token.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@async_unexpected_token.py.snap @@ -181,7 +181,7 @@ Module( | 1 | async class Foo: ... - | ^^^^^ Syntax Error: Expected 'def', 'with' or 'for' to follow 'async', found 'class' + | ^^^^^ Syntax Error: Expected `def`, `with` or `for` to follow `async`, found `class` 2 | async while test: ... 3 | async x = 1 | @@ -190,7 +190,7 @@ Module( | 1 | async class Foo: ... 2 | async while test: ... - | ^^^^^ Syntax Error: Expected 'def', 'with' or 'for' to follow 'async', found 'while' + | ^^^^^ Syntax Error: Expected `def`, `with` or `for` to follow `async`, found `while` 3 | async x = 1 4 | async async def foo(): ... | @@ -200,7 +200,7 @@ Module( 1 | async class Foo: ... 2 | async while test: ... 3 | async x = 1 - | ^ Syntax Error: Expected 'def', 'with' or 'for' to follow 'async', found name + | ^ Syntax Error: Expected `def`, `with` or `for` to follow `async`, found name 4 | async async def foo(): ... 5 | async match test: | @@ -210,7 +210,7 @@ Module( 2 | async while test: ... 3 | async x = 1 4 | async async def foo(): ... - | ^^^^^ Syntax Error: Expected 'def', 'with' or 'for' to follow 'async', found 'async' + | ^^^^^ Syntax Error: Expected `def`, `with` or `for` to follow `async`, found `async` 5 | async match test: 6 | case _: ... | @@ -220,6 +220,6 @@ Module( 3 | async x = 1 4 | async async def foo(): ... 5 | async match test: - | ^^^^^ Syntax Error: Expected 'def', 'with' or 'for' to follow 'async', found 'match' + | ^^^^^ Syntax Error: Expected `def`, `with` or `for` to follow `async`, found `match` 6 | case _: ... | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@aug_assign_stmt_invalid_target.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@aug_assign_stmt_invalid_target.py.snap index 0d1311ca25..dbe201539e 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@aug_assign_stmt_invalid_target.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@aug_assign_stmt_invalid_target.py.snap @@ -245,7 +245,7 @@ Module( 3 | *x += 1 4 | pass += 1 5 | x += pass - | ^^^^ Syntax Error: Expected an identifier, but found a keyword 'pass' that cannot be used here + | ^^^^ Syntax Error: Expected an identifier, but found a keyword `pass` that cannot be used here 6 | (x + y) += 1 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@class_def_unclosed_type_param_list.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@class_def_unclosed_type_param_list.py.snap index 3246bdb0ce..515534ed26 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@class_def_unclosed_type_param_list.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@class_def_unclosed_type_param_list.py.snap @@ -121,7 +121,7 @@ Module( | 1 | class Foo[T1, *T2(a, b): - | ^ Syntax Error: Expected ']', found '(' + | ^ Syntax Error: Expected `]`, found `(` 2 | pass 3 | x = 10 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma.py.snap index bf33de094f..84c88e8d1f 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma.py.snap @@ -68,7 +68,7 @@ Module( | 1 | call(**x := 1) - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma_between_elements.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma_between_elements.py.snap index 5f39e94515..4da8a91dea 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma_between_elements.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comma_separated_missing_comma_between_elements.py.snap @@ -61,5 +61,5 @@ Module( | 1 | # The comma between the first two elements is expected in `parse_list_expression`. 2 | [0, 1 2] - | ^ Syntax Error: Expected ',', found int + | ^ Syntax Error: Expected `,`, found int | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comprehension_missing_for_after_async.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comprehension_missing_for_after_async.py.snap index aee9bf7056..a31a055919 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comprehension_missing_for_after_async.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@comprehension_missing_for_after_async.py.snap @@ -77,7 +77,7 @@ Module( | 1 | (async) - | ^^^^^ Syntax Error: Expected an identifier, but found a keyword 'async' that cannot be used here + | ^^^^^ Syntax Error: Expected an identifier, but found a keyword `async` that cannot be used here 2 | (x async x in iter) | @@ -85,5 +85,5 @@ Module( | 1 | (async) 2 | (x async x in iter) - | ^ Syntax Error: Expected 'for', found name + | ^ Syntax Error: Expected `for`, found name | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_expression.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_expression.py.snap index 2c5bbd3a03..dd7225493f 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_expression.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_expression.py.snap @@ -169,7 +169,7 @@ Module( | 1 | @def foo(): ... - | ^^^ Syntax Error: Expected an identifier, but found a keyword 'def' that cannot be used here + | ^^^ Syntax Error: Expected an identifier, but found a keyword `def` that cannot be used here 2 | @ 3 | def foo(): ... | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_newline.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_newline.py.snap index 948fc24fe2..ea573c4cde 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_newline.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@decorator_missing_newline.py.snap @@ -161,7 +161,7 @@ Module( | 1 | @x def foo(): ... - | ^^^ Syntax Error: Expected newline, found 'def' + | ^^^ Syntax Error: Expected newline, found `def` 2 | @x async def foo(): ... 3 | @x class Foo: ... | @@ -170,7 +170,7 @@ Module( | 1 | @x def foo(): ... 2 | @x async def foo(): ... - | ^^^^^ Syntax Error: Expected newline, found 'async' + | ^^^^^ Syntax Error: Expected newline, found `async` 3 | @x class Foo: ... | @@ -179,5 +179,5 @@ Module( 1 | @x def foo(): ... 2 | @x async def foo(): ... 3 | @x class Foo: ... - | ^^^^^ Syntax Error: Expected newline, found 'class' + | ^^^^^ Syntax Error: Expected newline, found `class` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__double_starred.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__double_starred.py.snap index e3f633b879..8656ee03e8 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__double_starred.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__double_starred.py.snap @@ -238,7 +238,7 @@ Module( 3 | call(***x) 4 | 5 | call(**x := 1) - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__missing_comma.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__missing_comma.py.snap index 37e891b89a..0f781f1e53 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__missing_comma.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__missing_comma.py.snap @@ -61,5 +61,5 @@ Module( | 1 | call(x y) - | ^ Syntax Error: Expected ',', found name + | ^ Syntax Error: Expected `,`, found name | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_0.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_0.py.snap index 655f45ed24..cc6fba138b 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_0.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_0.py.snap @@ -76,7 +76,7 @@ Module( | 1 | call( - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 2 | 3 | def foo(): 4 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_1.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_1.py.snap index 99e0e4fbcd..cdb11a8ebc 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_1.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_1.py.snap @@ -85,7 +85,7 @@ Module( | 1 | call(x - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 2 | 3 | def foo(): 4 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_2.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_2.py.snap index 2b4270bf2c..e28cecdd9f 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_2.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__arguments__unclosed_2.py.snap @@ -85,7 +85,7 @@ Module( | 1 | call(x, - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 2 | 3 | def foo(): 4 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__compare__invalid_order.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__compare__invalid_order.py.snap index 419cc7854b..ccc649ea7c 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__compare__invalid_order.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__compare__invalid_order.py.snap @@ -175,7 +175,7 @@ Module( | 6 | # Same here as well, `not` without `in` is considered to be a unary operator 7 | x not is y - | ^^ Syntax Error: Expected an identifier, but found a keyword 'is' that cannot be used here + | ^^ Syntax Error: Expected an identifier, but found a keyword `is` that cannot be used here | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star.py.snap index a579afac89..64fb0233e4 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star.py.snap @@ -544,7 +544,7 @@ Module( 2 | # the ones which are higher than that. 3 | 4 | {**x := 1} - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` 5 | {a: 1, **x if True else y} 6 | {**lambda x: x, b: 2} | @@ -554,7 +554,7 @@ Module( 2 | # the ones which are higher than that. 3 | 4 | {**x := 1} - | ^ Syntax Error: Expected ':', found '}' + | ^ Syntax Error: Expected `:`, found `}` 5 | {a: 1, **x if True else y} 6 | {**lambda x: x, b: 2} | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star_comprehension.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star_comprehension.py.snap index 9c0cde63d3..e5e7f7e3ee 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star_comprehension.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__double_star_comprehension.py.snap @@ -134,7 +134,7 @@ Module( 2 | # it's actually a comprehension. 3 | 4 | {**x: y for x, y in data} - | ^^^ Syntax Error: Expected ':', found 'for' + | ^^^ Syntax Error: Expected `:`, found `for` 5 | 6 | # TODO(dhruvmanila): This test case fails because there's no way to represent `**y` | @@ -144,7 +144,7 @@ Module( 2 | # it's actually a comprehension. 3 | 4 | {**x: y for x, y in data} - | ^ Syntax Error: Expected ',', found name + | ^ Syntax Error: Expected `,`, found name 5 | 6 | # TODO(dhruvmanila): This test case fails because there's no way to represent `**y` | @@ -154,7 +154,7 @@ Module( 2 | # it's actually a comprehension. 3 | 4 | {**x: y for x, y in data} - | ^ Syntax Error: Expected ':', found ',' + | ^ Syntax Error: Expected `:`, found `,` 5 | 6 | # TODO(dhruvmanila): This test case fails because there's no way to represent `**y` | @@ -164,7 +164,7 @@ Module( 2 | # it's actually a comprehension. 3 | 4 | {**x: y for x, y in data} - | ^ Syntax Error: Expected ':', found '}' + | ^ Syntax Error: Expected `:`, found `}` 5 | 6 | # TODO(dhruvmanila): This test case fails because there's no way to represent `**y` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_0.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_0.py.snap index 99d310bc87..31bd7feb9f 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_0.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_0.py.snap @@ -86,7 +86,7 @@ Module( 1 | {x: 2 | 3 | def foo(): - | ^^^ Syntax Error: Expected an identifier, but found a keyword 'def' that cannot be used here + | ^^^ Syntax Error: Expected an identifier, but found a keyword `def` that cannot be used here 4 | pass | @@ -95,7 +95,7 @@ Module( 1 | {x: 2 | 3 | def foo(): - | ^^^ Syntax Error: Expected ',', found name + | ^^^ Syntax Error: Expected `,`, found name 4 | pass | @@ -103,7 +103,7 @@ Module( | 3 | def foo(): 4 | pass - | ^^^^ Syntax Error: Expected an identifier, but found a keyword 'pass' that cannot be used here + | ^^^^ Syntax Error: Expected an identifier, but found a keyword `pass` that cannot be used here | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_2.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_2.py.snap index a5a08be0be..a54264becd 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_2.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__missing_closing_brace_2.py.snap @@ -85,7 +85,7 @@ Module( | 1 | {x: 1, - | ^ Syntax Error: Expected '}', found newline + | ^ Syntax Error: Expected `}`, found newline 2 | 3 | def foo(): 4 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_0.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_0.py.snap index 5db7a61381..824f3261f7 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_0.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_0.py.snap @@ -149,7 +149,7 @@ Module( 1 | # Unparenthesized named expression not allowed in key 2 | 3 | {x := 1: y, z := 2: a} - | ^^ Syntax Error: Expected ':', found ':=' + | ^^ Syntax Error: Expected `:`, found `:=` 4 | 5 | x + y | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_1.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_1.py.snap index 58509cc935..ccf8ead1b9 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_1.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__named_expression_1.py.snap @@ -145,7 +145,7 @@ Module( 1 | # Unparenthesized named expression not allowed in value 2 | 3 | {x: y := 1, z: a := 2} - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` 4 | 5 | x + y | @@ -155,7 +155,7 @@ Module( 1 | # Unparenthesized named expression not allowed in value 2 | 3 | {x: y := 1, z: a := 2} - | ^ Syntax Error: Expected ':', found ',' + | ^ Syntax Error: Expected `:`, found `,` 4 | 5 | x + y | @@ -165,7 +165,7 @@ Module( 1 | # Unparenthesized named expression not allowed in value 2 | 3 | {x: y := 1, z: a := 2} - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` 4 | 5 | x + y | @@ -175,7 +175,7 @@ Module( 1 | # Unparenthesized named expression not allowed in value 2 | 3 | {x: y := 1, z: a := 2} - | ^ Syntax Error: Expected ':', found '}' + | ^ Syntax Error: Expected `:`, found `}` 4 | 5 | x + y | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__recover.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__recover.py.snap index c4c4f242c8..b1ad5d8255 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__recover.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__dict__recover.py.snap @@ -504,7 +504,7 @@ Module( | 9 | # Missing comma 10 | {1: 2 3: 4} - | ^ Syntax Error: Expected ',', found int + | ^ Syntax Error: Expected `,`, found int 11 | 12 | # No value | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__lambda_duplicate_parameters.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__lambda_duplicate_parameters.py.snap index 986a07de03..39ddc3c4c5 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__lambda_duplicate_parameters.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__lambda_duplicate_parameters.py.snap @@ -338,7 +338,7 @@ Module( 7 | lambda a, *a: 1 8 | 9 | lambda a, *, **a: 1 - | ^^^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^^^ Syntax Error: Expected one or more keyword parameter after `*` separator | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__missing_closing_bracket_3.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__missing_closing_bracket_3.py.snap index 4f21ca4ddf..1870af8aa0 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__missing_closing_bracket_3.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__missing_closing_bracket_3.py.snap @@ -85,7 +85,7 @@ Module( 2 | # token starts a statement. 3 | 4 | [1, 2 - | ^ Syntax Error: Expected ']', found newline + | ^ Syntax Error: Expected `]`, found newline 5 | 6 | def foo(): 7 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__recover.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__recover.py.snap index 3b1ba32aac..3fa2a32578 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__recover.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__list__recover.py.snap @@ -305,7 +305,7 @@ Module( | 9 | # Missing comma 10 | [1 2] - | ^ Syntax Error: Expected ',', found int + | ^ Syntax Error: Expected `,`, found int 11 | 12 | # Dictionary element in a list | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__named__missing_expression_2.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__named__missing_expression_2.py.snap index 884fb234b2..057477b761 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__named__missing_expression_2.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__named__missing_expression_2.py.snap @@ -84,7 +84,7 @@ Module( 3 | (x := 4 | 5 | def foo(): - | ^^^ Syntax Error: Expected an identifier, but found a keyword 'def' that cannot be used here + | ^^^ Syntax Error: Expected an identifier, but found a keyword `def` that cannot be used here 6 | pass | @@ -93,7 +93,7 @@ Module( 3 | (x := 4 | 5 | def foo(): - | ^^^ Syntax Error: Expected ')', found name + | ^^^ Syntax Error: Expected `)`, found name 6 | pass | @@ -101,7 +101,7 @@ Module( | 5 | def foo(): 6 | pass - | ^^^^ Syntax Error: Expected an identifier, but found a keyword 'pass' that cannot be used here + | ^^^^ Syntax Error: Expected an identifier, but found a keyword `pass` that cannot be used here | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__generator.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__generator.py.snap index 3e8a85dc97..7ebe57ede2 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__generator.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__generator.py.snap @@ -142,14 +142,14 @@ Module( | 1 | (*x for x in y) 2 | (x := 1, for x in y) - | ^^^ Syntax Error: Expected ')', found 'for' + | ^^^ Syntax Error: Expected `)`, found `for` | | 1 | (*x for x in y) 2 | (x := 1, for x in y) - | ^ Syntax Error: Expected ':', found ')' + | ^ Syntax Error: Expected `:`, found `)` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__missing_closing_paren_3.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__missing_closing_paren_3.py.snap index b98aae283e..24ae8dd4bb 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__missing_closing_paren_3.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__missing_closing_paren_3.py.snap @@ -86,7 +86,7 @@ Module( 2 | # token starts a statement. 3 | 4 | (1, 2 - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 5 | 6 | def foo(): 7 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple.py.snap index 768381c483..7670ed0edd 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple.py.snap @@ -315,7 +315,7 @@ Module( | 9 | # Missing comma 10 | (1 2) - | ^ Syntax Error: Expected ')', found int + | ^ Syntax Error: Expected `)`, found int 11 | 12 | # Dictionary element in a list | @@ -343,7 +343,7 @@ Module( | 12 | # Dictionary element in a list 13 | (1: 2) - | ^ Syntax Error: Expected ')', found ':' + | ^ Syntax Error: Expected `)`, found `:` 14 | 15 | # Missing expression | @@ -390,7 +390,7 @@ Module( 16 | (1, x + ) 17 | 18 | (1; 2) - | ^ Syntax Error: Expected ')', found ';' + | ^ Syntax Error: Expected `)`, found `;` 19 | 20 | # Unparenthesized named expression is not allowed | @@ -420,5 +420,5 @@ Module( | 20 | # Unparenthesized named expression is not allowed 21 | x, y := 2, z - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple_starred_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple_starred_expr.py.snap index da92fa1991..05cd9dbaca 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple_starred_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__parenthesized__tuple_starred_expr.py.snap @@ -1542,5 +1542,5 @@ Module( 18 | *x if True else y, z, *x if True else y 19 | *lambda x: x, z, *lambda x: x 20 | *x := 2, z, *x := 2 - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__missing_closing_curly_brace_3.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__missing_closing_curly_brace_3.py.snap index 311eaae530..0be8d06138 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__missing_closing_curly_brace_3.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__missing_closing_curly_brace_3.py.snap @@ -84,7 +84,7 @@ Module( 2 | # token starts a statement. 3 | 4 | {1, 2 - | ^ Syntax Error: Expected '}', found newline + | ^ Syntax Error: Expected `}`, found newline 5 | 6 | def foo(): 7 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__recover.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__recover.py.snap index b489b1c64f..74e95fe8c7 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__recover.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__set__recover.py.snap @@ -302,7 +302,7 @@ Module( | 11 | # Missing comma 12 | {1 2} - | ^ Syntax Error: Expected ',', found int + | ^ Syntax Error: Expected `,`, found int 13 | 14 | # Dictionary element in a list | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__subscript__unclosed_slice_1.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__subscript__unclosed_slice_1.py.snap index d3e57ddfc0..05de28c275 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__subscript__unclosed_slice_1.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__subscript__unclosed_slice_1.py.snap @@ -95,7 +95,7 @@ Module( 1 | x[:: 2 | 3 | def foo(): - | ^^^ Syntax Error: Expected an identifier, but found a keyword 'def' that cannot be used here + | ^^^ Syntax Error: Expected an identifier, but found a keyword `def` that cannot be used here 4 | pass | @@ -104,7 +104,7 @@ Module( 1 | x[:: 2 | 3 | def foo(): - | ^^^ Syntax Error: Expected ']', found name + | ^^^ Syntax Error: Expected `]`, found name 4 | pass | @@ -112,7 +112,7 @@ Module( | 3 | def foo(): 4 | pass - | ^^^^ Syntax Error: Expected an identifier, but found a keyword 'pass' that cannot be used here + | ^^^^ Syntax Error: Expected an identifier, but found a keyword `pass` that cannot be used here | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__yield__named_expression.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__yield__named_expression.py.snap index 5feebcc55f..49a4e5362b 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__yield__named_expression.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@expressions__yield__named_expression.py.snap @@ -125,5 +125,5 @@ Module( 2 | yield x := 1 3 | 4 | yield 1, x := 2, 3 - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_lambda_without_parentheses.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_lambda_without_parentheses.py.snap index b7da154352..2f5d767448 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_lambda_without_parentheses.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_lambda_without_parentheses.py.snap @@ -117,7 +117,7 @@ Module( | 1 | f"{lambda x: x}" - | ^^ Syntax Error: f-string: expecting '}' + | ^^ Syntax Error: f-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace.py.snap index c8b75ce3f9..004ae87faa 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace.py.snap @@ -267,7 +267,7 @@ Module( | 1 | f"{" 2 | f"{foo!r" - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` 3 | f"{foo=" 4 | f"{" | @@ -277,7 +277,7 @@ Module( 1 | f"{" 2 | f"{foo!r" 3 | f"{foo=" - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` 4 | f"{" 5 | f"""{""" | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace_in_format_spec.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace_in_format_spec.py.snap index cf843119c2..ac1d7c98f4 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace_in_format_spec.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@f_string_unclosed_lbrace_in_format_spec.py.snap @@ -146,7 +146,7 @@ Module( | 1 | f"hello {x:" - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` 2 | f"hello {x:.3f" | @@ -154,5 +154,5 @@ Module( | 1 | f"hello {x:" 2 | f"hello {x:.3f" - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_iter_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_iter_expr.py.snap index 907c07e8ce..ab00df4afe 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_iter_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_iter_expr.py.snap @@ -192,7 +192,7 @@ Module( 1 | for x in *a and b: ... 2 | for x in yield a: ... 3 | for target in x := 1: ... - | ^^ Syntax Error: Expected ':', found ':=' + | ^^ Syntax Error: Expected `:`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_target.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_target.py.snap index 04caa94916..88050de12e 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_target.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_invalid_target.py.snap @@ -498,7 +498,7 @@ Module( 4 | for *x | y in z: ... 5 | for await x in z: ... 6 | for yield x in y: ... - | ^ Syntax Error: Expected 'in', found ':' + | ^ Syntax Error: Expected `in`, found `:` 7 | for [x, 1, y, *["a"]] in z: ... | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_in_keyword.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_in_keyword.py.snap index 8052c314b5..a2bf0f699d 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_in_keyword.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_in_keyword.py.snap @@ -94,7 +94,7 @@ Module( | 1 | for a b: ... - | ^ Syntax Error: Expected 'in', found name + | ^ Syntax Error: Expected `in`, found name 2 | for a: ... | @@ -102,5 +102,5 @@ Module( | 1 | for a b: ... 2 | for a: ... - | ^ Syntax Error: Expected 'in', found ':' + | ^ Syntax Error: Expected `in`, found `:` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_target.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_target.py.snap index 84d8b4f8cd..7742223cd0 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_target.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@for_stmt_missing_target.py.snap @@ -56,11 +56,11 @@ Module( | 1 | for in x: ... - | ^^ Syntax Error: Expected an identifier, but found a keyword 'in' that cannot be used here + | ^^ Syntax Error: Expected an identifier, but found a keyword `in` that cannot be used here | | 1 | for in x: ... - | ^ Syntax Error: Expected 'in', found name + | ^ Syntax Error: Expected `in`, found name | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_dotted_names.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_dotted_names.py.snap index 2520cfbe49..a0fbe287f6 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_dotted_names.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_dotted_names.py.snap @@ -166,7 +166,7 @@ Module( | 1 | from x import a. - | ^ Syntax Error: Expected ',', found '.' + | ^ Syntax Error: Expected `,`, found `.` 2 | from x import a.b 3 | from x import a, b.c, d, e.f, g | @@ -175,7 +175,7 @@ Module( | 1 | from x import a. 2 | from x import a.b - | ^ Syntax Error: Expected ',', found '.' + | ^ Syntax Error: Expected `,`, found `.` 3 | from x import a, b.c, d, e.f, g | @@ -184,7 +184,7 @@ Module( 1 | from x import a. 2 | from x import a.b 3 | from x import a, b.c, d, e.f, g - | ^ Syntax Error: Expected ',', found '.' + | ^ Syntax Error: Expected `,`, found `.` | @@ -192,5 +192,5 @@ Module( 1 | from x import a. 2 | from x import a.b 3 | from x import a, b.c, d, e.f, g - | ^ Syntax Error: Expected ',', found '.' + | ^ Syntax Error: Expected `,`, found `.` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_missing_rpar.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_missing_rpar.py.snap index f53eb5aeff..d1792e0e09 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_missing_rpar.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@from_import_missing_rpar.py.snap @@ -152,7 +152,7 @@ Module( | 1 | from x import (a, b - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 2 | 1 + 1 3 | from x import (a, b, 4 | 2 + 2 @@ -163,6 +163,6 @@ Module( 1 | from x import (a, b 2 | 1 + 1 3 | from x import (a, b, - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 4 | 2 + 2 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_parameter_list.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_parameter_list.py.snap index 9028296eeb..9efb6d3fac 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_parameter_list.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_parameter_list.py.snap @@ -234,7 +234,7 @@ Module( | 1 | def foo(a: int, b: - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 2 | def foo(): 3 | return 42 4 | def foo(a: int, b: str @@ -254,7 +254,7 @@ Module( 3 | return 42 4 | def foo(a: int, b: str 5 | x = 10 - | ^ Syntax Error: Expected ',', found name + | ^ Syntax Error: Expected `,`, found name | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_type_param_list.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_type_param_list.py.snap index fa71509d1f..dd2412c32a 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_type_param_list.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@function_def_unclosed_type_param_list.py.snap @@ -163,7 +163,7 @@ Module( | 1 | def foo[T1, *T2(a, b): - | ^ Syntax Error: Expected ']', found '(' + | ^ Syntax Error: Expected `]`, found `(` 2 | return a + b 3 | x = 10 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_elif_missing_colon.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_elif_missing_colon.py.snap index d8ac7c86be..4238e23e7a 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_elif_missing_colon.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_elif_missing_colon.py.snap @@ -79,7 +79,7 @@ Module( 1 | if x: 2 | pass 3 | elif y - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 4 | pass 5 | else: 6 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_missing_colon.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_missing_colon.py.snap index 8092bc7d7c..5dc30d6ce6 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_missing_colon.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@if_stmt_missing_colon.py.snap @@ -82,7 +82,7 @@ Module( | 1 | if x - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 2 | if x 3 | pass 4 | a = 1 @@ -101,7 +101,7 @@ Module( | 1 | if x 2 | if x - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 3 | pass 4 | a = 1 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_expected_colon.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_expected_colon.py.snap index f352512262..da778df7a1 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_expected_colon.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_expected_colon.py.snap @@ -80,6 +80,6 @@ Module( | 1 | match [1, 2] - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 2 | case _: ... | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_stmt_no_newline_before_case.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_stmt_no_newline_before_case.py.snap index 324f3480ff..7888bcd48e 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_stmt_no_newline_before_case.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@match_stmt_no_newline_before_case.py.snap @@ -61,7 +61,7 @@ Module( | 1 | match foo: case _: ... - | ^^^^ Syntax Error: Expected newline, found 'case' + | ^^^^ Syntax Error: Expected newline, found `case` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@multiple_clauses_on_same_line.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@multiple_clauses_on_same_line.py.snap index 0fb6c83f46..be571b2cd2 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@multiple_clauses_on_same_line.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@multiple_clauses_on_same_line.py.snap @@ -326,7 +326,7 @@ Module( | 1 | if True: pass elif False: pass else: pass - | ^^^^ Syntax Error: Expected newline, found 'elif' + | ^^^^ Syntax Error: Expected newline, found `elif` 2 | if True: pass; elif False: pass; else: pass 3 | for x in iter: break else: pass | @@ -334,7 +334,7 @@ Module( | 1 | if True: pass elif False: pass else: pass - | ^^^^ Syntax Error: Expected newline, found 'else' + | ^^^^ Syntax Error: Expected newline, found `else` 2 | if True: pass; elif False: pass; else: pass 3 | for x in iter: break else: pass | @@ -343,7 +343,7 @@ Module( | 1 | if True: pass elif False: pass else: pass 2 | if True: pass; elif False: pass; else: pass - | ^^^^ Syntax Error: Expected newline, found 'elif' + | ^^^^ Syntax Error: Expected newline, found `elif` 3 | for x in iter: break else: pass 4 | for x in iter: break; else: pass | @@ -352,7 +352,7 @@ Module( | 1 | if True: pass elif False: pass else: pass 2 | if True: pass; elif False: pass; else: pass - | ^^^^ Syntax Error: Expected newline, found 'else' + | ^^^^ Syntax Error: Expected newline, found `else` 3 | for x in iter: break else: pass 4 | for x in iter: break; else: pass | @@ -362,7 +362,7 @@ Module( 1 | if True: pass elif False: pass else: pass 2 | if True: pass; elif False: pass; else: pass 3 | for x in iter: break else: pass - | ^^^^ Syntax Error: Expected newline, found 'else' + | ^^^^ Syntax Error: Expected newline, found `else` 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass | @@ -372,7 +372,7 @@ Module( 2 | if True: pass; elif False: pass; else: pass 3 | for x in iter: break else: pass 4 | for x in iter: break; else: pass - | ^^^^ Syntax Error: Expected newline, found 'else' + | ^^^^ Syntax Error: Expected newline, found `else` 5 | try: pass except exc: pass else: pass finally: pass 6 | try: pass; except exc: pass; else: pass; finally: pass | @@ -382,7 +382,7 @@ Module( 3 | for x in iter: break else: pass 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass - | ^^^^^^ Syntax Error: Expected newline, found 'except' + | ^^^^^^ Syntax Error: Expected newline, found `except` 6 | try: pass; except exc: pass; else: pass; finally: pass | @@ -391,7 +391,7 @@ Module( 3 | for x in iter: break else: pass 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass - | ^^^^ Syntax Error: Expected newline, found 'else' + | ^^^^ Syntax Error: Expected newline, found `else` 6 | try: pass; except exc: pass; else: pass; finally: pass | @@ -400,7 +400,7 @@ Module( 3 | for x in iter: break else: pass 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass - | ^^^^^^^ Syntax Error: Expected newline, found 'finally' + | ^^^^^^^ Syntax Error: Expected newline, found `finally` 6 | try: pass; except exc: pass; else: pass; finally: pass | @@ -409,7 +409,7 @@ Module( 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass 6 | try: pass; except exc: pass; else: pass; finally: pass - | ^^^^^^ Syntax Error: Expected newline, found 'except' + | ^^^^^^ Syntax Error: Expected newline, found `except` | @@ -417,7 +417,7 @@ Module( 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass 6 | try: pass; except exc: pass; else: pass; finally: pass - | ^^^^ Syntax Error: Expected newline, found 'else' + | ^^^^ Syntax Error: Expected newline, found `else` | @@ -425,5 +425,5 @@ Module( 4 | for x in iter: break; else: pass 5 | try: pass except exc: pass else: pass finally: pass 6 | try: pass; except exc: pass; else: pass; finally: pass - | ^^^^^^^ Syntax Error: Expected newline, found 'finally' + | ^^^^^^^ Syntax Error: Expected newline, found `finally` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@named_expr_slice.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@named_expr_slice.py.snap index d521c935f4..ab0bcdf9ca 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@named_expr_slice.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@named_expr_slice.py.snap @@ -238,7 +238,7 @@ Module( 1 | # even after 3.9, an unparenthesized named expression is not allowed in a slice 2 | lst[x:=1:-1] 3 | lst[1:x:=1] - | ^^ Syntax Error: Expected ']', found ':=' + | ^^ Syntax Error: Expected `]`, found `:=` 4 | lst[1:3:x:=1] | @@ -265,7 +265,7 @@ Module( 2 | lst[x:=1:-1] 3 | lst[1:x:=1] 4 | lst[1:3:x:=1] - | ^^ Syntax Error: Expected ']', found ':=' + | ^^ Syntax Error: Expected `]`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@nested_quote_in_format_spec_py312.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@nested_quote_in_format_spec_py312.py.snap index c39b322387..7eec20f80b 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@nested_quote_in_format_spec_py312.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@nested_quote_in_format_spec_py312.py.snap @@ -88,5 +88,5 @@ Module( | 1 | # parse_options: {"target-version": "3.12"} 2 | f"{1:""}" # this is a ParseError on all versions - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@node_range_with_gaps.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@node_range_with_gaps.py.snap index c3a8cdca25..361fe1288b 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@node_range_with_gaps.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@node_range_with_gaps.py.snap @@ -106,7 +106,7 @@ Module( | 1 | def foo # comment - | ^ Syntax Error: Expected '(', found newline + | ^ Syntax Error: Expected `(`, found newline 2 | def bar(): ... 3 | def baz | @@ -115,7 +115,7 @@ Module( | 1 | def foo # comment 2 | def bar(): ... - | ^^^ Syntax Error: Expected ')', found 'def' + | ^^^ Syntax Error: Expected `)`, found `def` 3 | def baz | @@ -124,12 +124,12 @@ Module( 1 | def foo # comment 2 | def bar(): ... 3 | def baz - | ^ Syntax Error: Expected '(', found newline + | ^ Syntax Error: Expected `(`, found newline | | 2 | def bar(): ... 3 | def baz - | ^ Syntax Error: Expected ')', found end of file + | ^ Syntax Error: Expected `)`, found end of file | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@param_with_invalid_annotation.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@param_with_invalid_annotation.py.snap index 149cc7b4ce..d6b1a5944e 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@param_with_invalid_annotation.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@param_with_invalid_annotation.py.snap @@ -255,7 +255,7 @@ Module( 1 | def foo(arg: *int): ... 2 | def foo(arg: yield int): ... 3 | def foo(arg: x := int): ... - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_expected_after_star_separator.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_expected_after_star_separator.py.snap index b1d8bdaa92..f343b562c6 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_expected_after_star_separator.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_expected_after_star_separator.py.snap @@ -251,7 +251,7 @@ Module( | 1 | def foo(*): ... - | ^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^ Syntax Error: Expected one or more keyword parameter after `*` separator 2 | def foo(*,): ... 3 | def foo(a, *): ... | @@ -260,7 +260,7 @@ Module( | 1 | def foo(*): ... 2 | def foo(*,): ... - | ^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^ Syntax Error: Expected one or more keyword parameter after `*` separator 3 | def foo(a, *): ... 4 | def foo(a, *,): ... | @@ -270,7 +270,7 @@ Module( 1 | def foo(*): ... 2 | def foo(*,): ... 3 | def foo(a, *): ... - | ^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^ Syntax Error: Expected one or more keyword parameter after `*` separator 4 | def foo(a, *,): ... 5 | def foo(*, **kwargs): ... | @@ -280,7 +280,7 @@ Module( 2 | def foo(*,): ... 3 | def foo(a, *): ... 4 | def foo(a, *,): ... - | ^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^ Syntax Error: Expected one or more keyword parameter after `*` separator 5 | def foo(*, **kwargs): ... | @@ -289,5 +289,5 @@ Module( 3 | def foo(a, *): ... 4 | def foo(a, *,): ... 5 | def foo(*, **kwargs): ... - | ^^^^^^^^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^^^^^^^^ Syntax Error: Expected one or more keyword parameter after `*` separator | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_kwarg_after_star_separator.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_kwarg_after_star_separator.py.snap index 324b7246ea..589d8905af 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_kwarg_after_star_separator.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_kwarg_after_star_separator.py.snap @@ -67,5 +67,5 @@ Module( | 1 | def foo(*, **kwargs): ... - | ^^^^^^^^ Syntax Error: Expected one or more keyword parameter after '*' separator + | ^^^^^^^^ Syntax Error: Expected one or more keyword parameter after `*` separator | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_keyword_with_default.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_keyword_with_default.py.snap index 3a8644b3a8..87adc006a3 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_keyword_with_default.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_keyword_with_default.py.snap @@ -165,19 +165,19 @@ Module( | 1 | def foo(a, **kwargs={'b': 1, 'c': 2}): ... - | ^ Syntax Error: Parameter with '*' or '**' cannot have default value + | ^ Syntax Error: Parameter with `*` or `**` cannot have default value | | 1 | def foo(a, **kwargs={'b': 1, 'c': 2}): ... - | ^ Syntax Error: Expected ')', found '{' + | ^ Syntax Error: Expected `)`, found `{` | | 1 | def foo(a, **kwargs={'b': 1, 'c': 2}): ... - | ^ Syntax Error: Expected newline, found ')' + | ^ Syntax Error: Expected newline, found `)` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_positional_with_default.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_positional_with_default.py.snap index 1e84abfa40..e965d4a0b4 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_positional_with_default.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@params_var_positional_with_default.py.snap @@ -117,19 +117,19 @@ Module( | 1 | def foo(a, *args=(1, 2)): ... - | ^ Syntax Error: Parameter with '*' or '**' cannot have default value + | ^ Syntax Error: Parameter with `*` or `**` cannot have default value | | 1 | def foo(a, *args=(1, 2)): ... - | ^ Syntax Error: Expected ')', found '(' + | ^ Syntax Error: Expected `)`, found `(` | | 1 | def foo(a, *args=(1, 2)): ... - | ^ Syntax Error: Expected newline, found ')' + | ^ Syntax Error: Expected newline, found `)` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@pos_only_py37.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@pos_only_py37.py.snap index f6650bb5d9..5328fcf7dd 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@pos_only_py37.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@pos_only_py37.py.snap @@ -298,7 +298,7 @@ Module( 3 | def foo(a, /, b, /): ... 4 | def foo(a, *args, /, b): ... 5 | def foo(a, //): ... - | ^^ Syntax Error: Expected ',', found '//' + | ^^ Syntax Error: Expected `,`, found `//` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token.py.snap index 45a5b27c78..61f3230855 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token.py.snap @@ -785,7 +785,7 @@ Module( | 1 | # No indentation before the function definition 2 | if call(foo - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 3 | def bar(): 4 | pass | @@ -803,7 +803,7 @@ Module( | 7 | # Indented function definition 8 | if call(foo - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 9 | def bar(): 10 | pass | @@ -812,7 +812,7 @@ Module( | 13 | # There are multiple non-logical newlines (blank lines) in the `if` body 14 | if call(foo - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 15 | 16 | 17 | def bar(): @@ -822,7 +822,7 @@ Module( | 21 | # There are trailing whitespaces in the blank line inside the `if` body 22 | if call(foo - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 23 | 24 | def bar(): 25 | pass @@ -832,7 +832,7 @@ Module( | 28 | # The lexer is nested with multiple levels of parentheses 29 | if call(foo, [a, b - | ^ Syntax Error: Expected ']', found NonLogicalNewline + | ^ Syntax Error: Expected `]`, found NonLogicalNewline 30 | def bar(): 31 | pass | @@ -841,7 +841,7 @@ Module( | 34 | # The outer parenthesis is closed but the inner bracket isn't 35 | if call(foo, [a, b) - | ^ Syntax Error: Expected ']', found ')' + | ^ Syntax Error: Expected `]`, found `)` 36 | def bar(): 37 | pass | @@ -850,7 +850,7 @@ Module( | 34 | # The outer parenthesis is closed but the inner bracket isn't 35 | if call(foo, [a, b) - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 36 | def bar(): 37 | pass | @@ -860,7 +860,7 @@ Module( 41 | # test is to make sure it emits a `NonLogicalNewline` token after `b`. 42 | if call(foo, [a, 43 | b - | ^ Syntax Error: Expected ']', found NonLogicalNewline + | ^ Syntax Error: Expected `]`, found NonLogicalNewline 44 | ) 45 | def bar(): 46 | pass @@ -871,7 +871,7 @@ Module( 42 | if call(foo, [a, 43 | b 44 | ) - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 45 | def bar(): 46 | pass | @@ -890,7 +890,7 @@ Module( 49 | # F-strings uses normal list parsing, so test those as well 50 | if call(f"hello {x 51 | def bar(): - | ^^^ Syntax Error: f-string: expecting '}' + | ^^^ Syntax Error: f-string: expecting `}` 52 | pass | @@ -923,7 +923,7 @@ Module( | 55 | if call(f"hello 56 | def bar(): - | ^^^^ Syntax Error: Expected ',', found indent + | ^^^^ Syntax Error: Expected `,`, found indent 57 | pass | @@ -931,7 +931,7 @@ Module( | 55 | if call(f"hello 56 | def bar(): - | ^^^ Syntax Error: Expected ')', found 'def' + | ^^^ Syntax Error: Expected `)`, found `def` 57 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_mac_eol.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_mac_eol.py.snap index d9082066b8..5567459c70 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_mac_eol.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_mac_eol.py.snap @@ -113,5 +113,5 @@ Module( | 1 | if call(foo, [a, b def bar(): pass - | ^ Syntax Error: Expected ']', found NonLogicalNewline + | ^ Syntax Error: Expected `]`, found NonLogicalNewline | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_windows_eol.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_windows_eol.py.snap index c3b23f4fbd..ae03fee095 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_windows_eol.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_windows_eol.py.snap @@ -113,7 +113,7 @@ Module( | 1 | if call(foo, [a, b - | ^ Syntax Error: Expected ']', found NonLogicalNewline + | ^ Syntax Error: Expected `]`, found NonLogicalNewline 2 | def bar(): 3 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__fstring_format_spec_1.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__fstring_format_spec_1.py.snap index 1e2ad65fcf..d836318aea 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__fstring_format_spec_1.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__fstring_format_spec_1.py.snap @@ -399,7 +399,7 @@ Module( | 5 | f'middle {'string':\ 6 | 'format spec'} - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` 7 | 8 | f'middle {'string':\\ | @@ -445,7 +445,7 @@ Module( 6 | 'format spec'} 7 | 8 | f'middle {'string':\\ - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` 9 | 'format spec'} 10 | 11 | f'middle {'string':\\\ @@ -492,7 +492,7 @@ Module( | 11 | f'middle {'string':\\\ 12 | 'format spec'} - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_1.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_1.py.snap index 54c66a5216..21f4465c4e 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_1.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_1.py.snap @@ -110,7 +110,7 @@ Module( | 1 | call(a, b, \\\ 2 | - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 3 | def bar(): 4 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_windows_eol.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_windows_eol.py.snap index f1ae6a18c9..cde70bca42 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_windows_eol.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__line_continuation_windows_eol.py.snap @@ -93,7 +93,7 @@ Module( | 1 | call(a, b, # comment \ - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 2 | 3 | def bar(): 4 | pass diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_1.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_1.py.snap index 1a9af6dacc..f58010350b 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_1.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_1.py.snap @@ -83,7 +83,7 @@ Module( | 5 | f"""hello {x # comment 6 | y = 1 - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_2.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_2.py.snap index 50bb114c7e..7a5f85ab4a 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_2.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_2.py.snap @@ -80,5 +80,5 @@ Module( | 5 | f'''{foo:.3f 6 | ''' - | ^^^ Syntax Error: f-string: expecting '}' + | ^^^ Syntax Error: f-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_3.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_3.py.snap index 174ebceee4..3c611d4fbe 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_3.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__triple_quoted_fstring_3.py.snap @@ -110,7 +110,7 @@ Module( | 5 | if call(f'''{x:.3f 6 | ''' - | ^^^ Syntax Error: f-string: expecting '}' + | ^^^ Syntax Error: f-string: expecting `}` 7 | pass | @@ -118,6 +118,6 @@ Module( | 5 | if call(f'''{x:.3f 6 | ''' - | ^ Syntax Error: Expected ')', found newline + | ^ Syntax Error: Expected `)`, found newline 7 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__function_type_parameters.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__function_type_parameters.py.snap index 9618f4200b..18b90424fa 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__function_type_parameters.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__function_type_parameters.py.snap @@ -375,7 +375,7 @@ Module( 9 | # on following lines. 10 | 11 | def keyword[A, await](): ... - | ^^^^^ Syntax Error: Expected an identifier, but found a keyword 'await' that cannot be used here + | ^^^^^ Syntax Error: Expected an identifier, but found a keyword `await` that cannot be used here 12 | 13 | def not_a_type_param[A, |, B](): ... | @@ -385,7 +385,7 @@ Module( 11 | def keyword[A, await](): ... 12 | 13 | def not_a_type_param[A, |, B](): ... - | ^ Syntax Error: Expected ',', found '|' + | ^ Syntax Error: Expected `,`, found `|` 14 | 15 | def multiple_commas[A,,B](): ... | @@ -433,7 +433,7 @@ Module( 17 | def multiple_trailing_commas[A,,](): ... 18 | 19 | def multiple_commas_and_recovery[A,,100](): ... - | ^^^ Syntax Error: Expected ']', found int + | ^^^ Syntax Error: Expected `]`, found int | @@ -441,7 +441,7 @@ Module( 17 | def multiple_trailing_commas[A,,](): ... 18 | 19 | def multiple_commas_and_recovery[A,,100](): ... - | ^ Syntax Error: Expected newline, found ']' + | ^ Syntax Error: Expected newline, found `]` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__if_extra_closing_parentheses.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__if_extra_closing_parentheses.py.snap index 886c21590c..780f943a96 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__if_extra_closing_parentheses.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__if_extra_closing_parentheses.py.snap @@ -40,7 +40,7 @@ Module( | 1 | # FIXME(micha): This creates two syntax errors instead of just one (and overlapping ones) 2 | if True)): - | ^ Syntax Error: Expected ':', found ')' + | ^ Syntax Error: Expected `:`, found `)` 3 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_2.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_2.py.snap index 34e3500f26..4bd787208c 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_2.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_2.py.snap @@ -111,7 +111,7 @@ Module( 2 | # This `as` pattern is unparenthesied so the parser never takes the path 3 | # where it might be confused as a complex literal pattern. 4 | case x as y + 1j: - | ^ Syntax Error: Expected ':', found '+' + | ^ Syntax Error: Expected `:`, found `+` 5 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_3.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_3.py.snap index 07cd1a64e6..ad4dbb0ec8 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_3.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_3.py.snap @@ -113,7 +113,7 @@ Module( 2 | # Not in the mapping start token set, so the list parsing bails 3 | # v 4 | case {(x as y): 1}: - | ^ Syntax Error: Expected '}', found '(' + | ^ Syntax Error: Expected `}`, found `(` 5 | pass | @@ -131,7 +131,7 @@ Module( 2 | # Not in the mapping start token set, so the list parsing bails 3 | # v 4 | case {(x as y): 1}: - | ^ Syntax Error: Expected newline, found '}' + | ^ Syntax Error: Expected newline, found `}` 5 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_4.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_4.py.snap index 771706eaed..fde6679f01 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_4.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__as_pattern_4.py.snap @@ -114,7 +114,7 @@ Module( 2 | # This `as` pattern is unparenthesized so the parser never takes the path 3 | # where it might be confused as a mapping key pattern. 4 | case {x as y: 1}: - | ^^ Syntax Error: Expected ':', found 'as' + | ^^ Syntax Error: Expected `:`, found `as` 5 | pass | @@ -123,6 +123,6 @@ Module( 2 | # This `as` pattern is unparenthesized so the parser never takes the path 3 | # where it might be confused as a mapping key pattern. 4 | case {x as y: 1}: - | ^ Syntax Error: Expected ',', found name + | ^ Syntax Error: Expected `,`, found name 5 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__invalid_mapping_pattern.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__invalid_mapping_pattern.py.snap index 5592524488..1fbaa9df86 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__invalid_mapping_pattern.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__invalid_mapping_pattern.py.snap @@ -540,7 +540,7 @@ Module( 1 | # Starred expression is not allowed as a mapping pattern key 2 | match subject: 3 | case {*key}: - | ^ Syntax Error: Expected ':', found '}' + | ^ Syntax Error: Expected `:`, found `}` 4 | pass 5 | case {*key: 1}: | @@ -570,7 +570,7 @@ Module( 5 | case {*key: 1}: 6 | pass 7 | case {*key 1}: - | ^ Syntax Error: Expected ':', found int + | ^ Syntax Error: Expected `:`, found int 8 | pass 9 | case {*key, None: 1}: | @@ -589,7 +589,7 @@ Module( 7 | case {*key 1}: 8 | pass 9 | case {*key, None: 1}: - | ^ Syntax Error: Expected ':', found ',' + | ^ Syntax Error: Expected `:`, found `,` 10 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__star_pattern_usage.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__star_pattern_usage.py.snap index 364d382ff2..d87687110b 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__star_pattern_usage.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__match__star_pattern_usage.py.snap @@ -580,7 +580,7 @@ Module( 15 | case Foo(x=*_): 16 | pass 17 | case {*_}: - | ^ Syntax Error: Expected ':', found '}' + | ^ Syntax Error: Expected `:`, found `}` 18 | pass 19 | case {*_: 1}: | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__ambiguous_lpar_with_items.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__ambiguous_lpar_with_items.py.snap index 9d872e920e..61ff19b25d 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__ambiguous_lpar_with_items.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__ambiguous_lpar_with_items.py.snap @@ -1580,7 +1580,7 @@ Module( | 4 | with (item1, item2),: ... 5 | with (item1, item2), as f: ... - | ^^ Syntax Error: Expected ',', found 'as' + | ^^ Syntax Error: Expected `,`, found `as` 6 | with (item1, item2), item3,: ... 7 | with (*item): ... | @@ -1640,7 +1640,7 @@ Module( 9 | with (item := 10 as f): ... 10 | with (item1, item2 := 10 as f): ... 11 | with (x for x in range(10), item): ... - | ^ Syntax Error: Expected ')', found ',' + | ^ Syntax Error: Expected `)`, found `,` 12 | with (item, x for x in range(10)): ... | @@ -1649,7 +1649,7 @@ Module( 9 | with (item := 10 as f): ... 10 | with (item1, item2 := 10 as f): ... 11 | with (x for x in range(10), item): ... - | ^ Syntax Error: Expected ',', found ')' + | ^ Syntax Error: Expected `,`, found `)` 12 | with (item, x for x in range(10)): ... | @@ -1658,7 +1658,7 @@ Module( 10 | with (item1, item2 := 10 as f): ... 11 | with (x for x in range(10), item): ... 12 | with (item, x for x in range(10)): ... - | ^^^ Syntax Error: Expected ')', found 'for' + | ^^^ Syntax Error: Expected `)`, found `for` 13 | 14 | # Make sure the parser doesn't report the same error twice | @@ -1668,7 +1668,7 @@ Module( 10 | with (item1, item2 := 10 as f): ... 11 | with (x for x in range(10), item): ... 12 | with (item, x for x in range(10)): ... - | ^ Syntax Error: Expected ':', found ')' + | ^ Syntax Error: Expected `:`, found `)` 13 | 14 | # Make sure the parser doesn't report the same error twice | @@ -1707,7 +1707,7 @@ Module( 15 | with ((*item)): ... 16 | 17 | with (*x for x in iter, item): ... - | ^ Syntax Error: Expected ')', found ',' + | ^ Syntax Error: Expected `)`, found `,` 18 | with (item1, *x for x in iter, item2): ... 19 | with (x as f, *y): ... | @@ -1717,7 +1717,7 @@ Module( 15 | with ((*item)): ... 16 | 17 | with (*x for x in iter, item): ... - | ^ Syntax Error: Expected ',', found ')' + | ^ Syntax Error: Expected `,`, found `)` 18 | with (item1, *x for x in iter, item2): ... 19 | with (x as f, *y): ... | @@ -1726,7 +1726,7 @@ Module( | 17 | with (*x for x in iter, item): ... 18 | with (item1, *x for x in iter, item2): ... - | ^^^ Syntax Error: Expected ')', found 'for' + | ^^^ Syntax Error: Expected `)`, found `for` 19 | with (x as f, *y): ... 20 | with (*x, y as f): ... | @@ -1735,7 +1735,7 @@ Module( | 17 | with (*x for x in iter, item): ... 18 | with (item1, *x for x in iter, item2): ... - | ^ Syntax Error: Expected ':', found ')' + | ^ Syntax Error: Expected `:`, found `)` 19 | with (x as f, *y): ... 20 | with (*x, y as f): ... | @@ -1804,7 +1804,7 @@ Module( 22 | with (x, yield y, z): ... 23 | with (x, yield from y): ... 24 | with (x as f, y) as f: ... - | ^^ Syntax Error: Expected ':', found 'as' + | ^^ Syntax Error: Expected `:`, found `as` 25 | with (x for x in iter as y): ... | @@ -1813,7 +1813,7 @@ Module( 23 | with (x, yield from y): ... 24 | with (x as f, y) as f: ... 25 | with (x for x in iter as y): ... - | ^^ Syntax Error: Expected ')', found 'as' + | ^^ Syntax Error: Expected `)`, found `as` 26 | 27 | # The inner `(...)` is parsed as parenthesized expression | @@ -1823,7 +1823,7 @@ Module( 23 | with (x, yield from y): ... 24 | with (x as f, y) as f: ... 25 | with (x for x in iter as y): ... - | ^ Syntax Error: Expected ',', found ')' + | ^ Syntax Error: Expected `,`, found `)` 26 | 27 | # The inner `(...)` is parsed as parenthesized expression | @@ -1832,7 +1832,7 @@ Module( | 27 | # The inner `(...)` is parsed as parenthesized expression 28 | with ((item as f)): ... - | ^^ Syntax Error: Expected ')', found 'as' + | ^^ Syntax Error: Expected `)`, found `as` 29 | 30 | with (item as f), x: ... | @@ -1841,7 +1841,7 @@ Module( | 27 | # The inner `(...)` is parsed as parenthesized expression 28 | with ((item as f)): ... - | ^ Syntax Error: Expected ':', found ')' + | ^ Syntax Error: Expected `:`, found `)` 29 | 30 | with (item as f), x: ... | @@ -1860,7 +1860,7 @@ Module( 28 | with ((item as f)): ... 29 | 30 | with (item as f), x: ... - | ^ Syntax Error: Expected ':', found ',' + | ^ Syntax Error: Expected `:`, found `,` 31 | with (item as f1) as f2: ... 32 | with (item1 as f, item2 := 0): ... | @@ -1869,7 +1869,7 @@ Module( | 30 | with (item as f), x: ... 31 | with (item as f1) as f2: ... - | ^^ Syntax Error: Expected ':', found 'as' + | ^^ Syntax Error: Expected `:`, found `as` 32 | with (item1 as f, item2 := 0): ... | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__unparenthesized_with_items.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__unparenthesized_with_items.py.snap index 7e2d808bc6..daebbd0a96 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__unparenthesized_with_items.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@statements__with__unparenthesized_with_items.py.snap @@ -401,5 +401,5 @@ Module( 7 | with *item1, item2 as f: pass 8 | with item1 as f, *item2: pass 9 | with item := 0 as f: pass - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_lambda_without_parentheses.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_lambda_without_parentheses.py.snap index 0d23f0c0d2..0de7376e01 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_lambda_without_parentheses.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_lambda_without_parentheses.py.snap @@ -118,7 +118,7 @@ Module( | 1 | # parse_options: {"target-version": "3.14"} 2 | t"{lambda x: x}" - | ^^ Syntax Error: t-string: expecting '}' + | ^^ Syntax Error: t-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace.py.snap index 4ff0a7d78f..f39f719d1d 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace.py.snap @@ -259,7 +259,7 @@ Module( 1 | # parse_options: {"target-version": "3.14"} 2 | t"{" 3 | t"{foo!r" - | ^ Syntax Error: t-string: expecting '}' + | ^ Syntax Error: t-string: expecting `}` 4 | t"{foo=" 5 | t"{" | @@ -269,7 +269,7 @@ Module( 2 | t"{" 3 | t"{foo!r" 4 | t"{foo=" - | ^ Syntax Error: t-string: expecting '}' + | ^ Syntax Error: t-string: expecting `}` 5 | t"{" 6 | t"""{""" | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace_in_format_spec.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace_in_format_spec.py.snap index bc20f6172c..9789ed8922 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace_in_format_spec.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@t_string_unclosed_lbrace_in_format_spec.py.snap @@ -143,7 +143,7 @@ Module( | 1 | # parse_options: {"target-version": "3.14"} 2 | t"hello {x:" - | ^ Syntax Error: t-string: expecting '}' + | ^ Syntax Error: t-string: expecting `}` 3 | t"hello {x:.3f" | @@ -152,5 +152,5 @@ Module( 1 | # parse_options: {"target-version": "3.14"} 2 | t"hello {x:" 3 | t"hello {x:.3f" - | ^ Syntax Error: t-string: expecting '}' + | ^ Syntax Error: t-string: expecting `}` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_invalid_bound_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_invalid_bound_expr.py.snap index 321704cd05..7876c6b6ec 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_invalid_bound_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_invalid_bound_expr.py.snap @@ -288,7 +288,7 @@ Module( 2 | type X[T: yield x] = int 3 | type X[T: yield from x] = int 4 | type X[T: x := int] = int - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_bound.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_bound.py.snap index de9da4848d..351a141b60 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_bound.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_bound.py.snap @@ -88,7 +88,7 @@ Module( | 1 | type X[**T: int] = int - | ^ Syntax Error: Expected ']', found ':' + | ^ Syntax Error: Expected `]`, found `:` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_invalid_default_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_invalid_default_expr.py.snap index dad7c709de..0bcbd3cc52 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_invalid_default_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_param_spec_invalid_default_expr.py.snap @@ -343,7 +343,7 @@ Module( 2 | type X[**P = yield x] = int 3 | type X[**P = yield from x] = int 4 | type X[**P = x := int] = int - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` 5 | type X[**P = *int] = int | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_invalid_default_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_invalid_default_expr.py.snap index 2831009c20..c3e38b8e9a 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_invalid_default_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_invalid_default_expr.py.snap @@ -417,7 +417,7 @@ Module( 3 | type X[T = (yield x)] = int 4 | type X[T = yield from x] = int 5 | type X[T = x := int] = int - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` 6 | type X[T: int = *int] = int | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_bound.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_bound.py.snap index e1693e1722..ae228c0e30 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_bound.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_bound.py.snap @@ -88,7 +88,7 @@ Module( | 1 | type X[*T: int] = int - | ^ Syntax Error: Expected ']', found ':' + | ^ Syntax Error: Expected `]`, found `:` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_invalid_default_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_invalid_default_expr.py.snap index 9b2d1c6de9..4aff137a73 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_invalid_default_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@type_param_type_var_tuple_invalid_default_expr.py.snap @@ -361,7 +361,7 @@ Module( 3 | type X[*Ts = yield x] = int 4 | type X[*Ts = yield from x] = int 5 | type X[*Ts = x := int] = int - | ^^ Syntax Error: Expected ',', found ':=' + | ^^ Syntax Error: Expected `,`, found `:=` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@unterminated_fstring_newline_recovery.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@unterminated_fstring_newline_recovery.py.snap index 0595f124f2..7ed7f32534 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@unterminated_fstring_newline_recovery.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@unterminated_fstring_newline_recovery.py.snap @@ -376,7 +376,7 @@ Module( 2 | 1 + 1 3 | f"hello {x 4 | 2 + 2 - | ^ Syntax Error: f-string: expecting '}' + | ^ Syntax Error: f-string: expecting `}` 5 | f"hello {x: 6 | 3 + 3 | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_invalid_test_expr.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_invalid_test_expr.py.snap index 0af23c288c..acdd7532ad 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_invalid_test_expr.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_invalid_test_expr.py.snap @@ -201,7 +201,7 @@ Module( 1 | while *x: ... 2 | while yield x: ... 3 | while a, b: ... - | ^ Syntax Error: Expected ':', found ',' + | ^ Syntax Error: Expected `:`, found `,` 4 | while a := 1, b: ... | @@ -210,5 +210,5 @@ Module( 2 | while yield x: ... 3 | while a, b: ... 4 | while a := 1, b: ... - | ^ Syntax Error: Expected ':', found ',' + | ^ Syntax Error: Expected `:`, found `,` | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_missing_colon.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_missing_colon.py.snap index 67fb75a824..9ad6e62132 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_missing_colon.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@while_stmt_missing_colon.py.snap @@ -63,6 +63,6 @@ Module( 1 | while ( 2 | a < 30 # comment 3 | ) - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 4 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_colon.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_colon.py.snap index 668c7c2c08..817368aa22 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_colon.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_colon.py.snap @@ -62,6 +62,6 @@ Module( | 1 | # `)` followed by a newline 2 | with (item1, item2) - | ^ Syntax Error: Expected ':', found newline + | ^ Syntax Error: Expected `:`, found newline 3 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_comma.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_comma.py.snap index ea060453a9..0c4b117dcf 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_comma.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@with_items_parenthesized_missing_comma.py.snap @@ -338,7 +338,7 @@ Module( | 1 | with (item1 item2): ... - | ^^^^^ Syntax Error: Expected ',', found name + | ^^^^^ Syntax Error: Expected `,`, found name 2 | with (item1 as f1 item2): ... 3 | with (item1, item2 item3, item4): ... | @@ -347,7 +347,7 @@ Module( | 1 | with (item1 item2): ... 2 | with (item1 as f1 item2): ... - | ^^^^^ Syntax Error: Expected ',', found name + | ^^^^^ Syntax Error: Expected `,`, found name 3 | with (item1, item2 item3, item4): ... 4 | with (item1, item2 as f1 item3, item4): ... | @@ -357,7 +357,7 @@ Module( 1 | with (item1 item2): ... 2 | with (item1 as f1 item2): ... 3 | with (item1, item2 item3, item4): ... - | ^^^^^ Syntax Error: Expected ',', found name + | ^^^^^ Syntax Error: Expected `,`, found name 4 | with (item1, item2 as f1 item3, item4): ... 5 | with (item1, item2: ... | @@ -367,7 +367,7 @@ Module( 2 | with (item1 as f1 item2): ... 3 | with (item1, item2 item3, item4): ... 4 | with (item1, item2 as f1 item3, item4): ... - | ^^^^^ Syntax Error: Expected ',', found name + | ^^^^^ Syntax Error: Expected `,`, found name 5 | with (item1, item2: ... | @@ -376,5 +376,5 @@ Module( 3 | with (item1, item2 item3, item4): ... 4 | with (item1, item2 as f1 item3, item4): ... 5 | with (item1, item2: ... - | ^ Syntax Error: Expected ')', found ':' + | ^ Syntax Error: Expected `)`, found `:` | diff --git a/crates/ty_python_semantic/resources/mdtest/comprehensions/invalid_syntax.md b/crates/ty_python_semantic/resources/mdtest/comprehensions/invalid_syntax.md index f16ec0505d..fd2a6ef0cf 100644 --- a/crates/ty_python_semantic/resources/mdtest/comprehensions/invalid_syntax.md +++ b/crates/ty_python_semantic/resources/mdtest/comprehensions/invalid_syntax.md @@ -1,20 +1,20 @@ # Comprehensions with invalid syntax ```py -# Missing 'in' keyword. +# Missing `in` keyword. # It's reasonably clear here what they *meant* to write, # so we'll still infer the correct type: -# error: [invalid-syntax] "Expected 'in', found name" +# error: [invalid-syntax] "Expected `in`, found name" # revealed: int [reveal_type(a) for a range(3)] # Missing iteration variable -# error: [invalid-syntax] "Expected an identifier, but found a keyword 'in' that cannot be used here" -# error: [invalid-syntax] "Expected 'in', found name" +# error: [invalid-syntax] "Expected an identifier, but found a keyword `in` that cannot be used here" +# error: [invalid-syntax] "Expected `in`, found name" # error: [unresolved-reference] # revealed: Unknown [reveal_type(b) for in range(3)] @@ -27,9 +27,9 @@ [reveal_type(c) for c in] -# Missing 'in' keyword and missing iterable +# Missing `in` keyword and missing iterable -# error: [invalid-syntax] "Expected 'in', found ']'" +# error: [invalid-syntax] "Expected `in`, found `]`" # revealed: Unknown [reveal_type(d) for d] ``` diff --git a/crates/ty_python_semantic/resources/mdtest/import/invalid_syntax.md b/crates/ty_python_semantic/resources/mdtest/import/invalid_syntax.md index 6b7423f86d..3b8ef67e72 100644 --- a/crates/ty_python_semantic/resources/mdtest/import/invalid_syntax.md +++ b/crates/ty_python_semantic/resources/mdtest/import/invalid_syntax.md @@ -14,7 +14,7 @@ TODO: This is correctly flagged as an error, but we could clean up the diagnosti ```py # TODO: No second diagnostic -# error: [invalid-syntax] "Expected ',', found '.'" +# error: [invalid-syntax] "Expected `,`, found `.`" # error: [unresolved-import] "Module `a` has no member `c`" from a import b.c From 9664474c5172c996a0fe5b9053e14a345629e166 Mon Sep 17 00:00:00 2001 From: Carl Meyer Date: Fri, 31 Oct 2025 12:06:47 -0400 Subject: [PATCH 019/180] [ty] rollback preferring declared type on invalid TypedDict creation (#21169) ## Summary Discussion with @ibraheemdev clarified that https://github.com/astral-sh/ruff/pull/21168 was incorrect. In a case of failed inference of a dict literal as a `TypedDict`, we should store the context-less inferred type of the dict literal as the type of the dict literal expression itself; the fallback to declared type should happen at the level of the overall assignment definition. The reason the latter isn't working yet is because currently we (wrongly) consider a homogeneous dict type as assignable to a `TypedDict`, so we don't actually consider the assignment itself as failed. So the "bug" I observed (and tried to fix) will naturally be fixed by implementing TypedDict assignability rules. Rollback https://github.com/astral-sh/ruff/pull/21168 except for the tests, and modify the tests to include TODOs as needed. ## Test Plan Updated mdtests. --- .../resources/mdtest/typed_dict.md | 9 ++++++--- .../ty_python_semantic/src/types/infer/builder.rs | 10 ++++++---- crates/ty_python_semantic/src/types/typed_dict.rs | 15 +++++++++++---- 3 files changed, 23 insertions(+), 11 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/typed_dict.md b/crates/ty_python_semantic/resources/mdtest/typed_dict.md index 8be6de4ef3..14142020a2 100644 --- a/crates/ty_python_semantic/resources/mdtest/typed_dict.md +++ b/crates/ty_python_semantic/resources/mdtest/typed_dict.md @@ -99,7 +99,8 @@ eve1a: Person = {"name": b"Eve", "age": None} # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`" eve1b = Person(name=b"Eve", age=None) -reveal_type(eve1a) # revealed: Person +# TODO should reveal Person (should be fixed by implementing assignability for TypedDicts) +reveal_type(eve1a) # revealed: dict[Unknown | str, Unknown | bytes | None] reveal_type(eve1b) # revealed: Person # error: [missing-typed-dict-key] "Missing required key 'name' in TypedDict `Person` constructor" @@ -107,7 +108,8 @@ eve2a: Person = {"age": 22} # error: [missing-typed-dict-key] "Missing required key 'name' in TypedDict `Person` constructor" eve2b = Person(age=22) -reveal_type(eve2a) # revealed: Person +# TODO should reveal Person (should be fixed by implementing assignability for TypedDicts) +reveal_type(eve2a) # revealed: dict[Unknown | str, Unknown | int] reveal_type(eve2b) # revealed: Person # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" @@ -115,7 +117,8 @@ eve3a: Person = {"name": "Eve", "age": 25, "extra": True} # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra"" eve3b = Person(name="Eve", age=25, extra=True) -reveal_type(eve3a) # revealed: Person +# TODO should reveal Person (should be fixed by implementing assignability for TypedDicts) +reveal_type(eve3a) # revealed: dict[Unknown | str, Unknown | str | int] reveal_type(eve3b) # revealed: Person ``` diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index f58f093a44..7094fdea07 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -6239,9 +6239,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { && let Some(typed_dict) = tcx .filter_union(self.db(), Type::is_typed_dict) .as_typed_dict() + && let Some(ty) = self.infer_typed_dict_expression(dict, typed_dict) { - self.infer_typed_dict_expression(dict, typed_dict); - return Type::TypedDict(typed_dict); + return ty; } // Avoid false positives for the functional `TypedDict` form, which is currently @@ -6266,7 +6266,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { &mut self, dict: &ast::ExprDict, typed_dict: TypedDictType<'db>, - ) { + ) -> Option> { let ast::ExprDict { range: _, node_index: _, @@ -6289,7 +6289,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { validate_typed_dict_dict_literal(&self.context, typed_dict, dict, dict.into(), |expr| { self.expression_type(expr) - }); + }) + .ok() + .map(|_| Type::TypedDict(typed_dict)) } // Infer the type of a collection literal expression. diff --git a/crates/ty_python_semantic/src/types/typed_dict.rs b/crates/ty_python_semantic/src/types/typed_dict.rs index 632d2a2933..e29b836d8a 100644 --- a/crates/ty_python_semantic/src/types/typed_dict.rs +++ b/crates/ty_python_semantic/src/types/typed_dict.rs @@ -389,7 +389,7 @@ fn validate_from_keywords<'db, 'ast>( provided_keys } -/// Validates a `TypedDict` dictionary literal assignment, emitting any needed diagnostics. +/// Validates a `TypedDict` dictionary literal assignment, /// e.g. `person: Person = {"name": "Alice", "age": 30}` pub(super) fn validate_typed_dict_dict_literal<'db>( context: &InferContext<'db, '_>, @@ -397,7 +397,8 @@ pub(super) fn validate_typed_dict_dict_literal<'db>( dict_expr: &ast::ExprDict, error_node: AnyNodeRef, expression_type_fn: impl Fn(&ast::Expr) -> Type<'db>, -) { +) -> Result, OrderSet<&'db str>> { + let mut valid = true; let mut provided_keys = OrderSet::new(); // Validate each key-value pair in the dictionary literal @@ -410,7 +411,7 @@ pub(super) fn validate_typed_dict_dict_literal<'db>( let value_type = expression_type_fn(&item.value); - validate_typed_dict_key_assignment( + valid &= validate_typed_dict_key_assignment( context, typed_dict, key_str, @@ -423,5 +424,11 @@ pub(super) fn validate_typed_dict_dict_literal<'db>( } } - validate_typed_dict_required_keys(context, typed_dict, &provided_keys, error_node); + valid &= validate_typed_dict_required_keys(context, typed_dict, &provided_keys, error_node); + + if valid { + Ok(provided_keys) + } else { + Err(provided_keys) + } } From ff3a6a8fbd5af1b3e9b42b4c5adb9a8954968de9 Mon Sep 17 00:00:00 2001 From: Ibraheem Ahmed Date: Fri, 31 Oct 2025 12:41:14 -0400 Subject: [PATCH 020/180] [ty] Support type context of union attribute assignments (#21170) ## Summary Turns out this is easy to implement. Resolves https://github.com/astral-sh/ty/issues/1375. --- .../resources/mdtest/bidirectional.md | 20 ++++++++++++++++++- .../src/types/infer/builder.rs | 14 +++++++------ 2 files changed, 27 insertions(+), 7 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/bidirectional.md b/crates/ty_python_semantic/resources/mdtest/bidirectional.md index 1cc3dba162..3fee0513ed 100644 --- a/crates/ty_python_semantic/resources/mdtest/bidirectional.md +++ b/crates/ty_python_semantic/resources/mdtest/bidirectional.md @@ -200,7 +200,7 @@ def f() -> list[Literal[1]]: return [1] ``` -## Instance attribute +## Instance attributes ```toml [environment] @@ -235,6 +235,24 @@ def _(flag: bool): C.x = lst(1) ``` +For union targets, each element of the union is considered as a separate type context: + +```py +from typing import Literal + +class X: + x: list[int | str] + +class Y: + x: list[int | None] + +def lst[T](x: T) -> list[T]: + return [x] + +def _(xy: X | Y): + xy.x = lst(1) +``` + ## Class constructor parameters ```toml diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index 7094fdea07..f6055c0a0e 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -3574,7 +3574,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { target: &ast::ExprAttribute, object_ty: Type<'db>, attribute: &str, - infer_value_ty: &dyn Fn(&mut Self, TypeContext<'db>) -> Type<'db>, + infer_value_ty: &mut dyn FnMut(&mut Self, TypeContext<'db>) -> Type<'db>, emit_diagnostics: bool, ) -> bool { let db = self.db(); @@ -3651,7 +3651,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { match object_ty { Type::Union(union) => { - // TODO: We could perform multi-inference here with each element of the union as type context. + // First infer the value without type context, and then again for each union element. let value_ty = infer_value_ty(self, TypeContext::default()); if union.elements(self.db()).iter().all(|elem| { @@ -3659,7 +3659,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { target, *elem, attribute, - &|_, _| value_ty, + // Note that `infer_value_ty` silences diagnostics after the first inference. + &mut infer_value_ty, false, ) }) { @@ -3684,7 +3685,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } Type::Intersection(intersection) => { - // TODO: We could perform multi-inference here with each element of the union as type context. + // First infer the value without type context, and then again for each union element. let value_ty = infer_value_ty(self, TypeContext::default()); // TODO: Handle negative intersection elements @@ -3693,7 +3694,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { target, *elem, attribute, - &|_, _| value_ty, + // Note that `infer_value_ty` silences diagnostics after the first inference. + &mut infer_value_ty, false, ) }) { @@ -4254,7 +4256,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { let object_ty = self.infer_expression(object, TypeContext::default()); if let Some(infer_assigned_ty) = infer_assigned_ty { - let infer_assigned_ty = &|builder: &mut Self, tcx| { + let infer_assigned_ty = &mut |builder: &mut Self, tcx| { let assigned_ty = infer_assigned_ty(builder, tcx); builder.store_expression_type(target, assigned_ty); assigned_ty From 1734ddfb3e6393b0cd45b2b1d3f170cc102b2fcf Mon Sep 17 00:00:00 2001 From: David Peter Date: Fri, 31 Oct 2025 17:48:34 +0100 Subject: [PATCH 021/180] [ty] Do not promote literals in contravariant positions of generic specializations (#21171) ## Summary closes https://github.com/astral-sh/ty/issues/1284 supersedes https://github.com/astral-sh/ruff/pull/20950 by @ibraheemdev ## Test Plan New regression test --- .../resources/mdtest/literal_promotion.md | 36 +++++++++++++++++++ .../ty_python_semantic/src/types/generics.rs | 9 +++-- .../ty_python_semantic/src/types/variance.rs | 7 ++++ 3 files changed, 50 insertions(+), 2 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/literal_promotion.md b/crates/ty_python_semantic/resources/mdtest/literal_promotion.md index 726ca59d20..f13d3229ee 100644 --- a/crates/ty_python_semantic/resources/mdtest/literal_promotion.md +++ b/crates/ty_python_semantic/resources/mdtest/literal_promotion.md @@ -1,5 +1,10 @@ # Literal promotion +```toml +[environment] +python-version = "3.12" +``` + There are certain places where we promote literals to their common supertype: ```py @@ -30,3 +35,34 @@ def double_negation(callback: Callable[[Callable[[Literal[1]], None]], None]): reveal_type([callback]) # revealed: list[Unknown | (((int, /) -> None, /) -> None)] ``` + +Literal promotion should also not apply recursively to type arguments in contravariant/invariant +position: + +```py +class Bivariant[T]: + pass + +class Covariant[T]: + def pop(self) -> T: + raise NotImplementedError + +class Contravariant[T]: + def push(self, value: T) -> None: + pass + +class Invariant[T]: + x: T + +def _( + bivariant: Bivariant[Literal[1]], + covariant: Covariant[Literal[1]], + contravariant: Contravariant[Literal[1]], + invariant: Invariant[Literal[1]], +): + reveal_type([bivariant]) # revealed: list[Unknown | Bivariant[int]] + reveal_type([covariant]) # revealed: list[Unknown | Covariant[int]] + + reveal_type([contravariant]) # revealed: list[Unknown | Contravariant[Literal[1]]] + reveal_type([invariant]) # revealed: list[Unknown | Invariant[Literal[1]]] +``` diff --git a/crates/ty_python_semantic/src/types/generics.rs b/crates/ty_python_semantic/src/types/generics.rs index 8485931ff2..98f7cb736f 100644 --- a/crates/ty_python_semantic/src/types/generics.rs +++ b/crates/ty_python_semantic/src/types/generics.rs @@ -969,10 +969,15 @@ impl<'db> Specialization<'db> { let types: Box<[_]> = self .types(db) .iter() + .zip(self.generic_context(db).variables(db)) .enumerate() - .map(|(i, ty)| { + .map(|(i, (ty, typevar))| { let tcx = TypeContext::new(tcx.get(i).copied()); - ty.apply_type_mapping_impl(db, type_mapping, tcx, visitor) + if typevar.variance(db).is_covariant() { + ty.apply_type_mapping_impl(db, type_mapping, tcx, visitor) + } else { + ty.apply_type_mapping_impl(db, &type_mapping.flip(), tcx, visitor) + } }) .collect(); diff --git a/crates/ty_python_semantic/src/types/variance.rs b/crates/ty_python_semantic/src/types/variance.rs index fb9c87d062..5ec1d5a8ff 100644 --- a/crates/ty_python_semantic/src/types/variance.rs +++ b/crates/ty_python_semantic/src/types/variance.rs @@ -85,6 +85,13 @@ impl TypeVarVariance { TypeVarVariance::Bivariant => TypeVarVariance::Bivariant, } } + + pub(crate) const fn is_covariant(self) -> bool { + matches!( + self, + TypeVarVariance::Covariant | TypeVarVariance::Bivariant + ) + } } impl std::iter::FromIterator for TypeVarVariance { From 827d8ae5d4b67c33c567a855236b4d058d366b20 Mon Sep 17 00:00:00 2001 From: Brent Westbrook <36778786+ntBre@users.noreply.github.com> Date: Fri, 31 Oct 2025 14:53:40 -0400 Subject: [PATCH 022/180] Allow newlines after function headers without docstrings (#21110) Summary -- This is a first step toward fixing #9745. After reviewing our open issues and several Black issues and PRs, I personally found the function case the most compelling, especially with very long argument lists: ```py def func( self, arg1: int, arg2: bool, arg3: bool, arg4: float, arg5: bool, ) -> tuple[...]: if arg2 and arg3: raise ValueError ``` or many annotations: ```py def function( self, data: torch.Tensor | tuple[torch.Tensor, ...], other_argument: int ) -> torch.Tensor | tuple[torch.Tensor, ...]: do_something(data) return something ``` I think docstrings help the situation substantially both because syntax highlighting will usually give a very clear separation between the annotations and the docstring and because we already allow a blank line _after_ the docstring: ```py def function( self, data: torch.Tensor | tuple[torch.Tensor, ...], other_argument: int ) -> torch.Tensor | tuple[torch.Tensor, ...]: """ A function doing something. And a longer description of the things it does. """ do_something(data) return something ``` There are still other comments on #9745, such as [this one] with 9 upvotes, where users specifically request blank lines in all block types, or at least including conditionals and loops. I'm sympathetic to that case as well, even if personally I don't find an [example] like this: ```py if blah: # Do some stuff that is logically related data = get_data() # Do some different stuff that is logically related results = calculate_results() return results ``` to be much more readable than: ```py if blah: # Do some stuff that is logically related data = get_data() # Do some different stuff that is logically related results = calculate_results() return results ``` I'm probably just used to the latter from the formatters I've used, but I do prefer it. I also think that functions are the least susceptible to the accidental introduction of a newline after refactoring described in Micha's [comment] on #8893. I actually considered further restricting this change to functions with multiline headers. I don't think very short functions like: ```py def foo(): return 1 ``` benefit nearly as much from the allowed newline, but I just went with any function without a docstring for now. I guess a marginal case like: ```py def foo(a_long_parameter: ALongType, b_long_parameter: BLongType) -> CLongType: return 1 ``` might be a good argument for not restricting it. I caused a couple of syntax errors before adding special handling for the ellipsis-only case, so I suspect that there are some other interesting edge cases that may need to be handled better. Test Plan -- Existing tests, plus a few simple new ones. As noted above, I suspect that we may need a few more for edge cases I haven't considered. [this one]: https://github.com/astral-sh/ruff/issues/9745#issuecomment-2876771400 [example]: https://github.com/psf/black/issues/902#issuecomment-1562154809 [comment]: https://github.com/astral-sh/ruff/issues/8893#issuecomment-1867259744 --- .../resources/test/fixtures/ruff/newlines.py | 93 +++++++ .../fixtures/ruff/range_formatting/indent.py | 6 + crates/ruff_python_formatter/src/preview.rs | 7 + .../src/statement/clause.rs | 13 +- .../src/statement/suite.rs | 46 +++- .../tests/snapshots/format@newlines.py.snap | 254 +++++++++++++++++- .../format@range_formatting__indent.py.snap | 69 +++++ 7 files changed, 460 insertions(+), 28 deletions(-) diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/newlines.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/newlines.py index 2afbd18229..18c810ead8 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/newlines.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/newlines.py @@ -335,3 +335,96 @@ def overload4(): # trailing comment def overload4(a: int): ... + + +# In preview, we preserve these newlines at the start of functions: +def preserved1(): + + return 1 + +def preserved2(): + + pass + +def preserved3(): + + def inner(): ... + +def preserved4(): + + def inner(): + print("with a body") + return 1 + + return 2 + +def preserved5(): + + ... + # trailing comment prevents collapsing the stub + + +def preserved6(): + + # Comment + + return 1 + + +def preserved7(): + + # comment + # another line + # and a third + + return 0 + + +def preserved8(): # this also prevents collapsing the stub + + ... + + +# But we still discard these newlines: +def removed1(): + + "Docstring" + + return 1 + + +def removed2(): + + ... + + +def removed3(): + + ... # trailing same-line comment does not prevent collapsing the stub + + +# And we discard empty lines after the first: +def partially_preserved1(): + + + return 1 + + +# We only preserve blank lines, not add new ones +def untouched1(): + # comment + + return 0 + + +def untouched2(): + # comment + return 0 + + +def untouched3(): + # comment + # another line + # and a third + + return 0 diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/range_formatting/indent.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/range_formatting/indent.py index 1fb1522aa0..e10ffe55ee 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/range_formatting/indent.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/range_formatting/indent.py @@ -61,3 +61,9 @@ def test6 (): print("Format" ) print(3 + 4) print("Format to fix indentation" ) + + +def test7 (): + print("Format" ) + print(3 + 4) + print("Format to fix indentation" ) diff --git a/crates/ruff_python_formatter/src/preview.rs b/crates/ruff_python_formatter/src/preview.rs index b6479ab1b4..5455fa9a12 100644 --- a/crates/ruff_python_formatter/src/preview.rs +++ b/crates/ruff_python_formatter/src/preview.rs @@ -36,3 +36,10 @@ pub(crate) const fn is_remove_parens_around_except_types_enabled( ) -> bool { context.is_preview() } + +/// Returns `true` if the +/// [`allow_newline_after_block_open`](https://github.com/astral-sh/ruff/pull/21110) preview style +/// is enabled. +pub(crate) const fn is_allow_newline_after_block_open_enabled(context: &PyFormatContext) -> bool { + context.is_preview() +} diff --git a/crates/ruff_python_formatter/src/statement/clause.rs b/crates/ruff_python_formatter/src/statement/clause.rs index a5c172f4f8..1554c30d0f 100644 --- a/crates/ruff_python_formatter/src/statement/clause.rs +++ b/crates/ruff_python_formatter/src/statement/clause.rs @@ -8,7 +8,7 @@ use ruff_python_trivia::{SimpleToken, SimpleTokenKind, SimpleTokenizer}; use ruff_text_size::{Ranged, TextRange, TextSize}; use crate::comments::{SourceComment, leading_alternate_branch_comments, trailing_comments}; -use crate::statement::suite::{SuiteKind, contains_only_an_ellipsis}; +use crate::statement::suite::{SuiteKind, as_only_an_ellipsis}; use crate::verbatim::write_suppressed_clause_header; use crate::{has_skip_comment, prelude::*}; @@ -449,17 +449,10 @@ impl Format> for FormatClauseBody<'_> { || matches!(self.kind, SuiteKind::Function | SuiteKind::Class); if should_collapse_stub - && contains_only_an_ellipsis(self.body, f.context().comments()) + && let Some(ellipsis) = as_only_an_ellipsis(self.body, f.context().comments()) && self.trailing_comments.is_empty() { - write!( - f, - [ - space(), - self.body.format().with_options(self.kind), - hard_line_break() - ] - ) + write!(f, [space(), ellipsis.format(), hard_line_break()]) } else { write!( f, diff --git a/crates/ruff_python_formatter/src/statement/suite.rs b/crates/ruff_python_formatter/src/statement/suite.rs index 4071b4ba1f..9ed32beb76 100644 --- a/crates/ruff_python_formatter/src/statement/suite.rs +++ b/crates/ruff_python_formatter/src/statement/suite.rs @@ -13,7 +13,9 @@ use crate::comments::{ use crate::context::{NodeLevel, TopLevelStatementPosition, WithIndentLevel, WithNodeLevel}; use crate::other::string_literal::StringLiteralKind; use crate::prelude::*; -use crate::preview::is_blank_line_before_decorated_class_in_stub_enabled; +use crate::preview::{ + is_allow_newline_after_block_open_enabled, is_blank_line_before_decorated_class_in_stub_enabled, +}; use crate::statement::stmt_expr::FormatStmtExpr; use crate::verbatim::{ suppressed_node, write_suppressed_statements_starting_with_leading_comment, @@ -169,6 +171,22 @@ impl FormatRule> for FormatSuite { false, ) } else { + // Allow an empty line after a function header in preview, if the function has no + // docstring and no initial comment. + let allow_newline_after_block_open = + is_allow_newline_after_block_open_enabled(f.context()) + && matches!(self.kind, SuiteKind::Function) + && matches!(first, SuiteChildStatement::Other(_)); + + let start = comments + .leading(first) + .first() + .map_or_else(|| first.start(), Ranged::start); + + if allow_newline_after_block_open && lines_before(start, f.context().source()) > 1 { + empty_line().fmt(f)?; + } + first.fmt(f)?; let empty_line_after_docstring = if matches!(first, SuiteChildStatement::Docstring(_)) @@ -218,7 +236,7 @@ impl FormatRule> for FormatSuite { )?; } else { // Preserve empty lines after a stub implementation but don't insert a new one if there isn't any present in the source. - // This is useful when having multiple function overloads that should be grouped to getter by omitting new lines between them. + // This is useful when having multiple function overloads that should be grouped together by omitting new lines between them. let is_preceding_stub_function_without_empty_line = following .is_function_def_stmt() && preceding @@ -728,17 +746,21 @@ fn stub_suite_can_omit_empty_line(preceding: &Stmt, following: &Stmt, f: &PyForm /// Returns `true` if a function or class body contains only an ellipsis with no comments. pub(crate) fn contains_only_an_ellipsis(body: &[Stmt], comments: &Comments) -> bool { - match body { - [Stmt::Expr(ast::StmtExpr { value, .. })] => { - let [node] = body else { - return false; - }; - value.is_ellipsis_literal_expr() - && !comments.has_leading(node) - && !comments.has_trailing_own_line(node) - } - _ => false, + as_only_an_ellipsis(body, comments).is_some() +} + +/// Returns `Some(Stmt::Ellipsis)` if a function or class body contains only an ellipsis with no +/// comments. +pub(crate) fn as_only_an_ellipsis<'a>(body: &'a [Stmt], comments: &Comments) -> Option<&'a Stmt> { + if let [node @ Stmt::Expr(ast::StmtExpr { value, .. })] = body + && value.is_ellipsis_literal_expr() + && !comments.has_leading(node) + && !comments.has_trailing_own_line(node) + { + return Some(node); } + + None } /// Returns `true` if a [`Stmt`] is a class or function definition. diff --git a/crates/ruff_python_formatter/tests/snapshots/format@newlines.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@newlines.py.snap index 84bd4283c4..260de915fc 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@newlines.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@newlines.py.snap @@ -1,7 +1,6 @@ --- source: crates/ruff_python_formatter/tests/fixtures.rs input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/newlines.py -snapshot_kind: text --- ## Input ```python @@ -342,6 +341,99 @@ def overload4(): # trailing comment def overload4(a: int): ... + + +# In preview, we preserve these newlines at the start of functions: +def preserved1(): + + return 1 + +def preserved2(): + + pass + +def preserved3(): + + def inner(): ... + +def preserved4(): + + def inner(): + print("with a body") + return 1 + + return 2 + +def preserved5(): + + ... + # trailing comment prevents collapsing the stub + + +def preserved6(): + + # Comment + + return 1 + + +def preserved7(): + + # comment + # another line + # and a third + + return 0 + + +def preserved8(): # this also prevents collapsing the stub + + ... + + +# But we still discard these newlines: +def removed1(): + + "Docstring" + + return 1 + + +def removed2(): + + ... + + +def removed3(): + + ... # trailing same-line comment does not prevent collapsing the stub + + +# And we discard empty lines after the first: +def partially_preserved1(): + + + return 1 + + +# We only preserve blank lines, not add new ones +def untouched1(): + # comment + + return 0 + + +def untouched2(): + # comment + return 0 + + +def untouched3(): + # comment + # another line + # and a third + + return 0 ``` ## Output @@ -732,6 +824,88 @@ def overload4(): def overload4(a: int): ... + + +# In preview, we preserve these newlines at the start of functions: +def preserved1(): + return 1 + + +def preserved2(): + pass + + +def preserved3(): + def inner(): ... + + +def preserved4(): + def inner(): + print("with a body") + return 1 + + return 2 + + +def preserved5(): + ... + # trailing comment prevents collapsing the stub + + +def preserved6(): + # Comment + + return 1 + + +def preserved7(): + # comment + # another line + # and a third + + return 0 + + +def preserved8(): # this also prevents collapsing the stub + ... + + +# But we still discard these newlines: +def removed1(): + "Docstring" + + return 1 + + +def removed2(): ... + + +def removed3(): ... # trailing same-line comment does not prevent collapsing the stub + + +# And we discard empty lines after the first: +def partially_preserved1(): + return 1 + + +# We only preserve blank lines, not add new ones +def untouched1(): + # comment + + return 0 + + +def untouched2(): + # comment + return 0 + + +def untouched3(): + # comment + # another line + # and a third + + return 0 ``` @@ -739,7 +913,15 @@ def overload4(a: int): ... ```diff --- Stable +++ Preview -@@ -277,6 +277,7 @@ +@@ -253,6 +253,7 @@ + + + def fakehttp(): ++ + class FakeHTTPConnection: + if mock_close: + +@@ -277,6 +278,7 @@ def a(): return 1 @@ -747,7 +929,7 @@ def overload4(a: int): ... else: pass -@@ -293,6 +294,7 @@ +@@ -293,6 +295,7 @@ def a(): return 1 @@ -755,7 +937,7 @@ def overload4(a: int): ... case 1: def a(): -@@ -303,6 +305,7 @@ +@@ -303,6 +306,7 @@ def a(): return 1 @@ -763,7 +945,7 @@ def overload4(a: int): ... except RuntimeError: def a(): -@@ -313,6 +316,7 @@ +@@ -313,6 +317,7 @@ def a(): return 1 @@ -771,7 +953,7 @@ def overload4(a: int): ... finally: def a(): -@@ -323,18 +327,22 @@ +@@ -323,18 +328,22 @@ def a(): return 1 @@ -794,4 +976,64 @@ def overload4(a: int): ... finally: def a(): +@@ -388,18 +397,22 @@ + + # In preview, we preserve these newlines at the start of functions: + def preserved1(): ++ + return 1 + + + def preserved2(): ++ + pass + + + def preserved3(): ++ + def inner(): ... + + + def preserved4(): ++ + def inner(): + print("with a body") + return 1 +@@ -408,17 +421,20 @@ + + + def preserved5(): ++ + ... + # trailing comment prevents collapsing the stub + + + def preserved6(): ++ + # Comment + + return 1 + + + def preserved7(): ++ + # comment + # another line + # and a third +@@ -427,6 +443,7 @@ + + + def preserved8(): # this also prevents collapsing the stub ++ + ... + + +@@ -445,6 +462,7 @@ + + # And we discard empty lines after the first: + def partially_preserved1(): ++ + return 1 + + ``` diff --git a/crates/ruff_python_formatter/tests/snapshots/format@range_formatting__indent.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@range_formatting__indent.py.snap index 1609cf657e..213c843da1 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@range_formatting__indent.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@range_formatting__indent.py.snap @@ -67,6 +67,12 @@ def test6 (): print("Format" ) print(3 + 4) print("Format to fix indentation" ) + + +def test7 (): + print("Format" ) + print(3 + 4) + print("Format to fix indentation" ) ``` ## Outputs @@ -146,6 +152,27 @@ def test6 (): print("Format") print(3 + 4) print("Format to fix indentation" ) + + +def test7 (): + print("Format") + print(3 + 4) + print("Format to fix indentation" ) +``` + + +#### Preview changes +```diff +--- Stable ++++ Preview +@@ -55,6 +55,7 @@ + + + def test6 (): ++ + print("Format") + print(3 + 4) + print("Format to fix indentation" ) ``` @@ -225,6 +252,27 @@ def test6 (): print("Format") print(3 + 4) print("Format to fix indentation") + + +def test7 (): + print("Format") + print(3 + 4) + print("Format to fix indentation") +``` + + +#### Preview changes +```diff +--- Stable ++++ Preview +@@ -55,6 +55,7 @@ + + + def test6 (): ++ + print("Format") + print(3 + 4) + print("Format to fix indentation") ``` @@ -304,4 +352,25 @@ def test6 (): print("Format") print(3 + 4) print("Format to fix indentation") + + +def test7 (): + print("Format") + print(3 + 4) + print("Format to fix indentation") +``` + + +#### Preview changes +```diff +--- Stable ++++ Preview +@@ -55,6 +55,7 @@ + + + def test6 (): ++ + print("Format") + print(3 + 4) + print("Format to fix indentation") ``` From 6337e22f0c158767ae13b2ed5744424bc2291b20 Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Fri, 31 Oct 2025 21:00:04 +0100 Subject: [PATCH 023/180] [ty] Smaller refactors to server API in prep for notebook support (#21095) --- crates/ruff_db/src/system/path.rs | 13 +- crates/ty_server/src/document.rs | 90 +++-- crates/ty_server/src/document/notebook.rs | 100 ++++-- .../ty_server/src/document/text_document.rs | 15 +- crates/ty_server/src/lib.rs | 2 +- crates/ty_server/src/server/api.rs | 23 +- .../ty_server/src/server/api/diagnostics.rs | 52 +-- .../server/api/notifications/did_change.rs | 23 +- .../notifications/did_change_watched_files.rs | 17 +- .../src/server/api/notifications/did_close.rs | 28 +- .../api/notifications/did_close_notebook.rs | 22 +- .../src/server/api/notifications/did_open.rs | 27 +- .../api/notifications/did_open_notebook.rs | 23 +- .../src/server/api/requests/completion.rs | 2 +- .../src/server/api/requests/doc_highlights.rs | 2 +- .../server/api/requests/document_symbols.rs | 2 +- .../server/api/requests/execute_command.rs | 2 +- .../server/api/requests/goto_declaration.rs | 2 +- .../server/api/requests/goto_definition.rs | 2 +- .../server/api/requests/goto_references.rs | 2 +- .../api/requests/goto_type_definition.rs | 2 +- .../src/server/api/requests/hover.rs | 2 +- .../src/server/api/requests/inlay_hints.rs | 2 +- .../src/server/api/requests/prepare_rename.rs | 2 +- .../src/server/api/requests/rename.rs | 2 +- .../server/api/requests/selection_range.rs | 2 +- .../server/api/requests/semantic_tokens.rs | 2 +- .../api/requests/semantic_tokens_range.rs | 2 +- .../src/server/api/requests/signature_help.rs | 2 +- .../api/requests/workspace_diagnostic.rs | 31 +- crates/ty_server/src/session.rs | 230 +++++++++---- crates/ty_server/src/session/index.rs | 314 +++++++----------- crates/ty_server/src/system.rs | 74 +---- 33 files changed, 570 insertions(+), 546 deletions(-) diff --git a/crates/ruff_db/src/system/path.rs b/crates/ruff_db/src/system/path.rs index 71a92fb4c8..a387ae54f6 100644 --- a/crates/ruff_db/src/system/path.rs +++ b/crates/ruff_db/src/system/path.rs @@ -723,10 +723,11 @@ impl ruff_cache::CacheKey for SystemPathBuf { /// A slice of a virtual path on [`System`](super::System) (akin to [`str`]). #[repr(transparent)] +#[derive(Eq, PartialEq, Hash, PartialOrd, Ord)] pub struct SystemVirtualPath(str); impl SystemVirtualPath { - pub fn new(path: &str) -> &SystemVirtualPath { + pub const fn new(path: &str) -> &SystemVirtualPath { // SAFETY: SystemVirtualPath is marked as #[repr(transparent)] so the conversion from a // *const str to a *const SystemVirtualPath is valid. unsafe { &*(path as *const str as *const SystemVirtualPath) } @@ -767,8 +768,8 @@ pub struct SystemVirtualPathBuf(String); impl SystemVirtualPathBuf { #[inline] - pub fn as_path(&self) -> &SystemVirtualPath { - SystemVirtualPath::new(&self.0) + pub const fn as_path(&self) -> &SystemVirtualPath { + SystemVirtualPath::new(self.0.as_str()) } } @@ -852,6 +853,12 @@ impl ruff_cache::CacheKey for SystemVirtualPathBuf { } } +impl Borrow for SystemVirtualPathBuf { + fn borrow(&self) -> &SystemVirtualPath { + self.as_path() + } +} + /// Deduplicates identical paths and removes nested paths. /// /// # Examples diff --git a/crates/ty_server/src/document.rs b/crates/ty_server/src/document.rs index fff51d2f49..e2c582475b 100644 --- a/crates/ty_server/src/document.rs +++ b/crates/ty_server/src/document.rs @@ -11,6 +11,7 @@ use lsp_types::{PositionEncodingKind, Url}; use crate::system::AnySystemPath; pub use notebook::NotebookDocument; pub(crate) use range::{FileRangeExt, PositionExt, RangeExt, TextSizeExt, ToRangeExt}; +use ruff_db::system::{SystemPathBuf, SystemVirtualPath}; pub(crate) use text_document::DocumentVersion; pub use text_document::TextDocument; @@ -41,39 +42,75 @@ impl From for ruff_source_file::PositionEncoding { /// A unique document ID, derived from a URL passed as part of an LSP request. /// This document ID can point to either be a standalone Python file, a full notebook, or a cell within a notebook. -#[derive(Clone, Debug)] -pub(crate) enum DocumentKey { - Notebook(AnySystemPath), - NotebookCell { - cell_url: Url, - notebook_path: AnySystemPath, - }, - Text(AnySystemPath), +/// +/// The `DocumentKey` is very similar to `AnySystemPath`. The important distinction is that +/// ty doesn't know about individual notebook cells, instead, ty operates on full notebook documents. +/// ty also doesn't support resolving settings per cell, instead, settings are resolved per file or notebook. +/// +/// Thus, the motivation of `DocumentKey` is to prevent accidental use of Cell keys for operations +/// that expect to work on a file path level. That's what [`DocumentHandle::to_file_path`] +/// is for, it returns a file path for any document, taking into account that these methods should +/// return the notebook for cell documents and notebooks. +#[derive(Clone, Debug, Hash, PartialEq, Eq)] +pub(super) enum DocumentKey { + /// A URI using the `file` schema and maps to a valid path. + File(SystemPathBuf), + + /// Any other URI. + /// + /// Used for Notebook-cells, URI's with non-`file` schemes, or invalid `file` URI's. + Opaque(String), } impl DocumentKey { - /// Returns the file path associated with the key. - pub(crate) fn path(&self) -> &AnySystemPath { - match self { - DocumentKey::Notebook(path) | DocumentKey::Text(path) => path, - DocumentKey::NotebookCell { notebook_path, .. } => notebook_path, + /// Converts the given [`Url`] to an [`DocumentKey`]. + /// + /// If the URL scheme is `file`, then the path is converted to a [`SystemPathBuf`] unless + /// the url isn't a valid file path. + /// + /// In all other cases, the URL is kept as an opaque identifier ([`Self::Opaque`]). + pub(crate) fn from_url(url: &Url) -> Self { + if url.scheme() == "file" { + if let Ok(path) = url.to_file_path() { + Self::File(SystemPathBuf::from_path_buf(path).expect("URL to be valid UTF-8")) + } else { + tracing::warn!( + "Treating `file:` url `{url}` as opaque URL as it isn't a valid file path" + ); + Self::Opaque(url.to_string()) + } + } else { + Self::Opaque(url.to_string()) } } - pub(crate) fn from_path(path: AnySystemPath) -> Self { - // For text documents, we assume it's a text document unless it's a notebook file. - match path.extension() { - Some("ipynb") => Self::Notebook(path), - _ => Self::Text(path), + pub(crate) fn as_opaque(&self) -> Option<&str> { + match self { + Self::Opaque(uri) => Some(uri), + Self::File(_) => None, } } - /// Returns the URL for this document key. For notebook cells, returns the cell URL. - /// For other document types, converts the path to a URL. - pub(crate) fn to_url(&self) -> Option { + /// Returns the corresponding [`AnySystemPath`] for this document key. + /// + /// Note, calling this method on a `DocumentKey::Opaque` representing a cell document + /// will return a `SystemVirtualPath` corresponding to the cell URI but not the notebook file path. + /// That's most likely not what you want. + pub(super) fn to_file_path(&self) -> AnySystemPath { match self { - DocumentKey::NotebookCell { cell_url, .. } => Some(cell_url.clone()), - DocumentKey::Notebook(path) | DocumentKey::Text(path) => path.to_url(), + Self::File(path) => AnySystemPath::System(path.clone()), + Self::Opaque(uri) => { + AnySystemPath::SystemVirtual(SystemVirtualPath::new(uri).to_path_buf()) + } + } + } +} + +impl From for DocumentKey { + fn from(value: AnySystemPath) -> Self { + match value { + AnySystemPath::System(system_path) => Self::File(system_path), + AnySystemPath::SystemVirtual(virtual_path) => Self::Opaque(virtual_path.to_string()), } } } @@ -81,11 +118,8 @@ impl DocumentKey { impl std::fmt::Display for DocumentKey { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Self::NotebookCell { cell_url, .. } => cell_url.fmt(f), - Self::Notebook(path) | Self::Text(path) => match path { - AnySystemPath::System(system_path) => system_path.fmt(f), - AnySystemPath::SystemVirtual(virtual_path) => virtual_path.fmt(f), - }, + Self::File(path) => path.fmt(f), + Self::Opaque(uri) => uri.fmt(f), } } } diff --git a/crates/ty_server/src/document/notebook.rs b/crates/ty_server/src/document/notebook.rs index 2616cffd70..d1e07648e2 100644 --- a/crates/ty_server/src/document/notebook.rs +++ b/crates/ty_server/src/document/notebook.rs @@ -3,9 +3,8 @@ use lsp_types::NotebookCellKind; use ruff_notebook::CellMetadata; use rustc_hash::{FxBuildHasher, FxHashMap}; -use crate::{PositionEncoding, TextDocument}; - use super::DocumentVersion; +use crate::{PositionEncoding, TextDocument}; pub(super) type CellId = usize; @@ -13,16 +12,25 @@ pub(super) type CellId = usize; /// contents are internally represented by [`TextDocument`]s. #[derive(Clone, Debug)] pub struct NotebookDocument { + url: lsp_types::Url, cells: Vec, metadata: ruff_notebook::RawNotebookMetadata, version: DocumentVersion, // Used to quickly find the index of a cell for a given URL. - cell_index: FxHashMap, + cell_index: FxHashMap, } /// A single cell within a notebook, which has text contents represented as a `TextDocument`. #[derive(Clone, Debug)] struct NotebookCell { + /// The URL uniquely identifying the cell. + /// + /// > Cell text documents have a URI, but servers should not rely on any + /// > format for this URI, since it is up to the client on how it will + /// > create these URIs. The URIs must be unique across ALL notebook + /// > cells and can therefore be used to uniquely identify a notebook cell + /// > or the cell’s text document. + /// > url: lsp_types::Url, kind: NotebookCellKind, document: TextDocument, @@ -30,32 +38,45 @@ struct NotebookCell { impl NotebookDocument { pub fn new( - version: DocumentVersion, + url: lsp_types::Url, + notebook_version: DocumentVersion, cells: Vec, metadata: serde_json::Map, cell_documents: Vec, ) -> crate::Result { - let mut cell_contents: FxHashMap<_, _> = cell_documents - .into_iter() - .map(|document| (document.uri, document.text)) - .collect(); + let mut cells: Vec<_> = cells.into_iter().map(NotebookCell::empty).collect(); - let cells: Vec<_> = cells - .into_iter() - .map(|cell| { - let contents = cell_contents.remove(&cell.document).unwrap_or_default(); - NotebookCell::new(cell, contents, version) - }) - .collect(); + let cell_index = Self::make_cell_index(&cells); + + for cell_document in cell_documents { + let index = cell_index + .get(cell_document.uri.as_str()) + .copied() + .ok_or_else(|| { + anyhow::anyhow!( + "Received content for cell `{}` that isn't present in the metadata", + cell_document.uri + ) + })?; + + cells[index].document = + TextDocument::new(cell_document.uri, cell_document.text, cell_document.version) + .with_language_id(&cell_document.language_id); + } Ok(Self { - version, - cell_index: Self::make_cell_index(cells.as_slice()), - metadata: serde_json::from_value(serde_json::Value::Object(metadata))?, + url, + version: notebook_version, + cell_index, cells, + metadata: serde_json::from_value(serde_json::Value::Object(metadata))?, }) } + pub(crate) fn url(&self) -> &lsp_types::Url { + &self.url + } + /// Generates a pseudo-representation of a notebook that lacks per-cell metadata and contextual information /// but should still work with Ruff's linter. pub fn make_ruff_notebook(&self) -> ruff_notebook::Notebook { @@ -127,7 +148,7 @@ impl NotebookDocument { // First, delete the cells and remove them from the index. if delete > 0 { for cell in self.cells.drain(start..start + delete) { - self.cell_index.remove(&cell.url); + self.cell_index.remove(cell.url.as_str()); deleted_cells.insert(cell.url, cell.document); } } @@ -150,7 +171,7 @@ impl NotebookDocument { // Third, register the new cells in the index and update existing ones that came // after the insertion. for (index, cell) in self.cells.iter().enumerate().skip(start) { - self.cell_index.insert(cell.url.clone(), index); + self.cell_index.insert(cell.url.to_string(), index); } // Finally, update the text document that represents the cell with the actual @@ -158,8 +179,9 @@ impl NotebookDocument { // `cell_index` are updated before we start applying the changes to the cells. if let Some(did_open) = structure.did_open { for cell_text_document in did_open { - if let Some(cell) = self.cell_by_uri_mut(&cell_text_document.uri) { + if let Some(cell) = self.cell_by_uri_mut(cell_text_document.uri.as_str()) { cell.document = TextDocument::new( + cell_text_document.uri, cell_text_document.text, cell_text_document.version, ); @@ -170,7 +192,7 @@ impl NotebookDocument { if let Some(cell_data) = data { for cell in cell_data { - if let Some(existing_cell) = self.cell_by_uri_mut(&cell.document) { + if let Some(existing_cell) = self.cell_by_uri_mut(cell.document.as_str()) { existing_cell.kind = cell.kind; } } @@ -178,7 +200,7 @@ impl NotebookDocument { if let Some(content_changes) = text_content { for content_change in content_changes { - if let Some(cell) = self.cell_by_uri_mut(&content_change.document.uri) { + if let Some(cell) = self.cell_by_uri_mut(content_change.document.uri.as_str()) { cell.document .apply_changes(content_change.changes, version, encoding); } @@ -204,7 +226,8 @@ impl NotebookDocument { } /// Get the text document representing the contents of a cell by the cell URI. - pub(crate) fn cell_document_by_uri(&self, uri: &lsp_types::Url) -> Option<&TextDocument> { + #[expect(unused)] + pub(crate) fn cell_document_by_uri(&self, uri: &str) -> Option<&TextDocument> { self.cells .get(*self.cell_index.get(uri)?) .map(|cell| &cell.document) @@ -215,29 +238,41 @@ impl NotebookDocument { self.cells.iter().map(|cell| &cell.url) } - fn cell_by_uri_mut(&mut self, uri: &lsp_types::Url) -> Option<&mut NotebookCell> { + fn cell_by_uri_mut(&mut self, uri: &str) -> Option<&mut NotebookCell> { self.cells.get_mut(*self.cell_index.get(uri)?) } - fn make_cell_index(cells: &[NotebookCell]) -> FxHashMap { + fn make_cell_index(cells: &[NotebookCell]) -> FxHashMap { let mut index = FxHashMap::with_capacity_and_hasher(cells.len(), FxBuildHasher); for (i, cell) in cells.iter().enumerate() { - index.insert(cell.url.clone(), i); + index.insert(cell.url.to_string(), i); } index } } impl NotebookCell { + pub(crate) fn empty(cell: lsp_types::NotebookCell) -> Self { + Self { + kind: cell.kind, + document: TextDocument::new( + cell.document.clone(), + String::new(), + DocumentVersion::default(), + ), + url: cell.document, + } + } + pub(crate) fn new( cell: lsp_types::NotebookCell, contents: String, version: DocumentVersion, ) -> Self { Self { + document: TextDocument::new(cell.document.clone(), contents, version), url: cell.document, kind: cell.kind, - document: TextDocument::new(contents, version), } } } @@ -294,7 +329,14 @@ mod tests { } } - NotebookDocument::new(0, cells, serde_json::Map::default(), cell_documents).unwrap() + NotebookDocument::new( + lsp_types::Url::parse("file://test.ipynb").unwrap(), + 0, + cells, + serde_json::Map::default(), + cell_documents, + ) + .unwrap() } /// This test case checks that for a notebook with three code cells, when the client sends a diff --git a/crates/ty_server/src/document/text_document.rs b/crates/ty_server/src/document/text_document.rs index e5d00ff0cf..9898dd670b 100644 --- a/crates/ty_server/src/document/text_document.rs +++ b/crates/ty_server/src/document/text_document.rs @@ -1,4 +1,4 @@ -use lsp_types::TextDocumentContentChangeEvent; +use lsp_types::{TextDocumentContentChangeEvent, Url}; use ruff_source_file::LineIndex; use crate::PositionEncoding; @@ -11,6 +11,9 @@ pub(crate) type DocumentVersion = i32; /// with changes made by the user, including unsaved changes. #[derive(Debug, Clone)] pub struct TextDocument { + /// The URL as sent by the client + url: Url, + /// The string contents of the document. contents: String, /// A computed line index for the document. This should always reflect @@ -40,9 +43,10 @@ impl From<&str> for LanguageId { } impl TextDocument { - pub fn new(contents: String, version: DocumentVersion) -> Self { + pub fn new(url: Url, contents: String, version: DocumentVersion) -> Self { let index = LineIndex::from_source_text(&contents); Self { + url, contents, index, version, @@ -60,6 +64,10 @@ impl TextDocument { self.contents } + pub(crate) fn url(&self) -> &Url { + &self.url + } + pub fn contents(&self) -> &str { &self.contents } @@ -154,11 +162,12 @@ impl TextDocument { #[cfg(test)] mod tests { use crate::{PositionEncoding, TextDocument}; - use lsp_types::{Position, TextDocumentContentChangeEvent}; + use lsp_types::{Position, TextDocumentContentChangeEvent, Url}; #[test] fn redo_edit() { let mut document = TextDocument::new( + Url::parse("file:///test").unwrap(), r#"""" 测试comment 一些测试内容 diff --git a/crates/ty_server/src/lib.rs b/crates/ty_server/src/lib.rs index a56a95cb38..374c8421cf 100644 --- a/crates/ty_server/src/lib.rs +++ b/crates/ty_server/src/lib.rs @@ -8,7 +8,7 @@ pub use crate::logging::{LogLevel, init_logging}; pub use crate::server::{PartialWorkspaceProgress, PartialWorkspaceProgressParams, Server}; pub use crate::session::{ClientOptions, DiagnosticMode}; pub use document::{NotebookDocument, PositionEncoding, TextDocument}; -pub(crate) use session::{DocumentQuery, Session}; +pub(crate) use session::Session; mod capabilities; mod document; diff --git a/crates/ty_server/src/server/api.rs b/crates/ty_server/src/server/api.rs index 6fd1cde43a..a56866791b 100644 --- a/crates/ty_server/src/server/api.rs +++ b/crates/ty_server/src/server/api.rs @@ -1,6 +1,5 @@ use crate::server::schedule::Task; use crate::session::Session; -use crate::system::AnySystemPath; use anyhow::anyhow; use lsp_server as server; use lsp_server::RequestId; @@ -208,7 +207,7 @@ where // SAFETY: The `snapshot` is safe to move across the unwind boundary because it is not used // after unwinding. - let snapshot = AssertUnwindSafe(session.take_session_snapshot()); + let snapshot = AssertUnwindSafe(session.snapshot_session()); Box::new(move |client| { let _span = tracing::debug_span!("request", %id, method = R::METHOD).entered(); @@ -253,10 +252,10 @@ where .cancellation_token(&id) .expect("request should have been tested for cancellation before scheduling"); - let url = R::document_url(¶ms).into_owned(); + let url = R::document_url(¶ms); - let Ok(path) = AnySystemPath::try_from_url(&url) else { - let reason = format!("URL `{url}` isn't a valid system path"); + let Ok(document) = session.snapshot_document(&url) else { + let reason = format!("Document {url} is not open in the session"); tracing::warn!( "Ignoring request id={id} method={} because {reason}", R::METHOD @@ -274,8 +273,8 @@ where }); }; + let path = document.to_file_path(); let db = session.project_db(&path).clone(); - let snapshot = session.take_document_snapshot(url); Box::new(move |client| { let _span = tracing::debug_span!("request", %id, method = R::METHOD).entered(); @@ -294,7 +293,7 @@ where } if let Err(error) = ruff_db::panic::catch_unwind(|| { - R::handle_request(&id, &db, snapshot, client, params); + R::handle_request(&id, &db, document, client, params); }) { panic_response::(&id, client, &error, retry); } @@ -371,7 +370,15 @@ where let (id, params) = cast_notification::(req)?; Ok(Task::background(schedule, move |session: &Session| { let url = N::document_url(¶ms); - let snapshot = session.take_document_snapshot((*url).clone()); + let Ok(snapshot) = session.snapshot_document(&url) else { + let reason = format!("Document {url} is not open in the session"); + tracing::warn!( + "Ignoring notification id={id} method={} because {reason}", + N::METHOD + ); + return Box::new(|_| {}); + }; + Box::new(move |client| { let _span = tracing::debug_span!("notification", method = N::METHOD).entered(); diff --git a/crates/ty_server/src/server/api/diagnostics.rs b/crates/ty_server/src/server/api/diagnostics.rs index d43b176a9b..7680dc1bad 100644 --- a/crates/ty_server/src/server/api/diagnostics.rs +++ b/crates/ty_server/src/server/api/diagnostics.rs @@ -13,16 +13,16 @@ use ruff_db::source::{line_index, source_text}; use ruff_db::system::SystemPathBuf; use ty_project::{Db, ProjectDatabase}; -use crate::document::{DocumentKey, FileRangeExt, ToRangeExt}; +use crate::document::{FileRangeExt, ToRangeExt}; use crate::session::DocumentSnapshot; use crate::session::client::Client; use crate::system::{AnySystemPath, file_to_url}; -use crate::{DocumentQuery, PositionEncoding, Session}; +use crate::{NotebookDocument, PositionEncoding, Session}; pub(super) struct Diagnostics<'a> { items: Vec, encoding: PositionEncoding, - document: &'a DocumentQuery, + notebook: Option<&'a NotebookDocument>, } impl Diagnostics<'_> { @@ -53,7 +53,7 @@ impl Diagnostics<'_> { } pub(super) fn to_lsp_diagnostics(&self, db: &ProjectDatabase) -> LspDiagnostics { - if let Some(notebook) = self.document.as_notebook() { + if let Some(notebook) = self.notebook { let mut cell_diagnostics: FxHashMap> = FxHashMap::default(); // Populates all relevant URLs with an empty diagnostic list. This ensures that documents @@ -115,23 +115,18 @@ impl LspDiagnostics { } } -/// Clears the diagnostics for the document identified by `key`. +/// Clears the diagnostics for the document identified by `uri`. /// /// This is done by notifying the client with an empty list of diagnostics for the document. /// For notebook cells, this clears diagnostics for the specific cell. /// For other document types, this clears diagnostics for the main document. -pub(super) fn clear_diagnostics(session: &Session, key: &DocumentKey, client: &Client) { +pub(super) fn clear_diagnostics(session: &Session, uri: &lsp_types::Url, client: &Client) { if session.client_capabilities().supports_pull_diagnostics() { return; } - let Some(uri) = key.to_url() else { - // If we can't convert to URL, we can't clear diagnostics - return; - }; - client.send_notification::(PublishDiagnosticsParams { - uri, + uri: uri.clone(), diagnostics: vec![], version: None, }); @@ -143,18 +138,12 @@ pub(super) fn clear_diagnostics(session: &Session, key: &DocumentKey, client: &C /// This function is a no-op if the client supports pull diagnostics. /// /// [publish diagnostics notification]: https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_publishDiagnostics -pub(super) fn publish_diagnostics(session: &Session, key: &DocumentKey, client: &Client) { +pub(super) fn publish_diagnostics(session: &Session, url: &lsp_types::Url, client: &Client) { if session.client_capabilities().supports_pull_diagnostics() { return; } - let Some(url) = key.to_url() else { - return; - }; - - let snapshot = session.take_document_snapshot(url.clone()); - - let document = match snapshot.document() { + let snapshot = match session.snapshot_document(url) { Ok(document) => document, Err(err) => { tracing::debug!("Failed to resolve document for URL `{}`: {}", url, err); @@ -162,7 +151,7 @@ pub(super) fn publish_diagnostics(session: &Session, key: &DocumentKey, client: } }; - let db = session.project_db(key.path()); + let db = session.project_db(&snapshot.to_file_path()); let Some(diagnostics) = compute_diagnostics(db, &snapshot) else { return; @@ -173,13 +162,13 @@ pub(super) fn publish_diagnostics(session: &Session, key: &DocumentKey, client: client.send_notification::(PublishDiagnosticsParams { uri, diagnostics, - version: Some(document.version()), + version: Some(snapshot.document().version()), }); }; match diagnostics.to_lsp_diagnostics(db) { LspDiagnostics::TextDocument(diagnostics) => { - publish_diagnostics_notification(url, diagnostics); + publish_diagnostics_notification(url.clone(), diagnostics); } LspDiagnostics::NotebookDocument(cell_diagnostics) => { for (cell_url, diagnostics) in cell_diagnostics { @@ -264,16 +253,11 @@ pub(super) fn compute_diagnostics<'a>( db: &ProjectDatabase, snapshot: &'a DocumentSnapshot, ) -> Option> { - let document = match snapshot.document() { - Ok(document) => document, - Err(err) => { - tracing::info!("Failed to resolve document for snapshot: {}", err); - return None; - } - }; - - let Some(file) = document.file(db) else { - tracing::info!("No file found for snapshot for `{}`", document.file_path()); + let Some(file) = snapshot.to_file(db) else { + tracing::info!( + "No file found for snapshot for `{}`", + snapshot.to_file_path() + ); return None; }; @@ -282,7 +266,7 @@ pub(super) fn compute_diagnostics<'a>( Some(Diagnostics { items: diagnostics, encoding: snapshot.encoding(), - document, + notebook: snapshot.notebook(), }) } diff --git a/crates/ty_server/src/server/api/notifications/did_change.rs b/crates/ty_server/src/server/api/notifications/did_change.rs index 68f6f883e0..3cb52c3daa 100644 --- a/crates/ty_server/src/server/api/notifications/did_change.rs +++ b/crates/ty_server/src/server/api/notifications/did_change.rs @@ -28,19 +28,16 @@ impl SyncNotificationHandler for DidChangeTextDocumentHandler { content_changes, } = params; - let key = match session.key_from_url(uri) { - Ok(key) => key, - Err(uri) => { - tracing::debug!("Failed to create document key from URI: {}", uri); - return Ok(()); - } - }; - - session - .update_text_document(&key, content_changes, version) + let document = session + .document_handle(&uri) .with_failure_code(ErrorCode::InternalError)?; - let changes = match key.path() { + document + .update_text_document(session, content_changes, version) + .with_failure_code(ErrorCode::InternalError)?; + + let path = document.to_file_path(); + let changes = match &*path { AnySystemPath::System(system_path) => { vec![ChangeEvent::file_content_changed(system_path.clone())] } @@ -49,9 +46,9 @@ impl SyncNotificationHandler for DidChangeTextDocumentHandler { } }; - session.apply_changes(key.path(), changes); + session.apply_changes(&path, changes); - publish_diagnostics(session, &key, client); + publish_diagnostics(session, document.url(), client); Ok(()) } diff --git a/crates/ty_server/src/server/api/notifications/did_change_watched_files.rs b/crates/ty_server/src/server/api/notifications/did_change_watched_files.rs index 21285f461f..ce55100dee 100644 --- a/crates/ty_server/src/server/api/notifications/did_change_watched_files.rs +++ b/crates/ty_server/src/server/api/notifications/did_change_watched_files.rs @@ -1,3 +1,4 @@ +use crate::document::DocumentKey; use crate::server::Result; use crate::server::api::diagnostics::{publish_diagnostics, publish_settings_diagnostics}; use crate::server::api::traits::{NotificationHandler, SyncNotificationHandler}; @@ -25,16 +26,8 @@ impl SyncNotificationHandler for DidChangeWatchedFiles { let mut events_by_db: FxHashMap<_, Vec> = FxHashMap::default(); for change in params.changes { - let path = match AnySystemPath::try_from_url(&change.uri) { - Ok(path) => path, - Err(err) => { - tracing::warn!( - "Failed to convert URI '{}` to system path: {err:?}", - change.uri - ); - continue; - } - }; + let key = DocumentKey::from_url(&change.uri); + let path = key.to_file_path(); let system_path = match path { AnySystemPath::System(system) => system, @@ -99,8 +92,8 @@ impl SyncNotificationHandler for DidChangeWatchedFiles { |_, ()| {}, ); } else { - for key in session.text_document_keys() { - publish_diagnostics(session, &key, client); + for key in session.text_document_handles() { + publish_diagnostics(session, key.url(), client); } } // TODO: always publish diagnostics for notebook files (since they don't use pull diagnostics) diff --git a/crates/ty_server/src/server/api/notifications/did_close.rs b/crates/ty_server/src/server/api/notifications/did_close.rs index 60097df67b..5c5747ee05 100644 --- a/crates/ty_server/src/server/api/notifications/did_close.rs +++ b/crates/ty_server/src/server/api/notifications/did_close.rs @@ -27,22 +27,20 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler { text_document: TextDocumentIdentifier { uri }, } = params; - let key = match session.key_from_url(uri) { - Ok(key) => key, - Err(uri) => { - tracing::debug!("Failed to create document key from URI: {}", uri); - return Ok(()); - } - }; - - session - .close_document(&key) + let document = session + .document_handle(&uri) .with_failure_code(ErrorCode::InternalError)?; - let path = key.path(); - let db = session.project_db_mut(path); + let path = document.to_file_path().into_owned(); + let url = document.url().clone(); - match path { + document + .close(session) + .with_failure_code(ErrorCode::InternalError)?; + + let db = session.project_db_mut(&path); + + match &path { AnySystemPath::System(system_path) => { if let Some(file) = db.files().try_system(db, system_path) { db.project().close_file(db, file); @@ -65,7 +63,7 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler { .diagnostic_mode() .is_open_files_only() { - clear_diagnostics(session, &key, client); + clear_diagnostics(session, &url, client); } } AnySystemPath::SystemVirtual(virtual_path) => { @@ -78,7 +76,7 @@ impl SyncNotificationHandler for DidCloseTextDocumentHandler { // Always clear diagnostics for virtual files, as they don't really exist on disk // which means closing them is like deleting the file. - clear_diagnostics(session, &key, client); + clear_diagnostics(session, &url, client); } } diff --git a/crates/ty_server/src/server/api/notifications/did_close_notebook.rs b/crates/ty_server/src/server/api/notifications/did_close_notebook.rs index f934f6832e..9b03651496 100644 --- a/crates/ty_server/src/server/api/notifications/did_close_notebook.rs +++ b/crates/ty_server/src/server/api/notifications/did_close_notebook.rs @@ -26,21 +26,19 @@ impl SyncNotificationHandler for DidCloseNotebookHandler { .. } = params; - let key = match session.key_from_url(uri) { - Ok(key) => key, - Err(uri) => { - tracing::debug!("Failed to create document key from URI: {}", uri); - return Ok(()); - } - }; - - session - .close_document(&key) + let document = session + .document_handle(&uri) .with_failure_code(lsp_server::ErrorCode::InternalError)?; - if let AnySystemPath::SystemVirtual(virtual_path) = key.path() { + let path = document.to_file_path().into_owned(); + + document + .close(session) + .with_failure_code(lsp_server::ErrorCode::InternalError)?; + + if let AnySystemPath::SystemVirtual(virtual_path) = &path { session.apply_changes( - key.path(), + &path, vec![ChangeEvent::DeletedVirtual(virtual_path.clone())], ); } diff --git a/crates/ty_server/src/server/api/notifications/did_open.rs b/crates/ty_server/src/server/api/notifications/did_open.rs index 5647bb2781..b2561e9c6c 100644 --- a/crates/ty_server/src/server/api/notifications/did_open.rs +++ b/crates/ty_server/src/server/api/notifications/did_open.rs @@ -35,30 +35,23 @@ impl SyncNotificationHandler for DidOpenTextDocumentHandler { }, } = params; - let key = match session.key_from_url(uri) { - Ok(key) => key, - Err(uri) => { - tracing::debug!("Failed to create document key from URI: {}", uri); - return Ok(()); - } - }; + let document = session.open_text_document( + TextDocument::new(uri, text, version).with_language_id(&language_id), + ); - let document = TextDocument::new(text, version).with_language_id(&language_id); - session.open_text_document(key.path(), document); - - let path = key.path(); + let path = document.to_file_path(); // This is a "maybe" because the `File` might've not been interned yet i.e., the // `try_system` call will return `None` which doesn't mean that the file is new, it's just // that the server didn't need the file yet. let is_maybe_new_system_file = path.as_system().is_some_and(|system_path| { - let db = session.project_db(path); + let db = session.project_db(&path); db.files() .try_system(db, system_path) .is_none_or(|file| !file.exists(db)) }); - match path { + match &*path { AnySystemPath::System(system_path) => { let event = if is_maybe_new_system_file { ChangeEvent::Created { @@ -68,22 +61,22 @@ impl SyncNotificationHandler for DidOpenTextDocumentHandler { } else { ChangeEvent::Opened(system_path.clone()) }; - session.apply_changes(path, vec![event]); + session.apply_changes(&path, vec![event]); - let db = session.project_db_mut(path); + let db = session.project_db_mut(&path); match system_path_to_file(db, system_path) { Ok(file) => db.project().open_file(db, file), Err(err) => tracing::warn!("Failed to open file {system_path}: {err}"), } } AnySystemPath::SystemVirtual(virtual_path) => { - let db = session.project_db_mut(path); + let db = session.project_db_mut(&path); let virtual_file = db.files().virtual_file(db, virtual_path); db.project().open_file(db, virtual_file.file()); } } - publish_diagnostics(session, &key, client); + publish_diagnostics(session, document.url(), client); Ok(()) } diff --git a/crates/ty_server/src/server/api/notifications/did_open_notebook.rs b/crates/ty_server/src/server/api/notifications/did_open_notebook.rs index 201add9587..b61f2aeef6 100644 --- a/crates/ty_server/src/server/api/notifications/did_open_notebook.rs +++ b/crates/ty_server/src/server/api/notifications/did_open_notebook.rs @@ -25,20 +25,27 @@ impl SyncNotificationHandler for DidOpenNotebookHandler { _client: &Client, params: DidOpenNotebookDocumentParams, ) -> Result<()> { - let Ok(path) = AnySystemPath::try_from_url(¶ms.notebook_document.uri) else { - return Ok(()); - }; + let lsp_types::NotebookDocument { + version, + cells, + metadata, + uri: notebook_uri, + .. + } = params.notebook_document; let notebook = NotebookDocument::new( - params.notebook_document.version, - params.notebook_document.cells, - params.notebook_document.metadata.unwrap_or_default(), + notebook_uri, + version, + cells, + metadata.unwrap_or_default(), params.cell_text_documents, ) .with_failure_code(ErrorCode::InternalError)?; - session.open_notebook_document(&path, notebook); - match &path { + let document = session.open_notebook_document(notebook); + let path = document.to_file_path(); + + match &*path { AnySystemPath::System(system_path) => { session.apply_changes(&path, vec![ChangeEvent::Opened(system_path.clone())]); } diff --git a/crates/ty_server/src/server/api/requests/completion.rs b/crates/ty_server/src/server/api/requests/completion.rs index a3e7d91f94..bf712c5efb 100644 --- a/crates/ty_server/src/server/api/requests/completion.rs +++ b/crates/ty_server/src/server/api/requests/completion.rs @@ -45,7 +45,7 @@ impl BackgroundDocumentRequestHandler for CompletionRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/doc_highlights.rs b/crates/ty_server/src/server/api/requests/doc_highlights.rs index 9750bdc190..b5b6d0d9ab 100644 --- a/crates/ty_server/src/server/api/requests/doc_highlights.rs +++ b/crates/ty_server/src/server/api/requests/doc_highlights.rs @@ -37,7 +37,7 @@ impl BackgroundDocumentRequestHandler for DocumentHighlightRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/document_symbols.rs b/crates/ty_server/src/server/api/requests/document_symbols.rs index 46c4c3eb2e..ea5ee312c6 100644 --- a/crates/ty_server/src/server/api/requests/document_symbols.rs +++ b/crates/ty_server/src/server/api/requests/document_symbols.rs @@ -39,7 +39,7 @@ impl BackgroundDocumentRequestHandler for DocumentSymbolRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/execute_command.rs b/crates/ty_server/src/server/api/requests/execute_command.rs index 8a2fc52fd1..a51ece8598 100644 --- a/crates/ty_server/src/server/api/requests/execute_command.rs +++ b/crates/ty_server/src/server/api/requests/execute_command.rs @@ -52,7 +52,7 @@ fn debug_information(session: &Session) -> crate::Result { writeln!( buffer, "Open text documents: {}", - session.text_document_keys().count() + session.text_document_handles().count() )?; writeln!(buffer)?; diff --git a/crates/ty_server/src/server/api/requests/goto_declaration.rs b/crates/ty_server/src/server/api/requests/goto_declaration.rs index 07444746f7..1c16a74bc5 100644 --- a/crates/ty_server/src/server/api/requests/goto_declaration.rs +++ b/crates/ty_server/src/server/api/requests/goto_declaration.rs @@ -37,7 +37,7 @@ impl BackgroundDocumentRequestHandler for GotoDeclarationRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/goto_definition.rs b/crates/ty_server/src/server/api/requests/goto_definition.rs index 793ae54bf1..bc33411778 100644 --- a/crates/ty_server/src/server/api/requests/goto_definition.rs +++ b/crates/ty_server/src/server/api/requests/goto_definition.rs @@ -37,7 +37,7 @@ impl BackgroundDocumentRequestHandler for GotoDefinitionRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/goto_references.rs b/crates/ty_server/src/server/api/requests/goto_references.rs index 129afcecdc..3afaf28b14 100644 --- a/crates/ty_server/src/server/api/requests/goto_references.rs +++ b/crates/ty_server/src/server/api/requests/goto_references.rs @@ -37,7 +37,7 @@ impl BackgroundDocumentRequestHandler for ReferencesRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/goto_type_definition.rs b/crates/ty_server/src/server/api/requests/goto_type_definition.rs index 5695c5a6ab..379defa344 100644 --- a/crates/ty_server/src/server/api/requests/goto_type_definition.rs +++ b/crates/ty_server/src/server/api/requests/goto_type_definition.rs @@ -37,7 +37,7 @@ impl BackgroundDocumentRequestHandler for GotoTypeDefinitionRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/hover.rs b/crates/ty_server/src/server/api/requests/hover.rs index be81eca472..cc8f8e0dab 100644 --- a/crates/ty_server/src/server/api/requests/hover.rs +++ b/crates/ty_server/src/server/api/requests/hover.rs @@ -37,7 +37,7 @@ impl BackgroundDocumentRequestHandler for HoverRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/inlay_hints.rs b/crates/ty_server/src/server/api/requests/inlay_hints.rs index ec8464fc6b..21eb1d09b6 100644 --- a/crates/ty_server/src/server/api/requests/inlay_hints.rs +++ b/crates/ty_server/src/server/api/requests/inlay_hints.rs @@ -36,7 +36,7 @@ impl BackgroundDocumentRequestHandler for InlayHintRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/prepare_rename.rs b/crates/ty_server/src/server/api/requests/prepare_rename.rs index 7f11961bee..a12541729d 100644 --- a/crates/ty_server/src/server/api/requests/prepare_rename.rs +++ b/crates/ty_server/src/server/api/requests/prepare_rename.rs @@ -37,7 +37,7 @@ impl BackgroundDocumentRequestHandler for PrepareRenameRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/rename.rs b/crates/ty_server/src/server/api/requests/rename.rs index 117891ebba..d434cb733e 100644 --- a/crates/ty_server/src/server/api/requests/rename.rs +++ b/crates/ty_server/src/server/api/requests/rename.rs @@ -38,7 +38,7 @@ impl BackgroundDocumentRequestHandler for RenameRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/selection_range.rs b/crates/ty_server/src/server/api/requests/selection_range.rs index 684b230cd3..516ea6aeda 100644 --- a/crates/ty_server/src/server/api/requests/selection_range.rs +++ b/crates/ty_server/src/server/api/requests/selection_range.rs @@ -37,7 +37,7 @@ impl BackgroundDocumentRequestHandler for SelectionRangeRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/semantic_tokens.rs b/crates/ty_server/src/server/api/requests/semantic_tokens.rs index 58f245d4ae..adc6142189 100644 --- a/crates/ty_server/src/server/api/requests/semantic_tokens.rs +++ b/crates/ty_server/src/server/api/requests/semantic_tokens.rs @@ -33,7 +33,7 @@ impl BackgroundDocumentRequestHandler for SemanticTokensRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs b/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs index 6112405249..03193b32a6 100644 --- a/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs +++ b/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs @@ -35,7 +35,7 @@ impl BackgroundDocumentRequestHandler for SemanticTokensRangeRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/signature_help.rs b/crates/ty_server/src/server/api/requests/signature_help.rs index e9b9f160b6..f9b20cccd9 100644 --- a/crates/ty_server/src/server/api/requests/signature_help.rs +++ b/crates/ty_server/src/server/api/requests/signature_help.rs @@ -39,7 +39,7 @@ impl BackgroundDocumentRequestHandler for SignatureHelpRequestHandler { return Ok(None); } - let Some(file) = snapshot.file(db) else { + let Some(file) = snapshot.to_file(db) else { return Ok(None); }; diff --git a/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs b/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs index c990d4f4af..2d37436116 100644 --- a/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs +++ b/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs @@ -1,4 +1,5 @@ use crate::PositionEncoding; +use crate::document::DocumentKey; use crate::server::api::diagnostics::{Diagnostics, to_lsp_diagnostic}; use crate::server::api::traits::{ BackgroundRequestHandler, RequestHandler, RetriableRequestHandler, @@ -8,7 +9,7 @@ use crate::server::{Action, Result}; use crate::session::client::Client; use crate::session::index::Index; use crate::session::{SessionSnapshot, SuspendedWorkspaceDiagnosticRequest}; -use crate::system::{AnySystemPath, file_to_url}; +use crate::system::file_to_url; use lsp_server::RequestId; use lsp_types::request::WorkspaceDiagnosticRequest; use lsp_types::{ @@ -317,7 +318,7 @@ struct ResponseWriter<'a> { // It's important that we use `AnySystemPath` over `Url` here because // `file_to_url` isn't guaranteed to return the exact same URL as the one provided // by the client. - previous_result_ids: FxHashMap, + previous_result_ids: FxHashMap, } impl<'a> ResponseWriter<'a> { @@ -346,12 +347,7 @@ impl<'a> ResponseWriter<'a> { let previous_result_ids = previous_result_ids .into_iter() - .filter_map(|prev| { - Some(( - AnySystemPath::try_from_url(&prev.uri).ok()?, - (prev.uri, prev.value), - )) - }) + .map(|prev| (DocumentKey::from_url(&prev.uri), (prev.uri, prev.value))) .collect(); Self { @@ -367,20 +363,16 @@ impl<'a> ResponseWriter<'a> { tracing::debug!("Failed to convert file path to URL at {}", file.path(db)); return; }; - + let key = DocumentKey::from_url(&url); let version = self .index - .key_from_url(url.clone()) - .ok() - .and_then(|key| self.index.make_document_ref(key).ok()) - .map(|doc| i64::from(doc.version())); + .document_handle(&url) + .map(|doc| i64::from(doc.version())) + .ok(); let result_id = Diagnostics::result_id_from_hash(diagnostics); - let previous_result_id = AnySystemPath::try_from_url(&url) - .ok() - .and_then(|path| self.previous_result_ids.remove(&path)) - .map(|(_url, id)| id); + let previous_result_id = self.previous_result_ids.remove(&key).map(|(_url, id)| id); let report = match result_id { Some(new_id) if Some(&new_id) == previous_result_id.as_ref() => { @@ -444,13 +436,12 @@ impl<'a> ResponseWriter<'a> { // Handle files that had diagnostics in previous request but no longer have any // Any remaining entries in previous_results are files that were fixed - for (previous_url, previous_result_id) in self.previous_result_ids.into_values() { + for (key, (previous_url, previous_result_id)) in self.previous_result_ids { // This file had diagnostics before but doesn't now, so we need to report it as having no diagnostics let version = self .index - .key_from_url(previous_url.clone()) + .document(&key) .ok() - .and_then(|key| self.index.make_document_ref(key).ok()) .map(|doc| i64::from(doc.version())); let new_result_id = Diagnostics::result_id_from_hash(&[]); diff --git a/crates/ty_server/src/session.rs b/crates/ty_server/src/session.rs index 24ad0ef55e..c5daec77e3 100644 --- a/crates/ty_server/src/session.rs +++ b/crates/ty_server/src/session.rs @@ -1,7 +1,7 @@ //! Data model, state management, and configuration resolution. use anyhow::{Context, anyhow}; -use index::DocumentQueryError; +use index::DocumentError; use lsp_server::{Message, RequestId}; use lsp_types::notification::{DidChangeWatchedFiles, Exit, Notification}; use lsp_types::request::{ @@ -15,8 +15,9 @@ use lsp_types::{ }; use options::GlobalOptions; use ruff_db::Db; -use ruff_db::files::File; +use ruff_db::files::{File, system_path_to_file}; use ruff_db::system::{System, SystemPath, SystemPathBuf}; +use std::borrow::Cow; use std::collections::{BTreeMap, HashSet, VecDeque}; use std::ops::{Deref, DerefMut}; use std::panic::RefUnwindSafe; @@ -26,7 +27,6 @@ use ty_project::metadata::Options; use ty_project::watch::ChangeEvent; use ty_project::{ChangeResult, CheckMode, Db as _, ProjectDatabase, ProjectMetadata}; -pub(crate) use self::index::DocumentQuery; pub(crate) use self::options::InitializationOptions; pub use self::options::{ClientOptions, DiagnosticMode}; pub(crate) use self::settings::{GlobalSettings, WorkspaceSettings}; @@ -439,13 +439,6 @@ impl Session { self.projects.values_mut().chain(default_project) } - /// Returns the [`DocumentKey`] for the given URL. - /// - /// Refer to [`Index::key_from_url`] for more details. - pub(crate) fn key_from_url(&self, url: Url) -> Result { - self.index().key_from_url(url) - } - pub(crate) fn initialize_workspaces( &mut self, workspace_settings: Vec<(Url, ClientOptions)>, @@ -819,25 +812,34 @@ impl Session { } /// Creates a document snapshot with the URL referencing the document to snapshot. - pub(crate) fn take_document_snapshot(&self, url: Url) -> DocumentSnapshot { - let key = self - .key_from_url(url) - .map_err(DocumentQueryError::InvalidUrl); - DocumentSnapshot { + pub(crate) fn snapshot_document(&self, url: &Url) -> Result { + let index = self.index(); + let document_handle = index.document_handle(url)?; + + let notebook = if let Some(notebook_path) = &document_handle.notebook_path { + index + .notebook_arc(&DocumentKey::from(notebook_path.clone())) + .ok() + } else { + None + }; + + Ok(DocumentSnapshot { resolved_client_capabilities: self.resolved_client_capabilities, global_settings: self.global_settings.clone(), - workspace_settings: key - .as_ref() - .ok() - .and_then(|key| self.workspaces.settings_for_path(key.path().as_system()?)) + workspace_settings: document_handle + .to_file_path() + .as_system() + .and_then(|path| self.workspaces.settings_for_path(path)) .unwrap_or_else(|| Arc::new(WorkspaceSettings::default())), position_encoding: self.position_encoding, - document_query_result: key.and_then(|key| self.index().make_document_ref(key)), - } + document: document_handle, + notebook, + }) } /// Creates a snapshot of the current state of the [`Session`]. - pub(crate) fn take_session_snapshot(&self) -> SessionSnapshot { + pub(crate) fn snapshot_session(&self) -> SessionSnapshot { SessionSnapshot { projects: self .projects @@ -855,56 +857,49 @@ impl Session { } /// Iterates over the document keys for all open text documents. - pub(super) fn text_document_keys(&self) -> impl Iterator + '_ { + pub(super) fn text_document_handles(&self) -> impl Iterator + '_ { self.index() - .text_document_paths() - .map(|path| DocumentKey::Text(path.clone())) + .text_documents() + .map(|(key, document)| DocumentHandle { + key: key.clone(), + url: document.url().clone(), + version: document.version(), + // TODO: Set notebook path if text document is part of a notebook + notebook_path: None, + }) + } + + /// Returns a handle to the document specified by its URL. + /// + /// # Errors + /// + /// If the document is not found. + pub(crate) fn document_handle( + &self, + url: &lsp_types::Url, + ) -> Result { + self.index().document_handle(url) } /// Registers a notebook document at the provided `path`. /// If a document is already open here, it will be overwritten. - pub(crate) fn open_notebook_document( - &mut self, - path: &AnySystemPath, - document: NotebookDocument, - ) { - self.index_mut().open_notebook_document(path, document); + /// + /// Returns a handle to the opened document. + pub(crate) fn open_notebook_document(&mut self, document: NotebookDocument) -> DocumentHandle { + let handle = self.index_mut().open_notebook_document(document); self.bump_revision(); + handle } /// Registers a text document at the provided `path`. /// If a document is already open here, it will be overwritten. - pub(crate) fn open_text_document(&mut self, path: &AnySystemPath, document: TextDocument) { - self.index_mut().open_text_document(path, document); - self.bump_revision(); - } - - /// Updates a text document at the associated `key`. /// - /// The document key must point to a text document, or this will throw an error. - pub(crate) fn update_text_document( - &mut self, - key: &DocumentKey, - content_changes: Vec, - new_version: DocumentVersion, - ) -> crate::Result<()> { - let position_encoding = self.position_encoding; - self.index_mut().update_text_document( - key, - content_changes, - new_version, - position_encoding, - )?; - self.bump_revision(); - Ok(()) - } + /// Returns a handle to the opened document. + pub(crate) fn open_text_document(&mut self, document: TextDocument) -> DocumentHandle { + let handle = self.index_mut().open_text_document(document); - /// De-registers a document, specified by its key. - /// Calling this multiple times for the same document is a logic error. - pub(crate) fn close_document(&mut self, key: &DocumentKey) -> crate::Result<()> { - self.index_mut().close_document(key)?; self.bump_revision(); - Ok(()) + handle } /// Returns a reference to the index. @@ -1003,7 +998,8 @@ pub(crate) struct DocumentSnapshot { global_settings: Arc, workspace_settings: Arc, position_encoding: PositionEncoding, - document_query_result: Result, + document: DocumentHandle, + notebook: Option>, } impl DocumentSnapshot { @@ -1028,27 +1024,28 @@ impl DocumentSnapshot { } /// Returns the result of the document query for this snapshot. - pub(crate) fn document(&self) -> Result<&DocumentQuery, &DocumentQueryError> { - self.document_query_result.as_ref() + pub(crate) fn document(&self) -> &DocumentHandle { + &self.document } - pub(crate) fn file(&self, db: &dyn Db) -> Option { - let document = match self.document() { - Ok(document) => document, - Err(err) => { - tracing::debug!("Failed to resolve file: {}", err); - return None; - } - }; - let file = document.file(db); + pub(crate) fn notebook(&self) -> Option<&NotebookDocument> { + self.notebook.as_deref() + } + + pub(crate) fn to_file(&self, db: &dyn Db) -> Option { + let file = self.document.to_file(db); if file.is_none() { tracing::debug!( - "Failed to resolve file: file not found for path `{}`", - document.file_path() + "Failed to resolve file: file not found for `{}`", + self.document.url() ); } file } + + pub(crate) fn to_file_path(&self) -> Cow<'_, AnySystemPath> { + self.document.to_file_path() + } } /// An immutable snapshot of the current state of [`Session`]. @@ -1320,3 +1317,90 @@ impl SuspendedWorkspaceDiagnosticRequest { None } } + +/// A handle to a document stored within [`Index`]. +/// +/// Allows identifying the document within the index but it also carries the URL used by the +/// client to reference the document as well as the version of the document. +/// +/// It also exposes methods to get the file-path of the corresponding ty-file. +#[derive(Clone, Debug)] +pub(crate) struct DocumentHandle { + /// The key that uniquely identifies this document in the index. + key: DocumentKey, + url: lsp_types::Url, + /// The path to the enclosing notebook file if this document is a notebook or a notebook cell. + notebook_path: Option, + version: DocumentVersion, +} + +impl DocumentHandle { + pub(crate) const fn version(&self) -> DocumentVersion { + self.version + } + + /// The URL as used by the client to reference this document. + pub(crate) fn url(&self) -> &lsp_types::Url { + &self.url + } + + /// The path to the enclosing file for this document. + /// + /// This is the path corresponding to the URL, except for notebook cells where the + /// path corresponds to the notebook file. + pub(crate) fn to_file_path(&self) -> Cow<'_, AnySystemPath> { + if let Some(path) = self.notebook_path.as_ref() { + Cow::Borrowed(path) + } else { + Cow::Owned(self.key.to_file_path()) + } + } + + /// Returns the salsa interned [`File`] for the document selected by this query. + /// + /// It returns [`None`] for the following cases: + /// - For virtual file, if it's not yet opened + /// - For regular file, if it does not exists or is a directory + pub(crate) fn to_file(&self, db: &dyn Db) -> Option { + match &*self.to_file_path() { + AnySystemPath::System(path) => system_path_to_file(db, path).ok(), + AnySystemPath::SystemVirtual(virtual_path) => db + .files() + .try_virtual_file(virtual_path) + .map(|virtual_file| virtual_file.file()), + } + } + + pub(crate) fn update_text_document( + &self, + session: &mut Session, + content_changes: Vec, + new_version: DocumentVersion, + ) -> crate::Result<()> { + let position_encoding = session.position_encoding(); + let mut index = session.index_mut(); + + let document_mut = index.document_mut(&self.key)?; + + let Some(document) = document_mut.as_text_mut() else { + anyhow::bail!("Text document path does not point to a text document"); + }; + + if content_changes.is_empty() { + document.update_version(new_version); + return Ok(()); + } + + document.apply_changes(content_changes, new_version, position_encoding); + + Ok(()) + } + + /// De-registers a document, specified by its key. + /// Calling this multiple times for the same document is a logic error. + pub(crate) fn close(self, session: &mut Session) -> crate::Result<()> { + session.index_mut().close_document(&self.key)?; + session.bump_revision(); + Ok(()) + } +} diff --git a/crates/ty_server/src/session/index.rs b/crates/ty_server/src/session/index.rs index 89d310f2ab..95cc515a35 100644 --- a/crates/ty_server/src/session/index.rs +++ b/crates/ty_server/src/session/index.rs @@ -1,24 +1,24 @@ use std::sync::Arc; -use lsp_types::Url; -use ruff_db::Db; -use ruff_db::files::{File, system_path_to_file}; -use rustc_hash::FxHashMap; - +use crate::document::DocumentKey; +use crate::session::DocumentHandle; use crate::{ PositionEncoding, TextDocument, - document::{DocumentKey, DocumentVersion, NotebookDocument}, + document::{DocumentVersion, NotebookDocument}, system::AnySystemPath, }; +use ruff_db::system::SystemVirtualPath; +use rustc_hash::FxHashMap; + /// Stores and tracks all open documents in a session, along with their associated settings. #[derive(Debug)] pub(crate) struct Index { /// Maps all document file paths to the associated document controller - documents: FxHashMap, + documents: FxHashMap, /// Maps opaque cell URLs to a notebook path (document) - notebook_cells: FxHashMap, + notebook_cells: FxHashMap, } impl Index { @@ -29,68 +29,55 @@ impl Index { } } - pub(super) fn text_document_paths(&self) -> impl Iterator + '_ { - self.documents - .iter() - .filter_map(|(path, doc)| doc.as_text().and(Some(path))) + pub(super) fn text_documents( + &self, + ) -> impl Iterator + '_ { + self.documents.iter().filter_map(|(key, doc)| { + let text_document = doc.as_text()?; + Some((key, text_document)) + }) + } + + pub(crate) fn document_handle( + &self, + url: &lsp_types::Url, + ) -> Result { + let key = DocumentKey::from_url(url); + let Some(document) = self.documents.get(&key) else { + return Err(DocumentError::NotFound(key)); + }; + + if let Some(path) = key.as_opaque() { + if let Some(notebook_path) = self.notebook_cells.get(path) { + return Ok(DocumentHandle { + key: key.clone(), + notebook_path: Some(notebook_path.clone()), + url: url.clone(), + version: document.version(), + }); + } + } + + Ok(DocumentHandle { + key: key.clone(), + notebook_path: None, + url: url.clone(), + version: document.version(), + }) } #[expect(dead_code)] - pub(super) fn notebook_document_paths(&self) -> impl Iterator + '_ { + pub(super) fn notebook_document_keys(&self) -> impl Iterator + '_ { self.documents .iter() .filter(|(_, doc)| doc.as_notebook().is_some()) - .map(|(path, _)| path) - } - - pub(super) fn update_text_document( - &mut self, - key: &DocumentKey, - content_changes: Vec, - new_version: DocumentVersion, - encoding: PositionEncoding, - ) -> crate::Result<()> { - let controller = self.document_controller_for_key(key)?; - let Some(document) = controller.as_text_mut() else { - anyhow::bail!("Text document path does not point to a text document"); - }; - - if content_changes.is_empty() { - document.update_version(new_version); - return Ok(()); - } - - document.apply_changes(content_changes, new_version, encoding); - - Ok(()) - } - - /// Returns the [`DocumentKey`] corresponding to the given URL. - /// - /// It returns [`Err`] with the original URL if it cannot be converted to a [`AnySystemPath`]. - pub(crate) fn key_from_url(&self, url: Url) -> Result { - if let Some(notebook_path) = self.notebook_cells.get(&url) { - Ok(DocumentKey::NotebookCell { - cell_url: url, - notebook_path: notebook_path.clone(), - }) - } else { - let path = AnySystemPath::try_from_url(&url).map_err(|()| url)?; - if path - .extension() - .is_some_and(|ext| ext.eq_ignore_ascii_case("ipynb")) - { - Ok(DocumentKey::Notebook(path)) - } else { - Ok(DocumentKey::Text(path)) - } - } + .map(|(key, _)| key) } #[expect(dead_code)] pub(super) fn update_notebook_document( &mut self, - key: &DocumentKey, + notebook_key: &DocumentKey, cells: Option, metadata: Option>, new_version: DocumentVersion, @@ -102,17 +89,16 @@ impl Index { .. }) = cells.as_ref().and_then(|cells| cells.structure.as_ref()) { - let notebook_path = key.path().clone(); - for opened_cell in did_open { + let cell_path = SystemVirtualPath::new(opened_cell.uri.as_str()); self.notebook_cells - .insert(opened_cell.uri.clone(), notebook_path.clone()); + .insert(cell_path.to_string(), notebook_key.to_file_path()); } // deleted notebook cells are closed via textDocument/didClose - we don't close them here. } - let controller = self.document_controller_for_key(key)?; - let Some(notebook) = controller.as_notebook_mut() else { + let document = self.document_mut(notebook_key)?; + let Some(notebook) = document.as_notebook_mut() else { anyhow::bail!("Notebook document path does not point to a notebook document"); }; @@ -123,44 +109,64 @@ impl Index { /// Create a document reference corresponding to the given document key. /// /// Returns an error if the document is not found or if the path cannot be converted to a URL. - pub(crate) fn make_document_ref( + pub(crate) fn document(&self, key: &DocumentKey) -> Result<&Document, DocumentError> { + let Some(document) = self.documents.get(key) else { + return Err(DocumentError::NotFound(key.clone())); + }; + + Ok(document) + } + + pub(crate) fn notebook_arc( &self, - key: DocumentKey, - ) -> Result { - let path = key.path(); - let Some(controller) = self.documents.get(path) else { - return Err(DocumentQueryError::NotFound(key)); + key: &DocumentKey, + ) -> Result, DocumentError> { + let Some(document) = self.documents.get(key) else { + return Err(DocumentError::NotFound(key.clone())); }; - // TODO: The `to_url` conversion shouldn't be an error because the paths themselves are - // constructed from the URLs but the `Index` APIs don't maintain this invariant. - let (cell_url, file_path) = match key { - DocumentKey::NotebookCell { - cell_url, - notebook_path, - } => (Some(cell_url), notebook_path), - DocumentKey::Notebook(path) | DocumentKey::Text(path) => (None, path), - }; - Ok(controller.make_ref(cell_url, file_path)) + + if let Document::Notebook(notebook) = document { + Ok(notebook.clone()) + } else { + Err(DocumentError::NotFound(key.clone())) + } } - pub(super) fn open_text_document(&mut self, path: &AnySystemPath, document: TextDocument) { - self.documents - .insert(path.clone(), DocumentController::new_text(document)); + pub(super) fn open_text_document(&mut self, document: TextDocument) -> DocumentHandle { + let key = DocumentKey::from_url(document.url()); + + // TODO: Fix file path for notebook cells + let handle = DocumentHandle { + key: key.clone(), + notebook_path: None, + url: document.url().clone(), + version: document.version(), + }; + + self.documents.insert(key, Document::new_text(document)); + + handle } - pub(super) fn open_notebook_document( - &mut self, - notebook_path: &AnySystemPath, - document: NotebookDocument, - ) { + pub(super) fn open_notebook_document(&mut self, document: NotebookDocument) -> DocumentHandle { + let notebook_key = DocumentKey::from_url(document.url()); + let url = document.url().clone(); + let version = document.version(); + for cell_url in document.cell_urls() { self.notebook_cells - .insert(cell_url.clone(), notebook_path.clone()); + .insert(cell_url.to_string(), notebook_key.to_file_path()); + } + + self.documents + .insert(notebook_key.clone(), Document::new_notebook(document)); + + DocumentHandle { + notebook_path: Some(notebook_key.to_file_path()), + key: notebook_key, + url, + version, } - self.documents.insert( - notebook_path.clone(), - DocumentController::new_notebook(document), - ); } pub(super) fn close_document(&mut self, key: &DocumentKey) -> crate::Result<()> { @@ -169,27 +175,23 @@ impl Index { // is requested to be `closed` by VS Code after the notebook gets updated. // This is not documented in the LSP specification explicitly, and this assumption // may need revisiting in the future as we support more editors with notebook support. - if let DocumentKey::NotebookCell { cell_url, .. } = key { - if self.notebook_cells.remove(cell_url).is_none() { - tracing::warn!("Tried to remove a notebook cell that does not exist: {cell_url}"); - } - return Ok(()); + if let DocumentKey::Opaque(uri) = key { + self.notebook_cells.remove(uri); } - let path = key.path(); - let Some(_) = self.documents.remove(path) else { + let Some(_) = self.documents.remove(key) else { anyhow::bail!("tried to close document that didn't exist at {key}") }; + Ok(()) } - fn document_controller_for_key( + pub(super) fn document_mut( &mut self, key: &DocumentKey, - ) -> crate::Result<&mut DocumentController> { - let path = key.path(); - let Some(controller) = self.documents.get_mut(path) else { - anyhow::bail!("Document controller not available at `{key}`"); + ) -> Result<&mut Document, DocumentError> { + let Some(controller) = self.documents.get_mut(key) else { + return Err(DocumentError::NotFound(key.clone())); }; Ok(controller) } @@ -197,31 +199,24 @@ impl Index { /// A mutable handler to an underlying document. #[derive(Debug)] -enum DocumentController { +pub(crate) enum Document { Text(Arc), Notebook(Arc), } -impl DocumentController { - fn new_text(document: TextDocument) -> Self { +impl Document { + pub(super) fn new_text(document: TextDocument) -> Self { Self::Text(Arc::new(document)) } - fn new_notebook(document: NotebookDocument) -> Self { + pub(super) fn new_notebook(document: NotebookDocument) -> Self { Self::Notebook(Arc::new(document)) } - fn make_ref(&self, cell_url: Option, file_path: AnySystemPath) -> DocumentQuery { - match &self { - Self::Notebook(notebook) => DocumentQuery::Notebook { - cell_url, - file_path, - notebook: notebook.clone(), - }, - Self::Text(document) => DocumentQuery::Text { - file_path, - document: document.clone(), - }, + pub(crate) fn version(&self) -> DocumentVersion { + match self { + Self::Text(document) => document.version(), + Self::Notebook(notebook) => notebook.version(), } } @@ -254,85 +249,8 @@ impl DocumentController { } } -/// A read-only query to an open document. -/// -/// This query can 'select' a text document, full notebook, or a specific notebook cell. -/// It also includes document settings. -#[derive(Debug, Clone)] -pub(crate) enum DocumentQuery { - Text { - file_path: AnySystemPath, - document: Arc, - }, - Notebook { - /// The selected notebook cell, if it exists. - cell_url: Option, - /// The path to the notebook. - file_path: AnySystemPath, - notebook: Arc, - }, -} - -impl DocumentQuery { - /// Attempts to access the underlying notebook document that this query is selecting. - pub(crate) fn as_notebook(&self) -> Option<&NotebookDocument> { - match self { - Self::Notebook { notebook, .. } => Some(notebook), - Self::Text { .. } => None, - } - } - - /// Get the version of document selected by this query. - pub(crate) fn version(&self) -> DocumentVersion { - match self { - Self::Text { document, .. } => document.version(), - Self::Notebook { notebook, .. } => notebook.version(), - } - } - - /// Get the system path for the document selected by this query. - pub(crate) fn file_path(&self) -> &AnySystemPath { - match self { - Self::Text { file_path, .. } | Self::Notebook { file_path, .. } => file_path, - } - } - - /// Attempt to access the single inner text document selected by the query. - /// If this query is selecting an entire notebook document, this will return `None`. - #[expect(dead_code)] - pub(crate) fn as_single_document(&self) -> Option<&TextDocument> { - match self { - Self::Text { document, .. } => Some(document), - Self::Notebook { - notebook, - cell_url: cell_uri, - .. - } => cell_uri - .as_ref() - .and_then(|cell_uri| notebook.cell_document_by_uri(cell_uri)), - } - } - - /// Returns the salsa interned [`File`] for the document selected by this query. - /// - /// It returns [`None`] for the following cases: - /// - For virtual file, if it's not yet opened - /// - For regular file, if it does not exists or is a directory - pub(crate) fn file(&self, db: &dyn Db) -> Option { - match self.file_path() { - AnySystemPath::System(path) => system_path_to_file(db, path).ok(), - AnySystemPath::SystemVirtual(virtual_path) => db - .files() - .try_virtual_file(virtual_path) - .map(|virtual_file| virtual_file.file()), - } - } -} - #[derive(Debug, Clone, thiserror::Error)] -pub(crate) enum DocumentQueryError { - #[error("invalid URL: {0}")] - InvalidUrl(Url), +pub(crate) enum DocumentError { #[error("document not found for key: {0}")] NotFound(DocumentKey), } diff --git a/crates/ty_server/src/system.rs b/crates/ty_server/src/system.rs index 323e4a6846..17b9bcbde6 100644 --- a/crates/ty_server/src/system.rs +++ b/crates/ty_server/src/system.rs @@ -4,6 +4,8 @@ use std::fmt::Display; use std::panic::RefUnwindSafe; use std::sync::Arc; +use crate::document::DocumentKey; +use crate::session::index::{Document, Index}; use lsp_types::Url; use ruff_db::file_revision::FileRevision; use ruff_db::files::{File, FilePath}; @@ -16,10 +18,6 @@ use ruff_notebook::{Notebook, NotebookError}; use ty_ide::cached_vendored_path; use ty_python_semantic::Db; -use crate::DocumentQuery; -use crate::document::DocumentKey; -use crate::session::index::Index; - /// Returns a [`Url`] for the given [`File`]. pub(crate) fn file_to_url(db: &dyn Db, file: File) -> Option { match file.path(db) { @@ -41,26 +39,6 @@ pub(crate) enum AnySystemPath { } impl AnySystemPath { - /// Converts the given [`Url`] to an [`AnySystemPath`]. - /// - /// If the URL scheme is `file`, then the path is converted to a [`SystemPathBuf`]. Otherwise, the - /// URL is converted to a [`SystemVirtualPathBuf`]. - /// - /// This fails in the following cases: - /// * The URL cannot be converted to a file path (refer to [`Url::to_file_path`]). - /// * If the URL is not a valid UTF-8 string. - pub(crate) fn try_from_url(url: &Url) -> std::result::Result { - if url.scheme() == "file" { - Ok(AnySystemPath::System( - SystemPathBuf::from_path_buf(url.to_file_path()?).map_err(|_| ())?, - )) - } else { - Ok(AnySystemPath::SystemVirtual( - SystemVirtualPath::new(url.as_str()).to_path_buf(), - )) - } - } - pub(crate) const fn as_system(&self) -> Option<&SystemPathBuf> { match self { AnySystemPath::System(system_path_buf) => Some(system_path_buf), @@ -68,21 +46,11 @@ impl AnySystemPath { } } - /// Returns the extension of the path, if any. - pub(crate) fn extension(&self) -> Option<&str> { + #[expect(unused)] + pub(crate) const fn as_virtual(&self) -> Option<&SystemVirtualPath> { match self { - AnySystemPath::System(system_path) => system_path.extension(), - AnySystemPath::SystemVirtual(virtual_path) => virtual_path.extension(), - } - } - - /// Converts the path to a URL. - pub(crate) fn to_url(&self) -> Option { - match self { - AnySystemPath::System(system_path) => { - Url::from_file_path(system_path.as_std_path()).ok() - } - AnySystemPath::SystemVirtual(virtual_path) => Url::parse(virtual_path.as_str()).ok(), + AnySystemPath::SystemVirtual(path) => Some(path.as_path()), + AnySystemPath::System(_) => None, } } } @@ -144,21 +112,17 @@ impl LSPSystem { self.index.as_ref().unwrap() } - fn make_document_ref(&self, path: AnySystemPath) -> Option { + fn make_document_ref(&self, path: AnySystemPath) -> Option<&Document> { let index = self.index(); - let key = DocumentKey::from_path(path); - index.make_document_ref(key).ok() + index.document(&DocumentKey::from(path)).ok() } - fn system_path_to_document_ref(&self, path: &SystemPath) -> Option { + fn system_path_to_document_ref(&self, path: &SystemPath) -> Option<&Document> { let any_path = AnySystemPath::System(path.to_path_buf()); self.make_document_ref(any_path) } - fn system_virtual_path_to_document_ref( - &self, - path: &SystemVirtualPath, - ) -> Option { + fn system_virtual_path_to_document_ref(&self, path: &SystemVirtualPath) -> Option<&Document> { let any_path = AnySystemPath::SystemVirtual(path.to_path_buf()); self.make_document_ref(any_path) } @@ -170,7 +134,7 @@ impl System for LSPSystem { if let Some(document) = document { Ok(Metadata::new( - document_revision(&document), + document_revision(document), None, FileType::File, )) @@ -191,7 +155,7 @@ impl System for LSPSystem { let document = self.system_path_to_document_ref(path); match document { - Some(DocumentQuery::Text { document, .. }) => Ok(document.contents().to_string()), + Some(Document::Text(document)) => Ok(document.contents().to_string()), _ => self.native_system.read_to_string(path), } } @@ -200,10 +164,8 @@ impl System for LSPSystem { let document = self.system_path_to_document_ref(path); match document { - Some(DocumentQuery::Text { document, .. }) => { - Notebook::from_source_code(document.contents()) - } - Some(DocumentQuery::Notebook { notebook, .. }) => Ok(notebook.make_ruff_notebook()), + Some(Document::Text(document)) => Notebook::from_source_code(document.contents()), + Some(Document::Notebook(notebook)) => Ok(notebook.make_ruff_notebook()), None => self.native_system.read_to_notebook(path), } } @@ -213,7 +175,7 @@ impl System for LSPSystem { .system_virtual_path_to_document_ref(path) .ok_or_else(|| virtual_path_not_found(path))?; - if let DocumentQuery::Text { document, .. } = &document { + if let Document::Text(document) = &document { Ok(document.contents().to_string()) } else { Err(not_a_text_document(path)) @@ -229,8 +191,8 @@ impl System for LSPSystem { .ok_or_else(|| virtual_path_not_found(path))?; match document { - DocumentQuery::Text { document, .. } => Notebook::from_source_code(document.contents()), - DocumentQuery::Notebook { notebook, .. } => Ok(notebook.make_ruff_notebook()), + Document::Text(document) => Notebook::from_source_code(document.contents()), + Document::Notebook(notebook) => Ok(notebook.make_ruff_notebook()), } } @@ -307,7 +269,7 @@ fn virtual_path_not_found(path: impl Display) -> std::io::Error { } /// Helper function to get the [`FileRevision`] of the given document. -fn document_revision(document: &DocumentQuery) -> FileRevision { +fn document_revision(document: &Document) -> FileRevision { // The file revision is just an opaque number which doesn't have any significant meaning other // than that the file has changed if the revisions are different. #[expect(clippy::cast_sign_loss)] From a32d5b8dc416c719bbc7217f4e1563237d510e9a Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Fri, 31 Oct 2025 16:51:11 -0400 Subject: [PATCH 024/180] [ty] Improve exhaustiveness analysis for type variables with bounds or constraints (#21172) --- .../mdtest/exhaustiveness_checking.md | 52 +++++++++++++++ .../ty_python_semantic/src/types/builder.rs | 64 ++++++++++--------- 2 files changed, 85 insertions(+), 31 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/exhaustiveness_checking.md b/crates/ty_python_semantic/resources/mdtest/exhaustiveness_checking.md index 29b267024b..4379498f2d 100644 --- a/crates/ty_python_semantic/resources/mdtest/exhaustiveness_checking.md +++ b/crates/ty_python_semantic/resources/mdtest/exhaustiveness_checking.md @@ -417,3 +417,55 @@ class Answer(Enum): case Answer.NO: return False ``` + +## Exhaustiveness checking for type variables with bounds or constraints + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import assert_never, Literal + +def f[T: bool](x: T) -> T: + match x: + case True: + return x + case False: + return x + case _: + reveal_type(x) # revealed: Never + assert_never(x) + +def g[T: Literal["foo", "bar"]](x: T) -> T: + match x: + case "foo": + return x + case "bar": + return x + case _: + reveal_type(x) # revealed: Never + assert_never(x) + +def h[T: int | str](x: T) -> T: + if isinstance(x, int): + return x + elif isinstance(x, str): + return x + else: + reveal_type(x) # revealed: Never + assert_never(x) + +def i[T: (int, str)](x: T) -> T: + match x: + case int(): + pass + case str(): + pass + case _: + reveal_type(x) # revealed: Never + assert_never(x) + + return x +``` diff --git a/crates/ty_python_semantic/src/types/builder.rs b/crates/ty_python_semantic/src/types/builder.rs index 6b555b6fdb..11017d1571 100644 --- a/crates/ty_python_semantic/src/types/builder.rs +++ b/crates/ty_python_semantic/src/types/builder.rs @@ -781,37 +781,6 @@ impl<'db> IntersectionBuilder<'db> { seen_aliases, ) } - Type::EnumLiteral(enum_literal) => { - let enum_class = enum_literal.enum_class(self.db); - let metadata = - enum_metadata(self.db, enum_class).expect("Class of enum literal is an enum"); - - let enum_members_in_negative_part = self - .intersections - .iter() - .flat_map(|intersection| &intersection.negative) - .filter_map(|ty| ty.as_enum_literal()) - .filter(|lit| lit.enum_class(self.db) == enum_class) - .map(|lit| lit.name(self.db)) - .chain(std::iter::once(enum_literal.name(self.db))) - .collect::>(); - - let all_members_are_in_negative_part = metadata - .members - .keys() - .all(|name| enum_members_in_negative_part.contains(name)); - - if all_members_are_in_negative_part { - for inner in &mut self.intersections { - inner.add_negative(self.db, enum_literal.enum_class_instance(self.db)); - } - } else { - for inner in &mut self.intersections { - inner.add_negative(self.db, ty); - } - } - self - } _ => { for inner in &mut self.intersections { inner.add_negative(self.db, ty); @@ -1177,6 +1146,39 @@ impl<'db> InnerIntersectionBuilder<'db> { fn build(mut self, db: &'db dyn Db) -> Type<'db> { self.simplify_constrained_typevars(db); + + // If any typevars are in `self.positive`, speculatively solve all bounded type variables + // to their upper bound and all constrained type variables to the union of their constraints. + // If that speculative intersection simplifies to `Never`, this intersection must also simplify + // to `Never`. + if self.positive.iter().any(|ty| ty.is_type_var()) { + let mut speculative = IntersectionBuilder::new(db); + for pos in &self.positive { + match pos { + Type::TypeVar(type_var) => { + match type_var.typevar(db).bound_or_constraints(db) { + Some(TypeVarBoundOrConstraints::UpperBound(bound)) => { + speculative = speculative.add_positive(bound); + } + Some(TypeVarBoundOrConstraints::Constraints(constraints)) => { + speculative = speculative.add_positive(Type::Union(constraints)); + } + // TypeVars without a bound or constraint implicitly have `object` as their + // upper bound, and it is always a no-op to add `object` to an intersection. + None => {} + } + } + _ => speculative = speculative.add_positive(*pos), + } + } + for neg in &self.negative { + speculative = speculative.add_negative(*neg); + } + if speculative.build().is_never() { + return Type::Never; + } + } + match (self.positive.len(), self.negative.len()) { (0, 0) => Type::object(), (1, 0) => self.positive[0], From 521217bb904a03542f06306c462379a6bcbb54fc Mon Sep 17 00:00:00 2001 From: Gautham Venkataraman <26820345+gauthsvenkat@users.noreply.github.com> Date: Fri, 31 Oct 2025 22:47:01 +0100 Subject: [PATCH 025/180] [ruff]: Make `ruff analyze graph` work with jupyter notebooks (#21161) Co-authored-by: Gautham Venkataraman Co-authored-by: Micha Reiser --- crates/ruff/src/commands/analyze_graph.rs | 38 +++++-- crates/ruff/tests/analyze_graph.rs | 130 ++++++++++++++++++++++ crates/ruff_graph/src/lib.rs | 10 +- 3 files changed, 164 insertions(+), 14 deletions(-) diff --git a/crates/ruff/src/commands/analyze_graph.rs b/crates/ruff/src/commands/analyze_graph.rs index ffd7cc2d15..d4085e8ed0 100644 --- a/crates/ruff/src/commands/analyze_graph.rs +++ b/crates/ruff/src/commands/analyze_graph.rs @@ -7,6 +7,7 @@ use path_absolutize::CWD; use ruff_db::system::{SystemPath, SystemPathBuf}; use ruff_graph::{Direction, ImportMap, ModuleDb, ModuleImports}; use ruff_linter::package::PackageRoot; +use ruff_linter::source_kind::SourceKind; use ruff_linter::{warn_user, warn_user_once}; use ruff_python_ast::{PySourceType, SourceType}; use ruff_workspace::resolver::{ResolvedFile, match_exclusion, python_files_in_path}; @@ -127,10 +128,6 @@ pub(crate) fn analyze_graph( }, Some(language) => PySourceType::from(language), }; - if matches!(source_type, PySourceType::Ipynb) { - debug!("Ignoring Jupyter notebook: {}", path.display()); - continue; - } // Convert to system paths. let Ok(package) = package.map(SystemPathBuf::from_path_buf).transpose() else { @@ -147,13 +144,34 @@ pub(crate) fn analyze_graph( let root = root.clone(); let result = inner_result.clone(); scope.spawn(move |_| { + // Extract source code (handles both .py and .ipynb files) + let source_kind = match SourceKind::from_path(path.as_std_path(), source_type) { + Ok(Some(source_kind)) => source_kind, + Ok(None) => { + debug!("Skipping non-Python notebook: {path}"); + return; + } + Err(err) => { + warn!("Failed to read source for {path}: {err}"); + return; + } + }; + + let source_code = source_kind.source_code(); + // Identify any imports via static analysis. - let mut imports = - ModuleImports::detect(&db, &path, package.as_deref(), string_imports) - .unwrap_or_else(|err| { - warn!("Failed to generate import map for {path}: {err}"); - ModuleImports::default() - }); + let mut imports = ModuleImports::detect( + &db, + source_code, + source_type, + &path, + package.as_deref(), + string_imports, + ) + .unwrap_or_else(|err| { + warn!("Failed to generate import map for {path}: {err}"); + ModuleImports::default() + }); debug!("Discovered {} imports for {}", imports.len(), path); diff --git a/crates/ruff/tests/analyze_graph.rs b/crates/ruff/tests/analyze_graph.rs index 2c300029ea..993ebf3b59 100644 --- a/crates/ruff/tests/analyze_graph.rs +++ b/crates/ruff/tests/analyze_graph.rs @@ -653,3 +653,133 @@ fn venv() -> Result<()> { Ok(()) } + +#[test] +fn notebook_basic() -> Result<()> { + let tempdir = TempDir::new()?; + let root = ChildPath::new(tempdir.path()); + + root.child("ruff").child("__init__.py").write_str("")?; + root.child("ruff") + .child("a.py") + .write_str(indoc::indoc! {r#" + def helper(): + pass + "#})?; + + // Create a basic notebook with a simple import + root.child("notebook.ipynb").write_str(indoc::indoc! {r#" + { + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ruff.a import helper" + ] + } + ], + "metadata": { + "language_info": { + "name": "python", + "version": "3.12.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 + } + "#})?; + + insta::with_settings!({ + filters => INSTA_FILTERS.to_vec(), + }, { + assert_cmd_snapshot!(command().current_dir(&root), @r###" + success: true + exit_code: 0 + ----- stdout ----- + { + "notebook.ipynb": [ + "ruff/a.py" + ], + "ruff/__init__.py": [], + "ruff/a.py": [] + } + + ----- stderr ----- + "###); + }); + + Ok(()) +} + +#[test] +fn notebook_with_magic() -> Result<()> { + let tempdir = TempDir::new()?; + let root = ChildPath::new(tempdir.path()); + + root.child("ruff").child("__init__.py").write_str("")?; + root.child("ruff") + .child("a.py") + .write_str(indoc::indoc! {r#" + def helper(): + pass + "#})?; + + // Create a notebook with IPython magic commands and imports + root.child("notebook.ipynb").write_str(indoc::indoc! {r#" + { + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%load_ext autoreload\n", + "%autoreload 2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ruff.a import helper" + ] + } + ], + "metadata": { + "language_info": { + "name": "python", + "version": "3.12.0" + } + }, + "nbformat": 4, + "nbformat_minor": 5 + } + "#})?; + + insta::with_settings!({ + filters => INSTA_FILTERS.to_vec(), + }, { + assert_cmd_snapshot!(command().current_dir(&root), @r###" + success: true + exit_code: 0 + ----- stdout ----- + { + "notebook.ipynb": [ + "ruff/a.py" + ], + "ruff/__init__.py": [], + "ruff/a.py": [] + } + + ----- stderr ----- + "###); + }); + + Ok(()) +} diff --git a/crates/ruff_graph/src/lib.rs b/crates/ruff_graph/src/lib.rs index eaf307018d..377f1e89e9 100644 --- a/crates/ruff_graph/src/lib.rs +++ b/crates/ruff_graph/src/lib.rs @@ -3,8 +3,9 @@ use std::collections::{BTreeMap, BTreeSet}; use anyhow::Result; use ruff_db::system::{SystemPath, SystemPathBuf}; +use ruff_python_ast::PySourceType; use ruff_python_ast::helpers::to_module_path; -use ruff_python_parser::{Mode, ParseOptions, parse}; +use ruff_python_parser::{ParseOptions, parse}; use crate::collector::Collector; pub use crate::db::ModuleDb; @@ -24,13 +25,14 @@ impl ModuleImports { /// Detect the [`ModuleImports`] for a given Python file. pub fn detect( db: &ModuleDb, + source: &str, + source_type: PySourceType, path: &SystemPath, package: Option<&SystemPath>, string_imports: StringImports, ) -> Result { - // Read and parse the source code. - let source = std::fs::read_to_string(path)?; - let parsed = parse(&source, ParseOptions::from(Mode::Module))?; + // Parse the source code. + let parsed = parse(source, ParseOptions::from(source_type))?; let module_path = package.and_then(|package| to_module_path(package.as_std_path(), path.as_std_path())); From a151f9746d202647d1ce63a39fe357175395e182 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 31 Oct 2025 21:03:40 -0400 Subject: [PATCH 026/180] [ty] Sync vendored typeshed stubs (#21178) Close and reopen this PR to trigger CI --------- Co-authored-by: typeshedbot <> --- crates/ty_vendored/vendor/typeshed/README.md | 12 +-- .../vendor/typeshed/source_commit.txt | 2 +- .../vendor/typeshed/stdlib/builtins.pyi | 4 + .../vendor/typeshed/stdlib/cmath.pyi | 2 +- .../vendor/typeshed/stdlib/contextlib.pyi | 21 ++++-- .../vendor/typeshed/stdlib/enum.pyi | 2 + .../vendor/typeshed/stdlib/os/__init__.pyi | 3 + .../vendor/typeshed/stdlib/sys/__init__.pyi | 24 ++++++ .../vendor/typeshed/stdlib/sysconfig.pyi | 8 +- .../typeshed/stdlib/tkinter/__init__.pyi | 75 ++++++++----------- .../vendor/typeshed/stdlib/turtle.pyi | 8 +- .../vendor/typeshed/stdlib/zlib.pyi | 4 +- 12 files changed, 97 insertions(+), 68 deletions(-) diff --git a/crates/ty_vendored/vendor/typeshed/README.md b/crates/ty_vendored/vendor/typeshed/README.md index 1467aa20b4..d295b56bc0 100644 --- a/crates/ty_vendored/vendor/typeshed/README.md +++ b/crates/ty_vendored/vendor/typeshed/README.md @@ -7,10 +7,10 @@ ## About Typeshed contains external type annotations for the Python standard library -and Python builtins, as well as third party packages as contributed by +and Python builtins, as well as third-party packages that are contributed by people external to those projects. -This data can e.g. be used for static analysis, type checking, type inference, +This data can, e.g., be used for static analysis, type checking, type inference, and autocompletion. For information on how to use typeshed, read below. Information for @@ -29,8 +29,8 @@ If you're just using a type checker (e.g. [mypy](https://github.com/python/mypy/ [pyright](https://github.com/microsoft/pyright), or PyCharm's built-in type checker), as opposed to developing it, you don't need to interact with the typeshed repo at -all: a copy of standard library part of typeshed is bundled with type checkers. -And type stubs for third party packages and modules you are using can +all: a copy of the standard library part of typeshed is bundled with type checkers. +And type stubs for third-party packages and modules you are using can be installed from PyPI. For example, if you are using `html5lib` and `requests`, you can install the type stubs using @@ -70,7 +70,7 @@ package you're using, each with its own tradeoffs: type checking due to changes in the stubs. Another risk of this strategy is that stubs often lag behind - the package being stubbed. You might want to force the package being stubbed + the package that is being stubbed. You might want to force the package being stubbed to a certain minimum version because it fixes a critical bug, but if correspondingly updated stubs have not been released, your type checking results may not be fully accurate. @@ -119,6 +119,6 @@ a review of your type annotations or stubs outside of typeshed, head over to [our discussion forum](https://github.com/python/typing/discussions). For less formal discussion, try the typing chat room on [gitter.im](https://gitter.im/python/typing). Some typeshed maintainers -are almost always present; feel free to find us there and we're happy +are almost always present; feel free to find us there, and we're happy to chat. Substantive technical discussion will be directed to the issue tracker. diff --git a/crates/ty_vendored/vendor/typeshed/source_commit.txt b/crates/ty_vendored/vendor/typeshed/source_commit.txt index 54a8607d25..d0fd6efd8e 100644 --- a/crates/ty_vendored/vendor/typeshed/source_commit.txt +++ b/crates/ty_vendored/vendor/typeshed/source_commit.txt @@ -1 +1 @@ -d6f4a0f7102b1400a21742cf9b7ea93614e2b6ec +bf7214784877c52638844c065360d4814fae4c65 diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi index bcacb3857b..4859bbe675 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/builtins.pyi @@ -4525,6 +4525,10 @@ class BaseException: def __setstate__(self, state: dict[str, Any] | None, /) -> None: ... def with_traceback(self, tb: TracebackType | None, /) -> Self: """Set self.__traceback__ to tb and return self.""" + # Necessary for security-focused static analyzers (e.g, pysa) + # See https://github.com/python/typeshed/pull/14900 + def __str__(self) -> str: ... # noqa: Y029 + def __repr__(self) -> str: ... # noqa: Y029 if sys.version_info >= (3, 11): # only present after add_note() is called __notes__: list[str] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi index 575f2bf95d..659595046b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/cmath.pyi @@ -67,7 +67,7 @@ def isinf(z: _C, /) -> bool: def isnan(z: _C, /) -> bool: """Checks if the real or imaginary part of z not a number (NaN).""" -def log(x: _C, base: _C = ..., /) -> complex: +def log(z: _C, base: _C = ..., /) -> complex: """log(z[, base]) -> the logarithm of z to the given base. If the base is not specified, returns the natural logarithm (base e) of z. diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi index 2b05511c33..85baf55925 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/contextlib.pyi @@ -6,7 +6,7 @@ from _typeshed import FileDescriptorOrPath, Unused from abc import ABC, abstractmethod from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable, Generator, Iterator from types import TracebackType -from typing import IO, Any, Generic, Protocol, TypeVar, overload, runtime_checkable, type_check_only +from typing import Any, Generic, Protocol, TypeVar, overload, runtime_checkable, type_check_only from typing_extensions import ParamSpec, Self, TypeAlias __all__ = [ @@ -32,7 +32,6 @@ if sys.version_info >= (3, 11): _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) -_T_io = TypeVar("_T_io", bound=IO[str] | None) _ExitT_co = TypeVar("_ExitT_co", covariant=True, bound=bool | None, default=bool | None) _F = TypeVar("_F", bound=Callable[..., Any]) _G_co = TypeVar("_G_co", bound=Generator[Any, Any, Any] | AsyncGenerator[Any, Any], covariant=True) @@ -275,13 +274,23 @@ class suppress(AbstractContextManager[None, bool]): self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None ) -> bool: ... -class _RedirectStream(AbstractContextManager[_T_io, None]): - def __init__(self, new_target: _T_io) -> None: ... +# This is trying to describe what is needed for (most?) uses +# of `redirect_stdout` and `redirect_stderr`. +# https://github.com/python/typeshed/issues/14903 +@type_check_only +class _SupportsRedirect(Protocol): + def write(self, s: str, /) -> int: ... + def flush(self) -> None: ... + +_SupportsRedirectT = TypeVar("_SupportsRedirectT", bound=_SupportsRedirect | None) + +class _RedirectStream(AbstractContextManager[_SupportsRedirectT, None]): + def __init__(self, new_target: _SupportsRedirectT) -> None: ... def __exit__( self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None ) -> None: ... -class redirect_stdout(_RedirectStream[_T_io]): +class redirect_stdout(_RedirectStream[_SupportsRedirectT]): """Context manager for temporarily redirecting stdout to another file. # How to send help() to stderr @@ -294,7 +303,7 @@ class redirect_stdout(_RedirectStream[_T_io]): help(pow) """ -class redirect_stderr(_RedirectStream[_T_io]): +class redirect_stderr(_RedirectStream[_SupportsRedirectT]): """Context manager for temporarily redirecting stderr to another file.""" class _BaseExitStack(Generic[_ExitT_co]): diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi index b9933de380..825340e75b 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/enum.pyi @@ -623,6 +623,8 @@ if sys.version_info >= (3, 11): the module is the last module in case of a multi-module name """ + def show_flag_values(value: int) -> list[int]: ... + if sys.version_info >= (3, 12): # The body of the class is the same, but the base classes are different. class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi index 88f6a919a1..1ea3e4ea80 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/os/__init__.pyi @@ -752,6 +752,9 @@ environ: _Environ[str] if sys.platform != "win32": environb: _Environ[bytes] +if sys.version_info >= (3, 14): + def reload_environ() -> None: ... + if sys.version_info >= (3, 11) or sys.platform != "win32": EX_OK: Final[int] diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi index 21514c7609..0ecc8e2693 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sys/__init__.pyi @@ -578,6 +578,21 @@ def _getframe(depth: int = 0, /) -> FrameType: only. """ +# documented -- see https://docs.python.org/3/library/sys.html#sys._current_exceptions +if sys.version_info >= (3, 12): + def _current_exceptions() -> dict[int, BaseException | None]: + """Return a dict mapping each thread's identifier to its current raised exception. + + This function should be used for specialized purposes only. + """ + +else: + def _current_exceptions() -> dict[int, OptExcInfo]: + """Return a dict mapping each thread's identifier to its current raised exception. + + This function should be used for specialized purposes only. + """ + if sys.version_info >= (3, 12): def _getframemodulename(depth: int = 0) -> str | None: """Return the name of the module for a calling frame. @@ -627,6 +642,9 @@ def exit(status: _ExitCode = None, /) -> NoReturn: exit status will be one (i.e., failure). """ +if sys.platform == "android": # noqa: Y008 + def getandroidapilevel() -> int: ... + def getallocatedblocks() -> int: """Return the number of memory blocks currently allocated.""" @@ -949,3 +967,9 @@ if sys.version_info >= (3, 14): script (str|bytes): The path to a file containing the Python code to be executed. """ + + def _is_immortal(op: object, /) -> bool: + """Return True if the given object is "immortal" per PEP 683. + + This function should be used for specialized purposes only. + """ diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi index 8cdd3b1b2f..1dfb9f3cfe 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/sysconfig.pyi @@ -2,7 +2,7 @@ import sys from typing import IO, Any, Literal, overload -from typing_extensions import deprecated +from typing_extensions import LiteralString, deprecated __all__ = [ "get_config_h_filename", @@ -47,8 +47,10 @@ def get_scheme_names() -> tuple[str, ...]: """Return a tuple containing the schemes names.""" if sys.version_info >= (3, 10): - def get_default_scheme() -> str: ... - def get_preferred_scheme(key: Literal["prefix", "home", "user"]) -> str: ... + def get_default_scheme() -> LiteralString: ... + def get_preferred_scheme(key: Literal["prefix", "home", "user"]) -> LiteralString: ... + # Documented -- see https://docs.python.org/3/library/sysconfig.html#sysconfig._get_preferred_schemes + def _get_preferred_schemes() -> dict[Literal["prefix", "home", "user"], LiteralString]: ... def get_path_names() -> tuple[str, ...]: """Return a tuple containing the paths names.""" diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi index 1f31c1fbb4..1d8e299023 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/tkinter/__init__.pyi @@ -1721,17 +1721,22 @@ class Wm: if sys.platform == "darwin": @overload def wm_attributes(self, option: Literal["-modified"], /) -> bool: - """Return or sets platform specific attributes. + """This subcommand returns or sets platform specific attributes - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. + The first form returns a list of the platform specific flags and + their values. The second form returns the value for the specific + option. The third form sets one or more of the values. The values + are as follows: - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. + On Windows, -disabled gets or sets whether the window is in a + disabled state. -toolwindow gets or sets the style of the window + to toolwindow (as defined in the MSDN). -topmost gets or sets + whether this is a topmost window (displays above all other + windows). + + On Macintosh, XXXXX + + On Unix, there are currently no special attribute values. """ @overload @@ -1803,20 +1808,7 @@ class Wm: def wm_attributes(self, option: Literal["topmost"], /) -> bool: ... if sys.platform == "darwin": @overload - def wm_attributes(self, option: Literal["modified"], /) -> bool: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ - + def wm_attributes(self, option: Literal["modified"], /) -> bool: ... @overload def wm_attributes(self, option: Literal["notify"], /) -> bool: ... @overload @@ -1876,17 +1868,22 @@ class Wm: if sys.platform == "darwin": @overload def wm_attributes(self, option: Literal["-modified"], value: bool, /) -> Literal[""]: - """Return or sets platform specific attributes. + """This subcommand returns or sets platform specific attributes - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. + The first form returns a list of the platform specific flags and + their values. The second form returns the value for the specific + option. The third form sets one or more of the values. The values + are as follows: - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. + On Windows, -disabled gets or sets whether the window is in a + disabled state. -toolwindow gets or sets the style of the window + to toolwindow (as defined in the MSDN). -topmost gets or sets + whether this is a topmost window (displays above all other + windows). + + On Macintosh, XXXXX + + On Unix, there are currently no special attribute values. """ @overload @@ -1950,19 +1947,7 @@ class Wm: titlepath: str = ..., topmost: bool = ..., transparent: bool = ..., - ) -> None: - """Return or sets platform specific attributes. - - When called with a single argument return_python_dict=True, - return a dict of the platform specific attributes and their values. - When called without arguments or with a single argument - return_python_dict=False, return a tuple containing intermixed - attribute names with the minus prefix and their values. - - When called with a single string value, return the value for the - specific option. When called with keyword arguments, set the - corresponding attributes. - """ + ) -> None: ... elif sys.platform == "win32": @overload def wm_attributes( diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi index b0e7c1bf29..61cd2e44b5 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/turtle.pyi @@ -669,7 +669,7 @@ class TurtleScreen(TurtleScreenBase): ['arrow', 'blank', 'circle', ... , 'turtle'] """ - def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: + def onclick(self, fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: """Bind fun to mouse-click event on canvas. Arguments: @@ -2540,7 +2540,7 @@ def getshapes() -> list[str]: ['arrow', 'blank', 'circle', ... , 'turtle'] """ -def onclick(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: +def onclick(fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: """Bind fun to mouse-click event on this turtle on canvas. Arguments: @@ -3960,7 +3960,7 @@ def getturtle() -> Turtle: getpen = getturtle -def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: +def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: """Bind fun to mouse-button-release event on this turtle on canvas. Arguments: @@ -3983,7 +3983,7 @@ def onrelease(fun: Callable[[float, float], object], btn: int = 1, add: Any | No transparent. """ -def ondrag(fun: Callable[[float, float], object], btn: int = 1, add: Any | None = None) -> None: +def ondrag(fun: Callable[[float, float], object], btn: int = 1, add: bool | None = None) -> None: """Bind fun to mouse-move event on this turtle on canvas. Arguments: diff --git a/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi b/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi index 97d70804a3..a8231f62ee 100644 --- a/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi +++ b/crates/ty_vendored/vendor/typeshed/stdlib/zlib.pyi @@ -41,8 +41,8 @@ Z_RLE: Final = 3 Z_SYNC_FLUSH: Final = 2 Z_TREES: Final = 6 -if sys.version_info >= (3, 14) and sys.platform == "win32": - # Available when zlib was built with zlib-ng, usually only on Windows +if sys.version_info >= (3, 14): + # Available when zlib was built with zlib-ng ZLIBNG_VERSION: Final[str] class error(Exception): ... From 921f409ee8fe1a166294a13cad3fccd0db870c7b Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Sat, 1 Nov 2025 02:50:58 +0100 Subject: [PATCH 027/180] Update Rust toolchain to 1.91 (#21179) --- crates/ruff/src/commands/format.rs | 2 +- crates/ruff_dev/src/generate_options.rs | 2 +- crates/ruff_dev/src/generate_ty_options.rs | 2 +- crates/ruff_formatter/src/builders.rs | 2 +- .../rules/unconventional_import_alias.rs | 2 +- .../src/rules/flake8_pytest_style/types.rs | 24 +++++-------------- .../src/rules/flake8_quotes/settings.rs | 8 ++----- .../rules/reimplemented_builtin.rs | 4 ++-- .../flake8_tidy_imports/rules/banned_api.rs | 6 ++--- .../ruff_linter/src/rules/isort/settings.rs | 8 ++----- .../pycodestyle/rules/literal_comparisons.rs | 2 +- .../src/rules/pydocstyle/rules/capitalized.rs | 2 +- .../src/rules/refurb/rules/bit_count.rs | 2 +- crates/ruff_python_formatter/src/lib.rs | 2 +- .../ty_python_semantic/src/python_platform.rs | 2 +- .../src/semantic_index/use_def.rs | 2 +- rust-toolchain.toml | 2 +- 17 files changed, 27 insertions(+), 47 deletions(-) diff --git a/crates/ruff/src/commands/format.rs b/crates/ruff/src/commands/format.rs index 1f79e59339..0e245efa8c 100644 --- a/crates/ruff/src/commands/format.rs +++ b/crates/ruff/src/commands/format.rs @@ -370,7 +370,7 @@ pub(crate) fn format_source( let line_index = LineIndex::from_source_text(unformatted); let byte_range = range.to_text_range(unformatted, &line_index); format_range(unformatted, byte_range, options).map(|formatted_range| { - let mut formatted = unformatted.to_string(); + let mut formatted = unformatted.clone(); formatted.replace_range( std::ops::Range::::from(formatted_range.source_range()), formatted_range.as_code(), diff --git a/crates/ruff_dev/src/generate_options.rs b/crates/ruff_dev/src/generate_options.rs index 8b8579d730..49a898d6fe 100644 --- a/crates/ruff_dev/src/generate_options.rs +++ b/crates/ruff_dev/src/generate_options.rs @@ -62,7 +62,7 @@ fn generate_set(output: &mut String, set: Set, parents: &mut Vec) { generate_set( output, Set::Named { - name: set_name.to_string(), + name: set_name.clone(), set: *sub_set, }, parents, diff --git a/crates/ruff_dev/src/generate_ty_options.rs b/crates/ruff_dev/src/generate_ty_options.rs index af7794a0b2..4e4ab0a949 100644 --- a/crates/ruff_dev/src/generate_ty_options.rs +++ b/crates/ruff_dev/src/generate_ty_options.rs @@ -104,7 +104,7 @@ fn generate_set(output: &mut String, set: Set, parents: &mut Vec) { generate_set( output, Set::Named { - name: set_name.to_string(), + name: set_name.clone(), set: *sub_set, }, parents, diff --git a/crates/ruff_formatter/src/builders.rs b/crates/ruff_formatter/src/builders.rs index 14da643355..ab60103d99 100644 --- a/crates/ruff_formatter/src/builders.rs +++ b/crates/ruff_formatter/src/builders.rs @@ -1006,7 +1006,7 @@ impl std::fmt::Debug for Align<'_, Context> { /// Block indents indent a block of code, such as in a function body, and therefore insert a line /// break before and after the content. /// -/// Doesn't create an indentation if the passed in content is [`FormatElement.is_empty`]. +/// Doesn't create an indentation if the passed in content is empty. /// /// # Examples /// diff --git a/crates/ruff_linter/src/rules/flake8_import_conventions/rules/unconventional_import_alias.rs b/crates/ruff_linter/src/rules/flake8_import_conventions/rules/unconventional_import_alias.rs index e0684056dc..6827e99b93 100644 --- a/crates/ruff_linter/src/rules/flake8_import_conventions/rules/unconventional_import_alias.rs +++ b/crates/ruff_linter/src/rules/flake8_import_conventions/rules/unconventional_import_alias.rs @@ -78,7 +78,7 @@ pub(crate) fn unconventional_import_alias( let mut diagnostic = checker.report_diagnostic( UnconventionalImportAlias { name: qualified_name, - asname: expected_alias.to_string(), + asname: expected_alias.clone(), }, binding.range(), ); diff --git a/crates/ruff_linter/src/rules/flake8_pytest_style/types.rs b/crates/ruff_linter/src/rules/flake8_pytest_style/types.rs index 0de6758635..bd57ad080c 100644 --- a/crates/ruff_linter/src/rules/flake8_pytest_style/types.rs +++ b/crates/ruff_linter/src/rules/flake8_pytest_style/types.rs @@ -6,21 +6,17 @@ use ruff_macros::CacheKey; #[derive(Clone, Copy, Debug, CacheKey, PartialEq, Eq, Serialize, Deserialize)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Default)] pub enum ParametrizeNameType { #[serde(rename = "csv")] Csv, #[serde(rename = "tuple")] + #[default] Tuple, #[serde(rename = "list")] List, } -impl Default for ParametrizeNameType { - fn default() -> Self { - Self::Tuple - } -} - impl Display for ParametrizeNameType { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { @@ -33,19 +29,15 @@ impl Display for ParametrizeNameType { #[derive(Clone, Copy, Debug, CacheKey, PartialEq, Eq, Serialize, Deserialize)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Default)] pub enum ParametrizeValuesType { #[serde(rename = "tuple")] Tuple, #[serde(rename = "list")] + #[default] List, } -impl Default for ParametrizeValuesType { - fn default() -> Self { - Self::List - } -} - impl Display for ParametrizeValuesType { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { @@ -57,19 +49,15 @@ impl Display for ParametrizeValuesType { #[derive(Clone, Copy, Debug, CacheKey, PartialEq, Eq, Serialize, Deserialize)] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Default)] pub enum ParametrizeValuesRowType { #[serde(rename = "tuple")] + #[default] Tuple, #[serde(rename = "list")] List, } -impl Default for ParametrizeValuesRowType { - fn default() -> Self { - Self::Tuple - } -} - impl Display for ParametrizeValuesRowType { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { diff --git a/crates/ruff_linter/src/rules/flake8_quotes/settings.rs b/crates/ruff_linter/src/rules/flake8_quotes/settings.rs index b241e70b49..fe5129d6e3 100644 --- a/crates/ruff_linter/src/rules/flake8_quotes/settings.rs +++ b/crates/ruff_linter/src/rules/flake8_quotes/settings.rs @@ -9,19 +9,15 @@ use ruff_macros::CacheKey; #[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Default)] pub enum Quote { /// Use double quotes. + #[default] Double, /// Use single quotes. Single, } -impl Default for Quote { - fn default() -> Self { - Self::Double - } -} - impl From for Quote { fn from(value: ruff_python_ast::str::Quote) -> Self { match value { diff --git a/crates/ruff_linter/src/rules/flake8_simplify/rules/reimplemented_builtin.rs b/crates/ruff_linter/src/rules/flake8_simplify/rules/reimplemented_builtin.rs index 9c216311ed..4c858fb799 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/rules/reimplemented_builtin.rs +++ b/crates/ruff_linter/src/rules/flake8_simplify/rules/reimplemented_builtin.rs @@ -116,7 +116,7 @@ pub(crate) fn convert_for_loop_to_any_all(checker: &Checker, stmt: &Stmt) { let mut diagnostic = checker.report_diagnostic( ReimplementedBuiltin { - replacement: contents.to_string(), + replacement: contents.clone(), }, TextRange::new(stmt.start(), terminal.stmt.end()), ); @@ -212,7 +212,7 @@ pub(crate) fn convert_for_loop_to_any_all(checker: &Checker, stmt: &Stmt) { let mut diagnostic = checker.report_diagnostic( ReimplementedBuiltin { - replacement: contents.to_string(), + replacement: contents.clone(), }, TextRange::new(stmt.start(), terminal.stmt.end()), ); diff --git a/crates/ruff_linter/src/rules/flake8_tidy_imports/rules/banned_api.rs b/crates/ruff_linter/src/rules/flake8_tidy_imports/rules/banned_api.rs index 6ada015222..6379304d5c 100644 --- a/crates/ruff_linter/src/rules/flake8_tidy_imports/rules/banned_api.rs +++ b/crates/ruff_linter/src/rules/flake8_tidy_imports/rules/banned_api.rs @@ -47,7 +47,7 @@ pub(crate) fn banned_api(checker: &Checker, policy: &NameMatchPolicy, checker.report_diagnostic( BannedApi { name: banned_module, - message: reason.msg.to_string(), + message: reason.msg.clone(), }, node.range(), ); @@ -74,8 +74,8 @@ pub(crate) fn banned_attribute_access(checker: &Checker, expr: &Expr) { { checker.report_diagnostic( BannedApi { - name: banned_path.to_string(), - message: ban.msg.to_string(), + name: banned_path.clone(), + message: ban.msg.clone(), }, expr.range(), ); diff --git a/crates/ruff_linter/src/rules/isort/settings.rs b/crates/ruff_linter/src/rules/isort/settings.rs index 05a4dddf08..cab9ab35ed 100644 --- a/crates/ruff_linter/src/rules/isort/settings.rs +++ b/crates/ruff_linter/src/rules/isort/settings.rs @@ -20,21 +20,17 @@ use super::categorize::ImportSection; #[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, CacheKey)] #[serde(deny_unknown_fields, rename_all = "kebab-case")] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] +#[derive(Default)] pub enum RelativeImportsOrder { /// Place "closer" imports (fewer `.` characters, most local) before /// "further" imports (more `.` characters, least local). ClosestToFurthest, /// Place "further" imports (more `.` characters, least local) imports /// before "closer" imports (fewer `.` characters, most local). + #[default] FurthestToClosest, } -impl Default for RelativeImportsOrder { - fn default() -> Self { - Self::FurthestToClosest - } -} - impl Display for RelativeImportsOrder { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { match self { diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/literal_comparisons.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/literal_comparisons.rs index 6ae6cea817..a68e492846 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/literal_comparisons.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/literal_comparisons.rs @@ -427,7 +427,7 @@ pub(crate) fn literal_comparisons(checker: &Checker, compare: &ast::ExprCompare) for diagnostic in &mut diagnostics { diagnostic.set_fix(Fix::unsafe_edit(Edit::range_replacement( - content.to_string(), + content.clone(), compare.range(), ))); } diff --git a/crates/ruff_linter/src/rules/pydocstyle/rules/capitalized.rs b/crates/ruff_linter/src/rules/pydocstyle/rules/capitalized.rs index 32cfa89406..23faabc2ec 100644 --- a/crates/ruff_linter/src/rules/pydocstyle/rules/capitalized.rs +++ b/crates/ruff_linter/src/rules/pydocstyle/rules/capitalized.rs @@ -94,7 +94,7 @@ pub(crate) fn capitalized(checker: &Checker, docstring: &Docstring) { let mut diagnostic = checker.report_diagnostic( FirstWordUncapitalized { first_word: first_word.to_string(), - capitalized_word: capitalized_word.to_string(), + capitalized_word: capitalized_word.clone(), }, docstring.range(), ); diff --git a/crates/ruff_linter/src/rules/refurb/rules/bit_count.rs b/crates/ruff_linter/src/rules/refurb/rules/bit_count.rs index b86c6e9d9e..0690ca5449 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/bit_count.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/bit_count.rs @@ -188,7 +188,7 @@ pub(crate) fn bit_count(checker: &Checker, call: &ExprCall) { let mut diagnostic = checker.report_diagnostic( BitCount { existing: SourceCodeSnippet::from_str(literal_text), - replacement: SourceCodeSnippet::new(replacement.to_string()), + replacement: SourceCodeSnippet::new(replacement.clone()), }, call.range(), ); diff --git a/crates/ruff_python_formatter/src/lib.rs b/crates/ruff_python_formatter/src/lib.rs index e6b2f9e7b8..bf68598e13 100644 --- a/crates/ruff_python_formatter/src/lib.rs +++ b/crates/ruff_python_formatter/src/lib.rs @@ -334,7 +334,7 @@ class A: ... let options = PyFormatOptions::from_source_type(source_type); let printed = format_range(&source, TextRange::new(start, end), options).unwrap(); - let mut formatted = source.to_string(); + let mut formatted = source.clone(); formatted.replace_range( std::ops::Range::::from(printed.source_range()), printed.as_code(), diff --git a/crates/ty_python_semantic/src/python_platform.rs b/crates/ty_python_semantic/src/python_platform.rs index b21424ee33..04f7fa3598 100644 --- a/crates/ty_python_semantic/src/python_platform.rs +++ b/crates/ty_python_semantic/src/python_platform.rs @@ -24,7 +24,7 @@ impl From for PythonPlatform { fn from(platform: String) -> Self { match platform.as_str() { "all" => PythonPlatform::All, - _ => PythonPlatform::Identifier(platform.to_string()), + _ => PythonPlatform::Identifier(platform.clone()), } } } diff --git a/crates/ty_python_semantic/src/semantic_index/use_def.rs b/crates/ty_python_semantic/src/semantic_index/use_def.rs index 39f3a1a8ec..dcca102b87 100644 --- a/crates/ty_python_semantic/src/semantic_index/use_def.rs +++ b/crates/ty_python_semantic/src/semantic_index/use_def.rs @@ -233,7 +233,7 @@ //! have two live bindings of `x`: `x = 3` and `x = 4`. //! //! Another piece of information that the `UseDefMap` needs to provide are reachability constraints. -//! See [`reachability_constraints.rs`] for more details, in particular how they apply to bindings. +//! See `reachability_constraints.rs` for more details, in particular how they apply to bindings. //! //! The [`UseDefMapBuilder`] itself just exposes methods for taking a snapshot, resetting to a //! snapshot, and merging a snapshot into the current state. The logic using these methods lives in diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 73328e053b..1a35d66439 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,2 +1,2 @@ [toolchain] -channel = "1.90" +channel = "1.91" From 17c7b3cde1bef999be944c0b924c2021fa696cb8 Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Sat, 1 Nov 2025 03:26:38 +0100 Subject: [PATCH 028/180] Bump MSRV to Rust 1.89 (#21180) --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 935196f6a5..d12718ea12 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,7 +5,7 @@ resolver = "2" [workspace.package] # Please update rustfmt.toml when bumping the Rust edition edition = "2024" -rust-version = "1.88" +rust-version = "1.89" homepage = "https://docs.astral.sh/ruff" documentation = "https://docs.astral.sh/ruff" repository = "https://github.com/astral-sh/ruff" From bff32a41dc440b30764e9414767794e01d25c265 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Sat, 1 Nov 2025 22:06:03 -0400 Subject: [PATCH 029/180] [ty] Increase timeout-minutes to 10 for py-fuzzer job (#21196) --- .github/workflows/ci.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 806949d81e..5661ff48b7 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -277,8 +277,8 @@ jobs: run: cargo test -p ty_python_semantic --test mdtest || true - name: "Run tests" run: cargo insta test --all-features --unreferenced reject --test-runner nextest - # Dogfood ty on py-fuzzer - - run: uv run --project=./python/py-fuzzer cargo run -p ty check --project=./python/py-fuzzer + - name: Dogfood ty on py-fuzzer + run: uv run --project=./python/py-fuzzer cargo run -p ty check --project=./python/py-fuzzer # Check for broken links in the documentation. - run: cargo doc --all --no-deps env: @@ -649,7 +649,7 @@ jobs: - determine_changes # Only runs on pull requests, since that is the only we way we can find the base version for comparison. if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && (needs.determine_changes.outputs.ty == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }} - timeout-minutes: ${{ github.repository == 'astral-sh/ruff' && 5 || 20 }} + timeout-minutes: ${{ github.repository == 'astral-sh/ruff' && 10 || 20 }} steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: From de1a6fb8ad0fd6b72b9956f7c9c8061e4fc1f413 Mon Sep 17 00:00:00 2001 From: Matthew Mckee Date: Sun, 2 Nov 2025 13:01:06 +0000 Subject: [PATCH 030/180] Clean up definition completions docs and tests (#21183) ## Summary @BurntSushi provided some feedback in #21146 so i address it here. --- crates/ty_ide/src/completion.rs | 34 ++++++++++++++++++--------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/crates/ty_ide/src/completion.rs b/crates/ty_ide/src/completion.rs index 273a148ef3..7ca15362b0 100644 --- a/crates/ty_ide/src/completion.rs +++ b/crates/ty_ide/src/completion.rs @@ -830,16 +830,14 @@ fn find_typed_text( Some(source[last.range()].to_string()) } -/// Whether the given offset within the parsed module is within -/// a comment or not. +/// Whether the last token is within a comment or not. fn is_in_comment(tokens: &[Token]) -> bool { tokens.last().is_some_and(|t| t.kind().is_comment()) } -/// Returns true when the cursor at `offset` is positioned within -/// a string token (regular, f-string, t-string, etc). +/// Whether the last token is positioned within a string token (regular, f-string, t-string, etc). /// -/// Note that this will return `false` when positioned within an +/// Note that this will return `false` when the last token is positioned within an /// interpolation block in an f-string or a t-string. fn is_in_string(tokens: &[Token]) -> bool { tokens.last().is_some_and(|t| { @@ -850,9 +848,7 @@ fn is_in_string(tokens: &[Token]) -> bool { }) } -/// If the tokens end with `class f` or `def f` we return true. -/// If the tokens end with `class` or `def`, we return false. -/// This is fine because we don't provide completions anyway. +/// Returns true when the tokens indicate that the definition of a new name is being introduced at the end. fn is_in_definition_place(db: &dyn Db, tokens: &[Token], file: File) -> bool { let is_definition_keyword = |token: &Token| { if matches!( @@ -4088,11 +4084,13 @@ def f[T](x: T): fn no_completions_in_function_def_name() { let builder = completion_test_builder( "\ +foo = 1 + def f ", ); - assert!(builder.auto_import().build().completions().is_empty()); + assert!(builder.build().completions().is_empty()); } #[test] @@ -4104,18 +4102,20 @@ def ); // This is okay because the ide will not request completions when the cursor is in this position. - assert!(!builder.auto_import().build().completions().is_empty()); + assert!(!builder.build().completions().is_empty()); } #[test] fn no_completions_in_class_def_name() { let builder = completion_test_builder( "\ +foo = 1 + class f ", ); - assert!(builder.auto_import().build().completions().is_empty()); + assert!(builder.build().completions().is_empty()); } #[test] @@ -4127,29 +4127,33 @@ class ); // This is okay because the ide will not request completions when the cursor is in this position. - assert!(!builder.auto_import().build().completions().is_empty()); + assert!(!builder.build().completions().is_empty()); } #[test] fn no_completions_in_type_def_name() { let builder = completion_test_builder( "\ +foo = 1 + type f = int ", ); - assert!(builder.auto_import().build().completions().is_empty()); + assert!(builder.build().completions().is_empty()); } #[test] fn no_completions_in_maybe_type_def_name() { let builder = completion_test_builder( "\ +foo = 1 + type f ", ); - assert!(builder.auto_import().build().completions().is_empty()); + assert!(builder.build().completions().is_empty()); } #[test] @@ -4161,7 +4165,7 @@ type ); // This is okay because the ide will not request completions when the cursor is in this position. - assert!(!builder.auto_import().build().completions().is_empty()); + assert!(!builder.build().completions().is_empty()); } /// A way to create a simple single-file (named `main.py`) completion test From 73107a083c5f74c7da1a2e85349df78f0a73c3b1 Mon Sep 17 00:00:00 2001 From: David Peter Date: Sun, 2 Nov 2025 14:35:33 +0100 Subject: [PATCH 031/180] [ty] Type inference for comprehensions (#20962) ## Summary Adds type inference for list/dict/set comprehensions, including bidirectional inference: ```py reveal_type({k: v for k, v in [("a", 1), ("b", 2)]}) # dict[Unknown | str, Unknown | int] squares: list[int | None] = [x for x in range(10)] reveal_type(squares) # list[int | None] ``` ## Ecosystem impact I did spot check the changes and most of them seem like known limitations or true positives. Without proper bidirectional inference, we saw a lot of false positives. ## Test Plan New Markdown tests --- .../corpus/88_regression_pr_20962.py | 18 +++ .../resources/mdtest/comprehensions/basic.md | 89 ++++++++++++++ .../mdtest/literal/collections/dictionary.md | 2 +- .../mdtest/literal/collections/list.md | 2 +- .../mdtest/literal/collections/set.md | 2 +- .../pr_20962_comprehension_panics.md | 50 ++++++++ .../ty_python_semantic/src/types/function.rs | 10 +- .../src/types/infer/builder.rs | 115 ++++++++++++++---- .../types/infer/builder/type_expression.rs | 6 +- 9 files changed, 266 insertions(+), 28 deletions(-) create mode 100644 crates/ty_python_semantic/resources/corpus/88_regression_pr_20962.py create mode 100644 crates/ty_python_semantic/resources/mdtest/regression/pr_20962_comprehension_panics.md diff --git a/crates/ty_python_semantic/resources/corpus/88_regression_pr_20962.py b/crates/ty_python_semantic/resources/corpus/88_regression_pr_20962.py new file mode 100644 index 0000000000..d0b9f706ce --- /dev/null +++ b/crates/ty_python_semantic/resources/corpus/88_regression_pr_20962.py @@ -0,0 +1,18 @@ +name_1 +{0: 0 for unique_name_0 in unique_name_1 if name_1} + + +@[name_2 for unique_name_2 in name_2] +def name_2(): + pass + + +def name_2(): + pass + + +match 0: + case name_2(): + pass + case []: + name_1 = 0 diff --git a/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md b/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md index bdd9ec435c..254ac03d73 100644 --- a/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md +++ b/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md @@ -103,3 +103,92 @@ async def _(): # revealed: Unknown [reveal_type(x) async for x in range(3)] ``` + +## Comprehension expression types + +The type of the comprehension expression itself should reflect the inferred element type: + +```py +from typing import TypedDict, Literal + +# revealed: list[int | Unknown] +reveal_type([x for x in range(10)]) + +# revealed: set[int | Unknown] +reveal_type({x for x in range(10)}) + +# revealed: dict[int | Unknown, str | Unknown] +reveal_type({x: str(x) for x in range(10)}) + +# revealed: list[tuple[int, Unknown | str] | Unknown] +reveal_type([(x, y) for x in range(5) for y in ["a", "b", "c"]]) + +squares: list[int | None] = [x**2 for x in range(10)] +reveal_type(squares) # revealed: list[int | None] +``` + +Inference for comprehensions takes the type context into account: + +```py +# Without type context: +reveal_type([x for x in [1, 2, 3]]) # revealed: list[Unknown | int] +reveal_type({x: "a" for x in [1, 2, 3]}) # revealed: dict[Unknown | int, str | Unknown] +reveal_type({str(x): x for x in [1, 2, 3]}) # revealed: dict[str | Unknown, Unknown | int] +reveal_type({x for x in [1, 2, 3]}) # revealed: set[Unknown | int] + +# With type context: +xs: list[int] = [x for x in [1, 2, 3]] +reveal_type(xs) # revealed: list[int] + +ys: dict[int, str] = {x: str(x) for x in [1, 2, 3]} +reveal_type(ys) # revealed: dict[int, str] + +zs: set[int] = {x for x in [1, 2, 3]} +``` + +This also works for nested comprehensions: + +```py +table = [[(x, y) for x in range(3)] for y in range(3)] +reveal_type(table) # revealed: list[list[tuple[int, int] | Unknown] | Unknown] + +table_with_content: list[list[tuple[int, int, str | None]]] = [[(x, y, None) for x in range(3)] for y in range(3)] +reveal_type(table_with_content) # revealed: list[list[tuple[int, int, str | None]]] +``` + +The type context is propagated down into the comprehension: + +```py +class Person(TypedDict): + name: str + +persons: list[Person] = [{"name": n} for n in ["Alice", "Bob"]] +reveal_type(persons) # revealed: list[Person] + +# TODO: This should be an error +invalid: list[Person] = [{"misspelled": n} for n in ["Alice", "Bob"]] +``` + +We promote literals to avoid overly-precise types in invariant positions: + +```py +reveal_type([x for x in ("a", "b", "c")]) # revealed: list[str | Unknown] +reveal_type({x for x in (1, 2, 3)}) # revealed: set[int | Unknown] +reveal_type({k: 0 for k in ("a", "b", "c")}) # revealed: dict[str | Unknown, int | Unknown] +``` + +Type context can prevent this promotion from happening: + +```py +list_of_literals: list[Literal["a", "b", "c"]] = [x for x in ("a", "b", "c")] +reveal_type(list_of_literals) # revealed: list[Literal["a", "b", "c"]] + +dict_with_literal_keys: dict[Literal["a", "b", "c"], int] = {k: 0 for k in ("a", "b", "c")} +reveal_type(dict_with_literal_keys) # revealed: dict[Literal["a", "b", "c"], int] + +dict_with_literal_values: dict[str, Literal[1, 2, 3]] = {str(k): k for k in (1, 2, 3)} +reveal_type(dict_with_literal_values) # revealed: dict[str, Literal[1, 2, 3]] + +set_with_literals: set[Literal[1, 2, 3]] = {k for k in (1, 2, 3)} +reveal_type(set_with_literals) # revealed: set[Literal[1, 2, 3]] +``` diff --git a/crates/ty_python_semantic/resources/mdtest/literal/collections/dictionary.md b/crates/ty_python_semantic/resources/mdtest/literal/collections/dictionary.md index 7e1acf4efb..ad5829da1f 100644 --- a/crates/ty_python_semantic/resources/mdtest/literal/collections/dictionary.md +++ b/crates/ty_python_semantic/resources/mdtest/literal/collections/dictionary.md @@ -51,6 +51,6 @@ reveal_type({"a": 1, "b": (1, 2), "c": (1, 2, 3)}) ## Dict comprehensions ```py -# revealed: dict[@Todo(dict comprehension key type), @Todo(dict comprehension value type)] +# revealed: dict[int | Unknown, int | Unknown] reveal_type({x: y for x, y in enumerate(range(42))}) ``` diff --git a/crates/ty_python_semantic/resources/mdtest/literal/collections/list.md b/crates/ty_python_semantic/resources/mdtest/literal/collections/list.md index 15f385fa88..325caba10d 100644 --- a/crates/ty_python_semantic/resources/mdtest/literal/collections/list.md +++ b/crates/ty_python_semantic/resources/mdtest/literal/collections/list.md @@ -41,5 +41,5 @@ reveal_type([1, (1, 2), (1, 2, 3)]) ## List comprehensions ```py -reveal_type([x for x in range(42)]) # revealed: list[@Todo(list comprehension element type)] +reveal_type([x for x in range(42)]) # revealed: list[int | Unknown] ``` diff --git a/crates/ty_python_semantic/resources/mdtest/literal/collections/set.md b/crates/ty_python_semantic/resources/mdtest/literal/collections/set.md index 6c6855e40e..d80112ee84 100644 --- a/crates/ty_python_semantic/resources/mdtest/literal/collections/set.md +++ b/crates/ty_python_semantic/resources/mdtest/literal/collections/set.md @@ -35,5 +35,5 @@ reveal_type({1, (1, 2), (1, 2, 3)}) ## Set comprehensions ```py -reveal_type({x for x in range(42)}) # revealed: set[@Todo(set comprehension element type)] +reveal_type({x for x in range(42)}) # revealed: set[int | Unknown] ``` diff --git a/crates/ty_python_semantic/resources/mdtest/regression/pr_20962_comprehension_panics.md b/crates/ty_python_semantic/resources/mdtest/regression/pr_20962_comprehension_panics.md new file mode 100644 index 0000000000..b011d95e8c --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/regression/pr_20962_comprehension_panics.md @@ -0,0 +1,50 @@ +# Documentation of two fuzzer panics involving comprehensions + +Type inference for comprehensions was added in . It +added two new fuzzer panics that are documented here for regression testing. + +## Too many cycle iterations in `place_by_id` + + + +```py +name_5(name_3) +[0 for unique_name_0 in unique_name_1 for unique_name_2 in name_3] + +@{name_3 for unique_name_3 in unique_name_4} +class name_4[**name_3](0, name_2=name_5): + pass + +try: + name_0 = name_4 +except* 0: + pass +else: + match unique_name_12: + case 0: + from name_2 import name_3 + case name_0(): + + @name_4 + def name_3(): + pass + +(name_3 := 0) + +@name_3 +async def name_5(): + pass +``` + +## Too many cycle iterations in `infer_definition_types` + + + +```py +for name_1 in { + {{0: name_4 for unique_name_0 in unique_name_1}: 0 for unique_name_2 in unique_name_3 if name_4}: 0 + for unique_name_4 in name_1 + for name_4 in name_1 +}: + pass +``` diff --git a/crates/ty_python_semantic/src/types/function.rs b/crates/ty_python_semantic/src/types/function.rs index 0f5797ae7a..6244b0a85a 100644 --- a/crates/ty_python_semantic/src/types/function.rs +++ b/crates/ty_python_semantic/src/types/function.rs @@ -534,6 +534,14 @@ pub struct FunctionLiteral<'db> { // The Salsa heap is tracked separately. impl get_size2::GetSize for FunctionLiteral<'_> {} +fn overloads_and_implementation_cycle_initial<'db>( + _db: &'db dyn Db, + _id: salsa::Id, + _self: FunctionLiteral<'db>, +) -> (Box<[OverloadLiteral<'db>]>, Option>) { + (Box::new([]), None) +} + #[salsa::tracked] impl<'db> FunctionLiteral<'db> { fn name(self, db: &'db dyn Db) -> &'db ast::name::Name { @@ -576,7 +584,7 @@ impl<'db> FunctionLiteral<'db> { self.last_definition(db).spans(db) } - #[salsa::tracked(returns(ref), heap_size=ruff_memory_usage::heap_size)] + #[salsa::tracked(returns(ref), heap_size=ruff_memory_usage::heap_size, cycle_initial=overloads_and_implementation_cycle_initial)] fn overloads_and_implementation( self, db: &'db dyn Db, diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index f6055c0a0e..ad0a103319 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -5943,9 +5943,13 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ast::Expr::Set(set) => self.infer_set_expression(set, tcx), ast::Expr::Dict(dict) => self.infer_dict_expression(dict, tcx), ast::Expr::Generator(generator) => self.infer_generator_expression(generator), - ast::Expr::ListComp(listcomp) => self.infer_list_comprehension_expression(listcomp), - ast::Expr::DictComp(dictcomp) => self.infer_dict_comprehension_expression(dictcomp), - ast::Expr::SetComp(setcomp) => self.infer_set_comprehension_expression(setcomp), + ast::Expr::ListComp(listcomp) => { + self.infer_list_comprehension_expression(listcomp, tcx) + } + ast::Expr::DictComp(dictcomp) => { + self.infer_dict_comprehension_expression(dictcomp, tcx) + } + ast::Expr::SetComp(setcomp) => self.infer_set_comprehension_expression(setcomp, tcx), ast::Expr::Name(name) => self.infer_name_expression(name), ast::Expr::Attribute(attribute) => self.infer_attribute_expression(attribute), ast::Expr::UnaryOp(unary_op) => self.infer_unary_expression(unary_op), @@ -6450,52 +6454,121 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ) } - fn infer_list_comprehension_expression(&mut self, listcomp: &ast::ExprListComp) -> Type<'db> { + /// Return a specialization of the collection class (list, dict, set) based on the type context and the inferred + /// element / key-value types from the comprehension expression. + fn infer_comprehension_specialization( + &self, + collection_class: KnownClass, + inferred_element_types: &[Type<'db>], + tcx: TypeContext<'db>, + ) -> Type<'db> { + // Remove any union elements of that are unrelated to the collection type. + let tcx = tcx.map(|annotation| { + annotation.filter_disjoint_elements( + self.db(), + collection_class.to_instance(self.db()), + InferableTypeVars::None, + ) + }); + + if let Some(annotated_element_types) = tcx + .known_specialization(self.db(), collection_class) + .map(|specialization| specialization.types(self.db())) + && annotated_element_types + .iter() + .zip(inferred_element_types.iter()) + .all(|(annotated, inferred)| inferred.is_assignable_to(self.db(), *annotated)) + { + collection_class + .to_specialized_instance(self.db(), annotated_element_types.iter().copied()) + } else { + collection_class.to_specialized_instance( + self.db(), + inferred_element_types.iter().map(|ty| { + UnionType::from_elements( + self.db(), + [ + ty.promote_literals(self.db(), TypeContext::default()), + Type::unknown(), + ], + ) + }), + ) + } + } + + fn infer_list_comprehension_expression( + &mut self, + listcomp: &ast::ExprListComp, + tcx: TypeContext<'db>, + ) -> Type<'db> { let ast::ExprListComp { range: _, node_index: _, - elt: _, + elt, generators, } = listcomp; self.infer_first_comprehension_iter(generators); - KnownClass::List - .to_specialized_instance(self.db(), [todo_type!("list comprehension element type")]) + let scope_id = self + .index + .node_scope(NodeWithScopeRef::ListComprehension(listcomp)); + let scope = scope_id.to_scope_id(self.db(), self.file()); + let inference = infer_scope_types(self.db(), scope); + let element_type = inference.expression_type(elt.as_ref()); + + self.infer_comprehension_specialization(KnownClass::List, &[element_type], tcx) } - fn infer_dict_comprehension_expression(&mut self, dictcomp: &ast::ExprDictComp) -> Type<'db> { + fn infer_dict_comprehension_expression( + &mut self, + dictcomp: &ast::ExprDictComp, + tcx: TypeContext<'db>, + ) -> Type<'db> { let ast::ExprDictComp { range: _, node_index: _, - key: _, - value: _, + key, + value, generators, } = dictcomp; self.infer_first_comprehension_iter(generators); - KnownClass::Dict.to_specialized_instance( - self.db(), - [ - todo_type!("dict comprehension key type"), - todo_type!("dict comprehension value type"), - ], - ) + let scope_id = self + .index + .node_scope(NodeWithScopeRef::DictComprehension(dictcomp)); + let scope = scope_id.to_scope_id(self.db(), self.file()); + let inference = infer_scope_types(self.db(), scope); + let key_type = inference.expression_type(key.as_ref()); + let value_type = inference.expression_type(value.as_ref()); + + self.infer_comprehension_specialization(KnownClass::Dict, &[key_type, value_type], tcx) } - fn infer_set_comprehension_expression(&mut self, setcomp: &ast::ExprSetComp) -> Type<'db> { + fn infer_set_comprehension_expression( + &mut self, + setcomp: &ast::ExprSetComp, + tcx: TypeContext<'db>, + ) -> Type<'db> { let ast::ExprSetComp { range: _, node_index: _, - elt: _, + elt, generators, } = setcomp; self.infer_first_comprehension_iter(generators); - KnownClass::Set - .to_specialized_instance(self.db(), [todo_type!("set comprehension element type")]) + let scope_id = self + .index + .node_scope(NodeWithScopeRef::SetComprehension(setcomp)); + let scope = scope_id.to_scope_id(self.db(), self.file()); + let inference = infer_scope_types(self.db(), scope); + let element_type = inference.expression_type(elt.as_ref()); + + self.infer_comprehension_specialization(KnownClass::Set, &[element_type], tcx) } fn infer_generator_expression_scope(&mut self, generator: &ast::ExprGenerator) { diff --git a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs index 3c7bdb5464..0d72548e49 100644 --- a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs +++ b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs @@ -346,7 +346,7 @@ impl<'db> TypeInferenceBuilder<'db, '_> { } ast::Expr::DictComp(dictcomp) => { - self.infer_dict_comprehension_expression(dictcomp); + self.infer_dict_comprehension_expression(dictcomp, TypeContext::default()); self.report_invalid_type_expression( expression, format_args!("Dict comprehensions are not allowed in type expressions"), @@ -355,7 +355,7 @@ impl<'db> TypeInferenceBuilder<'db, '_> { } ast::Expr::ListComp(listcomp) => { - self.infer_list_comprehension_expression(listcomp); + self.infer_list_comprehension_expression(listcomp, TypeContext::default()); self.report_invalid_type_expression( expression, format_args!("List comprehensions are not allowed in type expressions"), @@ -364,7 +364,7 @@ impl<'db> TypeInferenceBuilder<'db, '_> { } ast::Expr::SetComp(setcomp) => { - self.infer_set_comprehension_expression(setcomp); + self.infer_set_comprehension_expression(setcomp, TypeContext::default()); self.report_invalid_type_expression( expression, format_args!("Set comprehensions are not allowed in type expressions"), From 6c3d6124c88fe8d7dcde9e8aa16f9e4193ed780b Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Sun, 2 Nov 2025 15:58:36 +0100 Subject: [PATCH 032/180] [ty] Fix range filtering for tokens starting at the end of the requested range (#21193) Co-authored-by: David Peter --- crates/ty_ide/src/semantic_tokens.rs | 44 +++++++++++++++++++++++----- 1 file changed, 36 insertions(+), 8 deletions(-) diff --git a/crates/ty_ide/src/semantic_tokens.rs b/crates/ty_ide/src/semantic_tokens.rs index 12e5e6581b..ca736acd4c 100644 --- a/crates/ty_ide/src/semantic_tokens.rs +++ b/crates/ty_ide/src/semantic_tokens.rs @@ -11,7 +11,7 @@ use ruff_python_ast::{ AnyNodeRef, BytesLiteral, Expr, FString, InterpolatedStringElement, Stmt, StringLiteral, TypeParam, }; -use ruff_text_size::{Ranged, TextLen, TextRange}; +use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; use std::ops::Deref; use ty_python_semantic::{ HasType, SemanticModel, semantic_index::definition::DefinitionKind, types::Type, @@ -226,7 +226,12 @@ impl<'db> SemanticTokenVisitor<'db> { let range = ranged.range(); // Only emit tokens that intersect with the range filter, if one is specified if let Some(range_filter) = self.range_filter { - if range.intersect(range_filter).is_none() { + // Only include ranges that have a non-empty overlap. Adjacent ranges + // should be excluded. + if range + .intersect(range_filter) + .is_none_or(TextRange::is_empty) + { return; } } @@ -446,11 +451,11 @@ impl<'db> SemanticTokenVisitor<'db> { let name_start = name.start(); // Split the dotted name and calculate positions for each part - let mut current_offset = ruff_text_size::TextSize::default(); + let mut current_offset = TextSize::default(); for part in name_str.split('.') { if !part.is_empty() { self.add_token( - ruff_text_size::TextRange::at(name_start + current_offset, part.text_len()), + TextRange::at(name_start + current_offset, part.text_len()), token_type, SemanticTokenModifier::empty(), ); @@ -926,6 +931,7 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> { mod tests { use super::*; use crate::tests::cursor_test; + use insta::assert_snapshot; /// Helper function to get semantic tokens for full file (for testing) @@ -1231,10 +1237,7 @@ def function2(): // Get the range that covers only the second function // Hardcoded offsets: function2 starts at position 42, source ends at position 108 - let range = ruff_text_size::TextRange::new( - ruff_text_size::TextSize::from(42u32), - ruff_text_size::TextSize::from(108u32), - ); + let range = TextRange::new(TextSize::from(42u32), TextSize::from(108u32)); let range_tokens = semantic_tokens(&test.db, test.cursor.file, Some(range)); @@ -1278,6 +1281,31 @@ def function2(): } } + /// When a token starts right at where the requested range ends, + /// don't include it in the semantic tokens. + #[test] + fn test_semantic_tokens_range_excludes_boundary_tokens() { + let test = cursor_test( + " +x = 1 +y = 2 +z = 3 +", + ); + + // Range [6..13) starts where "1" ends and ends where "z" starts. + // Expected: only "y" @ 7..8 and "2" @ 11..12 (non-empty overlap with target range). + // Not included: "1" @ 5..6 and "z" @ 13..14 (adjacent, but not overlapping at offsets 6 and 13). + let range = TextRange::new(TextSize::from(6), TextSize::from(13)); + + let range_tokens = semantic_tokens(&test.db, test.cursor.file, Some(range)); + + assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &range_tokens), @r#" + "y" @ 7..8: Variable + "2" @ 11..12: Number + "#); + } + #[test] fn test_dotted_module_names() { let test = cursor_test( From 566d1d649768234ad281a4334da81925a3c7dde5 Mon Sep 17 00:00:00 2001 From: David Peter Date: Sun, 2 Nov 2025 17:33:31 +0100 Subject: [PATCH 033/180] [ty] Update to the latest version of the conformance suite (#21205) ## Summary There have been some larger-scale updates to the conformance suite since we introduced our CI job, so it seems sensible to bump the version of the conformance suite to the latest state. ## Test plan This is a bit awkward to test. Here is the diff of running ty on the conformance suite before and after this bump. I filtered out line/column information (`sed -re 's/\.py:[0-9]+:[0-9]+:/.py/'`) to avoid spurious changes from content that has simply been moved around. ```diff 1,2c1 < fatal[panic] Panicked at /home/shark/.cargo/git/checkouts/salsa-e6f3bb7c2a062968/cdd0b85/src/function/execute.rs:419:17 when checking `/home/shark/typing/conformance/tests/aliases_typealiastype.py`: `infer_definition_types(Id(1a99c)): execute: too many cycle iterations` < src/type_checker.py error[unresolved-import] Cannot resolve imported module `tqdm` --- > fatal[panic] Panicked at /home/shark/.cargo/git/checkouts/salsa-e6f3bb7c2a062968/cdd0b85/src/function/execute.rs:419:17 when checking `/home/shark/typing/conformance/tests/aliases_typealiastype.py`: `infer_definition_types(Id(6e4c)): execute: too many cycle iterations` 205,206d203 < tests/constructors_call_metaclass.py error[type-assertion-failure] Argument does not have asserted type `Never` < tests/constructors_call_metaclass.py error[missing-argument] No argument provided for required parameter `x` of function `__new__` 268a266,273 > tests/dataclasses_match_args.py error[type-assertion-failure] Argument does not have asserted type `tuple[Literal["x"]]` > tests/dataclasses_match_args.py error[unresolved-attribute] Class `DC1` has no attribute `__match_args__` > tests/dataclasses_match_args.py error[type-assertion-failure] Argument does not have asserted type `tuple[Literal["x"]]` > tests/dataclasses_match_args.py error[unresolved-attribute] Class `DC2` has no attribute `__match_args__` > tests/dataclasses_match_args.py error[type-assertion-failure] Argument does not have asserted type `tuple[Literal["x"]]` > tests/dataclasses_match_args.py error[unresolved-attribute] Class `DC3` has no attribute `__match_args__` > tests/dataclasses_match_args.py error[unresolved-attribute] Class `DC4` has no attribute `__match_args__` > tests/dataclasses_match_args.py error[type-assertion-failure] Argument does not have asserted type `tuple[()]` 339a345 > tests/directives_assert_type.py error[type-assertion-failure] Argument does not have asserted type `Any` 424a431 > tests/generics_defaults.py error[type-assertion-failure] Argument does not have asserted type `Any` 520a528,529 > tests/generics_syntax_infer_variance.py error[invalid-return-type] Function always implicitly returns `None`, which is not assignable to return type `T@ShouldBeCovariant2 | Sequence[T@ShouldBeCovariant2]` > tests/generics_syntax_infer_variance.py error[invalid-return-type] Function always implicitly returns `None`, which is not assignable to return type `int` 711a721 > tests/namedtuples_define_class.py error[too-many-positional-arguments] Too many positional arguments: expected 3, got 4 795d804 < tests/protocols_explicit.py error[invalid-attribute-access] Cannot assign to ClassVar `cm1` from an instance of type `Self@__init__` 822,823d830 < tests/qualifiers_annotated.py error[invalid-syntax] named expression cannot be used within a type annotation < tests/qualifiers_annotated.py error[invalid-syntax] await expression cannot be used within a type annotation 922a930,953 > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `Movie`: Unknown key "novel_adaptation" > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `Movie`: Unknown key "year" > tests/typeddicts_extra_items.py error[type-assertion-failure] Argument does not have asserted type `bool` > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `Movie`: Unknown key "novel_adaptation" > tests/typeddicts_extra_items.py error[invalid-argument-type] Invalid argument to key "year" with declared type `int` on TypedDict `InheritedMovie`: value of type `None` > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `InheritedMovie`: Unknown key "other_extra_key" > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `MovieEI`: Unknown key "year" > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `MovieExtraInt`: Unknown key "year" > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `MovieExtraStr`: Unknown key "description" > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `MovieExtraInt`: Unknown key "year" > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `NonClosedMovie`: Unknown key "year" > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `ExtraMovie`: Unknown key "year" > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `ExtraMovie`: Unknown key "language" > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `ClosedMovie`: Unknown key "year" > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `MovieExtraStr`: Unknown key "summary" > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `MovieExtraInt`: Unknown key "year" > tests/typeddicts_extra_items.py error[invalid-assignment] Object of type `dict[Unknown | str, Unknown | str | int]` is not assignable to `Mapping[str, int]` > tests/typeddicts_extra_items.py error[type-assertion-failure] Argument does not have asserted type `list[tuple[str, int | str]]` > tests/typeddicts_extra_items.py error[type-assertion-failure] Argument does not have asserted type `list[int | str]` > tests/typeddicts_extra_items.py error[unresolved-attribute] Object of type `IntDict` has no attribute `clear` > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `IntDictWithNum`: Unknown key "bar" - did you mean "num"? > tests/typeddicts_extra_items.py error[type-assertion-failure] Argument does not have asserted type `tuple[str, int]` > tests/typeddicts_extra_items.py error[invalid-key] Cannot access `IntDictWithNum` with a key of type `str`. Only string literals are allowed as keys on TypedDicts. > tests/typeddicts_extra_items.py error[invalid-key] Invalid key for TypedDict `IntDictWithNum` of type `str` 950c981 < Found 949 diagnostics --- > Found 980 diagnostics ``` --- .github/workflows/typing_conformance.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/typing_conformance.yaml b/.github/workflows/typing_conformance.yaml index ed23e6c084..aa99f6dd72 100644 --- a/.github/workflows/typing_conformance.yaml +++ b/.github/workflows/typing_conformance.yaml @@ -24,7 +24,7 @@ env: CARGO_TERM_COLOR: always RUSTUP_MAX_RETRIES: 10 RUST_BACKTRACE: 1 - CONFORMANCE_SUITE_COMMIT: d4f39b27a4a47aac8b6d4019e1b0b5b3156fabdc + CONFORMANCE_SUITE_COMMIT: 9f6d8ced7cd1c8d92687a4e9c96d7716452e471e jobs: typing_conformance: From c32234cf0d8cb10fbab23060c9c65a20417f6a85 Mon Sep 17 00:00:00 2001 From: Carl Meyer Date: Sun, 2 Nov 2025 12:39:55 -0500 Subject: [PATCH 034/180] [ty] support subscripting typing.Literal with a type alias (#21207) Fixes https://github.com/astral-sh/ty/issues/1368 ## Summary Add support for patterns like this, where a type alias to a literal type (or union of literal types) is used to subscript `typing.Literal`: ```py type MyAlias = Literal[1] def _(x: Literal[MyAlias]): ... ``` This shows up in the ecosystem report for PEP 613 type alias support. One interesting case is an alias to `bool` or an enum type. `bool` is an equivalent type to `Literal[True, False]`, which is a union of literal types. Similarly an enum type `E` is also equivalent to a union of its member literal types. Since (for explicit type aliases) we infer the RHS directly as a type expression, this makes it difficult for us to distinguish between `bool` and `Literal[True, False]`, so we allow either one to (or an alias to either one) to appear inside `Literal`, where other type checkers allow only the latter. I think for implicit type aliases it may be simpler to support only types derived from actually subscripting `typing.Literal`, though, so I didn't make a TODO-comment commitment here. ## Test Plan Added mdtests, including TODO-filled tests for PEP 613 and implicit type aliases. ### Conformance suite All changes here are positive -- we now emit errors on lines that should be errors. This is a side effect of the new implementation, not the primary purpose of this PR, but it's still a positive change. ### Ecosystem Eliminates one ecosystem false positive, where a PEP 695 type alias for a union of literal types is used to subscript `typing.Literal`. --- .../resources/mdtest/annotations/literal.md | 204 ++++++++++++++++++ .../mdtest/assignment/annotations.md | 2 +- crates/ty_python_semantic/src/types.rs | 17 ++ .../types/infer/builder/type_expression.rs | 52 +++-- 4 files changed, 251 insertions(+), 24 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/literal.md b/crates/ty_python_semantic/resources/mdtest/annotations/literal.md index 3bd9e54c85..897be97e77 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/literal.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/literal.md @@ -39,6 +39,8 @@ def f(): reveal_type(a7) # revealed: None reveal_type(a8) # revealed: Literal[1] reveal_type(b1) # revealed: Literal[Color.RED] + # TODO should be `Literal[MissingT.MISSING]` + reveal_type(b2) # revealed: @Todo(functional `Enum` syntax) # error: [invalid-type-form] invalid1: Literal[3 + 4] @@ -66,6 +68,208 @@ a_list: list[int] = [1, 2, 3] invalid6: Literal[a_list[0]] ``` +## Parameterizing with a type alias + +`typing.Literal` can also be parameterized with a type alias for any literal type or union of +literal types. + +### PEP 695 type alias + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import Literal +from enum import Enum + +import mod + +class E(Enum): + A = 1 + B = 2 + +type SingleInt = Literal[1] +type SingleStr = Literal["foo"] +type SingleBytes = Literal[b"bar"] +type SingleBool = Literal[True] +type SingleNone = Literal[None] +type SingleEnum = Literal[E.A] +type UnionLiterals = Literal[1, "foo", b"bar", True, None, E.A] +# We support this because it is an equivalent type to the following union of literals, but maybe +# we should not, because it doesn't use `Literal` form? Other type checkers do not. +type AnEnum1 = E +type AnEnum2 = Literal[E.A, E.B] +# Similarly, we support this because it is equivalent to `Literal[True, False]`. +type Bool1 = bool +type Bool2 = Literal[True, False] + +def _( + single_int: Literal[SingleInt], + single_str: Literal[SingleStr], + single_bytes: Literal[SingleBytes], + single_bool: Literal[SingleBool], + single_none: Literal[SingleNone], + single_enum: Literal[SingleEnum], + union_literals: Literal[UnionLiterals], + an_enum1: Literal[AnEnum1], + an_enum2: Literal[AnEnum2], + bool1: Literal[Bool1], + bool2: Literal[Bool2], + multiple: Literal[SingleInt, SingleStr, SingleEnum], + single_int_other_module: Literal[mod.SingleInt], +): + reveal_type(single_int) # revealed: Literal[1] + reveal_type(single_str) # revealed: Literal["foo"] + reveal_type(single_bytes) # revealed: Literal[b"bar"] + reveal_type(single_bool) # revealed: Literal[True] + reveal_type(single_none) # revealed: None + reveal_type(single_enum) # revealed: Literal[E.A] + reveal_type(union_literals) # revealed: Literal[1, "foo", b"bar", True, E.A] | None + reveal_type(an_enum1) # revealed: E + reveal_type(an_enum2) # revealed: E + reveal_type(bool1) # revealed: bool + reveal_type(bool2) # revealed: bool + reveal_type(multiple) # revealed: Literal[1, "foo", E.A] + reveal_type(single_int_other_module) # revealed: Literal[2] +``` + +`mod.py`: + +```py +from typing import Literal + +type SingleInt = Literal[2] +``` + +### PEP 613 type alias + +```py +from typing import Literal, TypeAlias +from enum import Enum + +class E(Enum): + A = 1 + B = 2 + +SingleInt: TypeAlias = Literal[1] +SingleStr: TypeAlias = Literal["foo"] +SingleBytes: TypeAlias = Literal[b"bar"] +SingleBool: TypeAlias = Literal[True] +SingleNone: TypeAlias = Literal[None] +SingleEnum: TypeAlias = Literal[E.A] +UnionLiterals: TypeAlias = Literal[1, "foo", b"bar", True, None, E.A] +AnEnum1: TypeAlias = E +AnEnum2: TypeAlias = Literal[E.A, E.B] +Bool1: TypeAlias = bool +Bool2: TypeAlias = Literal[True, False] + +def _( + single_int: Literal[SingleInt], + single_str: Literal[SingleStr], + single_bytes: Literal[SingleBytes], + single_bool: Literal[SingleBool], + single_none: Literal[SingleNone], + single_enum: Literal[SingleEnum], + union_literals: Literal[UnionLiterals], + # Could also not error + an_enum1: Literal[AnEnum1], # error: [invalid-type-form] + an_enum2: Literal[AnEnum2], + # Could also not error + bool1: Literal[Bool1], # error: [invalid-type-form] + bool2: Literal[Bool2], + multiple: Literal[SingleInt, SingleStr, SingleEnum], +): + # TODO should be `Literal[1]` + reveal_type(single_int) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `Literal["foo"]` + reveal_type(single_str) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `Literal[b"bar"]` + reveal_type(single_bytes) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `Literal[True]` + reveal_type(single_bool) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `None` + reveal_type(single_none) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `Literal[E.A]` + reveal_type(single_enum) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `Literal[1, "foo", b"bar", True, E.A] | None` + reveal_type(union_literals) # revealed: @Todo(Inference of subscript on special form) + # Could also be `E` + reveal_type(an_enum1) # revealed: Unknown + # TODO should be `E` + reveal_type(an_enum2) # revealed: @Todo(Inference of subscript on special form) + # Could also be `bool` + reveal_type(bool1) # revealed: Unknown + # TODO should be `bool` + reveal_type(bool2) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `Literal[1, "foo", E.A]` + reveal_type(multiple) # revealed: @Todo(Inference of subscript on special form) +``` + +### Implicit type alias + +```py +from typing import Literal +from enum import Enum + +class E(Enum): + A = 1 + B = 2 + +SingleInt = Literal[1] +SingleStr = Literal["foo"] +SingleBytes = Literal[b"bar"] +SingleBool = Literal[True] +SingleNone = Literal[None] +SingleEnum = Literal[E.A] +UnionLiterals = Literal[1, "foo", b"bar", True, None, E.A] +# For implicit type aliases, we may not want to support this. It's simpler not to, and no other +# type checker does. +AnEnum1 = E +AnEnum2 = Literal[E.A, E.B] +# For implicit type aliases, we may not want to support this. +Bool1 = bool +Bool2 = Literal[True, False] + +def _( + single_int: Literal[SingleInt], + single_str: Literal[SingleStr], + single_bytes: Literal[SingleBytes], + single_bool: Literal[SingleBool], + single_none: Literal[SingleNone], + single_enum: Literal[SingleEnum], + union_literals: Literal[UnionLiterals], + an_enum1: Literal[AnEnum1], # error: [invalid-type-form] + an_enum2: Literal[AnEnum2], + bool1: Literal[Bool1], # error: [invalid-type-form] + bool2: Literal[Bool2], + multiple: Literal[SingleInt, SingleStr, SingleEnum], +): + # TODO should be `Literal[1]` + reveal_type(single_int) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `Literal["foo"]` + reveal_type(single_str) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `Literal[b"bar"]` + reveal_type(single_bytes) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `Literal[True]` + reveal_type(single_bool) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `None` + reveal_type(single_none) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `Literal[E.A]` + reveal_type(single_enum) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `Literal[1, "foo", b"bar", True, E.A] | None` + reveal_type(union_literals) # revealed: @Todo(Inference of subscript on special form) + reveal_type(an_enum1) # revealed: Unknown + # TODO should be `E` + reveal_type(an_enum2) # revealed: @Todo(Inference of subscript on special form) + reveal_type(bool1) # revealed: Unknown + # TODO should be `bool` + reveal_type(bool2) # revealed: @Todo(Inference of subscript on special form) + # TODO should be `Literal[1, "foo", E.A]` + reveal_type(multiple) # revealed: @Todo(Inference of subscript on special form) +``` + ## Shortening unions of literals When a Literal is parameterized with more than one value, it’s treated as exactly to equivalent to diff --git a/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md b/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md index adf0de358d..ceb588d7fe 100644 --- a/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md +++ b/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md @@ -259,7 +259,7 @@ class Color(Enum): RED = "red" f: dict[list[Literal[1]], list[Literal[Color.RED]]] = {[1]: [Color.RED, Color.RED]} -reveal_type(f) # revealed: dict[list[Literal[1]], list[Literal[Color.RED]]] +reveal_type(f) # revealed: dict[list[Literal[1]], list[Color]] class X[T]: def __init__(self, value: T): ... diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index be2fb264d8..b20f332999 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -1153,6 +1153,23 @@ impl<'db> Type<'db> { matches!(self, Type::FunctionLiteral(..)) } + /// Detects types which are valid to appear inside a `Literal[…]` type annotation. + pub(crate) fn is_literal_or_union_of_literals(&self, db: &'db dyn Db) -> bool { + match self { + Type::Union(union) => union + .elements(db) + .iter() + .all(|ty| ty.is_literal_or_union_of_literals(db)), + Type::StringLiteral(_) + | Type::BytesLiteral(_) + | Type::IntLiteral(_) + | Type::BooleanLiteral(_) + | Type::EnumLiteral(_) => true, + Type::NominalInstance(_) => self.is_none(db) || self.is_bool(db) || self.is_enum(db), + _ => false, + } + } + pub(crate) fn is_union_of_single_valued(&self, db: &'db dyn Db) -> bool { self.as_union().is_some_and(|union| { union.elements(db).iter().all(|ty| { diff --git a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs index 0d72548e49..50d22fac10 100644 --- a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs +++ b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs @@ -6,7 +6,6 @@ use crate::types::diagnostic::{ self, INVALID_TYPE_FORM, NON_SUBSCRIPTABLE, report_invalid_argument_number_to_special_form, report_invalid_arguments_to_annotated, report_invalid_arguments_to_callable, }; -use crate::types::enums::is_enum_class; use crate::types::signatures::Signature; use crate::types::string_annotation::parse_string_annotation; use crate::types::tuple::{TupleSpecBuilder, TupleType}; @@ -1369,7 +1368,6 @@ impl<'db> TypeInferenceBuilder<'db, '_> { parameters: &'param ast::Expr, ) -> Result, Vec<&'param ast::Expr>> { Ok(match parameters { - // TODO handle type aliases ast::Expr::Subscript(ast::ExprSubscript { value, slice, .. }) => { let value_ty = self.infer_expression(value, TypeContext::default()); if matches!(value_ty, Type::SpecialForm(SpecialFormType::Literal)) { @@ -1421,27 +1419,6 @@ impl<'db> TypeInferenceBuilder<'db, '_> { literal @ ast::Expr::NumberLiteral(number) if number.value.is_int() => { self.infer_expression(literal, TypeContext::default()) } - // For enum values - ast::Expr::Attribute(ast::ExprAttribute { value, attr, .. }) => { - let value_ty = self.infer_expression(value, TypeContext::default()); - - if is_enum_class(self.db(), value_ty) { - let ty = value_ty - .member(self.db(), &attr.id) - .place - .ignore_possibly_undefined() - .unwrap_or(Type::unknown()); - self.store_expression_type(parameters, ty); - ty - } else { - self.store_expression_type(parameters, Type::unknown()); - if value_ty.is_todo() { - value_ty - } else { - return Err(vec![parameters]); - } - } - } // for negative and positive numbers ast::Expr::UnaryOp(u) if matches!(u.op, ast::UnaryOp::USub | ast::UnaryOp::UAdd) @@ -1451,6 +1428,35 @@ impl<'db> TypeInferenceBuilder<'db, '_> { self.store_expression_type(parameters, ty); ty } + // enum members and aliases to literal types + ast::Expr::Name(_) | ast::Expr::Attribute(_) => { + let subscript_ty = self.infer_expression(parameters, TypeContext::default()); + // TODO handle implicit type aliases also + match subscript_ty { + // type aliases to literal types + Type::KnownInstance(KnownInstanceType::TypeAliasType(type_alias)) => { + let value_ty = type_alias.value_type(self.db()); + if value_ty.is_literal_or_union_of_literals(self.db()) { + return Ok(value_ty); + } + } + // `Literal[SomeEnum.Member]` + Type::EnumLiteral(_) => { + return Ok(subscript_ty); + } + // `Literal[SingletonEnum.Member]`, where `SingletonEnum.Member` simplifies to + // just `SingletonEnum`. + Type::NominalInstance(_) if subscript_ty.is_enum(self.db()) => { + return Ok(subscript_ty); + } + // suppress false positives for e.g. members of functional-syntax enums + Type::Dynamic(DynamicType::Todo(_)) => { + return Ok(subscript_ty); + } + _ => {} + } + return Err(vec![parameters]); + } _ => { self.infer_expression(parameters, TypeContext::default()); return Err(vec![parameters]); From 0454a72674e3f55fe081b94e7e60e72090b62489 Mon Sep 17 00:00:00 2001 From: Carl Meyer Date: Sun, 2 Nov 2025 18:21:54 -0500 Subject: [PATCH 035/180] [ty] don't union in default type for annotated parameters (#21208) --- .../resources/mdtest/function/parameters.md | 15 +++---- .../resources/mdtest/ty_extensions.md | 2 +- .../src/types/infer/builder.rs | 44 ++++++------------- 3 files changed, 21 insertions(+), 40 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/function/parameters.md b/crates/ty_python_semantic/resources/mdtest/function/parameters.md index eb7316fe91..c629f26565 100644 --- a/crates/ty_python_semantic/resources/mdtest/function/parameters.md +++ b/crates/ty_python_semantic/resources/mdtest/function/parameters.md @@ -1,12 +1,9 @@ # Function parameter types Within a function scope, the declared type of each parameter is its annotated type (or Unknown if -not annotated). The initial inferred type is the union of the declared type with the type of the -default value expression (if any). If both are fully static types, this union should simplify to the -annotated type (since the default value type must be assignable to the annotated type, and for fully -static types this means subtype-of, which simplifies in unions). But if the annotated type is -Unknown or another non-fully-static type, the default value type may still be relevant as lower -bound. +not annotated). The initial inferred type is the annotated type of the parameter, if any. If there +is no annotation, it is the union of `Unknown` with the type of the default value expression (if +any). The variadic parameter is a variadic tuple of its annotated type; the variadic-keywords parameter is a dictionary from strings to its annotated type. @@ -41,13 +38,13 @@ def g(*args, **kwargs): ## Annotation is present but not a fully static type -The default value type should be a lower bound on the inferred type. +If there is an annotation, we respect it fully and don't union in the default value type. ```py from typing import Any def f(x: Any = 1): - reveal_type(x) # revealed: Any | Literal[1] + reveal_type(x) # revealed: Any ``` ## Default value type must be assignable to annotated type @@ -64,7 +61,7 @@ def f(x: int = "foo"): from typing import Any def g(x: Any = "foo"): - reveal_type(x) # revealed: Any | Literal["foo"] + reveal_type(x) # revealed: Any ``` ## Stub functions diff --git a/crates/ty_python_semantic/resources/mdtest/ty_extensions.md b/crates/ty_python_semantic/resources/mdtest/ty_extensions.md index ba88851015..22d92b54af 100644 --- a/crates/ty_python_semantic/resources/mdtest/ty_extensions.md +++ b/crates/ty_python_semantic/resources/mdtest/ty_extensions.md @@ -99,7 +99,7 @@ static_assert(is_assignable_to(int, Unknown)) def explicit_unknown(x: Unknown, y: tuple[str, Unknown], z: Unknown = 1) -> None: reveal_type(x) # revealed: Unknown reveal_type(y) # revealed: tuple[str, Unknown] - reveal_type(z) # revealed: Unknown | Literal[1] + reveal_type(z) # revealed: Unknown ``` `Unknown` can be subclassed, just like `Any`: diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index ad0a103319..b74ff75404 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -2423,15 +2423,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { /// /// The declared type is the annotated type, if any, or `Unknown`. /// - /// The inferred type is the annotated type, unioned with the type of the default value, if - /// any. If both types are fully static, this union is a no-op (it should simplify to just the - /// annotated type.) But in a case like `f(x=None)` with no annotated type, we want to infer - /// the type `Unknown | None` for `x`, not just `Unknown`, so that we can error on usage of `x` - /// that would not be valid for `None`. - /// - /// If the default-value type is not assignable to the declared (annotated) type, we ignore the - /// default-value type and just infer the annotated type; this is the same way we handle - /// assignments, and allows an explicit annotation to override a bad inference. + /// The inferred type is the annotated type, if any. If there is no annotation, it is the union + /// of `Unknown` and the type of the default value, if any. /// /// Parameter definitions are odd in that they define a symbol in the function-body scope, so /// the Definition belongs to the function body scope, but the expressions (annotation and @@ -2460,23 +2453,17 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .map(|default| self.file_expression_type(default)); if let Some(annotation) = parameter.annotation.as_ref() { let declared_ty = self.file_expression_type(annotation); - let declared_and_inferred_ty = if let Some(default_ty) = default_ty { - if default_ty.is_assignable_to(self.db(), declared_ty) { - DeclaredAndInferredType::MightBeDifferent { - declared_ty: TypeAndQualifiers::declared(declared_ty), - inferred_ty: UnionType::from_elements(self.db(), [declared_ty, default_ty]), - } - } else if (self.in_stub() - || self.in_function_overload_or_abstractmethod() - || self - .class_context_of_current_method() - .is_some_and(|class| class.is_protocol(self.db()))) - && default - .as_ref() - .is_some_and(|d| d.is_ellipsis_literal_expr()) + if let Some(default_ty) = default_ty { + if !default_ty.is_assignable_to(self.db(), declared_ty) + && !((self.in_stub() + || self.in_function_overload_or_abstractmethod() + || self + .class_context_of_current_method() + .is_some_and(|class| class.is_protocol(self.db()))) + && default + .as_ref() + .is_some_and(|d| d.is_ellipsis_literal_expr())) { - DeclaredAndInferredType::are_the_same_type(declared_ty) - } else { if let Some(builder) = self .context .report_lint(&INVALID_PARAMETER_DEFAULT, parameter_with_default) @@ -2488,15 +2475,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { declared_ty.display(self.db()) )); } - DeclaredAndInferredType::are_the_same_type(declared_ty) } - } else { - DeclaredAndInferredType::are_the_same_type(declared_ty) - }; + } self.add_declaration_with_binding( parameter.into(), definition, - &declared_and_inferred_ty, + &DeclaredAndInferredType::are_the_same_type(declared_ty), ); } else { let ty = if let Some(default_ty) = default_ty { From 02f2dba28e312d0297e8658e97c53a64c403e497 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 01:45:20 +0000 Subject: [PATCH 036/180] Update Rust crate indoc to v2.0.7 (#21219) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index af119dab7e..410dba03fe 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1574,9 +1574,12 @@ dependencies = [ [[package]] name = "indoc" -version = "2.0.6" +version = "2.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" +checksum = "79cf5c93f93228cf8efb3ba362535fb11199ac548a09ce117c9b1adc3030d706" +dependencies = [ + "rustversion", +] [[package]] name = "inotify" From f14631e1ccf3862d5360c376382b04f2cedbed63 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 01:46:05 +0000 Subject: [PATCH 037/180] Update Rust crate indicatif to v0.18.2 (#21218) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 70 ++++++++++++++++++++---------------------------------- 1 file changed, 26 insertions(+), 44 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 410dba03fe..0c0402be2b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -45,7 +45,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "710e8eae58854cdc1790fcb56cca04d712a17be849eeb81da2a724bf4bae2bc4" dependencies = [ "anstyle", - "unicode-width 0.2.1", + "unicode-width", ] [[package]] @@ -106,7 +106,7 @@ dependencies = [ "anstyle-lossy", "anstyle-parse", "html-escape", - "unicode-width 0.2.1", + "unicode-width", ] [[package]] @@ -687,7 +687,7 @@ dependencies = [ "encode_unicode", "libc", "once_cell", - "unicode-width 0.2.1", + "unicode-width", "windows-sys 0.61.0", ] @@ -1007,7 +1007,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.61.0", + "windows-sys 0.59.0", ] [[package]] @@ -1093,7 +1093,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.61.0", + "windows-sys 0.52.0", ] [[package]] @@ -1250,7 +1250,7 @@ version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe4fbac503b8d1f88e6676011885f34b7174f46e59956bba534ba83abded4df" dependencies = [ - "unicode-width 0.2.1", + "unicode-width", ] [[package]] @@ -1560,13 +1560,13 @@ dependencies = [ [[package]] name = "indicatif" -version = "0.18.0" +version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70a646d946d06bedbbc4cac4c218acf4bbf2d87757a784857025f4d447e4e1cd" +checksum = "ade6dfcba0dfb62ad59e59e7241ec8912af34fd29e0e743e3db992bd278e8b65" dependencies = [ "console 0.16.1", "portable-atomic", - "unicode-width 0.2.1", + "unicode-width", "unit-prefix", "vt100", "web-time", @@ -2324,7 +2324,7 @@ checksum = "31095ca1f396e3de32745f42b20deef7bc09077f918b085307e8eab6ddd8fb9c" dependencies = [ "once_cell", "serde", - "unicode-width 0.2.1", + "unicode-width", "unscanny", "version-ranges", ] @@ -2345,7 +2345,7 @@ dependencies = [ "serde", "smallvec", "thiserror 1.0.69", - "unicode-width 0.2.1", + "unicode-width", "url", "urlencoding", "version-ranges", @@ -2899,7 +2899,7 @@ dependencies = [ "snapbox", "toml", "tryfn", - "unicode-width 0.2.1", + "unicode-width", ] [[package]] @@ -3050,7 +3050,7 @@ dependencies = [ "serde", "static_assertions", "tracing", - "unicode-width 0.2.1", + "unicode-width", ] [[package]] @@ -3140,7 +3140,7 @@ dependencies = [ "toml", "typed-arena", "unicode-normalization", - "unicode-width 0.2.1", + "unicode-width", "unicode_names2", "url", ] @@ -3538,7 +3538,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys 0.61.0", + "windows-sys 0.52.0", ] [[package]] @@ -3934,7 +3934,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix", - "windows-sys 0.61.0", + "windows-sys 0.52.0", ] [[package]] @@ -4657,12 +4657,6 @@ dependencies = [ "tinyvec", ] -[[package]] -name = "unicode-width" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" - [[package]] name = "unicode-width" version = "0.2.1" @@ -4798,25 +4792,13 @@ checksum = "051eb1abcf10076295e815102942cc58f9d5e3b4560e46e53c21e8ff6f3af7b1" [[package]] name = "vt100" -version = "0.15.2" +version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84cd863bf0db7e392ba3bd04994be3473491b31e66340672af5d11943c6274de" +checksum = "054ff75fb8fa83e609e685106df4faeffdf3a735d3c74ebce97ec557d5d36fd9" dependencies = [ "itoa", - "log", - "unicode-width 0.1.14", - "vte 0.11.1", -] - -[[package]] -name = "vte" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197" -dependencies = [ - "arrayvec", - "utf8parse", - "vte_generate_state_changes", + "unicode-width", + "vte 0.15.0", ] [[package]] @@ -4829,13 +4811,13 @@ dependencies = [ ] [[package]] -name = "vte_generate_state_changes" -version = "0.1.2" +name = "vte" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e369bee1b05d510a7b4ed645f5faa90619e05437111783ea5848f28d97d3c2e" +checksum = "a5924018406ce0063cd67f8e008104968b74b563ee1b85dde3ed1f7cb87d3dbd" dependencies = [ - "proc-macro2", - "quote", + "arrayvec", + "memchr", ] [[package]] @@ -5014,7 +4996,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.61.0", + "windows-sys 0.52.0", ] [[package]] From 41fe4d7f8c2dc519b7b05d433b6eeea4e21a8ba8 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 01:47:12 +0000 Subject: [PATCH 038/180] Update Rust crate ignore to v0.4.25 (#21217) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0c0402be2b..483b873fe5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1286,9 +1286,9 @@ checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "globset" -version = "0.4.17" +version = "0.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eab69130804d941f8075cfd713bf8848a2c3b3f201a9457a11e6f87e1ab62305" +checksum = "52dfc19153a48bde0cbd630453615c8151bce3a5adfac7a0aebfbf0a1e1f57e3" dependencies = [ "aho-corasick", "bstr", @@ -1513,9 +1513,9 @@ dependencies = [ [[package]] name = "ignore" -version = "0.4.24" +version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81776e6f9464432afcc28d03e52eb101c93b6f0566f52aef2427663e700f0403" +checksum = "d3d782a365a015e0f5c04902246139249abf769125006fbe7649e2ee88169b4a" dependencies = [ "crossbeam-deque", "globset", From b754abff1b4a2deb3bb899e60782a138925d8076 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 02:47:30 +0100 Subject: [PATCH 039/180] Update Rust crate aho-corasick to v1.1.4 (#21213) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [aho-corasick](https://redirect.github.com/BurntSushi/aho-corasick) | workspace.dependencies | patch | `1.1.3` -> `1.1.4` | --- > [!WARNING] > Some dependencies could not be looked up. Check the Dependency Dashboard for more information. --- ### Release Notes
BurntSushi/aho-corasick (aho-corasick) ### [`v1.1.4`](https://redirect.github.com/BurntSushi/aho-corasick/compare/1.1.3...1.1.4) [Compare Source](https://redirect.github.com/BurntSushi/aho-corasick/compare/1.1.3...1.1.4)
--- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/astral-sh/ruff). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 483b873fe5..ef4d55d8b7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10,9 +10,9 @@ checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aho-corasick" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" dependencies = [ "memchr", ] From 222c6fd49652b2f221f6a4a0ebc213b8c1689e44 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 01:48:02 +0000 Subject: [PATCH 040/180] Update Rust crate ctrlc to v3.5.1 (#21215) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 42 ++++++++++++++++++++++++++++++++++++------ 1 file changed, 36 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ef4d55d8b7..c624f57e47 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -287,6 +287,15 @@ dependencies = [ "generic-array", ] +[[package]] +name = "block2" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5" +dependencies = [ + "objc2", +] + [[package]] name = "boxcar" version = "0.2.14" @@ -877,11 +886,11 @@ dependencies = [ [[package]] name = "ctrlc" -version = "3.5.0" +version = "3.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "881c5d0a13b2f1498e2306e82cbada78390e152d4b1378fb28a84f4dcd0dc4f3" +checksum = "73736a89c4aff73035ba2ed2e565061954da00d4970fc9ac25dcc85a2a20d790" dependencies = [ - "dispatch", + "dispatch2", "nix 0.30.1", "windows-sys 0.61.0", ] @@ -1011,10 +1020,16 @@ dependencies = [ ] [[package]] -name = "dispatch" -version = "0.2.0" +name = "dispatch2" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b" +checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" +dependencies = [ + "bitflags 2.9.4", + "block2", + "libc", + "objc2", +] [[package]] name = "displaydoc" @@ -2176,6 +2191,21 @@ dependencies = [ "libc", ] +[[package]] +name = "objc2" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c2599ce0ec54857b29ce62166b0ed9b4f6f1a70ccc9a71165b6154caca8c05" +dependencies = [ + "objc2-encode", +] + +[[package]] +name = "objc2-encode" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef25abbcd74fb2609453eb695bd2f860d389e457f67dc17cafc8b8cbc89d0c33" + [[package]] name = "once_cell" version = "1.21.3" From 50b75cfcc68f28c470b085b62b11be7069fa8bb8 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 01:49:44 +0000 Subject: [PATCH 042/180] Update cargo-bins/cargo-binstall action to v1.15.10 (#21212) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 5661ff48b7..0fc4eeb444 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -438,7 +438,7 @@ jobs: - name: "Install Rust toolchain" run: rustup show - name: "Install cargo-binstall" - uses: cargo-bins/cargo-binstall@afcf9780305558bcc9e4bc94b7589ab2bb8b6106 # v1.15.9 + uses: cargo-bins/cargo-binstall@b3f755e95653da9a2d25b99154edfdbd5b356d0a # v1.15.10 - name: "Install cargo-fuzz" # Download the latest version from quick install and not the github releases because github releases only has MUSL targets. run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm @@ -698,7 +698,7 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: persist-credentials: false - - uses: cargo-bins/cargo-binstall@afcf9780305558bcc9e4bc94b7589ab2bb8b6106 # v1.15.9 + - uses: cargo-bins/cargo-binstall@b3f755e95653da9a2d25b99154edfdbd5b356d0a # v1.15.10 - run: cargo binstall --no-confirm cargo-shear - run: cargo shear From 80eeb1d64fc4cf7e09237ce8617df7850f8af92a Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 01:50:12 +0000 Subject: [PATCH 043/180] Update Rust crate clap to v4.5.51 (#21214) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c624f57e47..a51d68804d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -442,9 +442,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.50" +version = "4.5.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c2cfd7bf8a6017ddaa4e32ffe7403d547790db06bd171c1c53926faab501623" +checksum = "4c26d721170e0295f191a69bd9a1f93efcdb0aff38684b61ab5750468972e5f5" dependencies = [ "clap_builder", "clap_derive", @@ -452,9 +452,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.50" +version = "4.5.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a4c05b9e80c5ccd3a7ef080ad7b6ba7d6fc00a985b8b157197075677c82c7a0" +checksum = "75835f0c7bf681bfd05abe44e965760fea999a5286c6eb2d59883634fd02011a" dependencies = [ "anstream", "anstyle", From 73b9b8eb6b7c2f791b326b3ab8a1154f753ec224 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 02:15:56 +0000 Subject: [PATCH 044/180] Update Rust crate proc-macro2 to v1.0.103 (#21221) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a51d68804d..86f223bc84 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2583,9 +2583,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.101" +version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" +checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" dependencies = [ "unicode-ident", ] From c0bd092fa9a856c272c379f1b3783f5e7502b7fb Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 02:20:19 +0000 Subject: [PATCH 045/180] Update Rust crate snapbox to v0.6.23 (#21223) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 86f223bc84..11e23ab78a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -642,7 +642,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" dependencies = [ "lazy_static", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -651,7 +651,7 @@ version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -1016,7 +1016,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] @@ -1698,7 +1698,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" dependencies = [ "hermit-abi", "libc", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -1762,7 +1762,7 @@ dependencies = [ "portable-atomic", "portable-atomic-util", "serde", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -3841,9 +3841,9 @@ checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "snapbox" -version = "0.6.22" +version = "0.6.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "805d09a74586d9b17061e5be6ee5f8cc37e5982c349948114ffc5f68093fe5ec" +checksum = "96fa1ce81be900d083b30ec2d481e6658c2acfaa2cfc7be45ccc2cc1b820edb3" dependencies = [ "anstream", "anstyle", @@ -3861,9 +3861,9 @@ dependencies = [ [[package]] name = "snapbox-macros" -version = "0.3.10" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16569f53ca23a41bb6f62e0a5084aa1661f4814a67fa33696a79073e03a664af" +checksum = "3b750c344002d7cc69afb9da00ebd9b5c0f8ac2eb7d115d9d45d5b5f47718d74" dependencies = [ "anstream", ] From f477e11d26d9d096886346df07f2d5d14da3fe43 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 02:21:21 +0000 Subject: [PATCH 046/180] Update Rust crate thiserror to v2.0.17 (#21225) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 11e23ab78a..5287ab0e9a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -519,7 +519,7 @@ checksum = "85a8ab73a1c02b0c15597b22e09c7dc36e63b2f601f9d1e83ac0c3decd38b1ae" dependencies = [ "nix 0.29.0", "terminfo", - "thiserror 2.0.16", + "thiserror 2.0.17", "which", "windows-sys 0.59.0", ] @@ -1861,7 +1861,7 @@ dependencies = [ "paste", "peg", "regex", - "thiserror 2.0.16", + "thiserror 2.0.17", ] [[package]] @@ -2394,7 +2394,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21e0a3a33733faeaf8651dfee72dd0f388f0c8e5ad496a3478fa5a922f49cfa8" dependencies = [ "memchr", - "thiserror 2.0.16", + "thiserror 2.0.17", "ucd-trie", ] @@ -2600,7 +2600,7 @@ dependencies = [ "pep440_rs", "pep508_rs", "serde", - "thiserror 2.0.16", + "thiserror 2.0.17", "toml", ] @@ -2615,7 +2615,7 @@ dependencies = [ "newtype-uuid", "quick-xml", "strip-ansi-escapes", - "thiserror 2.0.16", + "thiserror 2.0.17", "uuid", ] @@ -2787,7 +2787,7 @@ checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" dependencies = [ "getrandom 0.2.16", "libredox", - "thiserror 2.0.16", + "thiserror 2.0.17", ] [[package]] @@ -2909,7 +2909,7 @@ dependencies = [ "strum", "tempfile", "test-case", - "thiserror 2.0.16", + "thiserror 2.0.17", "tikv-jemallocator", "toml", "tracing", @@ -3005,7 +3005,7 @@ dependencies = [ "serde_json", "similar", "tempfile", - "thiserror 2.0.16", + "thiserror 2.0.17", "tracing", "tracing-subscriber", "ty_static", @@ -3166,7 +3166,7 @@ dependencies = [ "strum_macros", "tempfile", "test-case", - "thiserror 2.0.16", + "thiserror 2.0.17", "toml", "typed-arena", "unicode-normalization", @@ -3209,7 +3209,7 @@ dependencies = [ "serde_json", "serde_with", "test-case", - "thiserror 2.0.16", + "thiserror 2.0.17", "uuid", ] @@ -3241,7 +3241,7 @@ dependencies = [ "schemars", "serde", "serde_json", - "thiserror 2.0.16", + "thiserror 2.0.17", ] [[package]] @@ -3295,7 +3295,7 @@ dependencies = [ "similar", "smallvec", "static_assertions", - "thiserror 2.0.16", + "thiserror 2.0.17", "tracing", ] @@ -3440,7 +3440,7 @@ dependencies = [ "serde", "serde_json", "shellexpand", - "thiserror 2.0.16", + "thiserror 2.0.17", "toml", "tracing", "tracing-log", @@ -4054,11 +4054,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.16" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ - "thiserror-impl 2.0.16", + "thiserror-impl 2.0.17", ] [[package]] @@ -4074,9 +4074,9 @@ dependencies = [ [[package]] name = "thiserror-impl" -version = "2.0.16" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", @@ -4432,7 +4432,7 @@ dependencies = [ "schemars", "serde", "serde_json", - "thiserror 2.0.16", + "thiserror 2.0.17", "toml", "tracing", "ty_combine", @@ -4489,7 +4489,7 @@ dependencies = [ "strum_macros", "tempfile", "test-case", - "thiserror 2.0.16", + "thiserror 2.0.17", "tracing", "ty_python_semantic", "ty_static", @@ -4523,7 +4523,7 @@ dependencies = [ "serde_json", "shellexpand", "tempfile", - "thiserror 2.0.16", + "thiserror 2.0.17", "tracing", "tracing-subscriber", "ty_combine", @@ -4563,7 +4563,7 @@ dependencies = [ "serde", "smallvec", "tempfile", - "thiserror 2.0.16", + "thiserror 2.0.17", "toml", "tracing", "ty_python_semantic", From 3bef60f69a6249589bd17bbfea6b9587640c026d Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 02:21:47 +0000 Subject: [PATCH 047/180] Update Rust crate toml to v0.9.8 (#21227) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5287ab0e9a..3ea34392f1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1568,7 +1568,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" dependencies = [ "equivalent", - "hashbrown 0.16.0", + "hashbrown 0.15.5", "serde", "serde_core", ] @@ -3747,9 +3747,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5417783452c2be558477e104686f7de5dae53dba813c28435e0e70f82d9b04ee" +checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392" dependencies = [ "serde_core", ] @@ -4158,9 +4158,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "toml" -version = "0.9.7" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00e5e5d9bf2475ac9d4f0d9edab68cc573dc2fd644b0dba36b0c30a92dd9eaa0" +checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8" dependencies = [ "indexmap", "serde_core", @@ -4173,9 +4173,9 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f1085dec27c2b6632b04c80b3bb1b4300d6495d1e129693bdda7d91e72eec1" +checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" dependencies = [ "serde_core", ] @@ -4194,18 +4194,18 @@ dependencies = [ [[package]] name = "toml_parser" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cf893c33be71572e0e9aa6dd15e6677937abd686b066eac3f8cd3531688a627" +checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e" dependencies = [ "winnow", ] [[package]] name = "toml_writer" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d163a63c116ce562a22cda521fcc4d79152e7aba014456fb5eb442f6d6a10109" +checksum = "df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2" [[package]] name = "tracing" From cb98175a363180eb6e7fd1ddb7cb9a0f5679bae8 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 02:22:04 +0000 Subject: [PATCH 048/180] Update Rust crate syn to v2.0.108 (#21224) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3ea34392f1..04018a5b06 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3928,9 +3928,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.106" +version = "2.0.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" +checksum = "da58917d35242480a05c2897064da0a80589a2a0476c9a3f2fdc83b53502e917" dependencies = [ "proc-macro2", "quote", From c596a78c08bfd0a5aed04eb9d3a2df5c66fb5386 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 02:27:40 +0000 Subject: [PATCH 049/180] Update Rust crate schemars to v1.0.5 (#21222) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 04018a5b06..df4540ff31 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3634,9 +3634,9 @@ dependencies = [ [[package]] name = "schemars" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" +checksum = "1317c3bf3e7df961da95b0a56a172a02abead31276215a0497241a7624b487ce" dependencies = [ "dyn-clone", "ref-cast", @@ -3647,9 +3647,9 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80" +checksum = "5f760a6150d45dd66ec044983c124595ae76912e77ed0b44124cb3e415cce5d9" dependencies = [ "proc-macro2", "quote", From 770b4d12abcee57ff0d0b37e8e0dadefe29ddbbc Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 02:35:50 +0000 Subject: [PATCH 050/180] Update Rust crate tikv-jemallocator to v0.6.1 (#21226) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index df4540ff31..6bda5cfd9d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4103,9 +4103,9 @@ dependencies = [ [[package]] name = "tikv-jemalloc-sys" -version = "0.6.0+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" +version = "0.6.1+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd3c60906412afa9c2b5b5a48ca6a5abe5736aec9eb48ad05037a677e52e4e2d" +checksum = "cd8aa5b2ab86a2cefa406d889139c162cbb230092f7d1d7cbc1716405d852a3b" dependencies = [ "cc", "libc", @@ -4113,9 +4113,9 @@ dependencies = [ [[package]] name = "tikv-jemallocator" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cec5ff18518d81584f477e9bfdf957f5bb0979b0bac3af4ca30b5b3ae2d2865" +checksum = "0359b4327f954e0567e69fb191cf1436617748813819c94b8cd4a431422d053a" dependencies = [ "libc", "tikv-jemalloc-sys", From c11b00bea0aea32c6dadcf0d2f302ea0e698ffe8 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 02:57:13 +0000 Subject: [PATCH 051/180] Update Rust crate wasm-bindgen-test to v0.3.55 (#21232) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 68 ++++++++++++++++++++++-------------------------------- 1 file changed, 27 insertions(+), 41 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6bda5cfd9d..2174b2788a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -642,7 +642,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" dependencies = [ "lazy_static", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -651,7 +651,7 @@ version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1108,7 +1108,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1698,7 +1698,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" dependencies = [ "hermit-abi", "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1762,7 +1762,7 @@ dependencies = [ "portable-atomic", "portable-atomic-util", "serde", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1809,9 +1809,9 @@ checksum = "a037eddb7d28de1d0fc42411f501b53b75838d313908078d6698d064f3029b24" [[package]] name = "js-sys" -version = "0.3.80" +version = "0.3.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "852f13bec5eba4ba9afbeb93fd7c13fe56147f055939ae21c43a29a0ecb2702e" +checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65" dependencies = [ "once_cell", "wasm-bindgen", @@ -3568,7 +3568,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3964,7 +3964,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -4886,9 +4886,9 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.103" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab10a69fbd0a177f5f649ad4d8d3305499c42bab9aef2f7ff592d0ec8f833819" +checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60" dependencies = [ "cfg-if", "once_cell", @@ -4897,25 +4897,11 @@ dependencies = [ "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.103" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bb702423545a6007bbc368fde243ba47ca275e549c8a28617f56f6ba53b1d1c" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-shared", -] - [[package]] name = "wasm-bindgen-futures" -version = "0.4.53" +version = "0.4.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0b221ff421256839509adbb55998214a70d829d3a28c69b4a6672e9d2a42f67" +checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0" dependencies = [ "cfg-if", "js-sys", @@ -4926,9 +4912,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.103" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc65f4f411d91494355917b605e1480033152658d71f722a90647f56a70c88a0" +checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -4936,31 +4922,31 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.103" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffc003a991398a8ee604a401e194b6b3a39677b3173d6e74495eb51b82e99a32" +checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc" dependencies = [ + "bumpalo", "proc-macro2", "quote", "syn", - "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.103" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "293c37f4efa430ca14db3721dfbe48d8c33308096bd44d80ebaa775ab71ba1cf" +checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76" dependencies = [ "unicode-ident", ] [[package]] name = "wasm-bindgen-test" -version = "0.3.53" +version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aee0a0f5343de9221a0d233b04520ed8dc2e6728dce180b1dcd9288ec9d9fa3c" +checksum = "bfc379bfb624eb59050b509c13e77b4eb53150c350db69628141abce842f2373" dependencies = [ "js-sys", "minicov", @@ -4971,9 +4957,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-test-macro" -version = "0.3.53" +version = "0.3.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a369369e4360c2884c3168d22bded735c43cccae97bbc147586d4b480edd138d" +checksum = "085b2df989e1e6f9620c1311df6c996e83fe16f57792b272ce1e024ac16a90f1" dependencies = [ "proc-macro2", "quote", @@ -4982,9 +4968,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.80" +version = "0.3.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbe734895e869dc429d78c4b433f8d17d95f8d05317440b4fad5ab2d33e596dc" +checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1" dependencies = [ "js-sys", "wasm-bindgen", @@ -5026,7 +5012,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] From fc71c90de6f1019eb9a5889424b46ad854ec7adb Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:02:16 +0000 Subject: [PATCH 052/180] Update taiki-e/install-action action to v2.62.45 (#21233) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/ci.yaml | 16 ++++++++-------- .github/workflows/sync_typeshed.yaml | 4 ++-- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0fc4eeb444..e537aaa11b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -256,11 +256,11 @@ jobs: - name: "Install mold" uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: "Install cargo nextest" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 with: tool: cargo-nextest - name: "Install cargo insta" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 with: tool: cargo-insta - name: "Install uv" @@ -320,11 +320,11 @@ jobs: - name: "Install mold" uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: "Install cargo nextest" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 with: tool: cargo-nextest - name: "Install cargo insta" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 with: tool: cargo-insta - name: "Install uv" @@ -353,7 +353,7 @@ jobs: - name: "Install Rust toolchain" run: rustup show - name: "Install cargo nextest" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 with: tool: cargo-nextest - name: "Install uv" @@ -944,7 +944,7 @@ jobs: run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 with: tool: cargo-codspeed @@ -982,7 +982,7 @@ jobs: run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 with: tool: cargo-codspeed @@ -1020,7 +1020,7 @@ jobs: run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 with: tool: cargo-codspeed diff --git a/.github/workflows/sync_typeshed.yaml b/.github/workflows/sync_typeshed.yaml index f7bb4c5426..18df7ecf94 100644 --- a/.github/workflows/sync_typeshed.yaml +++ b/.github/workflows/sync_typeshed.yaml @@ -207,12 +207,12 @@ jobs: uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: "Install cargo nextest" if: ${{ success() }} - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 with: tool: cargo-nextest - name: "Install cargo insta" if: ${{ success() }} - uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21 + uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 with: tool: cargo-insta - name: Update snapshots From dc373e639e681c02c7e8aa1a3e25b225908ec451 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:04:20 +0000 Subject: [PATCH 053/180] Update CodSpeedHQ/action action to v4.3.1 (#21234) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/ci.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e537aaa11b..f4f1fbffff 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -952,7 +952,7 @@ jobs: run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser - name: "Run benchmarks" - uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1 + uses: CodSpeedHQ/action@4348f634fa7309fe23aac9502e88b999ec90a164 # v4.3.1 with: mode: instrumentation run: cargo codspeed run @@ -990,7 +990,7 @@ jobs: run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench ty - name: "Run benchmarks" - uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1 + uses: CodSpeedHQ/action@4348f634fa7309fe23aac9502e88b999ec90a164 # v4.3.1 with: mode: instrumentation run: cargo codspeed run @@ -1028,7 +1028,7 @@ jobs: run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark - name: "Run benchmarks" - uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1 + uses: CodSpeedHQ/action@4348f634fa7309fe23aac9502e88b999ec90a164 # v4.3.1 env: # enabling walltime flamegraphs adds ~6 minutes to the CI time, and they don't # appear to provide much useful insight for our walltime benchmarks right now From bb055273506497448741707b8da4a3e3b2212f9f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:06:16 +0000 Subject: [PATCH 054/180] Update dependency monaco-editor to ^0.54.0 (#21235) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- playground/package-lock.json | 37 ++++++++++++++++++++++++------------ playground/ruff/package.json | 2 +- playground/ty/package.json | 2 +- 3 files changed, 27 insertions(+), 14 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index c8ddfc60a9..908e07dadb 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -1789,12 +1789,6 @@ "@types/react": "^19.0.0" } }, - "node_modules/@types/trusted-types": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-1.0.6.tgz", - "integrity": "sha512-230RC8sFeHoT6sSUlRO6a8cAnclO06eeiq1QDfiv2FGCLWFvvERWgwIQD4FWqD9A69BN7Lzee4OXwoMVnnsWDw==", - "license": "MIT" - }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "8.38.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.38.0.tgz", @@ -2732,6 +2726,12 @@ "node": ">=0.10.0" } }, + "node_modules/dompurify": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.7.tgz", + "integrity": "sha512-VaTstWtsneJY8xzy7DekmYWEOZcmzIe3Qb3zPd4STve1OBTa+e+WmS1ITQec1fZYXI3HCsOZZiSMpG6oxoWMWQ==", + "license": "(MPL-2.0 OR Apache-2.0)" + }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -4733,6 +4733,18 @@ "@jridgewell/sourcemap-codec": "^1.5.0" } }, + "node_modules/marked": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-14.0.0.tgz", + "integrity": "sha512-uIj4+faQ+MgHgwUW1l2PsPglZLOLOT1uErt06dAPtx2kjteLAkbsd/0FiYg/MGS+i7ZKLb7w2WClxHkzOOuryQ==", + "license": "MIT", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 18" + } + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -4841,13 +4853,14 @@ } }, "node_modules/monaco-editor": { - "version": "0.53.0", - "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.53.0.tgz", - "integrity": "sha512-0WNThgC6CMWNXXBxTbaYYcunj08iB5rnx4/G56UOPeL9UVIUGGHA1GR0EWIh9Ebabj7NpCRawQ5b0hfN1jQmYQ==", + "version": "0.54.0", + "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.54.0.tgz", + "integrity": "sha512-hx45SEUoLatgWxHKCmlLJH81xBo0uXP4sRkESUpmDQevfi+e7K1VuiSprK6UpQ8u4zOcKNiH0pMvHvlMWA/4cw==", "license": "MIT", "peer": true, "dependencies": { - "@types/trusted-types": "^1.0.6" + "dompurify": "3.1.7", + "marked": "14.0.0" } }, "node_modules/ms": { @@ -6546,7 +6559,7 @@ "@monaco-editor/react": "^4.4.6", "classnames": "^2.3.2", "lz-string": "^1.5.0", - "monaco-editor": "^0.53.0", + "monaco-editor": "^0.54.0", "react": "^19.0.0", "react-dom": "^19.0.0", "react-resizable-panels": "^3.0.0", @@ -6575,7 +6588,7 @@ "@monaco-editor/react": "^4.7.0", "classnames": "^2.5.1", "lz-string": "^1.5.0", - "monaco-editor": "^0.53.0", + "monaco-editor": "^0.54.0", "pyodide": "^0.28.0", "react": "^19.0.0", "react-dom": "^19.0.0", diff --git a/playground/ruff/package.json b/playground/ruff/package.json index 50f87a38d1..abb46f73e9 100644 --- a/playground/ruff/package.json +++ b/playground/ruff/package.json @@ -18,7 +18,7 @@ "@monaco-editor/react": "^4.4.6", "classnames": "^2.3.2", "lz-string": "^1.5.0", - "monaco-editor": "^0.53.0", + "monaco-editor": "^0.54.0", "react": "^19.0.0", "react-dom": "^19.0.0", "react-resizable-panels": "^3.0.0", diff --git a/playground/ty/package.json b/playground/ty/package.json index ad2b546980..f5b1fa8f0f 100644 --- a/playground/ty/package.json +++ b/playground/ty/package.json @@ -18,7 +18,7 @@ "@monaco-editor/react": "^4.7.0", "classnames": "^2.5.1", "lz-string": "^1.5.0", - "monaco-editor": "^0.53.0", + "monaco-editor": "^0.54.0", "pyodide": "^0.28.0", "react": "^19.0.0", "react-dom": "^19.0.0", From 666dd5fef128c0886b00ebfebe099146fa05c044 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:13:38 +0000 Subject: [PATCH 055/180] Update dependency ruff to v0.14.3 (#21239) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- docs/requirements-insiders.txt | 2 +- docs/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/requirements-insiders.txt b/docs/requirements-insiders.txt index 127d4bfaa2..5d0d47a756 100644 --- a/docs/requirements-insiders.txt +++ b/docs/requirements-insiders.txt @@ -1,5 +1,5 @@ PyYAML==6.0.3 -ruff==0.13.3 +ruff==0.14.3 mkdocs==1.6.1 mkdocs-material @ git+ssh://git@github.com/astral-sh/mkdocs-material-insiders.git@39da7a5e761410349e9a1b8abf593b0cdd5453ff mkdocs-redirects==1.2.2 diff --git a/docs/requirements.txt b/docs/requirements.txt index 9742b48785..9ccce87029 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,5 +1,5 @@ PyYAML==6.0.3 -ruff==0.13.3 +ruff==0.14.3 mkdocs==1.6.1 mkdocs-material==9.5.38 mkdocs-redirects==1.2.2 From 3493c9b67ae6c3698f13ec1aa9f9f2def782886e Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:17:38 +0000 Subject: [PATCH 056/180] Update dependency tomli to v2.3.0 (#21240) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- python/ruff-ecosystem/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/ruff-ecosystem/pyproject.toml b/python/ruff-ecosystem/pyproject.toml index f688f10094..dd5af5cfeb 100644 --- a/python/ruff-ecosystem/pyproject.toml +++ b/python/ruff-ecosystem/pyproject.toml @@ -6,7 +6,7 @@ build-backend = "hatchling.build" name = "ruff-ecosystem" version = "0.0.0" requires-python = ">=3.11" -dependencies = ["unidiff==0.7.5", "tomli_w==1.2.0", "tomli==2.2.1"] +dependencies = ["unidiff==0.7.5", "tomli_w==1.2.0", "tomli==2.3.0"] [project.scripts] ruff-ecosystem = "ruff_ecosystem.cli:entrypoint" From ade727ce662a51b2ce83aafd148ac47c0d9d240c Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:20:42 +0000 Subject: [PATCH 057/180] Update Rust crate csv to v1.4.0 (#21243) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2174b2788a..32ddac77b5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -865,14 +865,14 @@ dependencies = [ [[package]] name = "csv" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acdc4883a9c96732e4733212c01447ebd805833b7275a73ca3ee080fd77afdaf" +checksum = "52cd9d68cf7efc6ddfaaee42e7288d3a99d613d4b50f76ce9827ae0c6e14f938" dependencies = [ "csv-core", "itoa", "ryu", - "serde", + "serde_core", ] [[package]] From 884c3b178e7e92c251e60db2fed17fcb1c85e665 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:21:03 +0000 Subject: [PATCH 058/180] Update Rust crate indexmap to v2.12.0 (#21244) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 32ddac77b5..dba454d334 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1563,12 +1563,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.11.4" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" +checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f" dependencies = [ "equivalent", - "hashbrown 0.15.5", + "hashbrown 0.16.0", "serde", "serde_core", ] From 61c1007137a2407fde8ebf917c871107d0c6f428 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:21:41 +0000 Subject: [PATCH 059/180] Update Rust crate bitflags to v2.10.0 (#21242) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 44 ++++++++++++++++++++++---------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dba454d334..8e465bfb40 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -240,7 +240,7 @@ version = "0.72.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "cexpr", "clang-sys", "itertools 0.13.0", @@ -262,9 +262,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.4" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" [[package]] name = "bitvec" @@ -1025,7 +1025,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "block2", "libc", "objc2", @@ -1318,7 +1318,7 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "ignore", "walkdir", ] @@ -1602,7 +1602,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "inotify-sys", "libc", ] @@ -1900,7 +1900,7 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "libc", "redox_syscall", ] @@ -2105,7 +2105,7 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "cfg-if", "cfg_aliases", "libc", @@ -2117,7 +2117,7 @@ version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "cfg-if", "cfg_aliases", "libc", @@ -2145,7 +2145,7 @@ version = "8.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "fsevent-sys", "inotify", "kqueue", @@ -2776,7 +2776,7 @@ version = "0.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", ] [[package]] @@ -2864,7 +2864,7 @@ dependencies = [ "argfile", "assert_fs", "bincode", - "bitflags 2.9.4", + "bitflags 2.10.0", "cachedir", "clap", "clap_complete_command", @@ -3119,7 +3119,7 @@ version = "0.14.3" dependencies = [ "aho-corasick", "anyhow", - "bitflags 2.9.4", + "bitflags 2.10.0", "clap", "colored 3.0.0", "fern", @@ -3225,7 +3225,7 @@ name = "ruff_python_ast" version = "0.0.0" dependencies = [ "aho-corasick", - "bitflags 2.9.4", + "bitflags 2.10.0", "compact_str", "get-size2", "is-macro", @@ -3329,7 +3329,7 @@ dependencies = [ name = "ruff_python_literal" version = "0.0.0" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "itertools 0.14.0", "ruff_python_ast", "unic-ucd-category", @@ -3340,7 +3340,7 @@ name = "ruff_python_parser" version = "0.0.0" dependencies = [ "anyhow", - "bitflags 2.9.4", + "bitflags 2.10.0", "bstr", "compact_str", "get-size2", @@ -3365,7 +3365,7 @@ dependencies = [ name = "ruff_python_semantic" version = "0.0.0" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "insta", "is-macro", "ruff_cache", @@ -3386,7 +3386,7 @@ dependencies = [ name = "ruff_python_stdlib" version = "0.0.0" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "unicode-ident", ] @@ -3564,7 +3564,7 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "errno", "libc", "linux-raw-sys", @@ -4375,7 +4375,7 @@ dependencies = [ name = "ty_ide" version = "0.0.0" dependencies = [ - "bitflags 2.9.4", + "bitflags 2.10.0", "camino", "get-size2", "insta", @@ -4446,7 +4446,7 @@ name = "ty_python_semantic" version = "0.0.0" dependencies = [ "anyhow", - "bitflags 2.9.4", + "bitflags 2.10.0", "bitvec", "camino", "colored 3.0.0", @@ -4502,7 +4502,7 @@ name = "ty_server" version = "0.0.0" dependencies = [ "anyhow", - "bitflags 2.9.4", + "bitflags 2.10.0", "crossbeam", "dunce", "insta", From fe95ff6b066afbdd44ba7c4605569c1b04e275a5 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:23:24 +0000 Subject: [PATCH 060/180] Update dependency pyodide to ^0.29.0 (#21236) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- playground/package-lock.json | 15 +++++++++++---- playground/ty/package.json | 2 +- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 908e07dadb..3de5b851bf 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -1747,6 +1747,12 @@ "vite": "^5.2.0 || ^6 || ^7" } }, + "node_modules/@types/emscripten": { + "version": "1.41.5", + "resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-1.41.5.tgz", + "integrity": "sha512-cMQm7pxu6BxtHyqJ7mQZ2kXWV5SLmugybFdHCBbJ5eHzOo6VhBckEgAT3//rP5FwPHNPeEiq4SmQ5ucBwsOo4Q==", + "license": "MIT" + }, "node_modules/@types/estree": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", @@ -5278,11 +5284,12 @@ } }, "node_modules/pyodide": { - "version": "0.28.0", - "resolved": "https://registry.npmjs.org/pyodide/-/pyodide-0.28.0.tgz", - "integrity": "sha512-QML/Gh8eu50q5zZKLNpW6rgS0XUdK+94OSL54AUSKV8eJAxgwZrMebqj+CyM0EbF3EUX8JFJU3ryaxBViHammQ==", + "version": "0.29.0", + "resolved": "https://registry.npmjs.org/pyodide/-/pyodide-0.29.0.tgz", + "integrity": "sha512-ObIvsTmcrxAWKg+FT1GjfSdDmQc5CabnYe/nn5BCuhr9BVVITeQ24DBdZuG5B2tIiAZ9YonBpnDB7cmHZyd2Rw==", "license": "MPL-2.0", "dependencies": { + "@types/emscripten": "^1.41.4", "ws": "^8.5.0" }, "engines": { @@ -6589,7 +6596,7 @@ "classnames": "^2.5.1", "lz-string": "^1.5.0", "monaco-editor": "^0.54.0", - "pyodide": "^0.28.0", + "pyodide": "^0.29.0", "react": "^19.0.0", "react-dom": "^19.0.0", "react-resizable-panels": "^3.0.0", diff --git a/playground/ty/package.json b/playground/ty/package.json index f5b1fa8f0f..cf36dceece 100644 --- a/playground/ty/package.json +++ b/playground/ty/package.json @@ -19,7 +19,7 @@ "classnames": "^2.5.1", "lz-string": "^1.5.0", "monaco-editor": "^0.54.0", - "pyodide": "^0.28.0", + "pyodide": "^0.29.0", "react": "^19.0.0", "react-dom": "^19.0.0", "react-resizable-panels": "^3.0.0", From 31194d048df70f2a4e80cc47e0991ab6fd4eae49 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:31:24 +0000 Subject: [PATCH 061/180] Update Rust crate serde_with to v3.15.1 (#21247) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8e465bfb40..a4c079fd6b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3765,20 +3765,19 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.14.1" +version = "3.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c522100790450cf78eeac1507263d0a350d4d5b30df0c8e1fe051a10c22b376e" +checksum = "aa66c845eee442168b2c8134fec70ac50dc20e760769c8ba0ad1319ca1959b04" dependencies = [ - "serde", - "serde_derive", + "serde_core", "serde_with_macros", ] [[package]] name = "serde_with_macros" -version = "3.14.1" +version = "3.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327ada00f7d64abaac1e55a6911e90cf665aa051b9a561c7006c157f4633135e" +checksum = "b91a903660542fced4e99881aa481bdbaec1634568ee02e0b8bd57c64cb38955" dependencies = [ "darling", "proc-macro2", From 14fce1f788764e4ecb4897393eb5dbd7fb9a2c7f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:32:04 +0000 Subject: [PATCH 062/180] Update Rust crate regex to v1.12.2 (#21246) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a4c079fd6b..44533f19d7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2812,9 +2812,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.11.3" +version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b5288124840bee7b386bc413c487869b360b2b4ec421ea56425128692f2a82c" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", From f97c38dd881aac85367f5b925e45e1646cc5047f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:32:49 +0000 Subject: [PATCH 063/180] Update Rust crate matchit to 0.9.0 (#21245) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 12 ++++++------ Cargo.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 44533f19d7..929a838dca 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1108,7 +1108,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] @@ -2019,9 +2019,9 @@ checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" [[package]] name = "matchit" -version = "0.8.6" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f926ade0c4e170215ae43342bf13b9310a437609c81f29f86c5df6657582ef9" +checksum = "9ea5f97102eb9e54ab99fb70bb175589073f554bdadfb74d9bd656482ea73e2a" [[package]] name = "memchr" @@ -3568,7 +3568,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] @@ -3963,7 +3963,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix", - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] @@ -5011,7 +5011,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index d12718ea12..ed7fbf4fcb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -124,7 +124,7 @@ lsp-server = { version = "0.7.6" } lsp-types = { git = "https://github.com/astral-sh/lsp-types.git", rev = "3512a9f", features = [ "proposed", ] } -matchit = { version = "0.8.1" } +matchit = { version = "0.9.0" } memchr = { version = "2.7.1" } mimalloc = { version = "0.1.39" } natord = { version = "1.0.9" } From 7dbfb56c3ded7f8bff552f1e984d503cae13456c Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:33:16 +0000 Subject: [PATCH 064/180] Update astral-sh/setup-uv action to v7 (#21250) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/ci.yaml | 26 ++++++++++---------- .github/workflows/daily_fuzz.yaml | 2 +- .github/workflows/mypy_primer.yaml | 4 +-- .github/workflows/publish-pypi.yml | 2 +- .github/workflows/sync_typeshed.yaml | 6 ++--- .github/workflows/ty-ecosystem-analyzer.yaml | 2 +- .github/workflows/ty-ecosystem-report.yaml | 2 +- 7 files changed, 22 insertions(+), 22 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f4f1fbffff..3454de57af 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -264,7 +264,7 @@ jobs: with: tool: cargo-insta - name: "Install uv" - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: enable-cache: "true" - name: ty mdtests (GitHub annotations) @@ -328,7 +328,7 @@ jobs: with: tool: cargo-insta - name: "Install uv" - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: enable-cache: "true" - name: "Run tests" @@ -357,7 +357,7 @@ jobs: with: tool: cargo-nextest - name: "Install uv" - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: enable-cache: "true" - name: "Run tests" @@ -458,7 +458,7 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: persist-credentials: false - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 name: Download Ruff binary to test id: download-cached-binary @@ -494,7 +494,7 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" run: rustup component add rustfmt # Run all code generation scripts, and verify that the current output is @@ -529,7 +529,7 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: persist-credentials: false - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: python-version: ${{ env.PYTHON_VERSION }} activate-environment: true @@ -667,7 +667,7 @@ jobs: branch: ${{ github.event.pull_request.base.ref }} workflow: "ci.yaml" check_artifacts: true - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: Fuzz env: FORCE_COLOR: 1 @@ -711,7 +711,7 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: persist-credentials: false - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - name: "Install Rust toolchain" run: rustup show @@ -756,7 +756,7 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: persist-credentials: false - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 with: @@ -796,7 +796,7 @@ jobs: - name: "Install Rust toolchain" run: rustup show - name: Install uv - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: python-version: 3.13 activate-environment: true @@ -938,7 +938,7 @@ jobs: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" run: rustup show @@ -976,7 +976,7 @@ jobs: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" run: rustup show @@ -1014,7 +1014,7 @@ jobs: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" run: rustup show diff --git a/.github/workflows/daily_fuzz.yaml b/.github/workflows/daily_fuzz.yaml index c299e00fda..171f0c481a 100644 --- a/.github/workflows/daily_fuzz.yaml +++ b/.github/workflows/daily_fuzz.yaml @@ -34,7 +34,7 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: persist-credentials: false - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" run: rustup show - name: "Install mold" diff --git a/.github/workflows/mypy_primer.yaml b/.github/workflows/mypy_primer.yaml index 672a038537..89028a2235 100644 --- a/.github/workflows/mypy_primer.yaml +++ b/.github/workflows/mypy_primer.yaml @@ -43,7 +43,7 @@ jobs: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 with: @@ -85,7 +85,7 @@ jobs: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 with: diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index e5473f80a3..5bfeee7f5b 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -22,7 +22,7 @@ jobs: id-token: write steps: - name: "Install uv" - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 with: pattern: wheels-* diff --git a/.github/workflows/sync_typeshed.yaml b/.github/workflows/sync_typeshed.yaml index 18df7ecf94..3d84483c2b 100644 --- a/.github/workflows/sync_typeshed.yaml +++ b/.github/workflows/sync_typeshed.yaml @@ -77,7 +77,7 @@ jobs: run: | git config --global user.name typeshedbot git config --global user.email '<>' - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: Sync typeshed stubs run: | rm -rf "ruff/${VENDORED_TYPESHED}" @@ -131,7 +131,7 @@ jobs: with: persist-credentials: true ref: ${{ env.UPSTREAM_BRANCH}} - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: Setup git run: | git config --global user.name typeshedbot @@ -170,7 +170,7 @@ jobs: with: persist-credentials: true ref: ${{ env.UPSTREAM_BRANCH}} - - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: Setup git run: | git config --global user.name typeshedbot diff --git a/.github/workflows/ty-ecosystem-analyzer.yaml b/.github/workflows/ty-ecosystem-analyzer.yaml index a59cc6c947..cd763c3db1 100644 --- a/.github/workflows/ty-ecosystem-analyzer.yaml +++ b/.github/workflows/ty-ecosystem-analyzer.yaml @@ -33,7 +33,7 @@ jobs: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact diff --git a/.github/workflows/ty-ecosystem-report.yaml b/.github/workflows/ty-ecosystem-report.yaml index 30b3bc93ab..2078478505 100644 --- a/.github/workflows/ty-ecosystem-report.yaml +++ b/.github/workflows/ty-ecosystem-report.yaml @@ -29,7 +29,7 @@ jobs: persist-credentials: false - name: Install the latest version of uv - uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: enable-cache: true # zizmor: ignore[cache-poisoning] acceptable risk for CloudFlare pages artifact From 02879fa3777892c7cdbfc8ef518edda87e61c601 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:34:13 +0000 Subject: [PATCH 065/180] Update actions/setup-node action to v6 (#21249) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/ci.yaml | 6 +++--- .github/workflows/publish-playground.yml | 2 +- .github/workflows/publish-ty-playground.yml | 2 +- .github/workflows/publish-wasm.yml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3454de57af..ce8c15f345 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -378,7 +378,7 @@ jobs: - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - name: "Install Rust toolchain" run: rustup target add wasm32-unknown-unknown - - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 cache: "npm" @@ -758,7 +758,7 @@ jobs: persist-credentials: false - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 - name: "Cache pre-commit" @@ -900,7 +900,7 @@ jobs: - name: "Install Rust toolchain" run: rustup target add wasm32-unknown-unknown - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 - - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 cache: "npm" diff --git a/.github/workflows/publish-playground.yml b/.github/workflows/publish-playground.yml index 24bf4b4fef..8986a6d130 100644 --- a/.github/workflows/publish-playground.yml +++ b/.github/workflows/publish-playground.yml @@ -31,7 +31,7 @@ jobs: persist-credentials: false - name: "Install Rust toolchain" run: rustup target add wasm32-unknown-unknown - - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 package-manager-cache: false diff --git a/.github/workflows/publish-ty-playground.yml b/.github/workflows/publish-ty-playground.yml index f28086517c..a745e80794 100644 --- a/.github/workflows/publish-ty-playground.yml +++ b/.github/workflows/publish-ty-playground.yml @@ -35,7 +35,7 @@ jobs: persist-credentials: false - name: "Install Rust toolchain" run: rustup target add wasm32-unknown-unknown - - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 package-manager-cache: false diff --git a/.github/workflows/publish-wasm.yml b/.github/workflows/publish-wasm.yml index a51c888286..a0c6226406 100644 --- a/.github/workflows/publish-wasm.yml +++ b/.github/workflows/publish-wasm.yml @@ -45,7 +45,7 @@ jobs: jq '.name="@astral-sh/ruff-wasm-${{ matrix.target }}"' crates/ruff_wasm/pkg/package.json > /tmp/package.json mv /tmp/package.json crates/ruff_wasm/pkg - run: cp LICENSE crates/ruff_wasm/pkg # wasm-pack does not put the LICENSE file in the pkg - - uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0 + - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 registry-url: "https://registry.npmjs.org" From f947c23cd7ac079934ef7bd9e01b516906179ff7 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 03:41:20 +0000 Subject: [PATCH 066/180] Update Rust crate tempfile to v3.23.0 (#21248) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 929a838dca..bfe02a8697 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1016,7 +1016,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.61.0", + "windows-sys 0.59.0", ] [[package]] @@ -1108,7 +1108,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.61.0", + "windows-sys 0.52.0", ] [[package]] @@ -3568,7 +3568,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys 0.61.0", + "windows-sys 0.52.0", ] [[package]] @@ -3955,15 +3955,15 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.22.0" +version = "3.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84fa4d11fadde498443cca10fd3ac23c951f0dc59e080e9f4b93d4df4e4eea53" +checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" dependencies = [ "fastrand", "getrandom 0.3.4", "once_cell", "rustix", - "windows-sys 0.61.0", + "windows-sys 0.52.0", ] [[package]] @@ -5011,7 +5011,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.61.0", + "windows-sys 0.52.0", ] [[package]] From 0dfd55babfc7ea1c980c05e516ac9e07075952e9 Mon Sep 17 00:00:00 2001 From: Brent Westbrook <36778786+ntBre@users.noreply.github.com> Date: Mon, 3 Nov 2025 08:38:34 -0500 Subject: [PATCH 067/180] Delete unused `AsciiCharSet` in `FURB156` (#21181) Summary -- This code has been unused since #14233 but not detected by clippy I guess. This should help to remove the temptation to use the set comparison again like I suggested in #21144. And we shouldn't do the set comparison because of #13802, which #14233 fixed. Test Plan -- Existing tests --- .../refurb/rules/hardcoded_string_charset.rs | 31 +------------------ 1 file changed, 1 insertion(+), 30 deletions(-) diff --git a/crates/ruff_linter/src/rules/refurb/rules/hardcoded_string_charset.rs b/crates/ruff_linter/src/rules/refurb/rules/hardcoded_string_charset.rs index 151bdc3113..8194ac87d4 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/hardcoded_string_charset.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/hardcoded_string_charset.rs @@ -62,40 +62,11 @@ pub(crate) fn hardcoded_string_charset_literal(checker: &Checker, expr: &ExprStr struct NamedCharset { name: &'static str, bytes: &'static [u8], - ascii_char_set: AsciiCharSet, -} - -/// Represents the set of ascii characters in form of a bitset. -#[derive(Debug, Copy, Clone, Eq, PartialEq)] -struct AsciiCharSet(u128); - -impl AsciiCharSet { - /// Creates the set of ascii characters from `bytes`. - /// Returns None if there is non-ascii byte. - const fn from_bytes(bytes: &[u8]) -> Option { - // TODO: simplify implementation, when const-traits are supported - // https://github.com/rust-lang/rust-project-goals/issues/106 - let mut bitset = 0; - let mut i = 0; - while i < bytes.len() { - if !bytes[i].is_ascii() { - return None; - } - bitset |= 1 << bytes[i]; - i += 1; - } - Some(Self(bitset)) - } } impl NamedCharset { const fn new(name: &'static str, bytes: &'static [u8]) -> Self { - Self { - name, - bytes, - // SAFETY: The named charset is guaranteed to have only ascii bytes. - ascii_char_set: AsciiCharSet::from_bytes(bytes).unwrap(), - } + Self { name, bytes } } } From e017b039df5f17102e7ccfd7820814e029563102 Mon Sep 17 00:00:00 2001 From: Matthew Mckee Date: Mon, 3 Nov 2025 14:33:05 +0000 Subject: [PATCH 068/180] [ty] Favor in scope completions (#21194) ## Summary Resolves https://github.com/astral-sh/ty/issues/1464 We sort the completions before we add the unimported ones, meaning that imported completions show up before unimported ones. This is also spoken about in https://github.com/astral-sh/ty/issues/1274, and this is probably a duplicate of that. @AlexWaygood mentions this [here](https://github.com/astral-sh/ty/issues/1274#issuecomment-3345942698) too. ## Test Plan Add a test showing even if an unimported completion "should" (alphabetically before) come first, we favor the imported one. --- .../completion-evaluation-tasks.csv | 8 ++--- crates/ty_ide/src/completion.rs | 34 +++++++++++++++++-- 2 files changed, 35 insertions(+), 7 deletions(-) diff --git a/crates/ty_completion_eval/completion-evaluation-tasks.csv b/crates/ty_completion_eval/completion-evaluation-tasks.csv index 01b8ca4373..4bea881bf6 100644 --- a/crates/ty_completion_eval/completion-evaluation-tasks.csv +++ b/crates/ty_completion_eval/completion-evaluation-tasks.csv @@ -1,5 +1,5 @@ name,file,index,rank -auto-import-skips-current-module,main.py,0,4 +auto-import-skips-current-module,main.py,0,1 fstring-completions,main.py,0,1 higher-level-symbols-preferred,main.py,0, higher-level-symbols-preferred,main.py,1,1 @@ -10,17 +10,17 @@ import-deprioritizes-type_check_only,main.py,1,1 import-deprioritizes-type_check_only,main.py,2,1 import-deprioritizes-type_check_only,main.py,3,2 import-deprioritizes-type_check_only,main.py,4,3 -internal-typeshed-hidden,main.py,0,4 +internal-typeshed-hidden,main.py,0,5 none-completion,main.py,0,11 numpy-array,main.py,0, numpy-array,main.py,1,1 object-attr-instance-methods,main.py,0,1 object-attr-instance-methods,main.py,1,1 raise-uses-base-exception,main.py,0,2 -scope-existing-over-new-import,main.py,0,474 +scope-existing-over-new-import,main.py,0,13 scope-prioritize-closer,main.py,0,2 scope-simple-long-identifier,main.py,0,1 tstring-completions,main.py,0,1 ty-extensions-lower-stdlib,main.py,0,8 type-var-typing-over-ast,main.py,0,3 -type-var-typing-over-ast,main.py,1,270 +type-var-typing-over-ast,main.py,1,277 diff --git a/crates/ty_ide/src/completion.rs b/crates/ty_ide/src/completion.rs index 7ca15362b0..5b84e199f0 100644 --- a/crates/ty_ide/src/completion.rs +++ b/crates/ty_ide/src/completion.rs @@ -883,9 +883,16 @@ fn is_in_definition_place(db: &dyn Db, tokens: &[Token], file: File) -> bool { /// This has the effect of putting all dunder attributes after "normal" /// attributes, and all single-underscore attributes after dunder attributes. fn compare_suggestions(c1: &Completion, c2: &Completion) -> Ordering { - let (kind1, kind2) = (NameKind::classify(&c1.name), NameKind::classify(&c2.name)); + fn key<'a>(completion: &'a Completion) -> (bool, NameKind, bool, &'a Name) { + ( + completion.module_name.is_some(), + NameKind::classify(&completion.name), + completion.is_type_check_only, + &completion.name, + ) + } - (kind1, c1.is_type_check_only, &c1.name).cmp(&(kind2, c2.is_type_check_only, &c2.name)) + key(c1).cmp(&key(c2)) } #[cfg(test)] @@ -3440,8 +3447,8 @@ from os. .build() .snapshot(); assert_snapshot!(snapshot, @r" - AbraKadabra :: Unavailable :: package Kadabra :: Literal[1] :: Current module + AbraKadabra :: Unavailable :: package "); } @@ -4168,6 +4175,27 @@ type assert!(!builder.build().completions().is_empty()); } + #[test] + fn favour_symbols_currently_imported() { + let snapshot = CursorTest::builder() + .source("main.py", "long_nameb = 1\nlong_name") + .source("foo.py", "def long_namea(): ...") + .completion_test_builder() + .type_signatures() + .auto_import() + .module_names() + .filter(|c| c.name.contains("long_name")) + .build() + .snapshot(); + + // Even though long_namea is alphabetically before long_nameb, + // long_nameb is currently imported and should be preferred. + assert_snapshot!(snapshot, @r" + long_nameb :: Literal[1] :: Current module + long_namea :: Unavailable :: foo + "); + } + /// A way to create a simple single-file (named `main.py`) completion test /// builder. /// From 6ddfb51d71f511cfc35858d2d08b7e94cc5d1903 Mon Sep 17 00:00:00 2001 From: Dan Parizher <105245560+danparizher@users.noreply.github.com> Date: Mon, 3 Nov 2025 09:45:23 -0500 Subject: [PATCH 069/180] [`flake8-bugbear`] Mark fix as unsafe for non-NFKC attribute names (`B009`, `B010`) (#21131) --- .../test/fixtures/flake8_bugbear/B009_B010.py | 9 +++++ .../rules/getattr_with_constant.rs | 32 +++++++++++++++-- .../rules/setattr_with_constant.rs | 34 +++++++++++++++++-- ...ke8_bugbear__tests__B009_B009_B010.py.snap | 18 ++++++++++ ...ke8_bugbear__tests__B010_B009_B010.py.snap | 16 +++++++++ 5 files changed, 105 insertions(+), 4 deletions(-) diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B009_B010.py b/crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B009_B010.py index ce6e5c291e..3562a5a989 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B009_B010.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B009_B010.py @@ -70,3 +70,12 @@ builtins.getattr(foo, "bar") # Regression test for: https://github.com/astral-sh/ruff/issues/18353 setattr(foo, "__debug__", 0) + +# Regression test for: https://github.com/astral-sh/ruff/issues/21126 +# Non-NFKC attribute names should be marked as unsafe. Python normalizes identifiers in +# attribute access (obj.attr) using NFKC, but does not normalize string +# arguments passed to getattr/setattr. Rewriting `getattr(ns, "ſ")` to +# `ns.ſ` would be interpreted as `ns.s` at runtime, changing behavior. +# Example: the long s character "ſ" normalizes to "s" under NFKC. +getattr(foo, "ſ") +setattr(foo, "ſ", 1) diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/rules/getattr_with_constant.rs b/crates/ruff_linter/src/rules/flake8_bugbear/rules/getattr_with_constant.rs index 7905de14ca..9271d2b01a 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/rules/getattr_with_constant.rs +++ b/crates/ruff_linter/src/rules/flake8_bugbear/rules/getattr_with_constant.rs @@ -3,6 +3,7 @@ use ruff_python_ast::{self as ast, Expr}; use ruff_python_stdlib::identifiers::{is_identifier, is_mangled_private}; use ruff_source_file::LineRanges; use ruff_text_size::Ranged; +use unicode_normalization::UnicodeNormalization; use crate::checkers::ast::Checker; use crate::fix::edits::pad; @@ -29,6 +30,21 @@ use crate::{AlwaysFixableViolation, Edit, Fix}; /// obj.foo /// ``` /// +/// ## Fix safety +/// The fix is marked as unsafe for attribute names that are not in NFKC (Normalization Form KC) +/// normalization. Python normalizes identifiers using NFKC when using attribute access syntax +/// (e.g., `obj.attr`), but does not normalize string arguments passed to `getattr`. Rewriting +/// `getattr(obj, "ſ")` to `obj.ſ` would be interpreted as `obj.s` at runtime, changing behavior. +/// +/// For example, the long s character `"ſ"` normalizes to `"s"` under NFKC, so: +/// ```python +/// # This accesses an attribute with the exact name "ſ" (if it exists) +/// value = getattr(obj, "ſ") +/// +/// # But this would normalize to "s" and access a different attribute +/// obj.ſ # This is interpreted as obj.s, not obj.ſ +/// ``` +/// /// ## References /// - [Python documentation: `getattr`](https://docs.python.org/3/library/functions.html#getattr) #[derive(ViolationMetadata)] @@ -69,8 +85,14 @@ pub(crate) fn getattr_with_constant(checker: &Checker, expr: &Expr, func: &Expr, return; } + // Mark fixes as unsafe for non-NFKC attribute names. Python normalizes identifiers using NFKC, so using + // attribute syntax (e.g., `obj.attr`) would normalize the name and potentially change + // program behavior. + let attr_name = value.to_str(); + let is_unsafe = attr_name.nfkc().collect::() != attr_name; + let mut diagnostic = checker.report_diagnostic(GetAttrWithConstant, expr.range()); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + let edit = Edit::range_replacement( pad( if matches!( obj, @@ -88,5 +110,11 @@ pub(crate) fn getattr_with_constant(checker: &Checker, expr: &Expr, func: &Expr, checker.locator(), ), expr.range(), - ))); + ); + let fix = if is_unsafe { + Fix::unsafe_edit(edit) + } else { + Fix::safe_edit(edit) + }; + diagnostic.set_fix(fix); } diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/rules/setattr_with_constant.rs b/crates/ruff_linter/src/rules/flake8_bugbear/rules/setattr_with_constant.rs index d3ba5b953e..51fee45110 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/rules/setattr_with_constant.rs +++ b/crates/ruff_linter/src/rules/flake8_bugbear/rules/setattr_with_constant.rs @@ -4,6 +4,7 @@ use ruff_text_size::{Ranged, TextRange}; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_codegen::Generator; use ruff_python_stdlib::identifiers::{is_identifier, is_mangled_private}; +use unicode_normalization::UnicodeNormalization; use crate::checkers::ast::Checker; use crate::{AlwaysFixableViolation, Edit, Fix}; @@ -28,6 +29,23 @@ use crate::{AlwaysFixableViolation, Edit, Fix}; /// obj.foo = 42 /// ``` /// +/// ## Fix safety +/// The fix is marked as unsafe for attribute names that are not in NFKC (Normalization Form KC) +/// normalization. Python normalizes identifiers using NFKC when using attribute access syntax +/// (e.g., `obj.attr = value`), but does not normalize string arguments passed to `setattr`. +/// Rewriting `setattr(obj, "ſ", 1)` to `obj.ſ = 1` would be interpreted as `obj.s = 1` at +/// runtime, changing behavior. +/// +/// For example, the long s character `"ſ"` normalizes to `"s"` under NFKC, so: +/// ```python +/// # This creates an attribute with the exact name "ſ" +/// setattr(obj, "ſ", 1) +/// getattr(obj, "ſ") # Returns 1 +/// +/// # But this would normalize to "s" and set a different attribute +/// obj.ſ = 1 # This is interpreted as obj.s = 1, not obj.ſ = 1 +/// ``` +/// /// ## References /// - [Python documentation: `setattr`](https://docs.python.org/3/library/functions.html#setattr) #[derive(ViolationMetadata)] @@ -89,6 +107,12 @@ pub(crate) fn setattr_with_constant(checker: &Checker, expr: &Expr, func: &Expr, return; } + // Mark fixes as unsafe for non-NFKC attribute names. Python normalizes identifiers using NFKC, so using + // attribute syntax (e.g., `obj.attr = value`) would normalize the name and potentially change + // program behavior. + let attr_name = name.to_str(); + let is_unsafe = attr_name.nfkc().collect::() != attr_name; + // We can only replace a `setattr` call (which is an `Expr`) with an assignment // (which is a `Stmt`) if the `Expr` is already being used as a `Stmt` // (i.e., it's directly within an `Stmt::Expr`). @@ -100,10 +124,16 @@ pub(crate) fn setattr_with_constant(checker: &Checker, expr: &Expr, func: &Expr, { if expr == child.as_ref() { let mut diagnostic = checker.report_diagnostic(SetAttrWithConstant, expr.range()); - diagnostic.set_fix(Fix::safe_edit(Edit::range_replacement( + let edit = Edit::range_replacement( assignment(obj, name.to_str(), value, checker.generator()), expr.range(), - ))); + ); + let fix = if is_unsafe { + Fix::unsafe_edit(edit) + } else { + Fix::safe_edit(edit) + }; + diagnostic.set_fix(fix); } } } diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B009_B009_B010.py.snap b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B009_B009_B010.py.snap index ab05bd0966..febc145dd7 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B009_B009_B010.py.snap +++ b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B009_B009_B010.py.snap @@ -360,3 +360,21 @@ help: Replace `getattr` with attribute access 70 | 71 | # Regression test for: https://github.com/astral-sh/ruff/issues/18353 72 | setattr(foo, "__debug__", 0) + +B009 [*] Do not call `getattr` with a constant attribute value. It is not any safer than normal property access. + --> B009_B010.py:80:1 + | +78 | # `ns.ſ` would be interpreted as `ns.s` at runtime, changing behavior. +79 | # Example: the long s character "ſ" normalizes to "s" under NFKC. +80 | getattr(foo, "ſ") + | ^^^^^^^^^^^^^^^^^ +81 | setattr(foo, "ſ", 1) + | +help: Replace `getattr` with attribute access +77 | # arguments passed to getattr/setattr. Rewriting `getattr(ns, "ſ")` to +78 | # `ns.ſ` would be interpreted as `ns.s` at runtime, changing behavior. +79 | # Example: the long s character "ſ" normalizes to "s" under NFKC. + - getattr(foo, "ſ") +80 + foo.ſ +81 | setattr(foo, "ſ", 1) +note: This is an unsafe fix and may change runtime behavior diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B010_B009_B010.py.snap b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B010_B009_B010.py.snap index 87c2f01bfe..8dab4338a1 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B010_B009_B010.py.snap +++ b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B010_B009_B010.py.snap @@ -118,3 +118,19 @@ help: Replace `setattr` with assignment 56 | 57 | # Regression test for: https://github.com/astral-sh/ruff/issues/7455#issuecomment-1722458885 58 | assert getattr(func, '_rpc')is True + +B010 [*] Do not call `setattr` with a constant attribute value. It is not any safer than normal property access. + --> B009_B010.py:81:1 + | +79 | # Example: the long s character "ſ" normalizes to "s" under NFKC. +80 | getattr(foo, "ſ") +81 | setattr(foo, "ſ", 1) + | ^^^^^^^^^^^^^^^^^^^^ + | +help: Replace `setattr` with assignment +78 | # `ns.ſ` would be interpreted as `ns.s` at runtime, changing behavior. +79 | # Example: the long s character "ſ" normalizes to "s" under NFKC. +80 | getattr(foo, "ſ") + - setattr(foo, "ſ", 1) +81 + foo.ſ = 1 +note: This is an unsafe fix and may change runtime behavior From e2e83acd2f2929cb382b4f8eccd92b63f0d31944 Mon Sep 17 00:00:00 2001 From: Matthew Mckee Date: Mon, 3 Nov 2025 14:49:58 +0000 Subject: [PATCH 070/180] [ty] Remove mentions of VS Code from server logs (#21155) Co-authored-by: Micha Reiser --- crates/ty_project/src/metadata/options.rs | 16 ++++----- crates/ty_project/src/metadata/value.rs | 21 ++++++----- crates/ty_python_semantic/src/diagnostic.rs | 4 +-- crates/ty_python_semantic/src/program.rs | 7 ++-- .../ty_python_semantic/src/site_packages.rs | 35 +++++++++++++------ crates/ty_server/src/session/options.rs | 4 +-- 6 files changed, 50 insertions(+), 37 deletions(-) diff --git a/crates/ty_project/src/metadata/options.rs b/crates/ty_project/src/metadata/options.rs index bcd821c53d..1e498f6bf8 100644 --- a/crates/ty_project/src/metadata/options.rs +++ b/crates/ty_project/src/metadata/options.rs @@ -131,9 +131,7 @@ impl Options { ValueSource::File(path) => PythonVersionSource::ConfigFile( PythonVersionFileSource::new(path.clone(), ranged_version.range()), ), - ValueSource::PythonVSCodeExtension => { - PythonVersionSource::PythonVSCodeExtension - } + ValueSource::Editor => PythonVersionSource::Editor, }, }); @@ -153,7 +151,7 @@ impl Options { ValueSource::File(path) => { SysPrefixPathOrigin::ConfigFileSetting(path.clone(), python_path.range()) } - ValueSource::PythonVSCodeExtension => SysPrefixPathOrigin::PythonVSCodeExtension, + ValueSource::Editor => SysPrefixPathOrigin::Editor, }; Some(PythonEnvironment::new( @@ -819,8 +817,8 @@ impl Rules { ValueSource::File(_) => LintSource::File, ValueSource::Cli => LintSource::Cli, - ValueSource::PythonVSCodeExtension => { - unreachable!("Can't configure rules from the Python VSCode extension") + ValueSource::Editor => { + unreachable!("Can't configure rules from the user's editor") } }; if let Ok(severity) = Severity::try_from(**level) { @@ -957,7 +955,7 @@ fn build_include_filter( SubDiagnosticSeverity::Info, "The pattern was specified on the CLI", )), - ValueSource::PythonVSCodeExtension => unreachable!("Can't configure includes from the Python VSCode extension"), + ValueSource::Editor => unreachable!("Can't configure includes from the user's editor"), } })?; } @@ -1040,8 +1038,8 @@ fn build_exclude_filter( SubDiagnosticSeverity::Info, "The pattern was specified on the CLI", )), - ValueSource::PythonVSCodeExtension => unreachable!( - "Can't configure excludes from the Python VSCode extension" + ValueSource::Editor => unreachable!( + "Can't configure excludes from the user's editor" ) } })?; diff --git a/crates/ty_project/src/metadata/value.rs b/crates/ty_project/src/metadata/value.rs index f1f08d718a..22d940df58 100644 --- a/crates/ty_project/src/metadata/value.rs +++ b/crates/ty_project/src/metadata/value.rs @@ -28,8 +28,11 @@ pub enum ValueSource { /// long argument (`--extra-paths`) or `--config key=value`. Cli, - /// The value comes from an LSP client configuration. - PythonVSCodeExtension, + /// The value comes from the user's editor, + /// while it's left open if specified as a setting + /// or if the value was auto-discovered by the editor + /// (e.g., the Python environment) + Editor, } impl ValueSource { @@ -37,7 +40,7 @@ impl ValueSource { match self { ValueSource::File(path) => Some(&**path), ValueSource::Cli => None, - ValueSource::PythonVSCodeExtension => None, + ValueSource::Editor => None, } } @@ -137,11 +140,7 @@ impl RangedValue { } pub fn python_extension(value: T) -> Self { - Self::with_range( - value, - ValueSource::PythonVSCodeExtension, - TextRange::default(), - ) + Self::with_range(value, ValueSource::Editor, TextRange::default()) } pub fn with_range(value: T, source: ValueSource, range: TextRange) -> Self { @@ -368,7 +367,7 @@ impl RelativePathBuf { } pub fn python_extension(path: impl AsRef) -> Self { - Self::new(path, ValueSource::PythonVSCodeExtension) + Self::new(path, ValueSource::Editor) } /// Returns the relative path as specified by the user. @@ -398,7 +397,7 @@ impl RelativePathBuf { pub fn absolute(&self, project_root: &SystemPath, system: &dyn System) -> SystemPathBuf { let relative_to = match &self.0.source { ValueSource::File(_) => project_root, - ValueSource::Cli | ValueSource::PythonVSCodeExtension => system.current_directory(), + ValueSource::Cli | ValueSource::Editor => system.current_directory(), }; SystemPath::absolute(&self.0, relative_to) @@ -454,7 +453,7 @@ impl RelativeGlobPattern { ) -> Result { let relative_to = match &self.0.source { ValueSource::File(_) => project_root, - ValueSource::Cli | ValueSource::PythonVSCodeExtension => system.current_directory(), + ValueSource::Cli | ValueSource::Editor => system.current_directory(), }; let pattern = PortableGlobPattern::parse(&self.0, kind)?; diff --git a/crates/ty_python_semantic/src/diagnostic.rs b/crates/ty_python_semantic/src/diagnostic.rs index 5936d0874d..f58c90191c 100644 --- a/crates/ty_python_semantic/src/diagnostic.rs +++ b/crates/ty_python_semantic/src/diagnostic.rs @@ -88,10 +88,10 @@ pub fn add_inferred_python_version_hint_to_diagnostic( or in a configuration file", ); } - crate::PythonVersionSource::PythonVSCodeExtension => { + crate::PythonVersionSource::Editor => { diagnostic.info(format_args!( "Python {version} was assumed when {action} \ - because it's the version of the selected Python interpreter in the VS Code Python extension", + because it's the version of the selected Python interpreter in your editor", )); } crate::PythonVersionSource::InstallationDirectoryLayout { diff --git a/crates/ty_python_semantic/src/program.rs b/crates/ty_python_semantic/src/program.rs index 8f06527951..1a977de985 100644 --- a/crates/ty_python_semantic/src/program.rs +++ b/crates/ty_python_semantic/src/program.rs @@ -113,8 +113,11 @@ pub enum PythonVersionSource { /// long argument (`--extra-paths`) or `--config key=value`. Cli, - /// The value comes from the Python VS Code extension (the selected interpreter). - PythonVSCodeExtension, + /// The value comes from the user's editor, + /// while it's left open if specified as a setting + /// or if the value was auto-discovered by the editor + /// (e.g., the Python environment) + Editor, /// We fell back to a default value because the value was not specified via the CLI or a config file. #[default] diff --git a/crates/ty_python_semantic/src/site_packages.rs b/crates/ty_python_semantic/src/site_packages.rs index 9062228363..c162dfc70a 100644 --- a/crates/ty_python_semantic/src/site_packages.rs +++ b/crates/ty_python_semantic/src/site_packages.rs @@ -111,6 +111,12 @@ impl SitePackagesPaths { } } +impl fmt::Display for SitePackagesPaths { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_list().entries(self.0.iter()).finish() + } +} + impl From<[SystemPathBuf; N]> for SitePackagesPaths { fn from(paths: [SystemPathBuf; N]) -> Self { Self(IndexSet::from(paths)) @@ -543,7 +549,7 @@ System site-packages will not be used for module resolution.", } tracing::debug!( - "Resolved site-packages directories for this virtual environment are: {site_packages_directories:?}" + "Resolved site-packages directories for this virtual environment are: {site_packages_directories}" ); Ok(site_packages_directories) } @@ -823,7 +829,7 @@ impl SystemEnvironment { )?; tracing::debug!( - "Resolved site-packages directories for this environment are: {site_packages_directories:?}" + "Resolved site-packages directories for this environment are: {site_packages_directories}" ); Ok(site_packages_directories) } @@ -1567,8 +1573,8 @@ pub enum SysPrefixPathOrigin { ConfigFileSetting(Arc, Option), /// The `sys.prefix` path came from a `--python` CLI flag PythonCliFlag, - /// The selected interpreter in the VS Code's Python extension. - PythonVSCodeExtension, + /// The selected interpreter in the user's editor. + Editor, /// The `sys.prefix` path came from the `VIRTUAL_ENV` environment variable VirtualEnvVar, /// The `sys.prefix` path came from the `CONDA_PREFIX` environment variable @@ -1590,7 +1596,7 @@ impl SysPrefixPathOrigin { Self::LocalVenv | Self::VirtualEnvVar => true, Self::ConfigFileSetting(..) | Self::PythonCliFlag - | Self::PythonVSCodeExtension + | Self::Editor | Self::DerivedFromPyvenvCfg | Self::CondaPrefixVar => false, } @@ -1602,9 +1608,7 @@ impl SysPrefixPathOrigin { /// the `sys.prefix` directory, e.g. the `--python` CLI flag. pub(crate) const fn must_point_directly_to_sys_prefix(&self) -> bool { match self { - Self::PythonCliFlag | Self::ConfigFileSetting(..) | Self::PythonVSCodeExtension => { - false - } + Self::PythonCliFlag | Self::ConfigFileSetting(..) | Self::Editor => false, Self::VirtualEnvVar | Self::CondaPrefixVar | Self::DerivedFromPyvenvCfg @@ -1622,9 +1626,7 @@ impl std::fmt::Display for SysPrefixPathOrigin { Self::CondaPrefixVar => f.write_str("`CONDA_PREFIX` environment variable"), Self::DerivedFromPyvenvCfg => f.write_str("derived `sys.prefix` path"), Self::LocalVenv => f.write_str("local virtual environment"), - Self::PythonVSCodeExtension => { - f.write_str("selected interpreter in the VS Code Python extension") - } + Self::Editor => f.write_str("selected interpreter in your editor"), } } } @@ -2377,4 +2379,15 @@ mod tests { assert_eq!(&pyvenv_cfg[version.1], version.0); assert_eq!(parsed.implementation, PythonImplementation::PyPy); } + + #[test] + fn site_packages_paths_display() { + let paths = SitePackagesPaths::default(); + assert_eq!(paths.to_string(), "[]"); + + let mut paths = SitePackagesPaths::default(); + paths.insert(SystemPathBuf::from("/path/to/site/packages")); + + assert_eq!(paths.to_string(), r#"["/path/to/site/packages"]"#); + } } diff --git a/crates/ty_server/src/session/options.rs b/crates/ty_server/src/session/options.rs index c646c767de..982dccd484 100644 --- a/crates/ty_server/src/session/options.rs +++ b/crates/ty_server/src/session/options.rs @@ -213,7 +213,7 @@ impl WorkspaceOptions { if let Some(python) = &overrides.fallback_python { tracing::debug!( - "Using the Python environment selected in the VS Code Python extension \ + "Using the Python environment selected in your editor \ in case the configuration doesn't specify a Python environment: {python}", python = python.path() ); @@ -221,7 +221,7 @@ impl WorkspaceOptions { if let Some(version) = &overrides.fallback_python_version { tracing::debug!( - "Using the Python version selected in the VS Code Python extension: {version} \ + "Using the Python version selected in your editor: {version} \ in case the configuration doesn't specify a Python version", ); } From de9df1b326ad039d82903e44d5e090a1677ad910 Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Mon, 3 Nov 2025 15:53:04 +0100 Subject: [PATCH 071/180] Revert "Update CodSpeedHQ/action action to v4.3.1" (#21252) --- .github/workflows/ci.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ce8c15f345..7130f15b29 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -952,7 +952,7 @@ jobs: run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser - name: "Run benchmarks" - uses: CodSpeedHQ/action@4348f634fa7309fe23aac9502e88b999ec90a164 # v4.3.1 + uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1 with: mode: instrumentation run: cargo codspeed run @@ -990,7 +990,7 @@ jobs: run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench ty - name: "Run benchmarks" - uses: CodSpeedHQ/action@4348f634fa7309fe23aac9502e88b999ec90a164 # v4.3.1 + uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1 with: mode: instrumentation run: cargo codspeed run @@ -1028,7 +1028,7 @@ jobs: run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark - name: "Run benchmarks" - uses: CodSpeedHQ/action@4348f634fa7309fe23aac9502e88b999ec90a164 # v4.3.1 + uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1 env: # enabling walltime flamegraphs adds ~6 minutes to the CI time, and they don't # appear to provide much useful insight for our walltime benchmarks right now From 1b2ed6a503ffad946796e4be1c7c55c7dc79f808 Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Mon, 3 Nov 2025 16:00:30 +0100 Subject: [PATCH 072/180] [ty] Fix caching of imported modules in playground (#21251) --- playground/ty/src/Editor/SecondaryPanel.tsx | 68 ++++++++++----------- 1 file changed, 31 insertions(+), 37 deletions(-) diff --git a/playground/ty/src/Editor/SecondaryPanel.tsx b/playground/ty/src/Editor/SecondaryPanel.tsx index 7f68bb7153..a73cb57961 100644 --- a/playground/ty/src/Editor/SecondaryPanel.tsx +++ b/playground/ty/src/Editor/SecondaryPanel.tsx @@ -2,7 +2,7 @@ import MonacoEditor from "@monaco-editor/react"; import { AstralButton, Theme } from "shared"; import { ReadonlyFiles } from "../Playground"; import { Suspense, use, useState } from "react"; -import { loadPyodide, PyodideInterface } from "pyodide"; +import { loadPyodide } from "pyodide"; import classNames from "classnames"; export enum SecondaryTool { @@ -103,41 +103,12 @@ function Content({ } } -let pyodidePromise: Promise | null = null; - function Run({ files, theme }: { files: ReadonlyFiles; theme: Theme }) { - if (pyodidePromise == null) { - pyodidePromise = loadPyodide(); - } + const [runOutput, setRunOutput] = useState | null>(null); + const handleRun = () => { + const output = (async () => { + const pyodide = await loadPyodide(); - return ( - Loading} - > - - - ); -} - -function RunWithPyiodide({ - files, - pyodidePromise, - theme, -}: { - files: ReadonlyFiles; - theme: Theme; - pyodidePromise: Promise; -}) { - const pyodide = use(pyodidePromise); - - const [output, setOutput] = useState(null); - - if (output == null) { - const handleRun = () => { let combined_output = ""; const outputHandler = (output: string) => { @@ -179,14 +150,18 @@ function RunWithPyiodide({ filename: fileName, }); - setOutput(combined_output); + return combined_output; } catch (e) { - setOutput(`Failed to run Python script: ${e}`); + return `Failed to run Python script: ${e}`; } finally { globals.destroy(); dict.destroy(); } - }; + })(); + setRunOutput(output); + }; + + if (runOutput == null) { return (
); } + + return ( + Loading
} + > + + + ); +} + +function RunOutput({ + runOutput, + theme, +}: { + theme: Theme; + runOutput: Promise; +}) { + const output = use(runOutput); + return (
Date: Mon, 3 Nov 2025 16:35:42 +0100
Subject: [PATCH 073/180] [ty] Simplify semantic token tests (#21206)

---
 crates/ty_ide/src/inlay_hints.rs     |  19 +-
 crates/ty_ide/src/lib.rs             |  16 +-
 crates/ty_ide/src/semantic_tokens.rs | 755 ++++++++++++++-------------
 crates/ty_project/src/db.rs          |  27 +-
 crates/ty_project/src/lib.rs         |  16 +-
 5 files changed, 422 insertions(+), 411 deletions(-)

diff --git a/crates/ty_ide/src/inlay_hints.rs b/crates/ty_ide/src/inlay_hints.rs
index 1e3f545aff..47859c39e3 100644
--- a/crates/ty_ide/src/inlay_hints.rs
+++ b/crates/ty_ide/src/inlay_hints.rs
@@ -302,7 +302,6 @@ mod tests {
 
     use insta::assert_snapshot;
     use ruff_db::{
-        Db as _,
         files::{File, system_path_to_file},
         source::source_text,
     };
@@ -311,9 +310,6 @@ mod tests {
 
     use ruff_db::system::{DbWithWritableSystem, SystemPathBuf};
     use ty_project::ProjectMetadata;
-    use ty_python_semantic::{
-        Program, ProgramSettings, PythonPlatform, PythonVersionWithSource, SearchPathSettings,
-    };
 
     pub(super) fn inlay_hint_test(source: &str) -> InlayHintTest {
         const START: &str = "";
@@ -324,6 +320,8 @@ mod tests {
             SystemPathBuf::from("/"),
         ));
 
+        db.init_program().unwrap();
+
         let source = dedent(source);
 
         let start = source.find(START);
@@ -345,19 +343,6 @@ mod tests {
 
         let file = system_path_to_file(&db, "main.py").expect("newly written file to existing");
 
-        let search_paths = SearchPathSettings::new(vec![SystemPathBuf::from("/")])
-            .to_search_paths(db.system(), db.vendored())
-            .expect("Valid search path settings");
-
-        Program::from_settings(
-            &db,
-            ProgramSettings {
-                python_version: PythonVersionWithSource::default(),
-                python_platform: PythonPlatform::default(),
-                search_paths,
-            },
-        );
-
         InlayHintTest { db, file, range }
     }
 
diff --git a/crates/ty_ide/src/lib.rs b/crates/ty_ide/src/lib.rs
index 6a23302561..057d75b688 100644
--- a/crates/ty_ide/src/lib.rs
+++ b/crates/ty_ide/src/lib.rs
@@ -338,9 +338,6 @@ mod tests {
     use ruff_python_trivia::textwrap::dedent;
     use ruff_text_size::TextSize;
     use ty_project::ProjectMetadata;
-    use ty_python_semantic::{
-        Program, ProgramSettings, PythonPlatform, PythonVersionWithSource, SearchPathSettings,
-    };
 
     /// A way to create a simple single-file (named `main.py`) cursor test.
     ///
@@ -417,18 +414,7 @@ mod tests {
                 SystemPathBuf::from("/"),
             ));
 
-            let search_paths = SearchPathSettings::new(vec![SystemPathBuf::from("/")])
-                .to_search_paths(db.system(), db.vendored())
-                .expect("Valid search path settings");
-
-            Program::from_settings(
-                &db,
-                ProgramSettings {
-                    python_version: PythonVersionWithSource::default(),
-                    python_platform: PythonPlatform::default(),
-                    search_paths,
-                },
-            );
+            db.init_program().unwrap();
 
             let mut cursor: Option = None;
             for &Source {
diff --git a/crates/ty_ide/src/semantic_tokens.rs b/crates/ty_ide/src/semantic_tokens.rs
index ca736acd4c..df1bb88b37 100644
--- a/crates/ty_ide/src/semantic_tokens.rs
+++ b/crates/ty_ide/src/semantic_tokens.rs
@@ -930,88 +930,48 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::tests::cursor_test;
 
     use insta::assert_snapshot;
-
-    /// Helper function to get semantic tokens for full file (for testing)
-    fn semantic_tokens_full_file(db: &dyn Db, file: File) -> SemanticTokens {
-        semantic_tokens(db, file, None)
-    }
-
-    /// Helper function to convert semantic tokens to a snapshot-friendly text format
-    fn semantic_tokens_to_snapshot(db: &dyn Db, file: File, tokens: &SemanticTokens) -> String {
-        use std::fmt::Write;
-        let source = ruff_db::source::source_text(db, file);
-        let mut result = String::new();
-
-        for token in tokens.iter() {
-            let token_text = &source[token.range()];
-            let modifiers_text = if token.modifiers.is_empty() {
-                String::new()
-            } else {
-                let mut mods = Vec::new();
-                if token.modifiers.contains(SemanticTokenModifier::DEFINITION) {
-                    mods.push("definition");
-                }
-                if token.modifiers.contains(SemanticTokenModifier::READONLY) {
-                    mods.push("readonly");
-                }
-                if token.modifiers.contains(SemanticTokenModifier::ASYNC) {
-                    mods.push("async");
-                }
-                format!(" [{}]", mods.join(", "))
-            };
-
-            writeln!(
-                result,
-                "{:?} @ {}..{}: {:?}{}",
-                token_text,
-                u32::from(token.range().start()),
-                u32::from(token.range().end()),
-                token.token_type,
-                modifiers_text
-            )
-            .unwrap();
-        }
-
-        result
-    }
+    use ruff_db::{
+        files::system_path_to_file,
+        system::{DbWithWritableSystem, SystemPath, SystemPathBuf},
+    };
+    use ty_project::ProjectMetadata;
 
     #[test]
     fn test_semantic_tokens_basic() {
-        let test = cursor_test("def foo(): pass");
+        let test = SemanticTokenTest::new("def foo(): pass");
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "foo" @ 4..7: Function [definition]
         "###);
     }
 
     #[test]
     fn test_semantic_tokens_class() {
-        let test = cursor_test("class MyClass: pass");
+        let test = SemanticTokenTest::new("class MyClass: pass");
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "MyClass" @ 6..13: Class [definition]
         "###);
     }
 
     #[test]
     fn test_semantic_tokens_variables() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 x = 42
-y = 'hello'
+y = 'hello'
 ",
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "x" @ 1..2: Variable
         "42" @ 5..7: Number
         "y" @ 8..9: Variable
@@ -1021,16 +981,16 @@ y = 'hello'
 
     #[test]
     fn test_semantic_tokens_self_parameter() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 class MyClass:
-    def method(self, x): pass
+    def method(self, x): pass
 ",
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "MyClass" @ 7..14: Class [definition]
         "method" @ 24..30: Method [definition]
         "self" @ 31..35: SelfParameter
@@ -1040,17 +1000,17 @@ class MyClass:
 
     #[test]
     fn test_semantic_tokens_cls_parameter() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 class MyClass:
     @classmethod
-    def method(cls, x): pass
+    def method(cls, x): pass
 ",
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "MyClass" @ 7..14: Class [definition]
         "classmethod" @ 21..32: Decorator
         "method" @ 41..47: Method [definition]
@@ -1061,17 +1021,17 @@ class MyClass:
 
     #[test]
     fn test_semantic_tokens_staticmethod_parameter() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 class MyClass:
     @staticmethod
-    def method(x, y): pass
+    def method(x, y): pass
 ",
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "MyClass" @ 7..14: Class [definition]
         "staticmethod" @ 21..33: Decorator
         "method" @ 42..48: Method [definition]
@@ -1082,19 +1042,19 @@ class MyClass:
 
     #[test]
     fn test_semantic_tokens_custom_self_cls_names() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 class MyClass:
     def method(instance, x): pass
     @classmethod
     def other(klass, y): pass
-    def complex_method(instance, posonly, /, regular, *args, kwonly, **kwargs): pass
+    def complex_method(instance, posonly, /, regular, *args, kwonly, **kwargs): pass
 ",
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "MyClass" @ 7..14: Class [definition]
         "method" @ 24..30: Method [definition]
         "instance" @ 31..39: SelfParameter
@@ -1115,17 +1075,17 @@ class MyClass:
 
     #[test]
     fn test_semantic_tokens_modifiers() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 class MyClass:
     CONSTANT = 42
-    async def method(self): pass
+    async def method(self): pass
 ",
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "MyClass" @ 7..14: Class [definition]
         "CONSTANT" @ 20..28: Variable [readonly]
         "42" @ 31..33: Number
@@ -1136,7 +1096,7 @@ class MyClass:
 
     #[test]
     fn test_semantic_classification_vs_heuristic() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 import sys
 class MyClass:
@@ -1147,13 +1107,13 @@ def my_function():
 
 x = MyClass()
 y = my_function()
-z = sys.version
+z = sys.version
 ",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "sys" @ 8..11: Namespace
         "MyClass" @ 18..25: Class [definition]
         "my_function" @ 41..52: Function [definition]
@@ -1170,17 +1130,17 @@ z = sys.version
 
     #[test]
     fn test_builtin_constants() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 x = True
 y = False
-z = None
+z = None
 ",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "x" @ 1..2: Variable
         "True" @ 5..9: BuiltinConstant
         "y" @ 10..11: Variable
@@ -1192,20 +1152,20 @@ z = None
 
     #[test]
     fn test_builtin_constants_in_expressions() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 def check(value):
     if value is None:
         return False
     return True
 
-result = check(None)
+result = check(None)
 ",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "check" @ 5..10: Function [definition]
         "value" @ 11..16: Parameter
         "value" @ 26..31: Variable
@@ -1220,7 +1180,7 @@ result = check(None)
 
     #[test]
     fn test_semantic_tokens_range() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 def function1():
     x = 42
@@ -1229,46 +1189,46 @@ def function1():
 def function2():
     y = \"hello\"
     z = True
-    return y + z
+    return y + z
 ",
         );
 
-        let full_tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let full_tokens = test.highlight_file();
 
         // Get the range that covers only the second function
         // Hardcoded offsets: function2 starts at position 42, source ends at position 108
         let range = TextRange::new(TextSize::from(42u32), TextSize::from(108u32));
 
-        let range_tokens = semantic_tokens(&test.db, test.cursor.file, Some(range));
+        let range_tokens = test.highlight_range(range);
 
         // Range-based tokens should have fewer tokens than full scan
         // (should exclude tokens from function1)
         assert!(range_tokens.len() < full_tokens.len());
 
         // Test both full tokens and range tokens with snapshots
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &full_tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&full_tokens), @r###"
         "function1" @ 5..14: Function [definition]
         "x" @ 22..23: Variable
         "42" @ 26..28: Number
         "x" @ 40..41: Variable
         "function2" @ 47..56: Function [definition]
         "y" @ 64..65: Variable
-        "/"hello/"" @ 68..75: String
+        "\"hello\"" @ 68..75: String
         "z" @ 80..81: Variable
         "True" @ 84..88: BuiltinConstant
         "y" @ 100..101: Variable
         "z" @ 104..105: Variable
-        "#);
+        "###);
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &range_tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&range_tokens), @r###"
         "function2" @ 47..56: Function [definition]
         "y" @ 64..65: Variable
-        "/"hello/"" @ 68..75: String
+        "\"hello\"" @ 68..75: String
         "z" @ 80..81: Variable
         "True" @ 84..88: BuiltinConstant
         "y" @ 100..101: Variable
         "z" @ 104..105: Variable
-        "#);
+        "###);
 
         // Verify that no tokens from range_tokens have ranges outside the requested range
         for token in range_tokens.iter() {
@@ -1285,11 +1245,11 @@ def function2():
     /// don't include it in the semantic tokens.
     #[test]
     fn test_semantic_tokens_range_excludes_boundary_tokens() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 x = 1
 y = 2
-z = 3
+z = 3
 ",
         );
 
@@ -1298,9 +1258,9 @@ z = 3
         // Not included: "1" @ 5..6 and "z" @ 13..14 (adjacent, but not overlapping at offsets 6 and 13).
         let range = TextRange::new(TextSize::from(6), TextSize::from(13));
 
-        let range_tokens = semantic_tokens(&test.db, test.cursor.file, Some(range));
+        let range_tokens = test.highlight_range(range);
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &range_tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&range_tokens), @r#"
         "y" @ 7..8: Variable
         "2" @ 11..12: Number
         "#);
@@ -1308,18 +1268,18 @@ z = 3
 
     #[test]
     fn test_dotted_module_names() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 import os.path
 import sys.version_info
 from urllib.parse import urlparse
-from collections.abc import Mapping
+from collections.abc import Mapping
 ",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "os" @ 8..10: Namespace
         "path" @ 11..15: Namespace
         "sys" @ 23..26: Namespace
@@ -1335,7 +1295,7 @@ from collections.abc import Mapping
 
     #[test]
     fn test_module_type_classification() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 import os
 import sys
@@ -1343,13 +1303,13 @@ from collections import defaultdict
 
 # os and sys should be classified as namespace/module types
 x = os
-y = sys
+y = sys
 ",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "os" @ 8..10: Namespace
         "sys" @ 18..21: Namespace
         "collections" @ 27..38: Namespace
@@ -1363,18 +1323,18 @@ y = sys
 
     #[test]
     fn test_import_classification() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 from os import path
 from collections import defaultdict, OrderedDict, Counter
 from typing import List, Dict, Optional
-from mymodule import CONSTANT, my_function, MyClass
+from mymodule import CONSTANT, my_function, MyClass
 ",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "os" @ 6..8: Namespace
         "path" @ 16..20: Namespace
         "collections" @ 26..37: Namespace
@@ -1394,7 +1354,7 @@ from mymodule import CONSTANT, my_function, MyClass
 
     #[test]
     fn test_attribute_classification() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 import os
 import sys
@@ -1403,10 +1363,10 @@ from typing import List
 
 class MyClass:
     CONSTANT = 42
-    
+
     def method(self):
         return \"hello\"
-    
+
     @property
     def prop(self):
         return self.CONSTANT
@@ -1419,13 +1379,13 @@ y = obj.method           # method should be method (bound method)
 z = obj.CONSTANT         # CONSTANT should be variable with readonly modifier
 w = obj.prop             # prop should be property
 v = MyClass.method       # method should be method (function)
-u = List.__name__        # __name__ should be variable
+u = List.__name__        # __name__ should be variable
 ",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "os" @ 8..10: Namespace
         "sys" @ 18..21: Namespace
         "collections" @ 27..38: Namespace
@@ -1437,7 +1397,7 @@ u = List.__name__        # __name__ should be variable
         "42" @ 113..115: Number
         "method" @ 125..131: Method [definition]
         "self" @ 132..136: SelfParameter
-        "/"hello/"" @ 154..161: String
+        "\"hello\"" @ 154..161: String
         "property" @ 168..176: Decorator
         "prop" @ 185..189: Method [definition]
         "self" @ 190..194: SelfParameter
@@ -1463,29 +1423,29 @@ u = List.__name__        # __name__ should be variable
         "u" @ 596..597: Variable
         "List" @ 600..604: Variable
         "__name__" @ 605..613: Variable
-        "#);
+        "###);
     }
 
     #[test]
     fn test_attribute_fallback_classification() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 class MyClass:
     some_attr = \"value\"
-    
+
 obj = MyClass()
 # Test attribute that might not have detailed semantic info
 x = obj.some_attr        # Should fall back to variable, not property
-y = obj.unknown_attr     # Should fall back to variable
+y = obj.unknown_attr     # Should fall back to variable
 ",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "MyClass" @ 7..14: Class [definition]
         "some_attr" @ 20..29: Variable
-        "/"value/"" @ 32..39: String
+        "\"value\"" @ 32..39: String
         "obj" @ 41..44: Variable
         "MyClass" @ 47..54: Class
         "x" @ 117..118: Variable
@@ -1494,30 +1454,30 @@ y = obj.unknown_attr     # Should fall back to variable
         "y" @ 187..188: Variable
         "obj" @ 191..194: Variable
         "unknown_attr" @ 195..207: Variable
-        "#);
+        "###);
     }
 
     #[test]
     fn test_constant_name_detection() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 class MyClass:
     UPPER_CASE = 42
     lower_case = 24
     MixedCase = 12
     A = 1
-    
+
 obj = MyClass()
 x = obj.UPPER_CASE    # Should have readonly modifier
-y = obj.lower_case    # Should not have readonly modifier  
+y = obj.lower_case    # Should not have readonly modifier
 z = obj.MixedCase     # Should not have readonly modifier
-w = obj.A             # Should not have readonly modifier (length == 1)
+w = obj.A             # Should not have readonly modifier (length == 1)
 ",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "MyClass" @ 7..14: Class [definition]
         "UPPER_CASE" @ 20..30: Variable [readonly]
         "42" @ 33..35: Number
@@ -1535,18 +1495,18 @@ w = obj.A             # Should not have readonly modifier (length == 1)
         "y" @ 156..157: Variable
         "obj" @ 160..163: Variable
         "lower_case" @ 164..174: Variable
-        "z" @ 216..217: Variable
-        "obj" @ 220..223: Variable
-        "MixedCase" @ 224..233: Variable
-        "w" @ 274..275: Variable
-        "obj" @ 278..281: Variable
-        "A" @ 282..283: Variable
-        "#);
+        "z" @ 214..215: Variable
+        "obj" @ 218..221: Variable
+        "MixedCase" @ 222..231: Variable
+        "w" @ 272..273: Variable
+        "obj" @ 276..279: Variable
+        "A" @ 280..281: Variable
+        "###);
     }
 
     #[test]
     fn test_type_annotations() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             r#"
 from typing import List, Optional
 
@@ -1554,13 +1514,13 @@ def function_with_annotations(param1: int, param2: str) -> Optional[List[str]]:
     pass
 
 x: int = 42
-y: Optional[str] = None
+y: Optional[str] = None
 "#,
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "typing" @ 6..12: Namespace
         "List" @ 20..24: Variable
         "Optional" @ 26..34: Variable
@@ -1584,15 +1544,15 @@ y: Optional[str] = None
 
     #[test]
     fn test_debug_int_classification() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
-x: int = 42
+x: int = 42
 ",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r###"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "x" @ 1..2: Variable
         "int" @ 4..7: Class
         "42" @ 10..12: Number
@@ -1601,18 +1561,18 @@ x: int = 42
 
     #[test]
     fn test_debug_user_defined_type_classification() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 class MyClass:
     pass
 
-x: MyClass = MyClass()
+x: MyClass = MyClass()
 ",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "MyClass" @ 7..14: Class [definition]
         "x" @ 26..27: Variable
         "MyClass" @ 29..36: Class
@@ -1622,7 +1582,7 @@ x: MyClass = MyClass()
 
     #[test]
     fn test_type_annotation_vs_variable_classification() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 from typing import List, Optional
 
@@ -1634,16 +1594,16 @@ def test_function(param: int, other: MyClass) -> Optional[List[str]]:
     x: int = 42
     y: MyClass = MyClass()
     z: List[str] = [\"hello\"]
-    
+
     # Type annotations should be Class tokens:
     # int, MyClass, Optional, List, str
-    return None
+    return None
 ",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "typing" @ 6..12: Namespace
         "List" @ 20..24: Variable
         "Optional" @ 26..34: Variable
@@ -1665,14 +1625,14 @@ def test_function(param: int, other: MyClass) -> Optional[List[str]]:
         "z" @ 233..234: Variable
         "List" @ 236..240: Variable
         "str" @ 241..244: Class
-        "/"hello/"" @ 249..256: String
+        "\"hello\"" @ 249..256: String
         "None" @ 357..361: BuiltinConstant
-        "#);
+        "###);
     }
 
     #[test]
     fn test_protocol_types_in_annotations() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 from typing import Protocol
 
@@ -1681,12 +1641,12 @@ class MyProtocol(Protocol):
 
 def test_function(param: MyProtocol) -> None:
     pass
-",
+",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "typing" @ 6..12: Namespace
         "Protocol" @ 20..28: Variable
         "MyProtocol" @ 36..46: Class [definition]
@@ -1703,7 +1663,7 @@ def test_function(param: MyProtocol) -> None:
 
     #[test]
     fn test_protocol_type_annotation_vs_value_context() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 from typing import Protocol
 
@@ -1713,15 +1673,15 @@ class MyProtocol(Protocol):
 # Value context - MyProtocol is still a class literal, so should be Class
 my_protocol_var = MyProtocol
 
-# Type annotation context - should be Class  
+# Type annotation context - should be Class
 def test_function(param: MyProtocol) -> MyProtocol:
     return param
-",
+",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "typing" @ 6..12: Namespace
         "Protocol" @ 20..28: Variable
         "MyProtocol" @ 36..46: Class [definition]
@@ -1731,17 +1691,17 @@ def test_function(param: MyProtocol) -> MyProtocol:
         "int" @ 82..85: Class
         "my_protocol_var" @ 166..181: Class
         "MyProtocol" @ 184..194: Class
-        "test_function" @ 246..259: Function [definition]
-        "param" @ 260..265: Parameter
-        "MyProtocol" @ 267..277: Class
-        "MyProtocol" @ 282..292: Class
-        "param" @ 305..310: Parameter
-        "#);
+        "test_function" @ 244..257: Function [definition]
+        "param" @ 258..263: Parameter
+        "MyProtocol" @ 265..275: Class
+        "MyProtocol" @ 280..290: Class
+        "param" @ 303..308: Parameter
+        "###);
     }
 
     #[test]
     fn test_type_parameters_pep695() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 # Test Python 3.12 PEP 695 type parameter syntax
 
@@ -1749,7 +1709,7 @@ def test_function(param: MyProtocol) -> MyProtocol:
 def func[T](x: T) -> T:
     return x
 
-# Generic function with TypeVarTuple  
+# Generic function with TypeVarTuple
 def func_tuple[*Ts](args: tuple[*Ts]) -> tuple[*Ts]:
     return args
 
@@ -1764,10 +1724,10 @@ class Container[T, U]:
     def __init__(self, value1: T, value2: U):
         self.value1: T = value1
         self.value2: U = value2
-    
+
     def get_first(self) -> T:
         return self.value1
-    
+
     def get_second(self) -> U:
         return self.value2
 
@@ -1775,109 +1735,109 @@ class Container[T, U]:
 class BoundedContainer[T: int, U = str]:
     def process(self, x: T, y: U) -> tuple[T, U]:
         return (x, y)
-",
+",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "func" @ 87..91: Function [definition]
         "T" @ 92..93: TypeParameter [definition]
         "x" @ 95..96: Parameter
         "T" @ 98..99: TypeParameter
         "T" @ 104..105: TypeParameter
         "x" @ 118..119: Parameter
-        "func_tuple" @ 164..174: Function [definition]
-        "Ts" @ 176..178: TypeParameter [definition]
-        "args" @ 180..184: Parameter
-        "tuple" @ 186..191: Class
-        "Ts" @ 193..195: Variable
-        "tuple" @ 201..206: Class
-        "Ts" @ 208..210: Variable
-        "args" @ 224..228: Parameter
-        "func_paramspec" @ 268..282: Function [definition]
-        "P" @ 285..286: TypeParameter [definition]
-        "func" @ 288..292: Parameter
-        "Callable" @ 294..302: Variable
-        "P" @ 303..304: Variable
-        "int" @ 306..309: Class
-        "Callable" @ 315..323: Variable
-        "P" @ 324..325: Variable
-        "str" @ 327..330: Class
-        "wrapper" @ 341..348: Function [definition]
-        "args" @ 350..354: Parameter
-        "P" @ 356..357: Variable
-        "args" @ 358..362: Variable
-        "kwargs" @ 366..372: Parameter
-        "P" @ 374..375: Variable
-        "kwargs" @ 376..382: Variable
-        "str" @ 387..390: Class
-        "str" @ 407..410: Class
-        "func" @ 411..415: Variable
-        "args" @ 417..421: Parameter
-        "kwargs" @ 425..431: Parameter
-        "wrapper" @ 445..452: Function
-        "Container" @ 506..515: Class [definition]
-        "T" @ 516..517: TypeParameter [definition]
-        "U" @ 519..520: TypeParameter [definition]
-        "__init__" @ 531..539: Method [definition]
-        "self" @ 540..544: SelfParameter
-        "value1" @ 546..552: Parameter
-        "T" @ 554..555: TypeParameter
-        "value2" @ 557..563: Parameter
-        "U" @ 565..566: TypeParameter
-        "self" @ 577..581: TypeParameter
-        "value1" @ 582..588: Variable
-        "T" @ 590..591: TypeParameter
-        "value1" @ 594..600: Parameter
-        "self" @ 609..613: TypeParameter
-        "value2" @ 614..620: Variable
-        "U" @ 622..623: TypeParameter
-        "value2" @ 626..632: Parameter
-        "get_first" @ 642..651: Method [definition]
-        "self" @ 652..656: SelfParameter
-        "T" @ 661..662: TypeParameter
-        "self" @ 679..683: TypeParameter
-        "value1" @ 684..690: Variable
-        "get_second" @ 700..710: Method [definition]
-        "self" @ 711..715: SelfParameter
-        "U" @ 720..721: TypeParameter
-        "self" @ 738..742: TypeParameter
-        "value2" @ 743..749: Variable
-        "BoundedContainer" @ 798..814: Class [definition]
-        "T" @ 815..816: TypeParameter [definition]
-        "int" @ 818..821: Class
-        "U" @ 823..824: TypeParameter [definition]
-        "str" @ 827..830: Class
-        "process" @ 841..848: Method [definition]
-        "self" @ 849..853: SelfParameter
-        "x" @ 855..856: Parameter
-        "T" @ 858..859: TypeParameter
-        "y" @ 861..862: Parameter
-        "U" @ 864..865: TypeParameter
-        "tuple" @ 870..875: Class
-        "T" @ 876..877: TypeParameter
-        "U" @ 879..880: TypeParameter
-        "x" @ 899..900: Parameter
-        "y" @ 902..903: Parameter
-        "#);
+        "func_tuple" @ 162..172: Function [definition]
+        "Ts" @ 174..176: TypeParameter [definition]
+        "args" @ 178..182: Parameter
+        "tuple" @ 184..189: Class
+        "Ts" @ 191..193: Variable
+        "tuple" @ 199..204: Class
+        "Ts" @ 206..208: Variable
+        "args" @ 222..226: Parameter
+        "func_paramspec" @ 266..280: Function [definition]
+        "P" @ 283..284: TypeParameter [definition]
+        "func" @ 286..290: Parameter
+        "Callable" @ 292..300: Variable
+        "P" @ 301..302: Variable
+        "int" @ 304..307: Class
+        "Callable" @ 313..321: Variable
+        "P" @ 322..323: Variable
+        "str" @ 325..328: Class
+        "wrapper" @ 339..346: Function [definition]
+        "args" @ 348..352: Parameter
+        "P" @ 354..355: Variable
+        "args" @ 356..360: Variable
+        "kwargs" @ 364..370: Parameter
+        "P" @ 372..373: Variable
+        "kwargs" @ 374..380: Variable
+        "str" @ 385..388: Class
+        "str" @ 405..408: Class
+        "func" @ 409..413: Variable
+        "args" @ 415..419: Parameter
+        "kwargs" @ 423..429: Parameter
+        "wrapper" @ 443..450: Function
+        "Container" @ 504..513: Class [definition]
+        "T" @ 514..515: TypeParameter [definition]
+        "U" @ 517..518: TypeParameter [definition]
+        "__init__" @ 529..537: Method [definition]
+        "self" @ 538..542: SelfParameter
+        "value1" @ 544..550: Parameter
+        "T" @ 552..553: TypeParameter
+        "value2" @ 555..561: Parameter
+        "U" @ 563..564: TypeParameter
+        "self" @ 575..579: TypeParameter
+        "value1" @ 580..586: Variable
+        "T" @ 588..589: TypeParameter
+        "value1" @ 592..598: Parameter
+        "self" @ 607..611: TypeParameter
+        "value2" @ 612..618: Variable
+        "U" @ 620..621: TypeParameter
+        "value2" @ 624..630: Parameter
+        "get_first" @ 640..649: Method [definition]
+        "self" @ 650..654: SelfParameter
+        "T" @ 659..660: TypeParameter
+        "self" @ 677..681: TypeParameter
+        "value1" @ 682..688: Variable
+        "get_second" @ 698..708: Method [definition]
+        "self" @ 709..713: SelfParameter
+        "U" @ 718..719: TypeParameter
+        "self" @ 736..740: TypeParameter
+        "value2" @ 741..747: Variable
+        "BoundedContainer" @ 796..812: Class [definition]
+        "T" @ 813..814: TypeParameter [definition]
+        "int" @ 816..819: Class
+        "U" @ 821..822: TypeParameter [definition]
+        "str" @ 825..828: Class
+        "process" @ 839..846: Method [definition]
+        "self" @ 847..851: SelfParameter
+        "x" @ 853..854: Parameter
+        "T" @ 856..857: TypeParameter
+        "y" @ 859..860: Parameter
+        "U" @ 862..863: TypeParameter
+        "tuple" @ 868..873: Class
+        "T" @ 874..875: TypeParameter
+        "U" @ 877..878: TypeParameter
+        "x" @ 897..898: Parameter
+        "y" @ 900..901: Parameter
+        "###);
     }
 
     #[test]
     fn test_type_parameters_usage_in_function_body() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             "
 def generic_function[T](value: T) -> T:
     # Type parameter T should be recognized here too
     result: T = value
     temp = result  # This could potentially be T as well
     return result
-",
+",
         );
 
-        let tokens = semantic_tokens(&test.db, test.cursor.file, None);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "generic_function" @ 5..21: Function [definition]
         "T" @ 22..23: TypeParameter [definition]
         "value" @ 25..30: Parameter
@@ -1894,7 +1854,7 @@ def generic_function[T](value: T) -> T:
 
     #[test]
     fn test_decorator_classification() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             r#"
 @staticmethod
 @property
@@ -1904,117 +1864,117 @@ def my_function():
 
 @dataclass
 class MyClass:
-    pass
+    pass
 "#,
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "staticmethod" @ 2..14: Decorator
         "property" @ 16..24: Decorator
         "app" @ 26..29: Variable
         "route" @ 30..35: Variable
-        "/"/path/"" @ 36..43: String
+        "\"/path\"" @ 36..43: String
         "my_function" @ 49..60: Function [definition]
         "dataclass" @ 75..84: Decorator
         "MyClass" @ 91..98: Class [definition]
-        "#);
+        "###);
     }
 
     #[test]
     fn test_implicitly_concatenated_strings() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             r#"x = "hello" "world"
-y = ("multi" 
-     "line" 
+y = ("multi"
+     "line"
      "string")
-z = 'single' "mixed" 'quotes'"#,
+z = 'single' "mixed" 'quotes'"#,
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "x" @ 0..1: Variable
-        "/"hello/"" @ 4..11: String
-        "/"world/"" @ 12..19: String
+        "\"hello\"" @ 4..11: String
+        "\"world\"" @ 12..19: String
         "y" @ 20..21: Variable
-        "/"multi/"" @ 25..32: String
-        "/"line/"" @ 39..45: String
-        "/"string/"" @ 52..60: String
-        "z" @ 62..63: Variable
-        "'single'" @ 66..74: String
-        "/"mixed/"" @ 75..82: String
-        "'quotes'" @ 83..91: String
-        "#);
+        "\"multi\"" @ 25..32: String
+        "\"line\"" @ 38..44: String
+        "\"string\"" @ 50..58: String
+        "z" @ 60..61: Variable
+        "'single'" @ 64..72: String
+        "\"mixed\"" @ 73..80: String
+        "'quotes'" @ 81..89: String
+        "###);
     }
 
     #[test]
     fn test_bytes_literals() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             r#"x = b"hello" b"world"
-y = (b"multi" 
-     b"line" 
+y = (b"multi"
+     b"line"
      b"bytes")
-z = b'single' b"mixed" b'quotes'"#,
+z = b'single' b"mixed" b'quotes'"#,
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "x" @ 0..1: Variable
-        "b/"hello/"" @ 4..12: String
-        "b/"world/"" @ 13..21: String
+        "b\"hello\"" @ 4..12: String
+        "b\"world\"" @ 13..21: String
         "y" @ 22..23: Variable
-        "b/"multi/"" @ 27..35: String
-        "b/"line/"" @ 42..49: String
-        "b/"bytes/"" @ 56..64: String
-        "z" @ 66..67: Variable
-        "b'single'" @ 70..79: String
-        "b/"mixed/"" @ 80..88: String
-        "b'quotes'" @ 89..98: String
-        "#);
+        "b\"multi\"" @ 27..35: String
+        "b\"line\"" @ 41..48: String
+        "b\"bytes\"" @ 54..62: String
+        "z" @ 64..65: Variable
+        "b'single'" @ 68..77: String
+        "b\"mixed\"" @ 78..86: String
+        "b'quotes'" @ 87..96: String
+        "###);
     }
 
     #[test]
     fn test_mixed_string_and_bytes_literals() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             r#"# Test mixed string and bytes literals
 string_concat = "hello" "world"
 bytes_concat = b"hello" b"world"
 mixed_quotes_str = 'single' "double" 'single'
 mixed_quotes_bytes = b'single' b"double" b'single'
 regular_string = "just a string"
-regular_bytes = b"just bytes""#,
+regular_bytes = b"just bytes""#,
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "string_concat" @ 39..52: Variable
-        "/"hello/"" @ 55..62: String
-        "/"world/"" @ 63..70: String
+        "\"hello\"" @ 55..62: String
+        "\"world\"" @ 63..70: String
         "bytes_concat" @ 71..83: Variable
-        "b/"hello/"" @ 86..94: String
-        "b/"world/"" @ 95..103: String
+        "b\"hello\"" @ 86..94: String
+        "b\"world\"" @ 95..103: String
         "mixed_quotes_str" @ 104..120: Variable
         "'single'" @ 123..131: String
-        "/"double/"" @ 132..140: String
+        "\"double\"" @ 132..140: String
         "'single'" @ 141..149: String
         "mixed_quotes_bytes" @ 150..168: Variable
         "b'single'" @ 171..180: String
-        "b/"double/"" @ 181..190: String
+        "b\"double\"" @ 181..190: String
         "b'single'" @ 191..200: String
         "regular_string" @ 201..215: Variable
-        "/"just a string/"" @ 218..233: String
+        "\"just a string\"" @ 218..233: String
         "regular_bytes" @ 234..247: Variable
-        "b/"just bytes/"" @ 250..263: String
-        "#);
+        "b\"just bytes\"" @ 250..263: String
+        "###);
     }
 
     #[test]
     fn test_fstring_with_mixed_literals() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             r#"
 # Test f-strings with various literal types
 name = "Alice"
@@ -2028,17 +1988,17 @@ result = f"Hello {name}! Value: {value}, Data: {data!r}"
 mixed = f"prefix" + b"suffix"
 
 # Complex f-string with nested expressions
-complex_fstring = f"User: {name.upper()}, Count: {len(data)}, Hex: {value:x}"
+complex_fstring = f"User: {name.upper()}, Count: {len(data)}, Hex: {value:x}"
 "#,
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "name" @ 45..49: Variable
-        "/"Alice/"" @ 52..59: String
+        "\"Alice\"" @ 52..59: String
         "data" @ 60..64: Variable
-        "b/"hello/"" @ 67..75: String
+        "b\"hello\"" @ 67..75: String
         "value" @ 76..81: Variable
         "42" @ 84..86: Number
         "result" @ 153..159: Variable
@@ -2050,7 +2010,7 @@ complex_fstring = f"User: {name.upper()}, Count: {len(data)}, Hex: {value:x}"
+
+    return inner
 "#,
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "x" @ 1..2: Variable
-        "/"global_value/"" @ 5..19: String
+        "\"global_value\"" @ 5..19: String
         "y" @ 20..21: Variable
-        "/"another_global/"" @ 24..40: String
+        "\"another_global\"" @ 24..40: String
         "outer" @ 46..51: Function [definition]
         "x" @ 59..60: Variable
-        "/"outer_value/"" @ 63..76: String
+        "\"outer_value\"" @ 63..76: String
         "z" @ 81..82: Variable
-        "/"outer_local/"" @ 85..98: String
+        "\"outer_local\"" @ 85..98: String
         "inner" @ 108..113: Function [definition]
         "x" @ 134..135: Variable
         "z" @ 137..138: Variable
         "y" @ 189..190: Variable
         "x" @ 239..240: Variable
-        "/"modified/"" @ 243..253: String
+        "\"modified\"" @ 243..253: String
         "y" @ 262..263: Variable
-        "/"modified_global/"" @ 266..283: String
+        "\"modified_global\"" @ 266..283: String
         "z" @ 292..293: Variable
-        "/"modified_local/"" @ 296..312: String
+        "\"modified_local\"" @ 296..312: String
         "deeper" @ 326..332: Function [definition]
         "x" @ 357..358: Variable
         "y" @ 398..399: Variable
@@ -2123,29 +2083,29 @@ def outer():
         "y" @ 461..462: Variable
         "deeper" @ 479..485: Function
         "inner" @ 498..503: Function
-        "#);
+        "###);
     }
 
     #[test]
     fn test_nonlocal_global_edge_cases() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             r#"
 # Single variable statements
 def test():
     global x
     nonlocal y
-    
+
     # Multiple variables in one statement
     global a, b, c
     nonlocal d, e, f
-    
-    return x + y + a + b + c + d + e + f
+
+    return x + y + a + b + c + d + e + f
 "#,
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "test" @ 34..38: Function [definition]
         "x" @ 53..54: Variable
         "y" @ 68..69: Variable
@@ -2168,7 +2128,7 @@ def test():
 
     #[test]
     fn test_pattern_matching() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             r#"
 def process_data(data):
     match data:
@@ -2180,19 +2140,19 @@ def process_data(data):
             return sequence
         case value as fallback:
             print(f"Fallback: {fallback}")
-            return fallback
+            return fallback
 "#,
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r###"
         "process_data" @ 5..17: Function [definition]
         "data" @ 18..22: Parameter
         "data" @ 35..39: Variable
-        "/"name/"" @ 55..61: String
+        "\"name\"" @ 55..61: String
         "name" @ 63..67: Variable
-        "/"age/"" @ 69..74: String
+        "\"age\"" @ 69..74: String
         "age" @ 76..79: Variable
         "rest" @ 83..87: Variable
         "person" @ 92..98: Variable
@@ -2218,12 +2178,12 @@ def process_data(data):
         "Fallback: " @ 375..385: String
         "fallback" @ 386..394: Variable
         "fallback" @ 417..425: Variable
-        "#);
+        "###);
     }
 
     #[test]
     fn test_exception_handlers() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             r#"
 try:
     x = 1 / 0
@@ -2234,13 +2194,13 @@ except (TypeError, RuntimeError) as re:
 except Exception as e:
     print(e)
 finally:
-    pass
+    pass
 "#,
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "x" @ 10..11: Variable
         "1" @ 14..15: Number
         "0" @ 18..19: Number
@@ -2262,7 +2222,7 @@ finally:
 
     #[test]
     fn test_self_attribute_expression() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             r#"
 from typing import Self
 
@@ -2272,15 +2232,13 @@ class C:
         self.annotated: int = 1
         self.non_annotated = 1
         self.x.test()
-        self.x()
-
-
+        self.x()
 "#,
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "typing" @ 6..12: Namespace
         "Self" @ 20..24: Variable
         "C" @ 33..34: Class [definition]
@@ -2305,16 +2263,16 @@ class C:
     /// Regression test for 
     #[test]
     fn test_invalid_kwargs() {
-        let test = cursor_test(
+        let test = SemanticTokenTest::new(
             r#"
-def foo(self, **key, value=10):
+def foo(self, **key, value=10):
     return
 "#,
         );
 
-        let tokens = semantic_tokens_full_file(&test.db, test.cursor.file);
+        let tokens = test.highlight_file();
 
-        assert_snapshot!(semantic_tokens_to_snapshot(&test.db, test.cursor.file, &tokens), @r#"
+        assert_snapshot!(test.to_snapshot(&tokens), @r#"
         "foo" @ 5..8: Function [definition]
         "self" @ 9..13: Parameter
         "key" @ 17..20: Parameter
@@ -2322,4 +2280,77 @@ def foo(self, **key, value=10):
         "10" @ 28..30: Number
         "#);
     }
+
+    pub(super) struct SemanticTokenTest {
+        pub(super) db: ty_project::TestDb,
+        file: File,
+    }
+
+    impl SemanticTokenTest {
+        fn new(source: &str) -> Self {
+            let mut db = ty_project::TestDb::new(ProjectMetadata::new(
+                "test".into(),
+                SystemPathBuf::from("/"),
+            ));
+
+            db.init_program().unwrap();
+
+            let path = SystemPath::new("src/main.py");
+            db.write_file(path, ruff_python_trivia::textwrap::dedent(source))
+                .expect("Write to memory file system to always succeed");
+
+            let file = system_path_to_file(&db, path).expect("newly written file to existing");
+
+            Self { db, file }
+        }
+
+        /// Get semantic tokens for the entire file
+        fn highlight_file(&self) -> SemanticTokens {
+            semantic_tokens(&self.db, self.file, None)
+        }
+
+        /// Get semantic tokens for a specific range in the file
+        fn highlight_range(&self, range: TextRange) -> SemanticTokens {
+            semantic_tokens(&self.db, self.file, Some(range))
+        }
+
+        /// Helper function to convert semantic tokens to a snapshot-friendly text format
+        fn to_snapshot(&self, tokens: &SemanticTokens) -> String {
+            use std::fmt::Write;
+            let source = ruff_db::source::source_text(&self.db, self.file);
+            let mut result = String::new();
+
+            for token in tokens.iter() {
+                let token_text = &source[token.range()];
+                let modifiers_text = if token.modifiers.is_empty() {
+                    String::new()
+                } else {
+                    let mut mods = Vec::new();
+                    if token.modifiers.contains(SemanticTokenModifier::DEFINITION) {
+                        mods.push("definition");
+                    }
+                    if token.modifiers.contains(SemanticTokenModifier::READONLY) {
+                        mods.push("readonly");
+                    }
+                    if token.modifiers.contains(SemanticTokenModifier::ASYNC) {
+                        mods.push("async");
+                    }
+                    format!(" [{}]", mods.join(", "))
+                };
+
+                writeln!(
+                    result,
+                    "{:?} @ {}..{}: {:?}{}",
+                    token_text,
+                    u32::from(token.start()),
+                    u32::from(token.end()),
+                    token.token_type,
+                    modifiers_text
+                )
+                .unwrap();
+            }
+
+            result
+        }
+    }
 }
diff --git a/crates/ty_project/src/db.rs b/crates/ty_project/src/db.rs
index 5e2105839e..8f6ec20c95 100644
--- a/crates/ty_project/src/db.rs
+++ b/crates/ty_project/src/db.rs
@@ -516,11 +516,13 @@ pub(crate) mod tests {
     use std::sync::{Arc, Mutex};
 
     use ruff_db::Db as SourceDb;
-    use ruff_db::files::Files;
+    use ruff_db::files::{FileRootKind, Files};
     use ruff_db::system::{DbWithTestSystem, System, TestSystem};
     use ruff_db::vendored::VendoredFileSystem;
-    use ty_python_semantic::Program;
     use ty_python_semantic::lint::{LintRegistry, RuleSelection};
+    use ty_python_semantic::{
+        Program, ProgramSettings, PythonPlatform, PythonVersionWithSource, SearchPathSettings,
+    };
 
     use crate::db::Db;
     use crate::{Project, ProjectMetadata};
@@ -560,6 +562,27 @@ pub(crate) mod tests {
             db.project = Some(project);
             db
         }
+
+        pub fn init_program(&mut self) -> anyhow::Result<()> {
+            let root = self.project().root(self);
+
+            let search_paths = SearchPathSettings::new(vec![root.to_path_buf()])
+                .to_search_paths(self.system(), self.vendored())
+                .expect("Valid search path settings");
+
+            Program::from_settings(
+                self,
+                ProgramSettings {
+                    python_version: PythonVersionWithSource::default(),
+                    python_platform: PythonPlatform::default(),
+                    search_paths,
+                },
+            );
+
+            self.files().try_add_root(self, root, FileRootKind::Project);
+
+            Ok(())
+        }
     }
 
     impl TestDb {
diff --git a/crates/ty_project/src/lib.rs b/crates/ty_project/src/lib.rs
index d47476c7dc..4c7688d47f 100644
--- a/crates/ty_project/src/lib.rs
+++ b/crates/ty_project/src/lib.rs
@@ -751,34 +751,20 @@ mod tests {
     use crate::ProjectMetadata;
     use crate::check_file_impl;
     use crate::db::tests::TestDb;
-    use ruff_db::Db as _;
     use ruff_db::files::system_path_to_file;
     use ruff_db::source::source_text;
     use ruff_db::system::{DbWithTestSystem, DbWithWritableSystem as _, SystemPath, SystemPathBuf};
     use ruff_db::testing::assert_function_query_was_not_run;
     use ruff_python_ast::name::Name;
     use ty_python_semantic::types::check_types;
-    use ty_python_semantic::{
-        Program, ProgramSettings, PythonPlatform, PythonVersionWithSource, SearchPathSettings,
-    };
 
     #[test]
     fn check_file_skips_type_checking_when_file_cant_be_read() -> ruff_db::system::Result<()> {
         let project = ProjectMetadata::new(Name::new_static("test"), SystemPathBuf::from("/"));
         let mut db = TestDb::new(project);
+        db.init_program().unwrap();
         let path = SystemPath::new("test.py");
 
-        Program::from_settings(
-            &db,
-            ProgramSettings {
-                python_version: PythonVersionWithSource::default(),
-                python_platform: PythonPlatform::default(),
-                search_paths: SearchPathSettings::new(vec![SystemPathBuf::from(".")])
-                    .to_search_paths(db.system(), db.vendored())
-                    .expect("Valid search path settings"),
-            },
-        );
-
         db.write_file(path, "x = 10")?;
         let file = system_path_to_file(&db, path).unwrap();
 

From 39f105bc4ac71ec37372c7fc535ae034fafa23ed Mon Sep 17 00:00:00 2001
From: Alex Waygood 
Date: Mon, 3 Nov 2025 10:38:20 -0500
Subject: [PATCH 074/180] [ty] Use "cannot" consistently over "can not"
 (#21255)

---
 crates/ruff_formatter/src/format_element.rs               | 2 +-
 crates/ruff_python_formatter/src/pattern/mod.rs           | 2 +-
 .../resources/mdtest/annotations/any.md                   | 2 +-
 crates/ty_python_semantic/resources/mdtest/attributes.md  | 8 ++++----
 crates/ty_python_semantic/resources/mdtest/call/dunder.md | 2 +-
 .../resources/mdtest/call/getattr_static.md               | 2 +-
 .../ty_python_semantic/resources/mdtest/call/methods.md   | 2 +-
 .../resources/mdtest/dataclasses/dataclasses.md           | 2 +-
 .../resources/mdtest/expression/yield_and_yield_from.md   | 2 +-
 .../resources/mdtest/ide_support/all_members.md           | 2 +-
 .../resources/mdtest/intersection_types.md                | 2 +-
 .../ty_python_semantic/resources/mdtest/ty_extensions.md  | 2 +-
 .../resources/mdtest/type_compendium/any.md               | 4 ++--
 .../resources/mdtest/type_compendium/integer_literals.md  | 6 +++---
 .../resources/mdtest/type_properties/is_disjoint_from.md  | 2 +-
 crates/ty_python_semantic/resources/mdtest/typed_dict.md  | 6 +++---
 crates/ty_python_semantic/resources/mdtest/unreachable.md | 4 ++--
 crates/ty_python_semantic/src/place.rs                    | 2 +-
 .../src/semantic_index/reachability_constraints.rs        | 8 ++++----
 crates/ty_python_semantic/src/subscript.rs                | 2 +-
 crates/ty_python_semantic/src/types.rs                    | 6 +++---
 crates/ty_python_semantic/src/types/diagnostic.rs         | 2 +-
 crates/ty_python_semantic/src/types/generics.rs           | 2 +-
 crates/ty_python_semantic/src/types/infer/builder.rs      | 4 ++--
 24 files changed, 39 insertions(+), 39 deletions(-)

diff --git a/crates/ruff_formatter/src/format_element.rs b/crates/ruff_formatter/src/format_element.rs
index 529992c642..715eeb3cfd 100644
--- a/crates/ruff_formatter/src/format_element.rs
+++ b/crates/ruff_formatter/src/format_element.rs
@@ -487,7 +487,7 @@ pub trait FormatElements {
 /// Represents the width by adding 1 to the actual width so that the width can be represented by a [`NonZeroU32`],
 /// allowing [`TextWidth`] or [`Option`] fit in 4 bytes rather than 8.
 ///
-/// This means that 2^32 can not be precisely represented and instead has the same value as 2^32-1.
+/// This means that 2^32 cannot be precisely represented and instead has the same value as 2^32-1.
 /// This imprecision shouldn't matter in practice because either text are longer than any configured line width
 /// and thus, the text should break.
 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
diff --git a/crates/ruff_python_formatter/src/pattern/mod.rs b/crates/ruff_python_formatter/src/pattern/mod.rs
index e255d59359..557337ddc5 100644
--- a/crates/ruff_python_formatter/src/pattern/mod.rs
+++ b/crates/ruff_python_formatter/src/pattern/mod.rs
@@ -299,7 +299,7 @@ impl<'a> CanOmitOptionalParenthesesVisitor<'a> {
                 }
 
                 // `case 4+3j:` or `case 4-3j:
-                // Can not contain arbitrary expressions. Limited to complex numbers.
+                // Cannot contain arbitrary expressions. Limited to complex numbers.
                 Expr::BinOp(_) => {
                     self.update_max_precedence(OperatorPrecedence::Additive, 1);
                 }
diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/any.md b/crates/ty_python_semantic/resources/mdtest/annotations/any.md
index d6baf8cbf9..c2cc2d2461 100644
--- a/crates/ty_python_semantic/resources/mdtest/annotations/any.md
+++ b/crates/ty_python_semantic/resources/mdtest/annotations/any.md
@@ -118,7 +118,7 @@ def takes_other_protocol(f: OtherProtocol): ...
 takes_other_protocol(SubclassOfAny())
 ```
 
-A subclass of `Any` cannot be assigned to literal types, since those can not be subclassed:
+A subclass of `Any` cannot be assigned to literal types, since those cannot be subclassed:
 
 ```py
 from typing import Any, Literal
diff --git a/crates/ty_python_semantic/resources/mdtest/attributes.md b/crates/ty_python_semantic/resources/mdtest/attributes.md
index 10b6d42318..b1dbd57c78 100644
--- a/crates/ty_python_semantic/resources/mdtest/attributes.md
+++ b/crates/ty_python_semantic/resources/mdtest/attributes.md
@@ -1878,7 +1878,7 @@ date.day = 8
 date.month = 4
 date.year = 2025
 
-# error: [unresolved-attribute] "Can not assign object of type `Literal["UTC"]` to attribute `tz` on type `Date` with custom `__setattr__` method."
+# error: [unresolved-attribute] "Cannot assign object of type `Literal["UTC"]` to attribute `tz` on type `Date` with custom `__setattr__` method."
 date.tz = "UTC"
 ```
 
@@ -1894,10 +1894,10 @@ class Frozen:
     existing: int = 1
 
     def __setattr__(self, name, value) -> Never:
-        raise AttributeError("Attributes can not be modified")
+        raise AttributeError("Attributes cannot be modified")
 
 instance = Frozen()
-instance.non_existing = 2  # error: [invalid-assignment] "Can not assign to unresolved attribute `non_existing` on type `Frozen`"
+instance.non_existing = 2  # error: [invalid-assignment] "Cannot assign to unresolved attribute `non_existing` on type `Frozen`"
 instance.existing = 2  # error: [invalid-assignment] "Cannot assign to attribute `existing` on type `Frozen` whose `__setattr__` method returns `Never`/`NoReturn`"
 ```
 
@@ -1949,7 +1949,7 @@ def flag() -> bool:
 class Frozen:
     if flag():
         def __setattr__(self, name, value) -> Never:
-            raise AttributeError("Attributes can not be modified")
+            raise AttributeError("Attributes cannot be modified")
 
 instance = Frozen()
 instance.non_existing = 2  # error: [invalid-assignment]
diff --git a/crates/ty_python_semantic/resources/mdtest/call/dunder.md b/crates/ty_python_semantic/resources/mdtest/call/dunder.md
index 721517eac4..f7be30464c 100644
--- a/crates/ty_python_semantic/resources/mdtest/call/dunder.md
+++ b/crates/ty_python_semantic/resources/mdtest/call/dunder.md
@@ -194,7 +194,7 @@ class_with_descriptor_dunder = ClassWithDescriptorDunder()
 reveal_type(class_with_descriptor_dunder[0])  # revealed: str
 ```
 
-## Dunders can not be overwritten on instances
+## Dunders cannot be overwritten on instances
 
 If we attempt to overwrite a dunder method on an instance, it does not affect the behavior of
 implicit dunder calls:
diff --git a/crates/ty_python_semantic/resources/mdtest/call/getattr_static.md b/crates/ty_python_semantic/resources/mdtest/call/getattr_static.md
index a8d87bbfa6..7841d04f79 100644
--- a/crates/ty_python_semantic/resources/mdtest/call/getattr_static.md
+++ b/crates/ty_python_semantic/resources/mdtest/call/getattr_static.md
@@ -84,7 +84,7 @@ class E(metaclass=Meta): ...
 reveal_type(inspect.getattr_static(E, "attr"))  # revealed: int
 ```
 
-Metaclass attributes can not be added when probing an instance of the class:
+Metaclass attributes cannot be added when probing an instance of the class:
 
 ```py
 reveal_type(inspect.getattr_static(E(), "attr", "non_existent"))  # revealed: Literal["non_existent"]
diff --git a/crates/ty_python_semantic/resources/mdtest/call/methods.md b/crates/ty_python_semantic/resources/mdtest/call/methods.md
index f101aa6e64..07740c2f89 100644
--- a/crates/ty_python_semantic/resources/mdtest/call/methods.md
+++ b/crates/ty_python_semantic/resources/mdtest/call/methods.md
@@ -308,7 +308,7 @@ reveal_type(C.f)  # revealed: bound method .f(arg: int) -> str
 reveal_type(C.f(1))  # revealed: str
 ```
 
-The method `f` can not be accessed from an instance of the class:
+The method `f` cannot be accessed from an instance of the class:
 
 ```py
 # error: [unresolved-attribute] "Object of type `C` has no attribute `f`"
diff --git a/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md b/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md
index d8619851a2..8548085302 100644
--- a/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md
+++ b/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md
@@ -424,7 +424,7 @@ from dataclasses import dataclass
 class MyFrozenClass: ...
 
 frozen = MyFrozenClass()
-frozen.x = 2  # error: [invalid-assignment] "Can not assign to unresolved attribute `x` on type `MyFrozenClass`"
+frozen.x = 2  # error: [invalid-assignment] "Cannot assign to unresolved attribute `x` on type `MyFrozenClass`"
 ```
 
 A diagnostic is also emitted if a frozen dataclass is inherited, and an attempt is made to mutate an
diff --git a/crates/ty_python_semantic/resources/mdtest/expression/yield_and_yield_from.md b/crates/ty_python_semantic/resources/mdtest/expression/yield_and_yield_from.md
index 629fd2b554..a207b3414f 100644
--- a/crates/ty_python_semantic/resources/mdtest/expression/yield_and_yield_from.md
+++ b/crates/ty_python_semantic/resources/mdtest/expression/yield_and_yield_from.md
@@ -26,7 +26,7 @@ def outer_generator():
 ## `yield from` with a custom iterable
 
 `yield from` can also be used with custom iterable types. In that case, the type of the `yield from`
-expression can not be determined
+expression cannot be determined
 
 ```py
 from typing import Generator, TypeVar, Generic
diff --git a/crates/ty_python_semantic/resources/mdtest/ide_support/all_members.md b/crates/ty_python_semantic/resources/mdtest/ide_support/all_members.md
index 03ea95b4a2..e8c19625ca 100644
--- a/crates/ty_python_semantic/resources/mdtest/ide_support/all_members.md
+++ b/crates/ty_python_semantic/resources/mdtest/ide_support/all_members.md
@@ -130,7 +130,7 @@ static_assert(has_member(C, "base_attr"))
 static_assert(not has_member(C, "non_existent"))
 ```
 
-But instance attributes can not be accessed this way:
+But instance attributes cannot be accessed this way:
 
 ```py
 static_assert(not has_member(C, "instance_attr"))
diff --git a/crates/ty_python_semantic/resources/mdtest/intersection_types.md b/crates/ty_python_semantic/resources/mdtest/intersection_types.md
index 0f5b37eb88..022e09c43b 100644
--- a/crates/ty_python_semantic/resources/mdtest/intersection_types.md
+++ b/crates/ty_python_semantic/resources/mdtest/intersection_types.md
@@ -444,7 +444,7 @@ def _(
     reveal_type(i07)  # revealed: Never
     reveal_type(i08)  # revealed: Never
 
-# `bool` is final and can not be subclassed, so `type[bool]` is equivalent to `Literal[bool]`, which
+# `bool` is final and cannot be subclassed, so `type[bool]` is equivalent to `Literal[bool]`, which
 # is disjoint from `type[str]`:
 def example_type_bool_type_str(
     i: Intersection[type[bool], type[str]],
diff --git a/crates/ty_python_semantic/resources/mdtest/ty_extensions.md b/crates/ty_python_semantic/resources/mdtest/ty_extensions.md
index 22d92b54af..4ff580954e 100644
--- a/crates/ty_python_semantic/resources/mdtest/ty_extensions.md
+++ b/crates/ty_python_semantic/resources/mdtest/ty_extensions.md
@@ -390,7 +390,7 @@ static_assert(not is_single_valued(Literal["a"] | Literal["b"]))
 
 We use `TypeOf` to get the inferred type of an expression. This is useful when we want to refer to
 it in a type expression. For example, if we want to make sure that the class literal type `str` is a
-subtype of `type[str]`, we can not use `is_subtype_of(str, type[str])`, as that would test if the
+subtype of `type[str]`, we cannot use `is_subtype_of(str, type[str])`, as that would test if the
 type `str` itself is a subtype of `type[str]`. Instead, we can use `TypeOf[str]` to get the type of
 the expression `str`:
 
diff --git a/crates/ty_python_semantic/resources/mdtest/type_compendium/any.md b/crates/ty_python_semantic/resources/mdtest/type_compendium/any.md
index a0de2576b9..255e744af9 100644
--- a/crates/ty_python_semantic/resources/mdtest/type_compendium/any.md
+++ b/crates/ty_python_semantic/resources/mdtest/type_compendium/any.md
@@ -54,7 +54,7 @@ class Small(Medium): ...
 static_assert(is_assignable_to(Any | Medium, Big))
 static_assert(is_assignable_to(Any | Medium, Medium))
 
-# `Any | Medium` is at least as large as `Medium`, so we can not assign it to `Small`:
+# `Any | Medium` is at least as large as `Medium`, so we cannot assign it to `Small`:
 static_assert(not is_assignable_to(Any | Medium, Small))
 ```
 
@@ -84,7 +84,7 @@ static_assert(is_assignable_to(Small, Intersection[Any, Medium]))
 static_assert(is_assignable_to(Medium, Intersection[Any, Medium]))
 ```
 
-`Any & Medium` is no larger than `Medium`, so we can not assign `Big` to it. There is no possible
+`Any & Medium` is no larger than `Medium`, so we cannot assign `Big` to it. There is no possible
 materialization of `Any & Medium` that would make it as big as `Big`:
 
 ```py
diff --git a/crates/ty_python_semantic/resources/mdtest/type_compendium/integer_literals.md b/crates/ty_python_semantic/resources/mdtest/type_compendium/integer_literals.md
index d8d42ae7ad..66b759b9ac 100644
--- a/crates/ty_python_semantic/resources/mdtest/type_compendium/integer_literals.md
+++ b/crates/ty_python_semantic/resources/mdtest/type_compendium/integer_literals.md
@@ -32,8 +32,8 @@ static_assert(not is_singleton(Literal[1]))
 static_assert(not is_singleton(Literal[54165]))
 ```
 
-This has implications for type-narrowing. For example, you can not use the `is not` operator to
-check whether a variable has a specific integer literal type, but this is not a recommended practice
+This has implications for type-narrowing. For example, you cannot use the `is not` operator to check
+whether a variable has a specific integer literal type, but this is not a recommended practice
 anyway.
 
 ```py
@@ -44,7 +44,7 @@ def f(x: int):
         reveal_type(x)  # revealed: Literal[54165]
 
     if x is not 54165:
-        # But here, we can not narrow the type (to `int & ~Literal[54165]`), because `x` might also
+        # But here, we cannot narrow the type (to `int & ~Literal[54165]`), because `x` might also
         # have the value `54165`, but a different object identity.
         reveal_type(x)  # revealed: int
 ```
diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md b/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md
index dfad076726..db4b0f5f98 100644
--- a/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md
+++ b/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md
@@ -45,7 +45,7 @@ class C(B1, B2): ...
 # ... which lies in their intersection:
 static_assert(is_subtype_of(C, Intersection[B1, B2]))
 
-# However, if a class is marked final, it can not be subclassed ...
+# However, if a class is marked final, it cannot be subclassed ...
 @final
 class FinalSubclass(A): ...
 
diff --git a/crates/ty_python_semantic/resources/mdtest/typed_dict.md b/crates/ty_python_semantic/resources/mdtest/typed_dict.md
index 14142020a2..30bbb2132b 100644
--- a/crates/ty_python_semantic/resources/mdtest/typed_dict.md
+++ b/crates/ty_python_semantic/resources/mdtest/typed_dict.md
@@ -680,7 +680,7 @@ def _(p: Person) -> None:
     reveal_type(p.__class__)  # revealed: 
 ```
 
-Also, the "attributes" on the class definition can not be accessed. Neither on the class itself, nor
+Also, the "attributes" on the class definition cannot be accessed. Neither on the class itself, nor
 on inhabitants of the type defined by the class:
 
 ```py
@@ -714,7 +714,7 @@ reveal_type(Person.__required_keys__)  # revealed: frozenset[str]
 reveal_type(Person.__optional_keys__)  # revealed: frozenset[str]
 ```
 
-These attributes can not be accessed on inhabitants:
+These attributes cannot be accessed on inhabitants:
 
 ```py
 def _(person: Person) -> None:
@@ -723,7 +723,7 @@ def _(person: Person) -> None:
     person.__optional_keys__  # error: [unresolved-attribute]
 ```
 
-Also, they can not be accessed on `type(person)`, as that would be `dict` at runtime:
+Also, they cannot be accessed on `type(person)`, as that would be `dict` at runtime:
 
 ```py
 def _(person: Person) -> None:
diff --git a/crates/ty_python_semantic/resources/mdtest/unreachable.md b/crates/ty_python_semantic/resources/mdtest/unreachable.md
index 7321ed9b01..73e174f6a1 100644
--- a/crates/ty_python_semantic/resources/mdtest/unreachable.md
+++ b/crates/ty_python_semantic/resources/mdtest/unreachable.md
@@ -187,8 +187,8 @@ python-platform = "all"
 
 If `python-platform` is set to `all`, we treat the platform as unspecified. This means that we do
 not infer a literal type like `Literal["win32"]` for `sys.platform`, but instead fall back to
-`LiteralString` (the `typeshed` annotation for `sys.platform`). This means that we can not
-statically determine the truthiness of a branch like `sys.platform == "win32"`.
+`LiteralString` (the `typeshed` annotation for `sys.platform`). This means that we cannot statically
+determine the truthiness of a branch like `sys.platform == "win32"`.
 
 See  for a plan on how this
 could be improved.
diff --git a/crates/ty_python_semantic/src/place.rs b/crates/ty_python_semantic/src/place.rs
index 3989942b04..c0b3428345 100644
--- a/crates/ty_python_semantic/src/place.rs
+++ b/crates/ty_python_semantic/src/place.rs
@@ -733,7 +733,7 @@ pub(crate) fn place_by_id<'db>(
     };
 
     // If a symbol is undeclared, but qualified with `typing.Final`, we use the right-hand side
-    // inferred type, without unioning with `Unknown`, because it can not be modified.
+    // inferred type, without unioning with `Unknown`, because it cannot be modified.
     if let Some(qualifiers) = declared.is_bare_final() {
         let bindings = all_considered_bindings();
         return place_from_bindings_impl(db, bindings, requires_explicit_reexport)
diff --git a/crates/ty_python_semantic/src/semantic_index/reachability_constraints.rs b/crates/ty_python_semantic/src/semantic_index/reachability_constraints.rs
index 1224190209..9e6d60668f 100644
--- a/crates/ty_python_semantic/src/semantic_index/reachability_constraints.rs
+++ b/crates/ty_python_semantic/src/semantic_index/reachability_constraints.rs
@@ -13,7 +13,7 @@
 //! of `test`. When evaluating a constraint, there are three possible outcomes: always true, always
 //! false, or ambiguous. For a simple constraint like this, always-true and always-false correspond
 //! to the case in which we can infer that the type of `test` is `Literal[True]` or `Literal[False]`.
-//! In any other case, like if the type of `test` is `bool` or `Unknown`, we can not statically
+//! In any other case, like if the type of `test` is `bool` or `Unknown`, we cannot statically
 //! determine whether `test` is truthy or falsy, so the outcome would be "ambiguous".
 //!
 //!
@@ -29,7 +29,7 @@
 //! Here, we would accumulate a reachability constraint of `test1 AND test2`. We can statically
 //! determine that this position is *always* reachable only if both `test1` and `test2` are
 //! always true. On the other hand, we can statically determine that this position is *never*
-//! reachable if *either* `test1` or `test2` is always false. In any other case, we can not
+//! reachable if *either* `test1` or `test2` is always false. In any other case, we cannot
 //! determine whether this position is reachable or not, so the outcome is "ambiguous". This
 //! corresponds to a ternary *AND* operation in [Kleene] logic:
 //!
@@ -60,7 +60,7 @@
 //! The third branch ends in a terminal statement [^1]. When we merge control flow, we need to consider
 //! the reachability through either the first or the second branch. The current position is only
 //! *definitely* unreachable if both `test1` and `test2` are always false. It is definitely
-//! reachable if *either* `test1` or `test2` is always true. In any other case, we can not statically
+//! reachable if *either* `test1` or `test2` is always true. In any other case, we cannot statically
 //! determine whether it is reachable or not. This operation corresponds to a ternary *OR* operation:
 //!
 //! ```text
@@ -91,7 +91,7 @@
 //! ## Explicit ambiguity
 //!
 //! In some cases, we explicitly record an “ambiguous” constraint. We do this when branching on
-//! something that we can not (or intentionally do not want to) analyze statically. `for` loops are
+//! something that we cannot (or intentionally do not want to) analyze statically. `for` loops are
 //! one example:
 //! ```py
 //! def _():
diff --git a/crates/ty_python_semantic/src/subscript.rs b/crates/ty_python_semantic/src/subscript.rs
index b7ea13db10..b51a9e597b 100644
--- a/crates/ty_python_semantic/src/subscript.rs
+++ b/crates/ty_python_semantic/src/subscript.rs
@@ -27,7 +27,7 @@ fn from_negative_i32(index: i32) -> usize {
     static_assertions::const_assert!(usize::BITS >= 32);
 
     index.checked_neg().map(from_nonnegative_i32).unwrap_or({
-        // 'checked_neg' only fails for i32::MIN. We can not
+        // 'checked_neg' only fails for i32::MIN. We cannot
         // represent -i32::MIN as a i32, but we can represent
         // it as a usize, since usize is at least 32 bits.
         from_nonnegative_i32(i32::MAX) + 1
diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs
index b20f332999..ffe0f3066b 100644
--- a/crates/ty_python_semantic/src/types.rs
+++ b/crates/ty_python_semantic/src/types.rs
@@ -297,7 +297,7 @@ impl AttributeKind {
 /// When invoked on a class object, the fallback type (a class attribute) can shadow a
 /// non-data descriptor of the meta-type (the class's metaclass). However, this is not
 /// true for instances. When invoked on an instance, the fallback type (an attribute on
-/// the instance) can not completely shadow a non-data descriptor of the meta-type (the
+/// the instance) cannot completely shadow a non-data descriptor of the meta-type (the
 /// class), because we do not currently attempt to statically infer if an instance
 /// attribute is definitely defined (i.e. to check whether a particular method has been
 /// called).
@@ -4412,7 +4412,7 @@ impl<'db> Type<'db> {
                 };
 
                 if result.is_class_var() && self.is_typed_dict() {
-                    // `ClassVar`s on `TypedDictFallback` can not be accessed on inhabitants of `SomeTypedDict`.
+                    // `ClassVar`s on `TypedDictFallback` cannot be accessed on inhabitants of `SomeTypedDict`.
                     // They can only be accessed on `SomeTypedDict` directly.
                     return Place::Undefined.into();
                 }
@@ -12050,7 +12050,7 @@ pub(crate) mod tests {
         assert!(todo1.is_assignable_to(&db, int));
 
         // We lose information when combining several `Todo` types. This is an
-        // acknowledged limitation of the current implementation. We can not
+        // acknowledged limitation of the current implementation. We cannot
         // easily store the meta information of several `Todo`s in a single
         // variant, as `TodoType` needs to implement `Copy`, meaning it can't
         // contain `Vec`/`Box`/etc., and can't be boxed itself.
diff --git a/crates/ty_python_semantic/src/types/diagnostic.rs b/crates/ty_python_semantic/src/types/diagnostic.rs
index 2dd75e57aa..6ab6f2a447 100644
--- a/crates/ty_python_semantic/src/types/diagnostic.rs
+++ b/crates/ty_python_semantic/src/types/diagnostic.rs
@@ -2000,7 +2000,7 @@ pub(super) fn report_slice_step_size_zero(context: &InferContext, node: AnyNodeR
     let Some(builder) = context.report_lint(&ZERO_STEPSIZE_IN_SLICE, node) else {
         return;
     };
-    builder.into_diagnostic("Slice step size can not be zero");
+    builder.into_diagnostic("Slice step size cannot be zero");
 }
 
 fn report_invalid_assignment_with_message(
diff --git a/crates/ty_python_semantic/src/types/generics.rs b/crates/ty_python_semantic/src/types/generics.rs
index 98f7cb736f..444c5badd6 100644
--- a/crates/ty_python_semantic/src/types/generics.rs
+++ b/crates/ty_python_semantic/src/types/generics.rs
@@ -714,7 +714,7 @@ fn is_subtype_in_invariant_position<'db>(
         // TODO:
         // This should be removed and properly handled in the respective
         // `(Type::TypeVar(_), _) | (_, Type::TypeVar(_))` branch of
-        // `Type::has_relation_to_impl`. Right now, we can not generally
+        // `Type::has_relation_to_impl`. Right now, we cannot generally
         // return `ConstraintSet::from(true)` from that branch, as that
         // leads to union simplification, which means that we lose track
         // of type variables without recording the constraints under which
diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs
index b74ff75404..078d49fe5e 100644
--- a/crates/ty_python_semantic/src/types/infer/builder.rs
+++ b/crates/ty_python_semantic/src/types/infer/builder.rs
@@ -3804,7 +3804,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
 
                                     let msg = if !member_exists {
                                         format!(
-                                            "Can not assign to unresolved attribute `{attribute}` on type `{}`",
+                                            "Cannot assign to unresolved attribute `{attribute}` on type `{}`",
                                             object_ty.display(db)
                                         )
                                     } else if is_setattr_synthesized {
@@ -3840,7 +3840,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
                                 self.context.report_lint(&UNRESOLVED_ATTRIBUTE, target)
                             {
                                 builder.into_diagnostic(format_args!(
-                                    "Can not assign object of type `{}` to attribute \
+                                    "Cannot assign object of type `{}` to attribute \
                                      `{attribute}` on type `{}` with \
                                      custom `__setattr__` method.",
                                     value_ty.display(db),

From b5305b5f328b03e3ba0ab1ba629dedb5c9f4415e Mon Sep 17 00:00:00 2001
From: Shunsuke Shibayama <45118249+mtshiba@users.noreply.github.com>
Date: Tue, 4 Nov 2025 00:41:11 +0900
Subject: [PATCH 075/180] [ty] Fix panic due to simplifying `Divergent` types
 out of intersections types (#21253)

---
 .../resources/corpus/cyclic_comprehensions.py | 10 ++++
 .../pr_20962_comprehension_panics.md          | 13 ----
 crates/ty_python_semantic/src/types.rs        | 60 +++++++++++++++----
 3 files changed, 60 insertions(+), 23 deletions(-)
 create mode 100644 crates/ty_python_semantic/resources/corpus/cyclic_comprehensions.py

diff --git a/crates/ty_python_semantic/resources/corpus/cyclic_comprehensions.py b/crates/ty_python_semantic/resources/corpus/cyclic_comprehensions.py
new file mode 100644
index 0000000000..28ba9d9091
--- /dev/null
+++ b/crates/ty_python_semantic/resources/corpus/cyclic_comprehensions.py
@@ -0,0 +1,10 @@
+# Regression test for https://github.com/astral-sh/ruff/pull/20962
+# error message:
+# `infer_definition_types(Id(1804)): execute: too many cycle iterations`
+
+for name_1 in {
+    {{0: name_4 for unique_name_0 in unique_name_1}: 0 for unique_name_2 in unique_name_3 if name_4}: 0
+    for unique_name_4 in name_1
+    for name_4 in name_1
+}:
+    pass
diff --git a/crates/ty_python_semantic/resources/mdtest/regression/pr_20962_comprehension_panics.md b/crates/ty_python_semantic/resources/mdtest/regression/pr_20962_comprehension_panics.md
index b011d95e8c..97bbf21049 100644
--- a/crates/ty_python_semantic/resources/mdtest/regression/pr_20962_comprehension_panics.md
+++ b/crates/ty_python_semantic/resources/mdtest/regression/pr_20962_comprehension_panics.md
@@ -35,16 +35,3 @@ else:
 async def name_5():
     pass
 ```
-
-## Too many cycle iterations in `infer_definition_types`
-
-
-
-```py
-for name_1 in {
-    {{0: name_4 for unique_name_0 in unique_name_1}: 0 for unique_name_2 in unique_name_3 if name_4}: 0
-    for unique_name_4 in name_1
-    for name_4 in name_1
-}:
-    pass
-```
diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs
index ffe0f3066b..e79282a35d 100644
--- a/crates/ty_python_semantic/src/types.rs
+++ b/crates/ty_python_semantic/src/types.rs
@@ -873,6 +873,10 @@ impl<'db> Type<'db> {
         matches!(self, Type::Dynamic(_))
     }
 
+    const fn is_non_divergent_dynamic(&self) -> bool {
+        self.is_dynamic() && !self.is_divergent()
+    }
+
     /// Is a value of this type only usable in typing contexts?
     pub(crate) fn is_type_check_only(&self, db: &'db dyn Db) -> bool {
         match self {
@@ -1695,22 +1699,33 @@ impl<'db> Type<'db> {
             // holds true if `T` is also a dynamic type or a union that contains a dynamic type.
             // Similarly, `T <: Any` only holds true if `T` is a dynamic type or an intersection
             // that contains a dynamic type.
-            (Type::Dynamic(_), _) => ConstraintSet::from(match relation {
-                TypeRelation::Subtyping => false,
-                TypeRelation::Assignability => true,
-                TypeRelation::Redundancy => match target {
-                    Type::Dynamic(_) => true,
-                    Type::Union(union) => union.elements(db).iter().any(Type::is_dynamic),
-                    _ => false,
-                },
-            }),
+            (Type::Dynamic(dynamic), _) => {
+                // If a `Divergent` type is involved, it must not be eliminated.
+                debug_assert!(
+                    !matches!(dynamic, DynamicType::Divergent(_)),
+                    "DynamicType::Divergent should have been handled in an earlier branch"
+                );
+                ConstraintSet::from(match relation {
+                    TypeRelation::Subtyping => false,
+                    TypeRelation::Assignability => true,
+                    TypeRelation::Redundancy => match target {
+                        Type::Dynamic(_) => true,
+                        Type::Union(union) => union.elements(db).iter().any(Type::is_dynamic),
+                        _ => false,
+                    },
+                })
+            }
             (_, Type::Dynamic(_)) => ConstraintSet::from(match relation {
                 TypeRelation::Subtyping => false,
                 TypeRelation::Assignability => true,
                 TypeRelation::Redundancy => match self {
                     Type::Dynamic(_) => true,
                     Type::Intersection(intersection) => {
-                        intersection.positive(db).iter().any(Type::is_dynamic)
+                        // If a `Divergent` type is involved, it must not be eliminated.
+                        intersection
+                            .positive(db)
+                            .iter()
+                            .any(Type::is_non_divergent_dynamic)
                     }
                     _ => false,
                 },
@@ -9991,6 +10006,10 @@ pub(crate) enum TypeRelation {
     /// materialization of `Any` and `int | Any` may be the same type (`object`), but the
     /// two differ in their bottom materializations (`Never` and `int`, respectively).
     ///
+    /// Despite the above principles, there is one exceptional type that should never be union-simplified: the `Divergent` type.
+    /// This is a kind of dynamic type, but it acts as a marker to track recursive type structures.
+    /// If this type is accidentally eliminated by simplification, the fixed-point iteration will not converge.
+    ///
     /// [fully static]: https://typing.python.org/en/latest/spec/glossary.html#term-fully-static-type
     /// [materializations]: https://typing.python.org/en/latest/spec/glossary.html#term-materialize
     Redundancy,
@@ -12103,6 +12122,27 @@ pub(crate) mod tests {
         assert!(div.is_equivalent_to(&db, div));
         assert!(!div.is_equivalent_to(&db, Type::unknown()));
         assert!(!Type::unknown().is_equivalent_to(&db, div));
+        assert!(!div.is_redundant_with(&db, Type::unknown()));
+        assert!(!Type::unknown().is_redundant_with(&db, div));
+
+        let truthy_div = IntersectionBuilder::new(&db)
+            .add_positive(div)
+            .add_negative(Type::AlwaysFalsy)
+            .build();
+
+        let union = UnionType::from_elements(&db, [Type::unknown(), truthy_div]);
+        assert!(!truthy_div.is_redundant_with(&db, Type::unknown()));
+        assert_eq!(
+            union.display(&db).to_string(),
+            "Unknown | (Divergent & ~AlwaysFalsy)"
+        );
+
+        let union = UnionType::from_elements(&db, [truthy_div, Type::unknown()]);
+        assert!(!Type::unknown().is_redundant_with(&db, truthy_div));
+        assert_eq!(
+            union.display(&db).to_string(),
+            "(Divergent & ~AlwaysFalsy) | Unknown"
+        );
 
         // The `object` type has a good convergence property, that is, its union with all other types is `object`.
         // (e.g. `object | tuple[Divergent] == object`, `object | tuple[object] == object`)

From 64a255df497bede66fa4cdac80b9464d73a787ba Mon Sep 17 00:00:00 2001
From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com>
Date: Mon, 3 Nov 2025 15:55:50 +0000
Subject: [PATCH 076/180] Update to Unicode 17 for line-width calculations
 (#21231)

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
---
 Cargo.lock | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index bfe02a8697..db64440a3d 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -4688,9 +4688,9 @@ dependencies = [
 
 [[package]]
 name = "unicode-width"
-version = "0.2.1"
+version = "0.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
+checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254"
 
 [[package]]
 name = "unicode_names2"

From 21ec8aa7d494ec1314806f4f781b18c801c40794 Mon Sep 17 00:00:00 2001
From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com>
Date: Mon, 3 Nov 2025 16:10:49 +0000
Subject: [PATCH 077/180] Update Rust crate unicode-ident to v1.0.22 (#21228)

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
---
 Cargo.lock | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index db64440a3d..14312dfa75 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -4673,9 +4673,9 @@ checksum = "70ba288e709927c043cbe476718d37be306be53fb1fafecd0dbe36d072be2580"
 
 [[package]]
 name = "unicode-ident"
-version = "1.0.19"
+version = "1.0.22"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d"
+checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5"
 
 [[package]]
 name = "unicode-normalization"

From 78ee7ae9254a08e49c9c6443512235f74a027584 Mon Sep 17 00:00:00 2001
From: Tom Kuson 
Date: Mon, 3 Nov 2025 19:04:59 +0000
Subject: [PATCH 078/180] [`flake8-comprehensions`] Fix typo in `C416`
 documentation (#21184)

## Summary

Adds missing curly brace to the C416 documentation.

## Test Plan

Build the docs
---
 .../flake8_comprehensions/rules/unnecessary_comprehension.rs    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_comprehension.rs b/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_comprehension.rs
index f384b32e6e..70108c1ba7 100644
--- a/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_comprehension.rs
+++ b/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_comprehension.rs
@@ -43,7 +43,7 @@ use crate::rules::flake8_comprehensions::fixes;
 /// >>> {x: y for x, y in d1}  # Iterates over the keys of a mapping
 /// {1: 2, 4: 5}
 /// >>> dict(d1)               # Ruff's incorrect suggested fix
-/// (1, 2): 3, (4, 5): 6}
+/// {(1, 2): 3, (4, 5): 6}
 /// >>> dict(d1.keys())        # Correct fix
 /// {1: 2, 4: 5}
 /// ```

From 04335268970408aa6c81d8dae1c5279498c100e0 Mon Sep 17 00:00:00 2001
From: Wei Lee 
Date: Tue, 4 Nov 2025 04:20:20 +0800
Subject: [PATCH 079/180] [`airflow`] extend deprecated argument `concurrency`
 in `airflow..DAG` (`AIR301`) (#21220)



## Summary


* extend AIR301 to include deprecated argument `concurrency` in
`airflow....DAG`

## Test Plan



update the existing test fixture in the first commit and then reorganize
in the second one
---
 .../test/fixtures/airflow/AIR301_args.py      |   1 +
 .../src/rules/airflow/rules/removal_in_3.rs   |   1 +
 ...airflow__tests__AIR301_AIR301_args.py.snap | 316 ++++++++++--------
 3 files changed, 172 insertions(+), 146 deletions(-)

diff --git a/crates/ruff_linter/resources/test/fixtures/airflow/AIR301_args.py b/crates/ruff_linter/resources/test/fixtures/airflow/AIR301_args.py
index ce35d79338..e275a54bcd 100644
--- a/crates/ruff_linter/resources/test/fixtures/airflow/AIR301_args.py
+++ b/crates/ruff_linter/resources/test/fixtures/airflow/AIR301_args.py
@@ -22,6 +22,7 @@ DAG(dag_id="class_schedule_interval", schedule_interval="@hourly")
 
 DAG(dag_id="class_timetable", timetable=NullTimetable())
 
+DAG(dag_id="class_concurrency", concurrency=12)
 
 DAG(dag_id="class_fail_stop", fail_stop=True)
 
diff --git a/crates/ruff_linter/src/rules/airflow/rules/removal_in_3.rs b/crates/ruff_linter/src/rules/airflow/rules/removal_in_3.rs
index 78c89da0b4..562f37230d 100644
--- a/crates/ruff_linter/src/rules/airflow/rules/removal_in_3.rs
+++ b/crates/ruff_linter/src/rules/airflow/rules/removal_in_3.rs
@@ -196,6 +196,7 @@ fn check_call_arguments(checker: &Checker, qualified_name: &QualifiedName, argum
     match qualified_name.segments() {
         ["airflow", .., "DAG" | "dag"] => {
             // with replacement
+            diagnostic_for_argument(checker, arguments, "concurrency", Some("max_active_tasks"));
             diagnostic_for_argument(checker, arguments, "fail_stop", Some("fail_fast"));
             diagnostic_for_argument(checker, arguments, "schedule_interval", Some("schedule"));
             diagnostic_for_argument(checker, arguments, "timetable", Some("schedule"));
diff --git a/crates/ruff_linter/src/rules/airflow/snapshots/ruff_linter__rules__airflow__tests__AIR301_AIR301_args.py.snap b/crates/ruff_linter/src/rules/airflow/snapshots/ruff_linter__rules__airflow__tests__AIR301_AIR301_args.py.snap
index 6f783edc9f..e0daf99000 100644
--- a/crates/ruff_linter/src/rules/airflow/snapshots/ruff_linter__rules__airflow__tests__AIR301_AIR301_args.py.snap
+++ b/crates/ruff_linter/src/rules/airflow/snapshots/ruff_linter__rules__airflow__tests__AIR301_AIR301_args.py.snap
@@ -28,6 +28,8 @@ AIR301 [*] `timetable` is removed in Airflow 3.0
 22 |
 23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
    |                               ^^^^^^^^^
+24 |
+25 | DAG(dag_id="class_concurrency", concurrency=12)
    |
 help: Use `schedule` instead
 20 | 
@@ -36,249 +38,271 @@ help: Use `schedule` instead
    - DAG(dag_id="class_timetable", timetable=NullTimetable())
 23 + DAG(dag_id="class_timetable", schedule=NullTimetable())
 24 | 
-25 | 
-26 | DAG(dag_id="class_fail_stop", fail_stop=True)
+25 | DAG(dag_id="class_concurrency", concurrency=12)
+26 | 
 
-AIR301 [*] `fail_stop` is removed in Airflow 3.0
-  --> AIR301_args.py:26:31
+AIR301 [*] `concurrency` is removed in Airflow 3.0
+  --> AIR301_args.py:25:33
    |
-26 | DAG(dag_id="class_fail_stop", fail_stop=True)
-   |                               ^^^^^^^^^
-27 |
-28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
+23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
+24 |
+25 | DAG(dag_id="class_concurrency", concurrency=12)
+   |                                 ^^^^^^^^^^^
+26 |
+27 | DAG(dag_id="class_fail_stop", fail_stop=True)
    |
-help: Use `fail_fast` instead
+help: Use `max_active_tasks` instead
+22 | 
 23 | DAG(dag_id="class_timetable", timetable=NullTimetable())
 24 | 
-25 | 
+   - DAG(dag_id="class_concurrency", concurrency=12)
+25 + DAG(dag_id="class_concurrency", max_active_tasks=12)
+26 | 
+27 | DAG(dag_id="class_fail_stop", fail_stop=True)
+28 | 
+
+AIR301 [*] `fail_stop` is removed in Airflow 3.0
+  --> AIR301_args.py:27:31
+   |
+25 | DAG(dag_id="class_concurrency", concurrency=12)
+26 |
+27 | DAG(dag_id="class_fail_stop", fail_stop=True)
+   |                               ^^^^^^^^^
+28 |
+29 | DAG(dag_id="class_default_view", default_view="dag_default_view")
+   |
+help: Use `fail_fast` instead
+24 | 
+25 | DAG(dag_id="class_concurrency", concurrency=12)
+26 | 
    - DAG(dag_id="class_fail_stop", fail_stop=True)
-26 + DAG(dag_id="class_fail_stop", fail_fast=True)
-27 | 
-28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
-29 | 
+27 + DAG(dag_id="class_fail_stop", fail_fast=True)
+28 | 
+29 | DAG(dag_id="class_default_view", default_view="dag_default_view")
+30 | 
 
 AIR301 `default_view` is removed in Airflow 3.0
-  --> AIR301_args.py:28:34
+  --> AIR301_args.py:29:34
    |
-26 | DAG(dag_id="class_fail_stop", fail_stop=True)
-27 |
-28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
+27 | DAG(dag_id="class_fail_stop", fail_stop=True)
+28 |
+29 | DAG(dag_id="class_default_view", default_view="dag_default_view")
    |                                  ^^^^^^^^^^^^
-29 |
-30 | DAG(dag_id="class_orientation", orientation="BT")
+30 |
+31 | DAG(dag_id="class_orientation", orientation="BT")
    |
 
 AIR301 `orientation` is removed in Airflow 3.0
-  --> AIR301_args.py:30:33
+  --> AIR301_args.py:31:33
    |
-28 | DAG(dag_id="class_default_view", default_view="dag_default_view")
-29 |
-30 | DAG(dag_id="class_orientation", orientation="BT")
+29 | DAG(dag_id="class_default_view", default_view="dag_default_view")
+30 |
+31 | DAG(dag_id="class_orientation", orientation="BT")
    |                                 ^^^^^^^^^^^
-31 |
-32 | allow_future_exec_dates_dag = DAG(dag_id="class_allow_future_exec_dates")
+32 |
+33 | allow_future_exec_dates_dag = DAG(dag_id="class_allow_future_exec_dates")
    |
 
 AIR301 [*] `schedule_interval` is removed in Airflow 3.0
-  --> AIR301_args.py:41:6
+  --> AIR301_args.py:42:6
    |
-41 | @dag(schedule_interval="0 * * * *")
+42 | @dag(schedule_interval="0 * * * *")
    |      ^^^^^^^^^^^^^^^^^
-42 | def decorator_schedule_interval():
-43 |     pass
+43 | def decorator_schedule_interval():
+44 |     pass
    |
 help: Use `schedule` instead
-38 |     pass
-39 | 
+39 |     pass
 40 | 
+41 | 
    - @dag(schedule_interval="0 * * * *")
-41 + @dag(schedule="0 * * * *")
-42 | def decorator_schedule_interval():
-43 |     pass
-44 | 
+42 + @dag(schedule="0 * * * *")
+43 | def decorator_schedule_interval():
+44 |     pass
+45 | 
 
 AIR301 [*] `timetable` is removed in Airflow 3.0
-  --> AIR301_args.py:46:6
+  --> AIR301_args.py:47:6
    |
-46 | @dag(timetable=NullTimetable())
+47 | @dag(timetable=NullTimetable())
    |      ^^^^^^^^^
-47 | def decorator_timetable():
-48 |     pass
+48 | def decorator_timetable():
+49 |     pass
    |
 help: Use `schedule` instead
-43 |     pass
-44 | 
+44 |     pass
 45 | 
+46 | 
    - @dag(timetable=NullTimetable())
-46 + @dag(schedule=NullTimetable())
-47 | def decorator_timetable():
-48 |     pass
-49 | 
+47 + @dag(schedule=NullTimetable())
+48 | def decorator_timetable():
+49 |     pass
+50 | 
 
 AIR301 [*] `execution_date` is removed in Airflow 3.0
-  --> AIR301_args.py:54:62
+  --> AIR301_args.py:55:62
    |
-52 | def decorator_deprecated_operator_args():
-53 |     trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
-54 |         task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
+53 | def decorator_deprecated_operator_args():
+54 |     trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
+55 |         task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
    |                                                              ^^^^^^^^^^^^^^
-55 |     )
-56 |     trigger_dagrun_op2 = TriggerDagRunOperator(
+56 |     )
+57 |     trigger_dagrun_op2 = TriggerDagRunOperator(
    |
 help: Use `logical_date` instead
-51 | @dag()
-52 | def decorator_deprecated_operator_args():
-53 |     trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
+52 | @dag()
+53 | def decorator_deprecated_operator_args():
+54 |     trigger_dagrun_op = trigger_dagrun.TriggerDagRunOperator(
    -         task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
-54 +         task_id="trigger_dagrun_op1", trigger_dag_id="test", logical_date="2024-12-04"
-55 |     )
-56 |     trigger_dagrun_op2 = TriggerDagRunOperator(
-57 |         task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
+55 +         task_id="trigger_dagrun_op1", trigger_dag_id="test", logical_date="2024-12-04"
+56 |     )
+57 |     trigger_dagrun_op2 = TriggerDagRunOperator(
+58 |         task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
 
 AIR301 [*] `execution_date` is removed in Airflow 3.0
-  --> AIR301_args.py:57:62
+  --> AIR301_args.py:58:62
    |
-55 |     )
-56 |     trigger_dagrun_op2 = TriggerDagRunOperator(
-57 |         task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
+56 |     )
+57 |     trigger_dagrun_op2 = TriggerDagRunOperator(
+58 |         task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
    |                                                              ^^^^^^^^^^^^^^
-58 |     )
+59 |     )
    |
 help: Use `logical_date` instead
-54 |         task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
-55 |     )
-56 |     trigger_dagrun_op2 = TriggerDagRunOperator(
+55 |         task_id="trigger_dagrun_op1", trigger_dag_id="test", execution_date="2024-12-04"
+56 |     )
+57 |     trigger_dagrun_op2 = TriggerDagRunOperator(
    -         task_id="trigger_dagrun_op2", trigger_dag_id="test", execution_date="2024-12-04"
-57 +         task_id="trigger_dagrun_op2", trigger_dag_id="test", logical_date="2024-12-04"
-58 |     )
-59 | 
-60 |     branch_dt_op = datetime.BranchDateTimeOperator(
+58 +         task_id="trigger_dagrun_op2", trigger_dag_id="test", logical_date="2024-12-04"
+59 |     )
+60 | 
+61 |     branch_dt_op = datetime.BranchDateTimeOperator(
 
 AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
-  --> AIR301_args.py:61:33
+  --> AIR301_args.py:62:33
    |
-60 |     branch_dt_op = datetime.BranchDateTimeOperator(
-61 |         task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
+61 |     branch_dt_op = datetime.BranchDateTimeOperator(
+62 |         task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
    |                                 ^^^^^^^^^^^^^^^^^^^^^^
-62 |     )
-63 |     branch_dt_op2 = BranchDateTimeOperator(
+63 |     )
+64 |     branch_dt_op2 = BranchDateTimeOperator(
    |
 help: Use `use_task_logical_date` instead
-58 |     )
-59 | 
-60 |     branch_dt_op = datetime.BranchDateTimeOperator(
+59 |     )
+60 | 
+61 |     branch_dt_op = datetime.BranchDateTimeOperator(
    -         task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
-61 +         task_id="branch_dt_op", use_task_logical_date=True, task_concurrency=5
-62 |     )
-63 |     branch_dt_op2 = BranchDateTimeOperator(
-64 |         task_id="branch_dt_op2",
+62 +         task_id="branch_dt_op", use_task_logical_date=True, task_concurrency=5
+63 |     )
+64 |     branch_dt_op2 = BranchDateTimeOperator(
+65 |         task_id="branch_dt_op2",
 
 AIR301 [*] `task_concurrency` is removed in Airflow 3.0
-  --> AIR301_args.py:61:62
+  --> AIR301_args.py:62:62
    |
-60 |     branch_dt_op = datetime.BranchDateTimeOperator(
-61 |         task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
+61 |     branch_dt_op = datetime.BranchDateTimeOperator(
+62 |         task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
    |                                                              ^^^^^^^^^^^^^^^^
-62 |     )
-63 |     branch_dt_op2 = BranchDateTimeOperator(
+63 |     )
+64 |     branch_dt_op2 = BranchDateTimeOperator(
    |
 help: Use `max_active_tis_per_dag` instead
-58 |     )
-59 | 
-60 |     branch_dt_op = datetime.BranchDateTimeOperator(
+59 |     )
+60 | 
+61 |     branch_dt_op = datetime.BranchDateTimeOperator(
    -         task_id="branch_dt_op", use_task_execution_day=True, task_concurrency=5
-61 +         task_id="branch_dt_op", use_task_execution_day=True, max_active_tis_per_dag=5
-62 |     )
-63 |     branch_dt_op2 = BranchDateTimeOperator(
-64 |         task_id="branch_dt_op2",
+62 +         task_id="branch_dt_op", use_task_execution_day=True, max_active_tis_per_dag=5
+63 |     )
+64 |     branch_dt_op2 = BranchDateTimeOperator(
+65 |         task_id="branch_dt_op2",
 
 AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
-  --> AIR301_args.py:65:9
+  --> AIR301_args.py:66:9
    |
-63 |     branch_dt_op2 = BranchDateTimeOperator(
-64 |         task_id="branch_dt_op2",
-65 |         use_task_execution_day=True,
+64 |     branch_dt_op2 = BranchDateTimeOperator(
+65 |         task_id="branch_dt_op2",
+66 |         use_task_execution_day=True,
    |         ^^^^^^^^^^^^^^^^^^^^^^
-66 |         sla=timedelta(seconds=10),
-67 |     )
+67 |         sla=timedelta(seconds=10),
+68 |     )
    |
 help: Use `use_task_logical_date` instead
-62 |     )
-63 |     branch_dt_op2 = BranchDateTimeOperator(
-64 |         task_id="branch_dt_op2",
+63 |     )
+64 |     branch_dt_op2 = BranchDateTimeOperator(
+65 |         task_id="branch_dt_op2",
    -         use_task_execution_day=True,
-65 +         use_task_logical_date=True,
-66 |         sla=timedelta(seconds=10),
-67 |     )
-68 | 
+66 +         use_task_logical_date=True,
+67 |         sla=timedelta(seconds=10),
+68 |     )
+69 | 
 
 AIR301 [*] `use_task_execution_day` is removed in Airflow 3.0
-  --> AIR301_args.py:92:9
+  --> AIR301_args.py:93:9
    |
-90 |         follow_task_ids_if_true=None,
-91 |         week_day=1,
-92 |         use_task_execution_day=True,
+91 |         follow_task_ids_if_true=None,
+92 |         week_day=1,
+93 |         use_task_execution_day=True,
    |         ^^^^^^^^^^^^^^^^^^^^^^
-93 |     )
+94 |     )
    |
 help: Use `use_task_logical_date` instead
-89 |         follow_task_ids_if_false=None,
-90 |         follow_task_ids_if_true=None,
-91 |         week_day=1,
+90 |         follow_task_ids_if_false=None,
+91 |         follow_task_ids_if_true=None,
+92 |         week_day=1,
    -         use_task_execution_day=True,
-92 +         use_task_logical_date=True,
-93 |     )
-94 | 
-95 |     trigger_dagrun_op >> trigger_dagrun_op2
+93 +         use_task_logical_date=True,
+94 |     )
+95 | 
+96 |     trigger_dagrun_op >> trigger_dagrun_op2
 
 AIR301 `filename_template` is removed in Airflow 3.0
-   --> AIR301_args.py:102:15
+   --> AIR301_args.py:103:15
     |
-101 | # deprecated filename_template argument in FileTaskHandler
-102 | S3TaskHandler(filename_template="/tmp/test")
+102 | # deprecated filename_template argument in FileTaskHandler
+103 | S3TaskHandler(filename_template="/tmp/test")
     |               ^^^^^^^^^^^^^^^^^
-103 | HdfsTaskHandler(filename_template="/tmp/test")
-104 | ElasticsearchTaskHandler(filename_template="/tmp/test")
+104 | HdfsTaskHandler(filename_template="/tmp/test")
+105 | ElasticsearchTaskHandler(filename_template="/tmp/test")
     |
 
 AIR301 `filename_template` is removed in Airflow 3.0
-   --> AIR301_args.py:103:17
+   --> AIR301_args.py:104:17
     |
-101 | # deprecated filename_template argument in FileTaskHandler
-102 | S3TaskHandler(filename_template="/tmp/test")
-103 | HdfsTaskHandler(filename_template="/tmp/test")
+102 | # deprecated filename_template argument in FileTaskHandler
+103 | S3TaskHandler(filename_template="/tmp/test")
+104 | HdfsTaskHandler(filename_template="/tmp/test")
     |                 ^^^^^^^^^^^^^^^^^
-104 | ElasticsearchTaskHandler(filename_template="/tmp/test")
-105 | GCSTaskHandler(filename_template="/tmp/test")
+105 | ElasticsearchTaskHandler(filename_template="/tmp/test")
+106 | GCSTaskHandler(filename_template="/tmp/test")
     |
 
 AIR301 `filename_template` is removed in Airflow 3.0
-   --> AIR301_args.py:104:26
+   --> AIR301_args.py:105:26
     |
-102 | S3TaskHandler(filename_template="/tmp/test")
-103 | HdfsTaskHandler(filename_template="/tmp/test")
-104 | ElasticsearchTaskHandler(filename_template="/tmp/test")
+103 | S3TaskHandler(filename_template="/tmp/test")
+104 | HdfsTaskHandler(filename_template="/tmp/test")
+105 | ElasticsearchTaskHandler(filename_template="/tmp/test")
     |                          ^^^^^^^^^^^^^^^^^
-105 | GCSTaskHandler(filename_template="/tmp/test")
+106 | GCSTaskHandler(filename_template="/tmp/test")
     |
 
 AIR301 `filename_template` is removed in Airflow 3.0
-   --> AIR301_args.py:105:16
+   --> AIR301_args.py:106:16
     |
-103 | HdfsTaskHandler(filename_template="/tmp/test")
-104 | ElasticsearchTaskHandler(filename_template="/tmp/test")
-105 | GCSTaskHandler(filename_template="/tmp/test")
+104 | HdfsTaskHandler(filename_template="/tmp/test")
+105 | ElasticsearchTaskHandler(filename_template="/tmp/test")
+106 | GCSTaskHandler(filename_template="/tmp/test")
     |                ^^^^^^^^^^^^^^^^^
-106 |
-107 | FabAuthManager(None)
+107 |
+108 | FabAuthManager(None)
     |
 
 AIR301 `appbuilder` is removed in Airflow 3.0
-   --> AIR301_args.py:107:15
+   --> AIR301_args.py:108:15
     |
-105 | GCSTaskHandler(filename_template="/tmp/test")
-106 |
-107 | FabAuthManager(None)
+106 | GCSTaskHandler(filename_template="/tmp/test")
+107 |
+108 | FabAuthManager(None)
     |               ^^^^^^
     |
 help: The constructor takes no parameter now

From fe4ee81b9749bd326845aec30e694af8568549e6 Mon Sep 17 00:00:00 2001
From: Carl Meyer 
Date: Mon, 3 Nov 2025 15:24:01 -0500
Subject: [PATCH 080/180] [ty] prefer submodule over module __getattr__ in
 from-imports (#21260)

Fixes https://github.com/astral-sh/ty/issues/1053

## Summary

Other type checkers prioritize a submodule over a package `__getattr__`
in `from mod import sub`, even though the runtime precedence is the
other direction. In effect, this is making an implicit assumption that a
module `__getattr__` will not handle (that is, will raise
`AttributeError`) for names that are also actual submodules, rather than
shadowing them. In practice this seems like a realistic assumption in
the ecosystem? Or at least the ecosystem has adapted to it, and we need
to adapt this precedence also, for ecosystem compatibility.

The implementation is a bit ugly, precisely because it departs from the
runtime semantics, and our implementation is oriented toward modeling
runtime semantics accurately. That is, `__getattr__` is modeled within
the member-lookup code, so it's hard to split "member lookup result from
module `__getattr__`" apart from other member lookup results. I did this
via a synthetic `TypeQualifier::FROM_MODULE_GETATTR` that we attach to a
type resulting from a member lookup, which isn't beautiful but it works
well and doesn't introduce inefficiency (e.g. redundant member lookups).

## Test Plan

Updated mdtests.

Also added a related mdtest formalizing our support for a module
`__getattr__` that is explicitly annotated to accept a limited set of
names. In principle this could be an alternative (more explicit) way to
handle the precedence problem without departing from runtime semantics,
if the ecosystem would adopt it.

### Ecosystem analysis

Lots of removed diagnostics which are an improvement because we now
infer the expected submodule.

Added diagnostics are mostly unrelated issues surfaced now because we
previously had an earlier attribute error resulting in `Unknown`; now we
correctly resolve the module so that earlier attribute error goes away,
we get an actual type instead of `Unknown`, and that triggers a new
error.

In scipy and sklearn, the module `__getattr__` which we were respecting
previously is un-annotated so returned a forgiving `Unknown`; now we
correctly see the actual module, which reveals some cases of
https://github.com/astral-sh/ty/issues/133 that were previously hidden
(`scipy/optimize/__init__.py` [imports `from
._tnc`](https://github.com/scipy/scipy/blob/eff82ca575668d2d7a4bc12b6afba98daaf6d5d0/scipy/optimize/__init__.py#L429).)

---------

Co-authored-by: Alex Waygood 
---
 .../resources/mdtest/import/module_getattr.md | 51 +++++++++++++++++--
 crates/ty_python_semantic/src/types.rs        |  9 +++-
 .../src/types/infer/builder.rs                | 50 +++++++++++++-----
 3 files changed, 92 insertions(+), 18 deletions(-)

diff --git a/crates/ty_python_semantic/resources/mdtest/import/module_getattr.md b/crates/ty_python_semantic/resources/mdtest/import/module_getattr.md
index 4c493f4b74..79cce812fe 100644
--- a/crates/ty_python_semantic/resources/mdtest/import/module_getattr.md
+++ b/crates/ty_python_semantic/resources/mdtest/import/module_getattr.md
@@ -60,11 +60,6 @@ def __getattr__(name: str) -> str:
 If a package's `__init__.py` (e.g. `mod/__init__.py`) defines a `__getattr__` function, and there is
 also a submodule file present (e.g. `mod/sub.py`), then:
 
-- If you do `import mod` (without importing the submodule directly), accessing `mod.sub` will call
-    `mod.__getattr__('sub')`, so `reveal_type(mod.sub)` will show the return type of `__getattr__`.
-- If you do `import mod.sub` (importing the submodule directly), then `mod.sub` refers to the actual
-    submodule, so `reveal_type(mod.sub)` will show the type of the submodule itself.
-
 `mod/__init__.py`:
 
 ```py
@@ -78,6 +73,9 @@ def __getattr__(name: str) -> str:
 value = 42
 ```
 
+If you `import mod` (without importing the submodule directly), accessing `mod.sub` will call
+`mod.__getattr__('sub')`, so `reveal_type(mod.sub)` will show the return type of `__getattr__`.
+
 `test_import_mod.py`:
 
 ```py
@@ -86,6 +84,9 @@ import mod
 reveal_type(mod.sub)  # revealed: str
 ```
 
+If you `import mod.sub` (importing the submodule directly), then `mod.sub` refers to the actual
+submodule, so `reveal_type(mod.sub)` will show the type of the submodule itself.
+
 `test_import_mod_sub.py`:
 
 ```py
@@ -93,3 +94,43 @@ import mod.sub
 
 reveal_type(mod.sub)  # revealed: 
 ```
+
+If you `from mod import sub`, at runtime `sub` will be the value returned by the module
+`__getattr__`, but other type checkers do not model the precedence this way. They will always prefer
+a submodule over a package `__getattr__`, and thus this is the current expectation in the ecosystem.
+Effectively, this assumes that a well-implemented package `__getattr__` will always raise
+`AttributeError` for a name that also exists as a submodule (and in fact this is the case for many
+module `__getattr__` in the ecosystem.)
+
+`test_from_import.py`:
+
+```py
+from mod import sub
+
+reveal_type(sub)  # revealed: 
+```
+
+## Limiting names handled by `__getattr__`
+
+If a module `__getattr__` is annotated to only accept certain string literals, then the module
+`__getattr__` will be ignored for other names. (In principle this could be a more explicit way to
+handle the precedence issues discussed above, but it's not currently used in the ecosystem.)
+
+```py
+from limited_getattr_module import known_attr
+
+# error: [unresolved-import]
+from limited_getattr_module import unknown_attr
+
+reveal_type(known_attr)  # revealed: int
+reveal_type(unknown_attr)  # revealed: Unknown
+```
+
+`limited_getattr_module.py`:
+
+```py
+from typing import Literal
+
+def __getattr__(name: Literal["known_attr"]) -> int:
+    return 3
+```
diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs
index e79282a35d..3e3c241925 100644
--- a/crates/ty_python_semantic/src/types.rs
+++ b/crates/ty_python_semantic/src/types.rs
@@ -7909,6 +7909,10 @@ bitflags! {
         /// instance attributes that are only implicitly defined via `self.x = …` in
         /// the body of a class method.
         const IMPLICIT_INSTANCE_ATTRIBUTE = 1 << 6;
+        /// A non-standard type qualifier that marks a type returned from a module-level
+        /// `__getattr__` function. We need this in order to implement precedence of submodules
+        /// over module-level `__getattr__`, for compatibility with other type checkers.
+        const FROM_MODULE_GETATTR = 1 << 7;
     }
 }
 
@@ -11026,7 +11030,10 @@ impl<'db> ModuleLiteralType<'db> {
                     db,
                     &CallArguments::positional([Type::string_literal(db, name)]),
                 ) {
-                    return Place::Defined(outcome.return_type(db), origin, boundness).into();
+                    return PlaceAndQualifiers {
+                        place: Place::Defined(outcome.return_type(db), origin, boundness),
+                        qualifiers: TypeQualifiers::FROM_MODULE_GETATTR,
+                    };
                 }
             }
         }
diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs
index 078d49fe5e..cbb2fe8236 100644
--- a/crates/ty_python_semantic/src/types/infer/builder.rs
+++ b/crates/ty_python_semantic/src/types/infer/builder.rs
@@ -5347,6 +5347,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
             .as_module_literal()
             .is_some_and(|module| Some(self.file()) == module.module(self.db()).file(self.db()));
 
+        // Although it isn't the runtime semantics, we go to some trouble to prioritize a submodule
+        // over module `__getattr__`, because that's what other type checkers do.
+        let mut from_module_getattr = None;
+
         // First try loading the requested attribute from the module.
         if !import_is_self_referential {
             if let PlaceAndQualifiers {
@@ -5366,19 +5370,23 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
                         ));
                     }
                 }
-                self.add_declaration_with_binding(
-                    alias.into(),
-                    definition,
-                    &DeclaredAndInferredType::MightBeDifferent {
-                        declared_ty: TypeAndQualifiers {
-                            inner: ty,
-                            origin: TypeOrigin::Declared,
-                            qualifiers,
+                if qualifiers.contains(TypeQualifiers::FROM_MODULE_GETATTR) {
+                    from_module_getattr = Some((ty, qualifiers));
+                } else {
+                    self.add_declaration_with_binding(
+                        alias.into(),
+                        definition,
+                        &DeclaredAndInferredType::MightBeDifferent {
+                            declared_ty: TypeAndQualifiers {
+                                inner: ty,
+                                origin: TypeOrigin::Declared,
+                                qualifiers,
+                            },
+                            inferred_ty: ty,
                         },
-                        inferred_ty: ty,
-                    },
-                );
-                return;
+                    );
+                    return;
+                }
             }
         }
 
@@ -5418,6 +5426,24 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
             return;
         }
 
+        // We've checked for a submodule, so now we can go ahead and use a type from module
+        // `__getattr__`.
+        if let Some((ty, qualifiers)) = from_module_getattr {
+            self.add_declaration_with_binding(
+                alias.into(),
+                definition,
+                &DeclaredAndInferredType::MightBeDifferent {
+                    declared_ty: TypeAndQualifiers {
+                        inner: ty,
+                        origin: TypeOrigin::Declared,
+                        qualifiers,
+                    },
+                    inferred_ty: ty,
+                },
+            );
+            return;
+        }
+
         self.add_unknown_declaration_with_binding(alias.into(), definition);
 
         if &alias.name == "*" {

From 1fe958c694422fc283f1139533845e1ebe147356 Mon Sep 17 00:00:00 2001
From: David Peter 
Date: Mon, 3 Nov 2025 21:50:25 +0100
Subject: [PATCH 081/180] [ty] Implicit type aliases: Support for PEP 604
 unions (#21195)

## Summary

Add support for implicit type aliases that use PEP 604 unions:
```py
IntOrStr = int | str

reveal_type(IntOrStr)  # UnionType

def _(int_or_str: IntOrStr):
    reveal_type(int_or_str)  # int | str
```

## Typing conformance

The changes are either removed false positives, or new diagnostics due
to known limitations unrelated to this PR.

## Ecosystem impact

Spot checked, a mix of true positives and known limitations.

## Test Plan

New Markdown tests.
---
 crates/ruff_benchmark/benches/ty_walltime.rs  |   2 +-
 .../resources/mdtest/annotations/union.md     |   5 +-
 .../resources/mdtest/implicit_type_aliases.md | 203 ++++++++++++++++++
 .../resources/mdtest/mro.md                   |  14 ++
 .../resources/mdtest/narrow/isinstance.md     |   6 +-
 .../resources/mdtest/narrow/issubclass.md     |   3 +-
 crates/ty_python_semantic/src/types.rs        |  63 +++++-
 crates/ty_python_semantic/src/types/class.rs  |   4 +-
 .../src/types/class_base.rs                   |   3 +-
 .../src/types/ide_support.rs                  |   4 +-
 .../src/types/infer/builder.rs                |  51 ++++-
 .../types/infer/builder/type_expression.rs    |   4 +
 .../ty_python_semantic/src/types/instance.rs  |   8 +
 13 files changed, 334 insertions(+), 36 deletions(-)

diff --git a/crates/ruff_benchmark/benches/ty_walltime.rs b/crates/ruff_benchmark/benches/ty_walltime.rs
index 47bff641d7..55b2415990 100644
--- a/crates/ruff_benchmark/benches/ty_walltime.rs
+++ b/crates/ruff_benchmark/benches/ty_walltime.rs
@@ -146,7 +146,7 @@ static FREQTRADE: Benchmark = Benchmark::new(
         max_dep_date: "2025-06-17",
         python_version: PythonVersion::PY312,
     },
-    400,
+    500,
 );
 
 static PANDAS: Benchmark = Benchmark::new(
diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/union.md b/crates/ty_python_semantic/resources/mdtest/annotations/union.md
index 776d077e27..8313d7142a 100644
--- a/crates/ty_python_semantic/resources/mdtest/annotations/union.md
+++ b/crates/ty_python_semantic/resources/mdtest/annotations/union.md
@@ -72,9 +72,6 @@ def f(x: Union) -> None:
 
 ## Implicit type aliases using new-style unions
 
-We don't recognize these as type aliases yet, but we also don't emit false-positive diagnostics if
-you use them in type expressions:
-
 ```toml
 [environment]
 python-version = "3.10"
@@ -84,5 +81,5 @@ python-version = "3.10"
 X = int | str
 
 def f(y: X):
-    reveal_type(y)  # revealed: @Todo(Support for `types.UnionType` instances in type expressions)
+    reveal_type(y)  # revealed: int | str
 ```
diff --git a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
index 404d308083..904921e7b3 100644
--- a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
+++ b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
@@ -17,6 +17,209 @@ def f(x: MyInt):
 f(1)
 ```
 
+## None
+
+```py
+MyNone = None
+
+# TODO: this should not be an error
+# error: [invalid-type-form] "Variable of type `None` is not allowed in a type expression"
+def g(x: MyNone):
+    # TODO: this should be `None`
+    reveal_type(x)  # revealed: Unknown
+
+g(None)
+```
+
+## Unions
+
+We also support unions in type aliases:
+
+```py
+from typing_extensions import Any, Never
+from ty_extensions import Unknown
+
+IntOrStr = int | str
+IntOrStrOrBytes1 = int | str | bytes
+IntOrStrOrBytes2 = (int | str) | bytes
+IntOrStrOrBytes3 = int | (str | bytes)
+IntOrStrOrBytes4 = IntOrStr | bytes
+BytesOrIntOrStr = bytes | IntOrStr
+IntOrNone = int | None
+NoneOrInt = None | int
+IntOrStrOrNone = IntOrStr | None
+NoneOrIntOrStr = None | IntOrStr
+IntOrAny = int | Any
+AnyOrInt = Any | int
+NoneOrAny = None | Any
+AnyOrNone = Any | None
+NeverOrAny = Never | Any
+AnyOrNever = Any | Never
+UnknownOrInt = Unknown | int
+IntOrUnknown = int | Unknown
+
+reveal_type(IntOrStr)  # revealed: UnionType
+reveal_type(IntOrStrOrBytes1)  # revealed: UnionType
+reveal_type(IntOrStrOrBytes2)  # revealed: UnionType
+reveal_type(IntOrStrOrBytes3)  # revealed: UnionType
+reveal_type(IntOrStrOrBytes4)  # revealed: UnionType
+reveal_type(BytesOrIntOrStr)  # revealed: UnionType
+reveal_type(IntOrNone)  # revealed: UnionType
+reveal_type(NoneOrInt)  # revealed: UnionType
+reveal_type(IntOrStrOrNone)  # revealed: UnionType
+reveal_type(NoneOrIntOrStr)  # revealed: UnionType
+reveal_type(IntOrAny)  # revealed: UnionType
+reveal_type(AnyOrInt)  # revealed: UnionType
+reveal_type(NoneOrAny)  # revealed: UnionType
+reveal_type(AnyOrNone)  # revealed: UnionType
+reveal_type(NeverOrAny)  # revealed: UnionType
+reveal_type(AnyOrNever)  # revealed: UnionType
+reveal_type(UnknownOrInt)  # revealed: UnionType
+reveal_type(IntOrUnknown)  # revealed: UnionType
+
+def _(
+    int_or_str: IntOrStr,
+    int_or_str_or_bytes1: IntOrStrOrBytes1,
+    int_or_str_or_bytes2: IntOrStrOrBytes2,
+    int_or_str_or_bytes3: IntOrStrOrBytes3,
+    int_or_str_or_bytes4: IntOrStrOrBytes4,
+    bytes_or_int_or_str: BytesOrIntOrStr,
+    int_or_none: IntOrNone,
+    none_or_int: NoneOrInt,
+    int_or_str_or_none: IntOrStrOrNone,
+    none_or_int_or_str: NoneOrIntOrStr,
+    int_or_any: IntOrAny,
+    any_or_int: AnyOrInt,
+    none_or_any: NoneOrAny,
+    any_or_none: AnyOrNone,
+    never_or_any: NeverOrAny,
+    any_or_never: AnyOrNever,
+    unknown_or_int: UnknownOrInt,
+    int_or_unknown: IntOrUnknown,
+):
+    reveal_type(int_or_str)  # revealed: int | str
+    reveal_type(int_or_str_or_bytes1)  # revealed: int | str | bytes
+    reveal_type(int_or_str_or_bytes2)  # revealed: int | str | bytes
+    reveal_type(int_or_str_or_bytes3)  # revealed: int | str | bytes
+    reveal_type(int_or_str_or_bytes4)  # revealed: int | str | bytes
+    reveal_type(bytes_or_int_or_str)  # revealed: bytes | int | str
+    reveal_type(int_or_none)  # revealed: int | None
+    reveal_type(none_or_int)  # revealed: None | int
+    reveal_type(int_or_str_or_none)  # revealed: int | str | None
+    reveal_type(none_or_int_or_str)  # revealed: None | int | str
+    reveal_type(int_or_any)  # revealed: int | Any
+    reveal_type(any_or_int)  # revealed: Any | int
+    reveal_type(none_or_any)  # revealed: None | Any
+    reveal_type(any_or_none)  # revealed: Any | None
+    reveal_type(never_or_any)  # revealed: Any
+    reveal_type(any_or_never)  # revealed: Any
+    reveal_type(unknown_or_int)  # revealed: Unknown | int
+    reveal_type(int_or_unknown)  # revealed: int | Unknown
+```
+
+If a type is unioned with itself in a value expression, the result is just that type. No
+`types.UnionType` instance is created:
+
+```py
+IntOrInt = int | int
+ListOfIntOrListOfInt = list[int] | list[int]
+
+reveal_type(IntOrInt)  # revealed: 
+reveal_type(ListOfIntOrListOfInt)  # revealed: 
+
+def _(int_or_int: IntOrInt, list_of_int_or_list_of_int: ListOfIntOrListOfInt):
+    reveal_type(int_or_int)  # revealed: int
+    reveal_type(list_of_int_or_list_of_int)  # revealed: list[int]
+```
+
+`NoneType` has no special or-operator behavior, so this is an error:
+
+```py
+None | None  # error: [unsupported-operator] "Operator `|` is unsupported between objects of type `None` and `None`"
+```
+
+When constructing something non-sensical like `int | 1`, we could ideally emit a diagnostic for the
+expression itself, as it leads to a `TypeError` at runtime. No other type checker supports this, so
+for now we only emit an error when it is used in a type expression:
+
+```py
+IntOrOne = int | 1
+
+# error: [invalid-type-form] "Variable of type `Literal[1]` is not allowed in a type expression"
+def _(int_or_one: IntOrOne):
+    reveal_type(int_or_one)  # revealed: Unknown
+```
+
+If you were to somehow get hold of an opaque instance of `types.UnionType`, that could not be used
+as a type expression:
+
+```py
+from types import UnionType
+
+def f(SomeUnionType: UnionType):
+    # error: [invalid-type-form] "Variable of type `UnionType` is not allowed in a type expression"
+    some_union: SomeUnionType
+
+f(int | str)
+```
+
+## Generic types
+
+Implicit type aliases can also refer to generic types:
+
+```py
+from typing_extensions import TypeVar
+
+T = TypeVar("T")
+
+MyList = list[T]
+
+def _(my_list: MyList[int]):
+    # TODO: This should be `list[int]`
+    reveal_type(my_list)  # revealed: @Todo(unknown type subscript)
+
+ListOrTuple = list[T] | tuple[T, ...]
+
+reveal_type(ListOrTuple)  # revealed: UnionType
+
+def _(list_or_tuple: ListOrTuple[int]):
+    reveal_type(list_or_tuple)  # revealed: @Todo(Generic specialization of types.UnionType)
+```
+
+## Stringified annotations?
+
+From the [typing spec on type aliases](https://typing.python.org/en/latest/spec/aliases.html):
+
+> Type aliases may be as complex as type hints in annotations – anything that is acceptable as a
+> type hint is acceptable in a type alias
+
+However, no other type checker seems to support stringified annotations in implicit type aliases. We
+currently also do not support them:
+
+```py
+AliasForStr = "str"
+
+# error: [invalid-type-form] "Variable of type `Literal["str"]` is not allowed in a type expression"
+def _(s: AliasForStr):
+    reveal_type(s)  # revealed: Unknown
+
+IntOrStr = int | "str"
+
+# error: [invalid-type-form] "Variable of type `Literal["str"]` is not allowed in a type expression"
+def _(int_or_str: IntOrStr):
+    reveal_type(int_or_str)  # revealed: Unknown
+```
+
+We *do* support stringified annotations if they appear in a position where a type expression is
+syntactically expected:
+
+```py
+ListOfInts = list["int"]
+
+def _(list_of_ints: ListOfInts):
+    reveal_type(list_of_ints)  # revealed: list[int]
+```
+
 ## Recursive
 
 ### Old union syntax
diff --git a/crates/ty_python_semantic/resources/mdtest/mro.md b/crates/ty_python_semantic/resources/mdtest/mro.md
index da9a40b4a7..81a1cfe667 100644
--- a/crates/ty_python_semantic/resources/mdtest/mro.md
+++ b/crates/ty_python_semantic/resources/mdtest/mro.md
@@ -291,6 +291,20 @@ class Foo(x): ...
 reveal_mro(Foo)  # revealed: (, Unknown, )
 ```
 
+## `UnionType` instances are now allowed as a base
+
+This is not legal:
+
+```py
+class A: ...
+class B: ...
+
+EitherOr = A | B
+
+# error: [invalid-base] "Invalid class base with type `UnionType`"
+class Foo(EitherOr): ...
+```
+
 ## `__bases__` is a union of a dynamic type and valid bases
 
 If a dynamic type such as `Any` or `Unknown` is one of the elements in the union, and all other
diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md b/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md
index 60ec2fa844..375cc55b29 100644
--- a/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md
+++ b/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md
@@ -146,13 +146,11 @@ def _(flag: bool):
 def _(flag: bool):
     x = 1 if flag else "a"
 
-    # TODO: this should cause us to emit a diagnostic during
-    # type checking
+    # error: [invalid-argument-type] "Argument to function `isinstance` is incorrect: Expected `type | UnionType | tuple[Unknown, ...]`, found `Literal["a"]"
     if isinstance(x, "a"):
         reveal_type(x)  # revealed: Literal[1, "a"]
 
-    # TODO: this should cause us to emit a diagnostic during
-    # type checking
+    # error: [invalid-argument-type] "Argument to function `isinstance` is incorrect: Expected `type | UnionType | tuple[Unknown, ...]`, found `Literal["int"]"
     if isinstance(x, "int"):
         reveal_type(x)  # revealed: Literal[1, "a"]
 ```
diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md b/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md
index ce77126d32..052b4de2fe 100644
--- a/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md
+++ b/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md
@@ -214,8 +214,7 @@ def flag() -> bool:
 
 t = int if flag() else str
 
-# TODO: this should cause us to emit a diagnostic during
-# type checking
+# error: [invalid-argument-type] "Argument to function `issubclass` is incorrect: Expected `type | UnionType | tuple[Unknown, ...]`, found `Literal["str"]"
 if issubclass(t, "str"):
     reveal_type(t)  # revealed:  | 
 
diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs
index 3e3c241925..60da8a8f72 100644
--- a/crates/ty_python_semantic/src/types.rs
+++ b/crates/ty_python_semantic/src/types.rs
@@ -817,13 +817,11 @@ impl<'db> Type<'db> {
     }
 
     fn is_none(&self, db: &'db dyn Db) -> bool {
-        self.as_nominal_instance()
-            .is_some_and(|instance| instance.has_known_class(db, KnownClass::NoneType))
+        self.is_instance_of(db, KnownClass::NoneType)
     }
 
     fn is_bool(&self, db: &'db dyn Db) -> bool {
-        self.as_nominal_instance()
-            .is_some_and(|instance| instance.has_known_class(db, KnownClass::Bool))
+        self.is_instance_of(db, KnownClass::Bool)
     }
 
     fn is_enum(&self, db: &'db dyn Db) -> bool {
@@ -857,8 +855,7 @@ impl<'db> Type<'db> {
     }
 
     pub(crate) fn is_notimplemented(&self, db: &'db dyn Db) -> bool {
-        self.as_nominal_instance()
-            .is_some_and(|instance| instance.has_known_class(db, KnownClass::NotImplementedType))
+        self.is_instance_of(db, KnownClass::NotImplementedType)
     }
 
     pub(crate) const fn is_todo(&self) -> bool {
@@ -6436,6 +6433,17 @@ impl<'db> Type<'db> {
                     invalid_expressions: smallvec::smallvec_inline![InvalidTypeExpression::Generic],
                     fallback_type: Type::unknown(),
                 }),
+                KnownInstanceType::UnionType(union_type) => {
+                    let mut builder = UnionBuilder::new(db);
+                    for element in union_type.elements(db) {
+                        builder = builder.add(element.in_type_expression(
+                            db,
+                            scope_id,
+                            typevar_binding_context,
+                        )?);
+                    }
+                    Ok(builder.build())
+                }
             },
 
             Type::SpecialForm(special_form) => match special_form {
@@ -6604,9 +6612,6 @@ impl<'db> Type<'db> {
                 Some(KnownClass::GenericAlias) => Ok(todo_type!(
                     "Support for `typing.GenericAlias` instances in type expressions"
                 )),
-                Some(KnownClass::UnionType) => Ok(todo_type!(
-                    "Support for `types.UnionType` instances in type expressions"
-                )),
                 _ => Err(InvalidTypeExpressionError {
                     invalid_expressions: smallvec::smallvec_inline![
                         InvalidTypeExpression::InvalidType(*self, scope_id)
@@ -7646,6 +7651,10 @@ pub enum KnownInstanceType<'db> {
     /// A constraint set, which is exposed in mdtests as an instance of
     /// `ty_extensions.ConstraintSet`.
     ConstraintSet(TrackedConstraintSet<'db>),
+
+    /// A single instance of `types.UnionType`, which stores the left- and
+    /// right-hand sides of a PEP 604 union.
+    UnionType(UnionTypeInstance<'db>),
 }
 
 fn walk_known_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
@@ -7672,6 +7681,11 @@ fn walk_known_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
                 visitor.visit_type(db, default_ty);
             }
         }
+        KnownInstanceType::UnionType(union_type) => {
+            for element in union_type.elements(db) {
+                visitor.visit_type(db, element);
+            }
+        }
     }
 }
 
@@ -7708,6 +7722,7 @@ impl<'db> KnownInstanceType<'db> {
                 // Nothing to normalize
                 Self::ConstraintSet(set)
             }
+            Self::UnionType(union_type) => Self::UnionType(union_type.normalized_impl(db, visitor)),
         }
     }
 
@@ -7722,6 +7737,7 @@ impl<'db> KnownInstanceType<'db> {
             Self::Deprecated(_) => KnownClass::Deprecated,
             Self::Field(_) => KnownClass::Field,
             Self::ConstraintSet(_) => KnownClass::ConstraintSet,
+            Self::UnionType(_) => KnownClass::UnionType,
         }
     }
 
@@ -7795,6 +7811,7 @@ impl<'db> KnownInstanceType<'db> {
                             constraints.display(self.db)
                         )
                     }
+                    KnownInstanceType::UnionType(_) => f.write_str("UnionType"),
                 }
             }
         }
@@ -8918,6 +8935,34 @@ impl<'db> TypeVarBoundOrConstraints<'db> {
     }
 }
 
+/// An instance of `types.UnionType`.
+///
+/// # Ordering
+/// Ordering is based on the context's salsa-assigned id and not on its values.
+/// The id may change between runs, or when the context was garbage collected and recreated.
+#[salsa::interned(debug)]
+#[derive(PartialOrd, Ord)]
+pub struct UnionTypeInstance<'db> {
+    left: Type<'db>,
+    right: Type<'db>,
+}
+
+impl get_size2::GetSize for UnionTypeInstance<'_> {}
+
+impl<'db> UnionTypeInstance<'db> {
+    pub(crate) fn elements(self, db: &'db dyn Db) -> [Type<'db>; 2] {
+        [self.left(db), self.right(db)]
+    }
+
+    pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self {
+        UnionTypeInstance::new(
+            db,
+            self.left(db).normalized_impl(db, visitor),
+            self.right(db).normalized_impl(db, visitor),
+        )
+    }
+}
+
 /// Error returned if a type is not awaitable.
 #[derive(Debug)]
 enum AwaitError<'db> {
diff --git a/crates/ty_python_semantic/src/types/class.rs b/crates/ty_python_semantic/src/types/class.rs
index c3ff51e47f..1b794aefe0 100644
--- a/crates/ty_python_semantic/src/types/class.rs
+++ b/crates/ty_python_semantic/src/types/class.rs
@@ -1307,9 +1307,7 @@ impl<'db> Field<'db> {
     /// Returns true if this field is a `dataclasses.KW_ONLY` sentinel.
     /// 
     pub(crate) fn is_kw_only_sentinel(&self, db: &'db dyn Db) -> bool {
-        self.declared_ty
-            .as_nominal_instance()
-            .is_some_and(|instance| instance.has_known_class(db, KnownClass::KwOnly))
+        self.declared_ty.is_instance_of(db, KnownClass::KwOnly)
     }
 }
 
diff --git a/crates/ty_python_semantic/src/types/class_base.rs b/crates/ty_python_semantic/src/types/class_base.rs
index 4d43b58d06..bed18de8b9 100644
--- a/crates/ty_python_semantic/src/types/class_base.rs
+++ b/crates/ty_python_semantic/src/types/class_base.rs
@@ -170,7 +170,8 @@ impl<'db> ClassBase<'db> {
                 | KnownInstanceType::TypeVar(_)
                 | KnownInstanceType::Deprecated(_)
                 | KnownInstanceType::Field(_)
-                | KnownInstanceType::ConstraintSet(_) => None,
+                | KnownInstanceType::ConstraintSet(_)
+                | KnownInstanceType::UnionType(_) => None,
             },
 
             Type::SpecialForm(special_form) => match special_form {
diff --git a/crates/ty_python_semantic/src/types/ide_support.rs b/crates/ty_python_semantic/src/types/ide_support.rs
index 02a9330299..94cb15993c 100644
--- a/crates/ty_python_semantic/src/types/ide_support.rs
+++ b/crates/ty_python_semantic/src/types/ide_support.rs
@@ -290,7 +290,9 @@ impl<'db> AllMembers<'db> {
                             }
                             Type::ClassLiteral(class) if class.is_protocol(db) => continue,
                             Type::KnownInstance(
-                                KnownInstanceType::TypeVar(_) | KnownInstanceType::TypeAliasType(_),
+                                KnownInstanceType::TypeVar(_)
+                                | KnownInstanceType::TypeAliasType(_)
+                                | KnownInstanceType::UnionType(_),
                             ) => continue,
                             Type::Dynamic(DynamicType::TodoTypeAlias) => continue,
                             _ => {}
diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs
index cbb2fe8236..1ad7d18482 100644
--- a/crates/ty_python_semantic/src/types/infer/builder.rs
+++ b/crates/ty_python_semantic/src/types/infer/builder.rs
@@ -103,7 +103,7 @@ use crate::types::{
     TypeAliasType, TypeAndQualifiers, TypeContext, TypeQualifiers,
     TypeVarBoundOrConstraintsEvaluation, TypeVarDefaultEvaluation, TypeVarIdentity,
     TypeVarInstance, TypeVarKind, TypeVarVariance, TypedDictType, UnionBuilder, UnionType,
-    binding_type, todo_type,
+    UnionTypeInstance, binding_type, todo_type,
 };
 use crate::types::{ClassBase, add_inferred_python_version_hint_to_diagnostic};
 use crate::unpack::{EvaluationMode, UnpackPosition};
@@ -8449,19 +8449,48 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
                 )))
             }
 
-            // Special-case `X | Y` with `X` and `Y` instances of `type` to produce a `types.UnionType` instance, in order to
-            // overwrite the typeshed return type for `type.__or__`, which would result in `types.UnionType | X`. We currently
-            // do this to avoid false positives when a legacy type alias like `IntOrStr = int | str` is later used in a type
-            // expression, because `types.UnionType` will result in a `@Todo` type, while `types.UnionType | ` does
-            // not.
-            //
-            // TODO: Remove this special case once we add support for legacy type aliases.
+            // PEP 604-style union types using the `|` operator.
             (
-                Type::ClassLiteral(..) | Type::SubclassOf(..) | Type::GenericAlias(..),
-                Type::ClassLiteral(..) | Type::SubclassOf(..) | Type::GenericAlias(..),
+                Type::ClassLiteral(..)
+                | Type::SubclassOf(..)
+                | Type::GenericAlias(..)
+                | Type::SpecialForm(_)
+                | Type::KnownInstance(KnownInstanceType::UnionType(_)),
+                _,
+                ast::Operator::BitOr,
+            )
+            | (
+                _,
+                Type::ClassLiteral(..)
+                | Type::SubclassOf(..)
+                | Type::GenericAlias(..)
+                | Type::SpecialForm(_)
+                | Type::KnownInstance(KnownInstanceType::UnionType(_)),
                 ast::Operator::BitOr,
             ) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 => {
-                Some(KnownClass::UnionType.to_instance(self.db()))
+                // For a value expression like `int | None`, the inferred type for `None` will be
+                // a nominal instance of `NoneType`, so we need to convert it to a class literal
+                // such that it can later be converted back to a nominal instance type when calling
+                // `.in_type_expression` on the `UnionType` instance.
+                let convert_none_type = |ty: Type<'db>| {
+                    if ty.is_none(self.db()) {
+                        KnownClass::NoneType.to_class_literal(self.db())
+                    } else {
+                        ty
+                    }
+                };
+
+                if left_ty.is_equivalent_to(self.db(), right_ty) {
+                    Some(left_ty)
+                } else {
+                    Some(Type::KnownInstance(KnownInstanceType::UnionType(
+                        UnionTypeInstance::new(
+                            self.db(),
+                            convert_none_type(left_ty),
+                            convert_none_type(right_ty),
+                        ),
+                    )))
+                }
             }
 
             // We've handled all of the special cases that we support for literals, so we need to
diff --git a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs
index 50d22fac10..1e1ff82c0b 100644
--- a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs
+++ b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs
@@ -810,6 +810,10 @@ impl<'db> TypeInferenceBuilder<'db, '_> {
                     self.infer_type_expression(slice);
                     todo_type!("Generic manual PEP-695 type alias")
                 }
+                KnownInstanceType::UnionType(_) => {
+                    self.infer_type_expression(slice);
+                    todo_type!("Generic specialization of types.UnionType")
+                }
             },
             Type::Dynamic(DynamicType::Todo(_)) => {
                 self.infer_type_expression(slice);
diff --git a/crates/ty_python_semantic/src/types/instance.rs b/crates/ty_python_semantic/src/types/instance.rs
index f6c7b8406d..8c5adc9e0d 100644
--- a/crates/ty_python_semantic/src/types/instance.rs
+++ b/crates/ty_python_semantic/src/types/instance.rs
@@ -95,6 +95,14 @@ impl<'db> Type<'db> {
         }
     }
 
+    /// Return `true` if `self` is a nominal instance of the given known class.
+    pub(crate) fn is_instance_of(self, db: &'db dyn Db, known_class: KnownClass) -> bool {
+        match self {
+            Type::NominalInstance(instance) => instance.class(db).is_known(db, known_class),
+            _ => false,
+        }
+    }
+
     /// Synthesize a protocol instance type with a given set of read-only property members.
     pub(super) fn protocol_with_readonly_members<'a, M>(db: &'db dyn Db, members: M) -> Self
     where

From d2fe6347fb9d6c4b2c27d2fda02a1b857452fb66 Mon Sep 17 00:00:00 2001
From: David Peter 
Date: Mon, 3 Nov 2025 22:06:56 +0100
Subject: [PATCH 082/180] [ty] Rename `UnionType` to `types.UnionType` (#21262)

---
 .../resources/mdtest/binary/classes.md        | 12 +++---
 .../resources/mdtest/implicit_type_aliases.md | 38 +++++++++----------
 .../resources/mdtest/mro.md                   |  2 +-
 crates/ty_python_semantic/src/types.rs        |  2 +-
 4 files changed, 27 insertions(+), 27 deletions(-)

diff --git a/crates/ty_python_semantic/resources/mdtest/binary/classes.md b/crates/ty_python_semantic/resources/mdtest/binary/classes.md
index 7ae4c23e60..db42286c84 100644
--- a/crates/ty_python_semantic/resources/mdtest/binary/classes.md
+++ b/crates/ty_python_semantic/resources/mdtest/binary/classes.md
@@ -13,7 +13,7 @@ python-version = "3.10"
 class A: ...
 class B: ...
 
-reveal_type(A | B)  # revealed: UnionType
+reveal_type(A | B)  # revealed: types.UnionType
 ```
 
 ## Union of two classes (prior to 3.10)
@@ -43,14 +43,14 @@ class A: ...
 class B: ...
 
 def _(sub_a: type[A], sub_b: type[B]):
-    reveal_type(A | sub_b)  # revealed: UnionType
-    reveal_type(sub_a | B)  # revealed: UnionType
-    reveal_type(sub_a | sub_b)  # revealed: UnionType
+    reveal_type(A | sub_b)  # revealed: types.UnionType
+    reveal_type(sub_a | B)  # revealed: types.UnionType
+    reveal_type(sub_a | sub_b)  # revealed: types.UnionType
 
 class C[T]: ...
 class D[T]: ...
 
-reveal_type(C | D)  # revealed: UnionType
+reveal_type(C | D)  # revealed: types.UnionType
 
-reveal_type(C[int] | D[str])  # revealed: UnionType
+reveal_type(C[int] | D[str])  # revealed: types.UnionType
 ```
diff --git a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
index 904921e7b3..a3e0319f5a 100644
--- a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
+++ b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
@@ -58,24 +58,24 @@ AnyOrNever = Any | Never
 UnknownOrInt = Unknown | int
 IntOrUnknown = int | Unknown
 
-reveal_type(IntOrStr)  # revealed: UnionType
-reveal_type(IntOrStrOrBytes1)  # revealed: UnionType
-reveal_type(IntOrStrOrBytes2)  # revealed: UnionType
-reveal_type(IntOrStrOrBytes3)  # revealed: UnionType
-reveal_type(IntOrStrOrBytes4)  # revealed: UnionType
-reveal_type(BytesOrIntOrStr)  # revealed: UnionType
-reveal_type(IntOrNone)  # revealed: UnionType
-reveal_type(NoneOrInt)  # revealed: UnionType
-reveal_type(IntOrStrOrNone)  # revealed: UnionType
-reveal_type(NoneOrIntOrStr)  # revealed: UnionType
-reveal_type(IntOrAny)  # revealed: UnionType
-reveal_type(AnyOrInt)  # revealed: UnionType
-reveal_type(NoneOrAny)  # revealed: UnionType
-reveal_type(AnyOrNone)  # revealed: UnionType
-reveal_type(NeverOrAny)  # revealed: UnionType
-reveal_type(AnyOrNever)  # revealed: UnionType
-reveal_type(UnknownOrInt)  # revealed: UnionType
-reveal_type(IntOrUnknown)  # revealed: UnionType
+reveal_type(IntOrStr)  # revealed: types.UnionType
+reveal_type(IntOrStrOrBytes1)  # revealed: types.UnionType
+reveal_type(IntOrStrOrBytes2)  # revealed: types.UnionType
+reveal_type(IntOrStrOrBytes3)  # revealed: types.UnionType
+reveal_type(IntOrStrOrBytes4)  # revealed: types.UnionType
+reveal_type(BytesOrIntOrStr)  # revealed: types.UnionType
+reveal_type(IntOrNone)  # revealed: types.UnionType
+reveal_type(NoneOrInt)  # revealed: types.UnionType
+reveal_type(IntOrStrOrNone)  # revealed: types.UnionType
+reveal_type(NoneOrIntOrStr)  # revealed: types.UnionType
+reveal_type(IntOrAny)  # revealed: types.UnionType
+reveal_type(AnyOrInt)  # revealed: types.UnionType
+reveal_type(NoneOrAny)  # revealed: types.UnionType
+reveal_type(AnyOrNone)  # revealed: types.UnionType
+reveal_type(NeverOrAny)  # revealed: types.UnionType
+reveal_type(AnyOrNever)  # revealed: types.UnionType
+reveal_type(UnknownOrInt)  # revealed: types.UnionType
+reveal_type(IntOrUnknown)  # revealed: types.UnionType
 
 def _(
     int_or_str: IntOrStr,
@@ -180,7 +180,7 @@ def _(my_list: MyList[int]):
 
 ListOrTuple = list[T] | tuple[T, ...]
 
-reveal_type(ListOrTuple)  # revealed: UnionType
+reveal_type(ListOrTuple)  # revealed: types.UnionType
 
 def _(list_or_tuple: ListOrTuple[int]):
     reveal_type(list_or_tuple)  # revealed: @Todo(Generic specialization of types.UnionType)
diff --git a/crates/ty_python_semantic/resources/mdtest/mro.md b/crates/ty_python_semantic/resources/mdtest/mro.md
index 81a1cfe667..761fb9892d 100644
--- a/crates/ty_python_semantic/resources/mdtest/mro.md
+++ b/crates/ty_python_semantic/resources/mdtest/mro.md
@@ -301,7 +301,7 @@ class B: ...
 
 EitherOr = A | B
 
-# error: [invalid-base] "Invalid class base with type `UnionType`"
+# error: [invalid-base] "Invalid class base with type `types.UnionType`"
 class Foo(EitherOr): ...
 ```
 
diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs
index 60da8a8f72..bdc859b095 100644
--- a/crates/ty_python_semantic/src/types.rs
+++ b/crates/ty_python_semantic/src/types.rs
@@ -7811,7 +7811,7 @@ impl<'db> KnownInstanceType<'db> {
                             constraints.display(self.db)
                         )
                     }
-                    KnownInstanceType::UnionType(_) => f.write_str("UnionType"),
+                    KnownInstanceType::UnionType(_) => f.write_str("types.UnionType"),
                 }
             }
         }

From 79a02711c12d5381a640c7b07639e058917a3234 Mon Sep 17 00:00:00 2001
From: chiri 
Date: Tue, 4 Nov 2025 00:09:02 +0300
Subject: [PATCH 083/180] [`refurb`] Expand fix safety for keyword arguments
 and `Decimal`s (`FURB164`) (#21259)

## Summary

Fixes https://github.com/astral-sh/ruff/issues/21257

## Test Plan

`cargo nextest run furb164`
---
 .../resources/test/fixtures/refurb/FURB164.py |  5 +++
 .../refurb/rules/unnecessary_from_float.rs    | 39 +++++++++--------
 ...es__refurb__tests__FURB164_FURB164.py.snap | 43 +++++++++++++++++--
 3 files changed, 67 insertions(+), 20 deletions(-)

diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB164.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB164.py
index 9a03919ca9..81422d2cf8 100644
--- a/crates/ruff_linter/resources/test/fixtures/refurb/FURB164.py
+++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB164.py
@@ -64,3 +64,8 @@ _ = Decimal.from_float(True)
 _ = Decimal.from_float(float("-nan"))
 _ = Decimal.from_float(float("\x2dnan"))
 _ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan"))
+
+# See: https://github.com/astral-sh/ruff/issues/21257
+# fixes must be safe
+_ = Fraction.from_float(f=4.2)
+_ = Fraction.from_decimal(dec=4)
\ No newline at end of file
diff --git a/crates/ruff_linter/src/rules/refurb/rules/unnecessary_from_float.rs b/crates/ruff_linter/src/rules/refurb/rules/unnecessary_from_float.rs
index e34357bd55..38184c8fd3 100644
--- a/crates/ruff_linter/src/rules/refurb/rules/unnecessary_from_float.rs
+++ b/crates/ruff_linter/src/rules/refurb/rules/unnecessary_from_float.rs
@@ -149,10 +149,9 @@ pub(crate) fn unnecessary_from_float(checker: &Checker, call: &ExprCall) {
 
     // Check if we should suppress the fix due to type validation concerns
     let is_type_safe = is_valid_argument_type(arg_value, method_name, constructor, checker);
-    let has_keywords = !call.arguments.keywords.is_empty();
 
     // Determine fix safety
-    let applicability = if is_type_safe && !has_keywords {
+    let applicability = if is_type_safe {
         Applicability::Safe
     } else {
         Applicability::Unsafe
@@ -210,21 +209,27 @@ fn is_valid_argument_type(
             _ => false,
         },
         // Fraction.from_decimal accepts int, bool, Decimal
-        (MethodName::FromDecimal, Constructor::Fraction) => match resolved_type {
-            ResolvedPythonType::Atom(PythonType::Number(
-                NumberLike::Integer | NumberLike::Bool,
-            )) => true,
-            ResolvedPythonType::Unknown => is_int,
-            _ => {
-                // Check if it's a Decimal instance
-                arg_expr
-                    .as_call_expr()
-                    .and_then(|call| semantic.resolve_qualified_name(&call.func))
-                    .is_some_and(|qualified_name| {
-                        matches!(qualified_name.segments(), ["decimal", "Decimal"])
-                    })
+        (MethodName::FromDecimal, Constructor::Fraction) => {
+            // First check if it's a Decimal constructor call
+            let is_decimal_call = arg_expr
+                .as_call_expr()
+                .and_then(|call| semantic.resolve_qualified_name(&call.func))
+                .is_some_and(|qualified_name| {
+                    matches!(qualified_name.segments(), ["decimal", "Decimal"])
+                });
+
+            if is_decimal_call {
+                return true;
             }
-        },
+
+            match resolved_type {
+                ResolvedPythonType::Atom(PythonType::Number(
+                    NumberLike::Integer | NumberLike::Bool,
+                )) => true,
+                ResolvedPythonType::Unknown => is_int,
+                _ => false,
+            }
+        }
         _ => false,
     }
 }
@@ -274,7 +279,7 @@ fn handle_non_finite_float_special_case(
         return None;
     }
 
-    let Expr::Call(ast::ExprCall {
+    let Expr::Call(ExprCall {
         func, arguments, ..
     }) = arg_value
     else {
diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB164_FURB164.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB164_FURB164.py.snap
index e917928a64..7bd2ce8225 100644
--- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB164_FURB164.py.snap
+++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB164_FURB164.py.snap
@@ -99,7 +99,6 @@ help: Replace with `Fraction` constructor
 12 | _ = Fraction.from_decimal(Decimal("-4.2"))
 13 | _ = Fraction.from_decimal(Decimal.from_float(4.2))
 14 | _ = Decimal.from_float(0.1)
-note: This is an unsafe fix and may change runtime behavior
 
 FURB164 [*] Verbose method `from_decimal` in `Fraction` construction
   --> FURB164.py:12:5
@@ -120,7 +119,6 @@ help: Replace with `Fraction` constructor
 13 | _ = Fraction.from_decimal(Decimal.from_float(4.2))
 14 | _ = Decimal.from_float(0.1)
 15 | _ = Decimal.from_float(-0.5)
-note: This is an unsafe fix and may change runtime behavior
 
 FURB164 [*] Verbose method `from_decimal` in `Fraction` construction
   --> FURB164.py:13:5
@@ -484,7 +482,6 @@ help: Replace with `Fraction` constructor
 32 | _ = Decimal.from_float(f=4.2)
 33 | 
 34 | # Cases with invalid argument counts - should not get fixes
-note: This is an unsafe fix and may change runtime behavior
 
 FURB164 Verbose method `from_float` in `Decimal` construction
   --> FURB164.py:32:5
@@ -658,6 +655,7 @@ help: Replace with `Decimal` constructor
 64 + _ = Decimal("nan")
 65 | _ = Decimal.from_float(float("\x2dnan"))
 66 | _ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan"))
+67 | 
 
 FURB164 [*] Verbose method `from_float` in `Decimal` construction
   --> FURB164.py:65:5
@@ -675,6 +673,8 @@ help: Replace with `Decimal` constructor
    - _ = Decimal.from_float(float("\x2dnan"))
 65 + _ = Decimal("nan")
 66 | _ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan"))
+67 | 
+68 | # See: https://github.com/astral-sh/ruff/issues/21257
 
 FURB164 [*] Verbose method `from_float` in `Decimal` construction
   --> FURB164.py:66:5
@@ -683,6 +683,8 @@ FURB164 [*] Verbose method `from_float` in `Decimal` construction
 65 | _ = Decimal.from_float(float("\x2dnan"))
 66 | _ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan"))
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+67 |
+68 | # See: https://github.com/astral-sh/ruff/issues/21257
    |
 help: Replace with `Decimal` constructor
 63 | # Cases with non-finite floats - should produce safe fixes
@@ -690,3 +692,38 @@ help: Replace with `Decimal` constructor
 65 | _ = Decimal.from_float(float("\x2dnan"))
    - _ = Decimal.from_float(float("\N{HYPHEN-MINUS}nan"))
 66 + _ = Decimal("nan")
+67 | 
+68 | # See: https://github.com/astral-sh/ruff/issues/21257
+69 | # fixes must be safe
+
+FURB164 [*] Verbose method `from_float` in `Fraction` construction
+  --> FURB164.py:70:5
+   |
+68 | # See: https://github.com/astral-sh/ruff/issues/21257
+69 | # fixes must be safe
+70 | _ = Fraction.from_float(f=4.2)
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^
+71 | _ = Fraction.from_decimal(dec=4)
+   |
+help: Replace with `Fraction` constructor
+67 | 
+68 | # See: https://github.com/astral-sh/ruff/issues/21257
+69 | # fixes must be safe
+   - _ = Fraction.from_float(f=4.2)
+70 + _ = Fraction(4.2)
+71 | _ = Fraction.from_decimal(dec=4)
+
+FURB164 [*] Verbose method `from_decimal` in `Fraction` construction
+  --> FURB164.py:71:5
+   |
+69 | # fixes must be safe
+70 | _ = Fraction.from_float(f=4.2)
+71 | _ = Fraction.from_decimal(dec=4)
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+help: Replace with `Fraction` constructor
+68 | # See: https://github.com/astral-sh/ruff/issues/21257
+69 | # fixes must be safe
+70 | _ = Fraction.from_float(f=4.2)
+   - _ = Fraction.from_decimal(dec=4)
+71 + _ = Fraction(4)

From 42adfd40ea432b4e8dfff89155b3afc7dd803f92 Mon Sep 17 00:00:00 2001
From: Alex Waygood 
Date: Mon, 3 Nov 2025 16:53:42 -0500
Subject: [PATCH 084/180] Run py-fuzzer with `--profile=profiling` locally and
 in CI (#21266)

---
 .github/workflows/ci.yaml | 42 +++++++++++++++++++++------------------
 python/py-fuzzer/fuzz.py  |  5 +++--
 2 files changed, 26 insertions(+), 21 deletions(-)

diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 7130f15b29..44d9f2f542 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -437,6 +437,8 @@ jobs:
           workspaces: "fuzz -> target"
       - name: "Install Rust toolchain"
         run: rustup show
+      - name: "Install mold"
+        uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
       - name: "Install cargo-binstall"
         uses: cargo-bins/cargo-binstall@b3f755e95653da9a2d25b99154edfdbd5b356d0a # v1.15.10
       - name: "Install cargo-fuzz"
@@ -645,7 +647,6 @@ jobs:
     name: "Fuzz for new ty panics"
     runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-16' || 'ubuntu-latest' }}
     needs:
-      - cargo-test-linux
       - determine_changes
     # Only runs on pull requests, since that is the only we way we can find the base version for comparison.
     if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && (needs.determine_changes.outputs.ty == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }}
@@ -653,28 +654,29 @@ jobs:
     steps:
       - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
         with:
+          fetch-depth: 0
           persist-credentials: false
-      - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
-        name: Download new ty binary
-        id: ty-new
-        with:
-          name: ty
-          path: target/debug
-      - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
-        name: Download baseline ty binary
-        with:
-          name: ty
-          branch: ${{ github.event.pull_request.base.ref }}
-          workflow: "ci.yaml"
-          check_artifacts: true
       - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2
+      - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
+      - name: "Install Rust toolchain"
+        run: rustup show
+      - name: "Install mold"
+        uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
       - name: Fuzz
         env:
           FORCE_COLOR: 1
-          NEW_TY: ${{ steps.ty-new.outputs.download-path }}
         run: |
-          # Make executable, since artifact download doesn't preserve this
-          chmod +x "${PWD}/ty" "${NEW_TY}/ty"
+          echo "new commit"
+          git rev-list --format=%s --max-count=1 "$GITHUB_SHA"
+          cargo build --profile=profiling --bin=ty
+          mv target/profiling/ty ty-new
+
+          MERGE_BASE="$(git merge-base "$GITHUB_SHA" "origin/$GITHUB_BASE_REF")"
+          git checkout -b old_commit "$MERGE_BASE"
+          echo "old commit (merge base)"
+          git rev-list --format=%s --max-count=1 old_commit
+          cargo build --profile=profiling --bin=ty
+          mv target/profiling/ty ty-old
 
           (
             uv run \
@@ -682,8 +684,8 @@ jobs:
             --project=./python/py-fuzzer \
             --locked \
             fuzz \
-            --test-executable="${NEW_TY}/ty" \
-            --baseline-executable="${PWD}/ty" \
+            --test-executable=ty-new \
+            --baseline-executable=ty-old \
             --only-new-bugs \
             --bin=ty \
             0-1000
@@ -715,6 +717,8 @@ jobs:
       - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
       - name: "Install Rust toolchain"
         run: rustup show
+      - name: "Install mold"
+        uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
       - name: "Run ty completion evaluation"
         run: cargo run --release --package ty_completion_eval -- all --threshold 0.4 --tasks /tmp/completion-evaluation-tasks.csv
       - name: "Ensure there are no changes"
diff --git a/python/py-fuzzer/fuzz.py b/python/py-fuzzer/fuzz.py
index e113d7e179..035838a6d1 100644
--- a/python/py-fuzzer/fuzz.py
+++ b/python/py-fuzzer/fuzz.py
@@ -395,13 +395,14 @@ def parse_args() -> ResolvedCliArgs:
 
     if not args.test_executable:
         print(
-            "Running `cargo build --release` since no test executable was specified...",
+            "Running `cargo build --profile=profiling` since no test executable was specified...",
             flush=True,
         )
         cmd: list[str] = [
             "cargo",
             "build",
-            "--release",
+            "--profile",
+            "profiling",
             "--locked",
             "--color",
             "always",

From 3c8fb68765eafe9b43766fd64d5fd9a0297bc0e4 Mon Sep 17 00:00:00 2001
From: Ibraheem Ahmed 
Date: Mon, 3 Nov 2025 16:57:49 -0500
Subject: [PATCH 085/180] [ty] `dict` is not assignable to `TypedDict` (#21238)

## Summary

A lot of the bidirectional inference work relies on `dict` not being
assignable to `TypedDict`, so I think it makes sense to add this before
fully implementing https://github.com/astral-sh/ty/issues/1387.
---
 .../resources/mdtest/bidirectional.md         |  1 +
 .../resources/mdtest/call/overloads.md        |  3 +-
 .../resources/mdtest/call/union.md            |  3 +-
 .../resources/mdtest/comprehensions/basic.md  |  5 +-
 ...ict`_-_Diagnostics_(e5289abf5c570c29).snap | 55 ++++++++--------
 .../resources/mdtest/typed_dict.md            | 52 +++++++++++----
 crates/ty_python_semantic/src/types.rs        |  5 +-
 .../ty_python_semantic/src/types/call/bind.rs |  5 ++
 .../src/types/diagnostic.rs                   | 39 +++++++++--
 .../src/types/infer/builder.rs                | 66 ++++++++++++-------
 .../src/types/typed_dict.rs                   | 10 ++-
 11 files changed, 169 insertions(+), 75 deletions(-)

diff --git a/crates/ty_python_semantic/resources/mdtest/bidirectional.md b/crates/ty_python_semantic/resources/mdtest/bidirectional.md
index 3fee0513ed..6b90873728 100644
--- a/crates/ty_python_semantic/resources/mdtest/bidirectional.md
+++ b/crates/ty_python_semantic/resources/mdtest/bidirectional.md
@@ -76,6 +76,7 @@ def _() -> TD:
 
 def _() -> TD:
     # error: [missing-typed-dict-key] "Missing required key 'x' in TypedDict `TD` constructor"
+    # error: [invalid-return-type]
     return {}
 ```
 
diff --git a/crates/ty_python_semantic/resources/mdtest/call/overloads.md b/crates/ty_python_semantic/resources/mdtest/call/overloads.md
index 726d74a630..e6ef48276a 100644
--- a/crates/ty_python_semantic/resources/mdtest/call/overloads.md
+++ b/crates/ty_python_semantic/resources/mdtest/call/overloads.md
@@ -1685,8 +1685,7 @@ def int_or_str() -> int | str:
 x = f([{"x": 1}], int_or_str())
 reveal_type(x)  # revealed: int | str
 
-# TODO: error: [no-matching-overload] "No overload of function `f` matches arguments"
-# we currently incorrectly consider `list[dict[str, int]]` a subtype of `list[T]`
+# error: [no-matching-overload] "No overload of function `f` matches arguments"
 f([{"y": 1}], int_or_str())
 ```
 
diff --git a/crates/ty_python_semantic/resources/mdtest/call/union.md b/crates/ty_python_semantic/resources/mdtest/call/union.md
index 69695c3f5c..7bb4e02044 100644
--- a/crates/ty_python_semantic/resources/mdtest/call/union.md
+++ b/crates/ty_python_semantic/resources/mdtest/call/union.md
@@ -277,7 +277,6 @@ def _(flag: bool):
     x = f({"x": 1})
     reveal_type(x)  # revealed: int
 
-    # TODO: error: [invalid-argument-type] "Argument to function `f` is incorrect: Expected `T`, found `dict[str, int]`"
-    # we currently consider `TypedDict` instances to be subtypes of `dict`
+    # error: [invalid-argument-type] "Argument to function `f` is incorrect: Expected `T`, found `dict[Unknown | str, Unknown | int]`"
     f({"y": 1})
 ```
diff --git a/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md b/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md
index 254ac03d73..5fac394404 100644
--- a/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md
+++ b/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md
@@ -162,10 +162,13 @@ The type context is propagated down into the comprehension:
 class Person(TypedDict):
     name: str
 
+# TODO: This should not error.
+# error: [invalid-assignment]
 persons: list[Person] = [{"name": n} for n in ["Alice", "Bob"]]
 reveal_type(persons)  # revealed: list[Person]
 
-# TODO: This should be an error
+# TODO: This should be an invalid-key error.
+# error: [invalid-assignment]
 invalid: list[Person] = [{"misspelled": n} for n in ["Alice", "Bob"]]
 ```
 
diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap
index 155b4ea618..a5b9456acd 100644
--- a/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap
+++ b/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap
@@ -39,16 +39,19 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/typed_dict.md
 25 |     person[str_key] = "Alice"  # error: [invalid-key]
 26 | 
 27 | def create_with_invalid_string_key():
-28 |     alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"}  # error: [invalid-key]
-29 |     bob = Person(name="Bob", age=25, unknown="Bar")  # error: [invalid-key]
-30 | from typing_extensions import ReadOnly
-31 | 
-32 | class Employee(TypedDict):
-33 |     id: ReadOnly[int]
-34 |     name: str
-35 | 
-36 | def write_to_readonly_key(employee: Employee):
-37 |     employee["id"] = 42  # error: [invalid-assignment]
+28 |     # error: [invalid-key]
+29 |     alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"}
+30 | 
+31 |     # error: [invalid-key]
+32 |     bob = Person(name="Bob", age=25, unknown="Bar")
+33 | from typing_extensions import ReadOnly
+34 | 
+35 | class Employee(TypedDict):
+36 |     id: ReadOnly[int]
+37 |     name: str
+38 | 
+39 | def write_to_readonly_key(employee: Employee):
+40 |     employee["id"] = 42  # error: [invalid-assignment]
 ```
 
 # Diagnostics
@@ -158,16 +161,17 @@ info: rule `invalid-key` is enabled by default
 
 ```
 error[invalid-key]: Invalid key for TypedDict `Person`
-  --> src/mdtest_snippet.py:28:21
+  --> src/mdtest_snippet.py:29:21
    |
 27 | def create_with_invalid_string_key():
-28 |     alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"}  # error: [invalid-key]
+28 |     # error: [invalid-key]
+29 |     alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"}
    |                     -----------------------------^^^^^^^^^--------
    |                     |                            |
    |                     |                            Unknown key "unknown"
    |                     TypedDict `Person`
-29 |     bob = Person(name="Bob", age=25, unknown="Bar")  # error: [invalid-key]
-30 | from typing_extensions import ReadOnly
+30 |
+31 |     # error: [invalid-key]
    |
 info: rule `invalid-key` is enabled by default
 
@@ -175,13 +179,12 @@ info: rule `invalid-key` is enabled by default
 
 ```
 error[invalid-key]: Invalid key for TypedDict `Person`
-  --> src/mdtest_snippet.py:29:11
+  --> src/mdtest_snippet.py:32:11
    |
-27 | def create_with_invalid_string_key():
-28 |     alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"}  # error: [invalid-key]
-29 |     bob = Person(name="Bob", age=25, unknown="Bar")  # error: [invalid-key]
+31 |     # error: [invalid-key]
+32 |     bob = Person(name="Bob", age=25, unknown="Bar")
    |           ------ TypedDict `Person`  ^^^^^^^^^^^^^ Unknown key "unknown"
-30 | from typing_extensions import ReadOnly
+33 | from typing_extensions import ReadOnly
    |
 info: rule `invalid-key` is enabled by default
 
@@ -189,21 +192,21 @@ info: rule `invalid-key` is enabled by default
 
 ```
 error[invalid-assignment]: Cannot assign to key "id" on TypedDict `Employee`
-  --> src/mdtest_snippet.py:37:5
+  --> src/mdtest_snippet.py:40:5
    |
-36 | def write_to_readonly_key(employee: Employee):
-37 |     employee["id"] = 42  # error: [invalid-assignment]
+39 | def write_to_readonly_key(employee: Employee):
+40 |     employee["id"] = 42  # error: [invalid-assignment]
    |     -------- ^^^^ key is marked read-only
    |     |
    |     TypedDict `Employee`
    |
 info: Item declaration
-  --> src/mdtest_snippet.py:33:5
+  --> src/mdtest_snippet.py:36:5
    |
-32 | class Employee(TypedDict):
-33 |     id: ReadOnly[int]
+35 | class Employee(TypedDict):
+36 |     id: ReadOnly[int]
    |     ----------------- Read-only item declared here
-34 |     name: str
+37 |     name: str
    |
 info: rule `invalid-assignment` is enabled by default
 
diff --git a/crates/ty_python_semantic/resources/mdtest/typed_dict.md b/crates/ty_python_semantic/resources/mdtest/typed_dict.md
index 30bbb2132b..b4203ce2b6 100644
--- a/crates/ty_python_semantic/resources/mdtest/typed_dict.md
+++ b/crates/ty_python_semantic/resources/mdtest/typed_dict.md
@@ -96,29 +96,29 @@ The construction of a `TypedDict` is checked for type correctness:
 ```py
 # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`"
 eve1a: Person = {"name": b"Eve", "age": None}
+
 # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`"
 eve1b = Person(name=b"Eve", age=None)
 
-# TODO should reveal Person (should be fixed by implementing assignability for TypedDicts)
-reveal_type(eve1a)  # revealed: dict[Unknown | str, Unknown | bytes | None]
+reveal_type(eve1a)  # revealed: Person
 reveal_type(eve1b)  # revealed: Person
 
 # error: [missing-typed-dict-key] "Missing required key 'name' in TypedDict `Person` constructor"
 eve2a: Person = {"age": 22}
+
 # error: [missing-typed-dict-key] "Missing required key 'name' in TypedDict `Person` constructor"
 eve2b = Person(age=22)
 
-# TODO should reveal Person (should be fixed by implementing assignability for TypedDicts)
-reveal_type(eve2a)  # revealed: dict[Unknown | str, Unknown | int]
+reveal_type(eve2a)  # revealed: Person
 reveal_type(eve2b)  # revealed: Person
 
 # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra""
 eve3a: Person = {"name": "Eve", "age": 25, "extra": True}
+
 # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra""
 eve3b = Person(name="Eve", age=25, extra=True)
 
-# TODO should reveal Person (should be fixed by implementing assignability for TypedDicts)
-reveal_type(eve3a)  # revealed: dict[Unknown | str, Unknown | str | int]
+reveal_type(eve3a)  # revealed: Person
 reveal_type(eve3b)  # revealed: Person
 ```
 
@@ -238,15 +238,19 @@ All of these are missing the required `age` field:
 ```py
 # error: [missing-typed-dict-key] "Missing required key 'age' in TypedDict `Person` constructor"
 alice2: Person = {"name": "Alice"}
+
 # error: [missing-typed-dict-key] "Missing required key 'age' in TypedDict `Person` constructor"
 Person(name="Alice")
+
 # error: [missing-typed-dict-key] "Missing required key 'age' in TypedDict `Person` constructor"
 Person({"name": "Alice"})
 
 # error: [missing-typed-dict-key] "Missing required key 'age' in TypedDict `Person` constructor"
+# error: [invalid-argument-type]
 accepts_person({"name": "Alice"})
 
-# TODO: this should be an error, similar to the above
+# TODO: this should be an invalid-key error, similar to the above
+# error: [invalid-assignment]
 house.owner = {"name": "Alice"}
 
 a_person: Person
@@ -259,19 +263,25 @@ All of these have an invalid type for the `name` field:
 ```py
 # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`: value of type `None`"
 alice3: Person = {"name": None, "age": 30}
+
 # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`: value of type `None`"
 Person(name=None, age=30)
+
 # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`: value of type `None`"
 Person({"name": None, "age": 30})
 
 # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`: value of type `None`"
+# error: [invalid-argument-type]
 accepts_person({"name": None, "age": 30})
-# TODO: this should be an error, similar to the above
+
+# TODO: this should be an invalid-key error
+# error: [invalid-assignment]
 house.owner = {"name": None, "age": 30}
 
 a_person: Person
 # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`: value of type `None`"
 a_person = {"name": None, "age": 30}
+
 # error: [invalid-argument-type] "Invalid argument to key "name" with declared type `str` on TypedDict `Person`: value of type `None`"
 (a_person := {"name": None, "age": 30})
 ```
@@ -281,19 +291,25 @@ All of these have an extra field that is not defined in the `TypedDict`:
 ```py
 # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra""
 alice4: Person = {"name": "Alice", "age": 30, "extra": True}
+
 # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra""
 Person(name="Alice", age=30, extra=True)
+
 # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra""
 Person({"name": "Alice", "age": 30, "extra": True})
 
 # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra""
+# error: [invalid-argument-type]
 accepts_person({"name": "Alice", "age": 30, "extra": True})
-# TODO: this should be an error
+
+# TODO: this should be an invalid-key error
+# error: [invalid-assignment]
 house.owner = {"name": "Alice", "age": 30, "extra": True}
 
 a_person: Person
 # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra""
 a_person = {"name": "Alice", "age": 30, "extra": True}
+
 # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "extra""
 (a_person := {"name": "Alice", "age": 30, "extra": True})
 ```
@@ -490,6 +506,15 @@ dangerous(alice)
 reveal_type(alice["name"])  # revealed: str
 ```
 
+Likewise, `dict`s are not assignable to typed dictionaries:
+
+```py
+alice: dict[str, str] = {"name": "Alice"}
+
+# error: [invalid-assignment] "Object of type `dict[str, str]` is not assignable to `Person`"
+alice: Person = alice
+```
+
 ## Key-based access
 
 ### Reading
@@ -977,7 +1002,7 @@ class Person(TypedDict):
     name: str
     age: int | None
 
-# TODO: this should be an error
+# error: [invalid-assignment] "Object of type `MyDict` is not assignable to `Person`"
 x: Person = MyDict({"name": "Alice", "age": 30})
 ```
 
@@ -1029,8 +1054,11 @@ def write_to_non_literal_string_key(person: Person, str_key: str):
     person[str_key] = "Alice"  # error: [invalid-key]
 
 def create_with_invalid_string_key():
-    alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"}  # error: [invalid-key]
-    bob = Person(name="Bob", age=25, unknown="Bar")  # error: [invalid-key]
+    # error: [invalid-key]
+    alice: Person = {"name": "Alice", "age": 30, "unknown": "Foo"}
+
+    # error: [invalid-key]
+    bob = Person(name="Bob", age=25, unknown="Bar")
 ```
 
 Assignment to `ReadOnly` keys:
diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs
index bdc859b095..bee767b763 100644
--- a/crates/ty_python_semantic/src/types.rs
+++ b/crates/ty_python_semantic/src/types.rs
@@ -1987,11 +1987,14 @@ impl<'db> Type<'db> {
                 ConstraintSet::from(false)
             }
 
-            (Type::TypedDict(_), _) | (_, Type::TypedDict(_)) => {
+            (Type::TypedDict(_), _) => {
                 // TODO: Implement assignability and subtyping for TypedDict
                 ConstraintSet::from(relation.is_assignability())
             }
 
+            // A non-`TypedDict` cannot subtype a `TypedDict`
+            (_, Type::TypedDict(_)) => ConstraintSet::from(false),
+
             // Note that the definition of `Type::AlwaysFalsy` depends on the return value of `__bool__`.
             // If `__bool__` always returns True or False, it can be treated as a subtype of `AlwaysTruthy` or `AlwaysFalsy`, respectively.
             (left, Type::AlwaysFalsy) => ConstraintSet::from(left.bool(db).is_always_false()),
diff --git a/crates/ty_python_semantic/src/types/call/bind.rs b/crates/ty_python_semantic/src/types/call/bind.rs
index b0a5cc1b91..d2739fa696 100644
--- a/crates/ty_python_semantic/src/types/call/bind.rs
+++ b/crates/ty_python_semantic/src/types/call/bind.rs
@@ -3582,6 +3582,11 @@ impl<'db> BindingError<'db> {
                 expected_ty,
                 provided_ty,
             } => {
+                // TODO: Ideally we would not emit diagnostics for `TypedDict` literal arguments
+                // here (see `diagnostic::is_invalid_typed_dict_literal`). However, we may have
+                // silenced diagnostics during overload evaluation, and rely on the assignability
+                // diagnostic being emitted here.
+
                 let range = Self::get_node(node, *argument_index);
                 let Some(builder) = context.report_lint(&INVALID_ARGUMENT_TYPE, range) else {
                     return;
diff --git a/crates/ty_python_semantic/src/types/diagnostic.rs b/crates/ty_python_semantic/src/types/diagnostic.rs
index 6ab6f2a447..5d647f108f 100644
--- a/crates/ty_python_semantic/src/types/diagnostic.rs
+++ b/crates/ty_python_semantic/src/types/diagnostic.rs
@@ -2003,6 +2003,20 @@ pub(super) fn report_slice_step_size_zero(context: &InferContext, node: AnyNodeR
     builder.into_diagnostic("Slice step size cannot be zero");
 }
 
+// We avoid emitting invalid assignment diagnostic for literal assignments to a `TypedDict`, as
+// they can only occur if we already failed to validate the dict (and emitted some diagnostic).
+pub(crate) fn is_invalid_typed_dict_literal(
+    db: &dyn Db,
+    target_ty: Type,
+    source: AnyNodeRef<'_>,
+) -> bool {
+    target_ty
+        .filter_union(db, Type::is_typed_dict)
+        .as_typed_dict()
+        .is_some()
+        && matches!(source, AnyNodeRef::ExprDict(_))
+}
+
 fn report_invalid_assignment_with_message(
     context: &InferContext,
     node: AnyNodeRef,
@@ -2040,15 +2054,27 @@ pub(super) fn report_invalid_assignment<'db>(
     target_ty: Type,
     mut source_ty: Type<'db>,
 ) {
+    let value_expr = match definition.kind(context.db()) {
+        DefinitionKind::Assignment(def) => Some(def.value(context.module())),
+        DefinitionKind::AnnotatedAssignment(def) => def.value(context.module()),
+        DefinitionKind::NamedExpression(def) => Some(&*def.node(context.module()).value),
+        _ => None,
+    };
+
+    if let Some(value_expr) = value_expr
+        && is_invalid_typed_dict_literal(context.db(), target_ty, value_expr.into())
+    {
+        return;
+    }
+
     let settings =
         DisplaySettings::from_possibly_ambiguous_type_pair(context.db(), target_ty, source_ty);
 
-    if let DefinitionKind::AnnotatedAssignment(annotated_assignment) = definition.kind(context.db())
-        && let Some(value) = annotated_assignment.value(context.module())
-    {
+    if let Some(value_expr) = value_expr {
         // Re-infer the RHS of the annotated assignment, ignoring the type context for more precise
         // error messages.
-        source_ty = infer_isolated_expression(context.db(), definition.scope(context.db()), value);
+        source_ty =
+            infer_isolated_expression(context.db(), definition.scope(context.db()), value_expr);
     }
 
     report_invalid_assignment_with_message(
@@ -2070,6 +2096,11 @@ pub(super) fn report_invalid_attribute_assignment(
     source_ty: Type,
     attribute_name: &'_ str,
 ) {
+    // TODO: Ideally we would not emit diagnostics for `TypedDict` literal arguments
+    // here (see `diagnostic::is_invalid_typed_dict_literal`). However, we may have
+    // silenced diagnostics during attribute resolution, and rely on the assignability
+    // diagnostic being emitted here.
+
     report_invalid_assignment_with_message(
         context,
         node,
diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs
index 1ad7d18482..b7608bbfff 100644
--- a/crates/ty_python_semantic/src/types/infer/builder.rs
+++ b/crates/ty_python_semantic/src/types/infer/builder.rs
@@ -5,7 +5,9 @@ use ruff_db::diagnostic::{Annotation, DiagnosticId, Severity};
 use ruff_db::files::File;
 use ruff_db::parsed::ParsedModuleRef;
 use ruff_python_ast::visitor::{Visitor, walk_expr};
-use ruff_python_ast::{self as ast, AnyNodeRef, ExprContext, PythonVersion};
+use ruff_python_ast::{
+    self as ast, AnyNodeRef, ExprContext, HasNodeIndex, NodeIndex, PythonVersion,
+};
 use ruff_python_stdlib::builtins::version_builtin_was_added;
 use ruff_text_size::{Ranged, TextRange};
 use rustc_hash::{FxHashMap, FxHashSet};
@@ -5859,15 +5861,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
         expression.map(|expr| self.infer_expression(expr, tcx))
     }
 
-    fn get_or_infer_expression(
-        &mut self,
-        expression: &ast::Expr,
-        tcx: TypeContext<'db>,
-    ) -> Type<'db> {
-        self.try_expression_type(expression)
-            .unwrap_or_else(|| self.infer_expression(expression, tcx))
-    }
-
     #[track_caller]
     fn infer_expression(&mut self, expression: &ast::Expr, tcx: TypeContext<'db>) -> Type<'db> {
         debug_assert!(
@@ -6223,7 +6216,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
         } = list;
 
         let elts = elts.iter().map(|elt| [Some(elt)]);
-        self.infer_collection_literal(elts, tcx, KnownClass::List)
+        let infer_elt_ty = |builder: &mut Self, elt, tcx| builder.infer_expression(elt, tcx);
+        self.infer_collection_literal(elts, tcx, infer_elt_ty, KnownClass::List)
             .unwrap_or_else(|| {
                 KnownClass::List.to_specialized_instance(self.db(), [Type::unknown()])
             })
@@ -6237,7 +6231,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
         } = set;
 
         let elts = elts.iter().map(|elt| [Some(elt)]);
-        self.infer_collection_literal(elts, tcx, KnownClass::Set)
+        let infer_elt_ty = |builder: &mut Self, elt, tcx| builder.infer_expression(elt, tcx);
+        self.infer_collection_literal(elts, tcx, infer_elt_ty, KnownClass::Set)
             .unwrap_or_else(|| {
                 KnownClass::Set.to_specialized_instance(self.db(), [Type::unknown()])
             })
@@ -6250,12 +6245,14 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
             items,
         } = dict;
 
+        let mut item_types = FxHashMap::default();
+
         // Validate `TypedDict` dictionary literal assignments.
         if let Some(tcx) = tcx.annotation
             && let Some(typed_dict) = tcx
                 .filter_union(self.db(), Type::is_typed_dict)
                 .as_typed_dict()
-            && let Some(ty) = self.infer_typed_dict_expression(dict, typed_dict)
+            && let Some(ty) = self.infer_typed_dict_expression(dict, typed_dict, &mut item_types)
         {
             return ty;
         }
@@ -6271,7 +6268,17 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
             .iter()
             .map(|item| [item.key.as_ref(), Some(&item.value)]);
 
-        self.infer_collection_literal(items, tcx, KnownClass::Dict)
+        // Avoid inferring the items multiple times if we already attempted to infer the
+        // dictionary literal as a `TypedDict`. This also allows us to infer using the
+        // type context of the expected `TypedDict` field.
+        let infer_elt_ty = |builder: &mut Self, elt: &ast::Expr, tcx| {
+            item_types
+                .get(&elt.node_index().load())
+                .copied()
+                .unwrap_or_else(|| builder.infer_expression(elt, tcx))
+        };
+
+        self.infer_collection_literal(items, tcx, infer_elt_ty, KnownClass::Dict)
             .unwrap_or_else(|| {
                 KnownClass::Dict
                     .to_specialized_instance(self.db(), [Type::unknown(), Type::unknown()])
@@ -6282,6 +6289,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
         &mut self,
         dict: &ast::ExprDict,
         typed_dict: TypedDictType<'db>,
+        item_types: &mut FxHashMap>,
     ) -> Option> {
         let ast::ExprDict {
             range: _,
@@ -6293,14 +6301,19 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
 
         for item in items {
             let key_ty = self.infer_optional_expression(item.key.as_ref(), TypeContext::default());
+            if let Some((key, key_ty)) = item.key.as_ref().zip(key_ty) {
+                item_types.insert(key.node_index().load(), key_ty);
+            }
 
-            if let Some(Type::StringLiteral(key)) = key_ty
+            let value_ty = if let Some(Type::StringLiteral(key)) = key_ty
                 && let Some(field) = typed_dict_items.get(key.value(self.db()))
             {
-                self.infer_expression(&item.value, TypeContext::new(Some(field.declared_ty)));
+                self.infer_expression(&item.value, TypeContext::new(Some(field.declared_ty)))
             } else {
-                self.infer_expression(&item.value, TypeContext::default());
-            }
+                self.infer_expression(&item.value, TypeContext::default())
+            };
+
+            item_types.insert(item.value.node_index().load(), value_ty);
         }
 
         validate_typed_dict_dict_literal(&self.context, typed_dict, dict, dict.into(), |expr| {
@@ -6311,12 +6324,17 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
     }
 
     // Infer the type of a collection literal expression.
-    fn infer_collection_literal<'expr, const N: usize>(
+    fn infer_collection_literal<'expr, const N: usize, F, I>(
         &mut self,
-        elts: impl Iterator; N]>,
+        elts: I,
         tcx: TypeContext<'db>,
+        mut infer_elt_expression: F,
         collection_class: KnownClass,
-    ) -> Option> {
+    ) -> Option>
+    where
+        I: Iterator; N]>,
+        F: FnMut(&mut Self, &'expr ast::Expr, TypeContext<'db>) -> Type<'db>,
+    {
         // Extract the type variable `T` from `list[T]` in typeshed.
         let elt_tys = |collection_class: KnownClass| {
             let class_literal = collection_class.try_to_class_literal(self.db())?;
@@ -6332,7 +6350,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
             // Infer the element types without type context, and fallback to unknown for
             // custom typesheds.
             for elt in elts.flatten().flatten() {
-                self.get_or_infer_expression(elt, TypeContext::default());
+                infer_elt_expression(self, elt, TypeContext::default());
             }
 
             return None;
@@ -6387,7 +6405,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
         for elts in elts {
             // An unpacking expression for a dictionary.
             if let &[None, Some(value)] = elts.as_slice() {
-                let inferred_value_ty = self.get_or_infer_expression(value, TypeContext::default());
+                let inferred_value_ty = infer_elt_expression(self, value, TypeContext::default());
 
                 // Merge the inferred type of the nested dictionary.
                 if let Some(specialization) =
@@ -6410,7 +6428,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
             {
                 let Some(elt) = elt else { continue };
 
-                let inferred_elt_ty = self.get_or_infer_expression(elt, elt_tcx);
+                let inferred_elt_ty = infer_elt_expression(self, elt, elt_tcx);
 
                 // Simplify the inference based on the declared type of the element.
                 if let Some(elt_tcx) = elt_tcx.annotation {
diff --git a/crates/ty_python_semantic/src/types/typed_dict.rs b/crates/ty_python_semantic/src/types/typed_dict.rs
index e29b836d8a..83b4ae946e 100644
--- a/crates/ty_python_semantic/src/types/typed_dict.rs
+++ b/crates/ty_python_semantic/src/types/typed_dict.rs
@@ -8,7 +8,7 @@ use ruff_text_size::Ranged;
 use super::class::{ClassType, CodeGeneratorKind, Field};
 use super::context::InferContext;
 use super::diagnostic::{
-    INVALID_ARGUMENT_TYPE, INVALID_ASSIGNMENT, report_invalid_key_on_typed_dict,
+    self, INVALID_ARGUMENT_TYPE, INVALID_ASSIGNMENT, report_invalid_key_on_typed_dict,
     report_missing_typed_dict_key,
 };
 use super::{ApplyTypeMappingVisitor, Type, TypeMapping, visitor};
@@ -213,9 +213,13 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>(
         return true;
     }
 
+    let value_node = value_node.into();
+    if diagnostic::is_invalid_typed_dict_literal(context.db(), item.declared_ty, value_node) {
+        return false;
+    }
+
     // Invalid assignment - emit diagnostic
-    if let Some(builder) = context.report_lint(assignment_kind.diagnostic_type(), value_node.into())
-    {
+    if let Some(builder) = context.report_lint(assignment_kind.diagnostic_type(), value_node) {
         let typed_dict_ty = Type::TypedDict(typed_dict);
         let typed_dict_d = typed_dict_ty.display(db);
         let value_d = value_ty.display(db);

From 3c5e4e147779ee016320993dc3b2406c2c3fde34 Mon Sep 17 00:00:00 2001
From: Micha Reiser 
Date: Mon, 3 Nov 2025 23:00:30 +0100
Subject: [PATCH 086/180] [ty] Update salsa (#21265)

---
 Cargo.lock      | 6 +++---
 Cargo.toml      | 2 +-
 fuzz/Cargo.toml | 2 +-
 3 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index 14312dfa75..2da7d9ff0e 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -3586,7 +3586,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
 [[package]]
 name = "salsa"
 version = "0.24.0"
-source = "git+https://github.com/salsa-rs/salsa.git?rev=cdd0b85516a52c18b8a6d17a2279a96ed6c3e198#cdd0b85516a52c18b8a6d17a2279a96ed6c3e198"
+source = "git+https://github.com/salsa-rs/salsa.git?rev=664750a6e588ed23a0d2d9105a02cb5993c8e178#664750a6e588ed23a0d2d9105a02cb5993c8e178"
 dependencies = [
  "boxcar",
  "compact_str",
@@ -3610,12 +3610,12 @@ dependencies = [
 [[package]]
 name = "salsa-macro-rules"
 version = "0.24.0"
-source = "git+https://github.com/salsa-rs/salsa.git?rev=cdd0b85516a52c18b8a6d17a2279a96ed6c3e198#cdd0b85516a52c18b8a6d17a2279a96ed6c3e198"
+source = "git+https://github.com/salsa-rs/salsa.git?rev=664750a6e588ed23a0d2d9105a02cb5993c8e178#664750a6e588ed23a0d2d9105a02cb5993c8e178"
 
 [[package]]
 name = "salsa-macros"
 version = "0.24.0"
-source = "git+https://github.com/salsa-rs/salsa.git?rev=cdd0b85516a52c18b8a6d17a2279a96ed6c3e198#cdd0b85516a52c18b8a6d17a2279a96ed6c3e198"
+source = "git+https://github.com/salsa-rs/salsa.git?rev=664750a6e588ed23a0d2d9105a02cb5993c8e178#664750a6e588ed23a0d2d9105a02cb5993c8e178"
 dependencies = [
  "proc-macro2",
  "quote",
diff --git a/Cargo.toml b/Cargo.toml
index ed7fbf4fcb..b2122cea97 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -146,7 +146,7 @@ regex-automata = { version = "0.4.9" }
 rustc-hash = { version = "2.0.0" }
 rustc-stable-hash = { version = "0.1.2" }
 # When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
-salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "cdd0b85516a52c18b8a6d17a2279a96ed6c3e198", default-features = false, features = [
+salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "664750a6e588ed23a0d2d9105a02cb5993c8e178", default-features = false, features = [
     "compact_str",
     "macros",
     "salsa_unstable",
diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml
index 359e59d1e1..278267fc15 100644
--- a/fuzz/Cargo.toml
+++ b/fuzz/Cargo.toml
@@ -30,7 +30,7 @@ ty_python_semantic = { path = "../crates/ty_python_semantic" }
 ty_vendored = { path = "../crates/ty_vendored" }
 
 libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer", default-features = false }
-salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "cdd0b85516a52c18b8a6d17a2279a96ed6c3e198", default-features = false, features = [
+salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "664750a6e588ed23a0d2d9105a02cb5993c8e178", default-features = false, features = [
     "compact_str",
     "macros",
     "salsa_unstable",

From 63b1c1ea8bd2b0b08661167c20492d2cb6e2889c Mon Sep 17 00:00:00 2001
From: Brent Westbrook <36778786+ntBre@users.noreply.github.com>
Date: Mon, 3 Nov 2025 17:06:52 -0500
Subject: [PATCH 087/180] Avoid extra parentheses for long `match` patterns
 with `as` captures (#21176)

Summary
--

This PR fixes #17796 by taking the approach mentioned in
https://github.com/astral-sh/ruff/issues/17796#issuecomment-2847943862
of simply recursing into the `MatchAs` patterns when checking if we need
parentheses. This allows us to reuse the parentheses in the inner
pattern before also breaking the `MatchAs` pattern itself:

```diff
 match class_pattern:
     case Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture:
         pass
-    case (
-        Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture
-    ):
+    case Class(
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ) as capture:
         pass
-    case (
-        Class(
-            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
-        ) as capture
-    ):
+    case Class(
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ) as capture:
         pass
     case (
         Class(
@@ -685,13 +683,11 @@
 match sequence_pattern_brackets:
     case [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture:
         pass
-    case (
-        [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture
-    ):
+    case [
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ] as capture:
         pass
-    case (
-        [
-            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
-        ] as capture
-    ):
+    case [
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ] as capture:
         pass
```

I haven't really resolved the question of whether or not it's okay
always to recurse, but I'm hoping the ecosystem check on this PR might
shed some light on that.

Test Plan
--

New tests based on the issue and then reviewing the ecosystem check here
---
 .../test/fixtures/ruff/statement/match.py     |  55 +++++
 .../ruff_python_formatter/src/pattern/mod.rs  |  20 +-
 crates/ruff_python_formatter/src/preview.rs   |   9 +
 .../snapshots/format@statement__match.py.snap | 227 +++++++++++++++++-
 4 files changed, 307 insertions(+), 4 deletions(-)

diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py
index 4067d508c0..4a403b1541 100644
--- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py
+++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py
@@ -613,3 +613,58 @@ match guard_comments:
     ):
         pass
 
+
+# regression tests from https://github.com/astral-sh/ruff/issues/17796
+match class_pattern:
+    case Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture:
+        pass
+    case Class(
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ) as capture:
+        pass
+    case Class(
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ) as capture:
+        pass
+    case Class(
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture:
+        pass
+
+match sequence_pattern_brackets:
+    case [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture:
+        pass
+    case [
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ] as capture:
+        pass
+    case [
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ] as capture:
+        pass
+
+
+match class_pattern:
+    # 1
+    case Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture:  # 2
+        # 3
+        pass # 4
+    # 5
+    case Class( # 6
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 7
+    ) as capture: # 8
+        pass
+    case Class( # 9
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 10
+    ) as capture: # 11
+        pass
+    case Class( # 12
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 13
+    ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture: # 14
+        pass
+    case Class( # 0
+            # 1
+            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 2
+            # 3
+    ) as capture:
+        pass
diff --git a/crates/ruff_python_formatter/src/pattern/mod.rs b/crates/ruff_python_formatter/src/pattern/mod.rs
index 557337ddc5..a379aeb849 100644
--- a/crates/ruff_python_formatter/src/pattern/mod.rs
+++ b/crates/ruff_python_formatter/src/pattern/mod.rs
@@ -1,5 +1,5 @@
 use ruff_formatter::{FormatOwnedWithRule, FormatRefWithRule, FormatRule, FormatRuleWithOptions};
-use ruff_python_ast::{AnyNodeRef, Expr};
+use ruff_python_ast::{AnyNodeRef, Expr, PatternMatchAs};
 use ruff_python_ast::{MatchCase, Pattern};
 use ruff_python_trivia::CommentRanges;
 use ruff_python_trivia::{
@@ -14,6 +14,7 @@ use crate::expression::parentheses::{
     NeedsParentheses, OptionalParentheses, Parentheses, optional_parentheses, parenthesized,
 };
 use crate::prelude::*;
+use crate::preview::is_avoid_parens_for_long_as_captures_enabled;
 
 pub(crate) mod pattern_arguments;
 pub(crate) mod pattern_keyword;
@@ -242,8 +243,14 @@ pub(crate) fn can_pattern_omit_optional_parentheses(
                 Pattern::MatchValue(_)
                 | Pattern::MatchSingleton(_)
                 | Pattern::MatchStar(_)
-                | Pattern::MatchAs(_)
                 | Pattern::MatchOr(_) => false,
+                Pattern::MatchAs(PatternMatchAs { pattern, .. }) => match pattern {
+                    Some(pattern) => {
+                        is_avoid_parens_for_long_as_captures_enabled(context)
+                            && has_parentheses_and_is_non_empty(pattern, context)
+                    }
+                    None => false,
+                },
                 Pattern::MatchSequence(sequence) => {
                     !sequence.patterns.is_empty() || context.comments().has_dangling(pattern)
                 }
@@ -318,7 +325,14 @@ impl<'a> CanOmitOptionalParenthesesVisitor<'a> {
                 // The pattern doesn't start with a parentheses pattern, but with the class's identifier.
                 self.first.set_if_none(First::Token);
             }
-            Pattern::MatchStar(_) | Pattern::MatchSingleton(_) | Pattern::MatchAs(_) => {}
+            Pattern::MatchAs(PatternMatchAs { pattern, .. }) => {
+                if let Some(pattern) = pattern
+                    && is_avoid_parens_for_long_as_captures_enabled(context)
+                {
+                    self.visit_sub_pattern(pattern, context);
+                }
+            }
+            Pattern::MatchStar(_) | Pattern::MatchSingleton(_) => {}
             Pattern::MatchOr(or_pattern) => {
                 self.update_max_precedence(
                     OperatorPrecedence::Or,
diff --git a/crates/ruff_python_formatter/src/preview.rs b/crates/ruff_python_formatter/src/preview.rs
index 5455fa9a12..9d307390d6 100644
--- a/crates/ruff_python_formatter/src/preview.rs
+++ b/crates/ruff_python_formatter/src/preview.rs
@@ -43,3 +43,12 @@ pub(crate) const fn is_remove_parens_around_except_types_enabled(
 pub(crate) const fn is_allow_newline_after_block_open_enabled(context: &PyFormatContext) -> bool {
     context.is_preview()
 }
+
+/// Returns `true` if the
+/// [`avoid_parens_for_long_as_captures`](https://github.com/astral-sh/ruff/pull/21176) preview
+/// style is enabled.
+pub(crate) const fn is_avoid_parens_for_long_as_captures_enabled(
+    context: &PyFormatContext,
+) -> bool {
+    context.is_preview()
+}
diff --git a/crates/ruff_python_formatter/tests/snapshots/format@statement__match.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@statement__match.py.snap
index 852740fa6d..a94ee5e636 100644
--- a/crates/ruff_python_formatter/tests/snapshots/format@statement__match.py.snap
+++ b/crates/ruff_python_formatter/tests/snapshots/format@statement__match.py.snap
@@ -1,7 +1,6 @@
 ---
 source: crates/ruff_python_formatter/tests/fixtures.rs
 input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/match.py
-snapshot_kind: text
 ---
 ## Input
 ```python
@@ -620,6 +619,61 @@ match guard_comments:
     ):
         pass
 
+
+# regression tests from https://github.com/astral-sh/ruff/issues/17796
+match class_pattern:
+    case Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture:
+        pass
+    case Class(
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ) as capture:
+        pass
+    case Class(
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ) as capture:
+        pass
+    case Class(
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture:
+        pass
+
+match sequence_pattern_brackets:
+    case [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture:
+        pass
+    case [
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ] as capture:
+        pass
+    case [
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+    ] as capture:
+        pass
+
+
+match class_pattern:
+    # 1
+    case Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture:  # 2
+        # 3
+        pass # 4
+    # 5
+    case Class( # 6
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 7
+    ) as capture: # 8
+        pass
+    case Class( # 9
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 10
+    ) as capture: # 11
+        pass
+    case Class( # 12
+        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 13
+    ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture: # 14
+        pass
+    case Class( # 0
+            # 1
+            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx # 2
+            # 3
+    ) as capture:
+        pass
 ```
 
 ## Output
@@ -1285,4 +1339,175 @@ match guard_comments:
         # trailing own line comment
     ):
         pass
+
+
+# regression tests from https://github.com/astral-sh/ruff/issues/17796
+match class_pattern:
+    case Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture:
+        pass
+    case (
+        Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture
+    ):
+        pass
+    case (
+        Class(
+            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+        ) as capture
+    ):
+        pass
+    case (
+        Class(
+            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+        ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture
+    ):
+        pass
+
+match sequence_pattern_brackets:
+    case [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture:
+        pass
+    case (
+        [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture
+    ):
+        pass
+    case (
+        [
+            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+        ] as capture
+    ):
+        pass
+
+
+match class_pattern:
+    # 1
+    case (
+        Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture
+    ):  # 2
+        # 3
+        pass  # 4
+    # 5
+    case (
+        Class(  # 6
+            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx  # 7
+        ) as capture
+    ):  # 8
+        pass
+    case (
+        Class(  # 9
+            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx  # 10
+        ) as capture
+    ):  # 11
+        pass
+    case (
+        Class(  # 12
+            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx  # 13
+        ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture
+    ):  # 14
+        pass
+    case (
+        Class(  # 0
+            # 1
+            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx  # 2
+            # 3
+        ) as capture
+    ):
+        pass
+```
+
+
+## Preview changes
+```diff
+--- Stable
++++ Preview
+@@ -665,15 +665,13 @@
+ match class_pattern:
+     case Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture:
+         pass
+-    case (
+-        Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture
+-    ):
++    case Class(
++        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
++    ) as capture:
+         pass
+-    case (
+-        Class(
+-            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+-        ) as capture
+-    ):
++    case Class(
++        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
++    ) as capture:
+         pass
+     case (
+         Class(
+@@ -685,37 +683,31 @@
+ match sequence_pattern_brackets:
+     case [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture:
+         pass
+-    case (
+-        [xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx] as capture
+-    ):
++    case [
++        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
++    ] as capture:
+         pass
+-    case (
+-        [
+-            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
+-        ] as capture
+-    ):
++    case [
++        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
++    ] as capture:
+         pass
+ 
+ 
+ match class_pattern:
+     # 1
+-    case (
+-        Class(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx) as capture
+-    ):  # 2
++    case Class(
++        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
++    ) as capture:  # 2
+         # 3
+         pass  # 4
+     # 5
+-    case (
+-        Class(  # 6
+-            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx  # 7
+-        ) as capture
+-    ):  # 8
++    case Class(  # 6
++        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx  # 7
++    ) as capture:  # 8
+         pass
+-    case (
+-        Class(  # 9
+-            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx  # 10
+-        ) as capture
+-    ):  # 11
++    case Class(  # 9
++        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx  # 10
++    ) as capture:  # 11
+         pass
+     case (
+         Class(  # 12
+@@ -723,11 +715,9 @@
+         ) as really_really_really_really_really_really_really_really_really_really_really_really_long_capture
+     ):  # 14
+         pass
+-    case (
+-        Class(  # 0
+-            # 1
+-            xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx  # 2
+-            # 3
+-        ) as capture
+-    ):
++    case Class(  # 0
++        # 1
++        xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx  # 2
++        # 3
++    ) as capture:
+         pass
 ```

From 4fd8d4b0ee3e20c673c175455d0ad9637183c67d Mon Sep 17 00:00:00 2001
From: Micha Reiser 
Date: Tue, 4 Nov 2025 04:18:12 +0100
Subject: [PATCH 088/180] [ty] Update expected diagnostic count in benchmarks
 (#21269)

---
 crates/ruff_benchmark/benches/ty_walltime.rs | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/crates/ruff_benchmark/benches/ty_walltime.rs b/crates/ruff_benchmark/benches/ty_walltime.rs
index 55b2415990..2b02230fd6 100644
--- a/crates/ruff_benchmark/benches/ty_walltime.rs
+++ b/crates/ruff_benchmark/benches/ty_walltime.rs
@@ -146,7 +146,7 @@ static FREQTRADE: Benchmark = Benchmark::new(
         max_dep_date: "2025-06-17",
         python_version: PythonVersion::PY312,
     },
-    500,
+    525,
 );
 
 static PANDAS: Benchmark = Benchmark::new(

From d8106d38a06fe4d957c10f075a2bb432a1d938fd Mon Sep 17 00:00:00 2001
From: Ibraheem Ahmed 
Date: Tue, 4 Nov 2025 09:59:40 -0500
Subject: [PATCH 089/180] Run codspeed benchmarks with `profiling` profile
 (#21261)

## Summary

This reduces the walltime benchmarks from 15m to 10m, and we should see an even bigger improvement once build caching kicks in, so I think it's worth the downsides.
---
 .github/workflows/ci.yaml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 44d9f2f542..64c1d9f9b6 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -953,7 +953,7 @@ jobs:
           tool: cargo-codspeed
 
       - name: "Build benchmarks"
-        run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
+        run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
 
       - name: "Run benchmarks"
         uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
@@ -991,7 +991,7 @@ jobs:
           tool: cargo-codspeed
 
       - name: "Build benchmarks"
-        run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench ty
+        run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench ty
 
       - name: "Run benchmarks"
         uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
@@ -1029,7 +1029,7 @@ jobs:
           tool: cargo-codspeed
 
       - name: "Build benchmarks"
-        run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark
+        run: cargo codspeed build --features "codspeed,walltime" --profile profiling --no-default-features -p ruff_benchmark
 
       - name: "Run benchmarks"
         uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1

From 2e7ab00d51fae2e6c1fa6842b5dda4dee28b3730 Mon Sep 17 00:00:00 2001
From: David Peter 
Date: Tue, 4 Nov 2025 16:29:55 +0100
Subject: [PATCH 090/180] [ty] Allow values of type `None` in type expressions
 (#21263)

## Summary

Allow values of type `None` in type expressions. The [typing
spec](https://typing.python.org/en/latest/spec/annotations.html#type-and-annotation-expressions)
could be more explicit on whether this is actually allowed or not, but
it seems relatively harmless and does help in some use cases like:

```py
try:
    from module import MyClass
except ImportError:
    MyClass = None  # ty: ignore


def f(m: MyClass):
    pass
```

## Test Plan

Updated tests, ecosystem check.
---
 .../resources/mdtest/implicit_type_aliases.md                | 5 +----
 crates/ty_python_semantic/src/types.rs                       | 1 +
 2 files changed, 2 insertions(+), 4 deletions(-)

diff --git a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
index a3e0319f5a..9ce736e235 100644
--- a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
+++ b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
@@ -22,11 +22,8 @@ f(1)
 ```py
 MyNone = None
 
-# TODO: this should not be an error
-# error: [invalid-type-form] "Variable of type `None` is not allowed in a type expression"
 def g(x: MyNone):
-    # TODO: this should be `None`
-    reveal_type(x)  # revealed: Unknown
+    reveal_type(x)  # revealed: None
 
 g(None)
 ```
diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs
index bee767b763..131d9e7630 100644
--- a/crates/ty_python_semantic/src/types.rs
+++ b/crates/ty_python_semantic/src/types.rs
@@ -6600,6 +6600,7 @@ impl<'db> Type<'db> {
             Type::Dynamic(_) => Ok(*self),
 
             Type::NominalInstance(instance) => match instance.known_class(db) {
+                Some(KnownClass::NoneType) => Ok(Type::none(db)),
                 Some(KnownClass::TypeVar) => Ok(todo_type!(
                     "Support for `typing.TypeVar` instances in type expressions"
                 )),

From 47e41ac6b66d81e2aa8a5249985d255e03e52aaa Mon Sep 17 00:00:00 2001
From: Dan Parizher <105245560+danparizher@users.noreply.github.com>
Date: Tue, 4 Nov 2025 11:02:50 -0500
Subject: [PATCH 091/180] [`refurb`] Fix false negative for underscores before
 sign in `Decimal` constructor (`FURB157`) (#21190)

## Summary

Fixes FURB157 false negative where `Decimal("_-1")` was not flagged as
verbose when underscores precede the sign character. This fixes #21186.

## Problem Analysis

The `verbose-decimal-constructor` (FURB157) rule failed to detect
verbose `Decimal` constructors when the sign character (`+` or `-`) was
preceded by underscores. For example, `Decimal("_-1")` was not flagged,
even though it can be simplified to `Decimal(-1)`.

The bug occurred because the rule checked for the sign character at the
start of the string before stripping leading underscores. According to
Python's `Decimal` parser behavior (as documented in CPython's
`_pydecimal.py`), underscores are removed before parsing the sign. The
rule's logic didn't match this behavior, causing a false negative for
cases like `"_-1"` where the underscore came before the sign.

This was a regression introduced in version 0.14.3, as these cases were
correctly flagged in version 0.14.2.

## Approach

The fix updates the sign extraction logic to:
1. Strip leading underscores first (matching Python's Decimal parser
behavior)
2. Extract the sign from the underscore-stripped string
3. Preserve the string after the sign for normalization purposes

This ensures that cases like `Decimal("_-1")`, `Decimal("_+1")`, and
`Decimal("_-1_000")` are correctly detected and flagged. The
normalization logic was also updated to use the string after the sign
(without underscores) to avoid double signs in the replacement output.
---
 .../resources/test/fixtures/refurb/FURB157.py |  6 ++
 .../rules/verbose_decimal_constructor.rs      | 17 ++++--
 ...es__refurb__tests__FURB157_FURB157.py.snap | 59 +++++++++++++++++++
 3 files changed, 76 insertions(+), 6 deletions(-)

diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB157.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB157.py
index d795fd1941..db49315f54 100644
--- a/crates/ruff_linter/resources/test/fixtures/refurb/FURB157.py
+++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB157.py
@@ -85,3 +85,9 @@ Decimal("1234_5678")    # Safe fix: preserves non-thousands separators
 Decimal("0001_2345")
 Decimal("000_1_2345")
 Decimal("000_000")
+
+# Test cases for underscores before sign
+# https://github.com/astral-sh/ruff/issues/21186
+Decimal("_-1")      # Should flag as verbose
+Decimal("_+1")      # Should flag as verbose
+Decimal("_-1_000")  # Should flag as verbose
diff --git a/crates/ruff_linter/src/rules/refurb/rules/verbose_decimal_constructor.rs b/crates/ruff_linter/src/rules/refurb/rules/verbose_decimal_constructor.rs
index 50c98026d5..28779b021a 100644
--- a/crates/ruff_linter/src/rules/refurb/rules/verbose_decimal_constructor.rs
+++ b/crates/ruff_linter/src/rules/refurb/rules/verbose_decimal_constructor.rs
@@ -93,16 +93,21 @@ pub(crate) fn verbose_decimal_constructor(checker: &Checker, call: &ast::ExprCal
             // https://github.com/python/cpython/blob/ac556a2ad1213b8bb81372fe6fb762f5fcb076de/Lib/_pydecimal.py#L6060-L6077
             // _after_ trimming whitespace from the string and removing all occurrences of "_".
             let original_str = str_literal.to_str().trim_whitespace();
+            // Strip leading underscores before extracting the sign, as Python's Decimal parser
+            // removes underscores before parsing the sign.
+            let sign_check_str = original_str.trim_start_matches('_');
             // Extract the unary sign, if any.
-            let (unary, original_str) = if let Some(trimmed) = original_str.strip_prefix('+') {
+            let (unary, sign_check_str) = if let Some(trimmed) = sign_check_str.strip_prefix('+') {
                 ("+", trimmed)
-            } else if let Some(trimmed) = original_str.strip_prefix('-') {
+            } else if let Some(trimmed) = sign_check_str.strip_prefix('-') {
                 ("-", trimmed)
             } else {
-                ("", original_str)
+                ("", sign_check_str)
             };
-            let mut rest = Cow::from(original_str);
-            let has_digit_separators = memchr::memchr(b'_', rest.as_bytes()).is_some();
+            // Save the string after the sign for normalization (before removing underscores)
+            let str_after_sign_for_normalization = sign_check_str;
+            let mut rest = Cow::from(sign_check_str);
+            let has_digit_separators = memchr::memchr(b'_', original_str.as_bytes()).is_some();
             if has_digit_separators {
                 rest = Cow::from(rest.replace('_', ""));
             }
@@ -123,7 +128,7 @@ pub(crate) fn verbose_decimal_constructor(checker: &Checker, call: &ast::ExprCal
 
             // If the original string had digit separators, normalize them
             let rest = if has_digit_separators {
-                Cow::from(normalize_digit_separators(original_str))
+                Cow::from(normalize_digit_separators(str_after_sign_for_normalization))
             } else {
                 Cow::from(rest)
             };
diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB157_FURB157.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB157_FURB157.py.snap
index 92e8057055..3f0a1c2cf6 100644
--- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB157_FURB157.py.snap
+++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB157_FURB157.py.snap
@@ -669,6 +669,7 @@ help: Replace with `1_2345`
 85 + Decimal(1_2345)
 86 | Decimal("000_1_2345")
 87 | Decimal("000_000")
+88 | 
 
 FURB157 [*] Verbose expression in `Decimal` constructor
   --> FURB157.py:86:9
@@ -686,6 +687,8 @@ help: Replace with `1_2345`
    - Decimal("000_1_2345")
 86 + Decimal(1_2345)
 87 | Decimal("000_000")
+88 | 
+89 | # Test cases for underscores before sign
 
 FURB157 [*] Verbose expression in `Decimal` constructor
   --> FURB157.py:87:9
@@ -694,6 +697,8 @@ FURB157 [*] Verbose expression in `Decimal` constructor
 86 | Decimal("000_1_2345")
 87 | Decimal("000_000")
    |         ^^^^^^^^^
+88 |
+89 | # Test cases for underscores before sign
    |
 help: Replace with `0`
 84 | # Separators _and_ leading zeros
@@ -701,3 +706,57 @@ help: Replace with `0`
 86 | Decimal("000_1_2345")
    - Decimal("000_000")
 87 + Decimal(0)
+88 | 
+89 | # Test cases for underscores before sign
+90 | # https://github.com/astral-sh/ruff/issues/21186
+
+FURB157 [*] Verbose expression in `Decimal` constructor
+  --> FURB157.py:91:9
+   |
+89 | # Test cases for underscores before sign
+90 | # https://github.com/astral-sh/ruff/issues/21186
+91 | Decimal("_-1")      # Should flag as verbose
+   |         ^^^^^
+92 | Decimal("_+1")      # Should flag as verbose
+93 | Decimal("_-1_000")  # Should flag as verbose
+   |
+help: Replace with `-1`
+88 | 
+89 | # Test cases for underscores before sign
+90 | # https://github.com/astral-sh/ruff/issues/21186
+   - Decimal("_-1")      # Should flag as verbose
+91 + Decimal(-1)      # Should flag as verbose
+92 | Decimal("_+1")      # Should flag as verbose
+93 | Decimal("_-1_000")  # Should flag as verbose
+
+FURB157 [*] Verbose expression in `Decimal` constructor
+  --> FURB157.py:92:9
+   |
+90 | # https://github.com/astral-sh/ruff/issues/21186
+91 | Decimal("_-1")      # Should flag as verbose
+92 | Decimal("_+1")      # Should flag as verbose
+   |         ^^^^^
+93 | Decimal("_-1_000")  # Should flag as verbose
+   |
+help: Replace with `+1`
+89 | # Test cases for underscores before sign
+90 | # https://github.com/astral-sh/ruff/issues/21186
+91 | Decimal("_-1")      # Should flag as verbose
+   - Decimal("_+1")      # Should flag as verbose
+92 + Decimal(+1)      # Should flag as verbose
+93 | Decimal("_-1_000")  # Should flag as verbose
+
+FURB157 [*] Verbose expression in `Decimal` constructor
+  --> FURB157.py:93:9
+   |
+91 | Decimal("_-1")      # Should flag as verbose
+92 | Decimal("_+1")      # Should flag as verbose
+93 | Decimal("_-1_000")  # Should flag as verbose
+   |         ^^^^^^^^^
+   |
+help: Replace with `-1_000`
+90 | # https://github.com/astral-sh/ruff/issues/21186
+91 | Decimal("_-1")      # Should flag as verbose
+92 | Decimal("_+1")      # Should flag as verbose
+   - Decimal("_-1_000")  # Should flag as verbose
+93 + Decimal(-1_000)  # Should flag as verbose

From f79044478c9999c58707f716d220767505bcf2d4 Mon Sep 17 00:00:00 2001
From: David Peter 
Date: Tue, 4 Nov 2025 18:36:36 +0100
Subject: [PATCH 092/180] [ty] Fix playground crash when file name includes
 path separator (#21151)

---
 playground/ty/src/Editor/SecondaryPanel.tsx | 22 ++++++++++++++++++---
 1 file changed, 19 insertions(+), 3 deletions(-)

diff --git a/playground/ty/src/Editor/SecondaryPanel.tsx b/playground/ty/src/Editor/SecondaryPanel.tsx
index a73cb57961..6c8cd0189a 100644
--- a/playground/ty/src/Editor/SecondaryPanel.tsx
+++ b/playground/ty/src/Editor/SecondaryPanel.tsx
@@ -103,11 +103,17 @@ function Content({
   }
 }
 
+const SANDBOX_BASE_DIRECTORY = "/playground/";
+
 function Run({ files, theme }: { files: ReadonlyFiles; theme: Theme }) {
   const [runOutput, setRunOutput] = useState | null>(null);
   const handleRun = () => {
     const output = (async () => {
-      const pyodide = await loadPyodide();
+      const pyodide = await loadPyodide({
+        env: {
+          HOME: SANDBOX_BASE_DIRECTORY,
+        },
+      });
 
       let combined_output = "";
 
@@ -122,7 +128,17 @@ function Run({ files, theme }: { files: ReadonlyFiles; theme: Theme }) {
 
       let fileName = "main.py";
       for (const file of files.index) {
-        pyodide.FS.writeFile(file.name, files.contents[file.id]);
+        const last_separator = file.name.lastIndexOf("/");
+
+        if (last_separator !== -1) {
+          const directory =
+            SANDBOX_BASE_DIRECTORY + file.name.slice(0, last_separator);
+          pyodide.FS.mkdirTree(directory);
+        }
+        pyodide.FS.writeFile(
+          SANDBOX_BASE_DIRECTORY + file.name,
+          files.contents[file.id],
+        );
 
         if (file.id === files.selected) {
           fileName = file.name;
@@ -133,7 +149,7 @@ function Run({ files, theme }: { files: ReadonlyFiles; theme: Theme }) {
       const globals = dict();
 
       try {
-        // Patch up reveal types
+        // Patch `reveal_type` to print runtime values
         pyodide.runPython(`
         import builtins
 

From 7009d6026014532dc69a04bb135d1ffc717effd8 Mon Sep 17 00:00:00 2001
From: Micha Reiser 
Date: Wed, 5 Nov 2025 14:24:03 +0100
Subject: [PATCH 093/180] [ty] Refactor `Range` to/from `TextRange` conversion
 as prep for notebook support (#21230)

---
 crates/ty_server/src/document/location.rs     |  55 ++-
 crates/ty_server/src/document/range.rs        | 359 ++++++++++++------
 .../ty_server/src/document/text_document.rs   |   4 +-
 .../ty_server/src/server/api/diagnostics.rs   |   9 +-
 .../src/server/api/requests/completion.rs     |  15 +-
 .../src/server/api/requests/doc_highlights.rs |  11 +-
 .../server/api/requests/document_symbols.rs   |  42 +-
 .../server/api/requests/goto_declaration.rs   |   8 +-
 .../server/api/requests/goto_definition.rs    |   8 +-
 .../server/api/requests/goto_references.rs    |   8 +-
 .../api/requests/goto_type_definition.rs      |   8 +-
 .../src/server/api/requests/hover.rs          |  22 +-
 .../src/server/api/requests/inlay_hints.rs    |   9 +-
 .../src/server/api/requests/prepare_rename.rs |   9 +-
 .../src/server/api/requests/rename.rs         |   8 +-
 .../server/api/requests/selection_range.rs    |  10 +-
 .../api/requests/semantic_tokens_range.rs     |  11 +-
 .../src/server/api/requests/signature_help.rs |   8 +-
 .../server/api/requests/workspace_symbols.rs  |  24 +-
 .../src/server/api/semantic_tokens.rs         |   8 +-
 crates/ty_server/src/server/api/symbols.rs    |  25 +-
 crates/ty_server/src/session.rs               |   5 +
 22 files changed, 386 insertions(+), 280 deletions(-)

diff --git a/crates/ty_server/src/document/location.rs b/crates/ty_server/src/document/location.rs
index d5924595b2..f02dc20d98 100644
--- a/crates/ty_server/src/document/location.rs
+++ b/crates/ty_server/src/document/location.rs
@@ -1,12 +1,9 @@
 use crate::PositionEncoding;
 use crate::document::{FileRangeExt, ToRangeExt};
-use crate::system::file_to_url;
 use lsp_types::Location;
 use ruff_db::files::FileRange;
-use ruff_db::source::{line_index, source_text};
-use ruff_text_size::Ranged;
 use ty_ide::{NavigationTarget, ReferenceTarget};
-use ty_project::Db;
+use ty_python_semantic::Db;
 
 pub(crate) trait ToLink {
     fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option;
@@ -21,7 +18,9 @@ pub(crate) trait ToLink {
 
 impl ToLink for NavigationTarget {
     fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option {
-        FileRange::new(self.file(), self.focus_range()).to_location(db, encoding)
+        FileRange::new(self.file(), self.focus_range())
+            .as_lsp_range(db, encoding)
+            .to_location()
     }
 
     fn to_link(
@@ -31,22 +30,24 @@ impl ToLink for NavigationTarget {
         encoding: PositionEncoding,
     ) -> Option {
         let file = self.file();
-        let uri = file_to_url(db, file)?;
-        let source = source_text(db, file);
-        let index = line_index(db, file);
 
-        let target_range = self.full_range().to_lsp_range(&source, &index, encoding);
-        let selection_range = self.focus_range().to_lsp_range(&source, &index, encoding);
+        // Get target_range and URI together to ensure they're consistent (same cell for notebooks)
+        let target_location = self
+            .full_range()
+            .as_lsp_range(db, file, encoding)
+            .to_location()?;
+        let target_range = target_location.range;
 
-        let src = src.map(|src| {
-            let source = source_text(db, src.file());
-            let index = line_index(db, src.file());
+        // For selection_range, we can use as_local_range since we know it's in the same document/cell
+        let selection_range = self
+            .focus_range()
+            .as_lsp_range(db, file, encoding)
+            .to_local_range();
 
-            src.range().to_lsp_range(&source, &index, encoding)
-        });
+        let src = src.map(|src| src.as_lsp_range(db, encoding).to_local_range());
 
         Some(lsp_types::LocationLink {
-            target_uri: uri,
+            target_uri: target_location.uri,
             target_range,
             target_selection_range: selection_range,
             origin_selection_range: src,
@@ -56,7 +57,7 @@ impl ToLink for NavigationTarget {
 
 impl ToLink for ReferenceTarget {
     fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option {
-        self.file_range().to_location(db, encoding)
+        self.file_range().as_lsp_range(db, encoding).to_location()
     }
 
     fn to_link(
@@ -65,22 +66,18 @@ impl ToLink for ReferenceTarget {
         src: Option,
         encoding: PositionEncoding,
     ) -> Option {
-        let uri = file_to_url(db, self.file())?;
-        let source = source_text(db, self.file());
-        let index = line_index(db, self.file());
-
-        let target_range = self.range().to_lsp_range(&source, &index, encoding);
+        // Get target_range and URI together to ensure they're consistent (same cell for notebooks)
+        let target_location = self
+            .range()
+            .as_lsp_range(db, self.file(), encoding)
+            .to_location()?;
+        let target_range = target_location.range;
         let selection_range = target_range;
 
-        let src = src.map(|src| {
-            let source = source_text(db, src.file());
-            let index = line_index(db, src.file());
-
-            src.range().to_lsp_range(&source, &index, encoding)
-        });
+        let src = src.map(|src| src.as_lsp_range(db, encoding).to_local_range());
 
         Some(lsp_types::LocationLink {
-            target_uri: uri,
+            target_uri: target_location.uri,
             target_range,
             target_selection_range: selection_range,
             origin_selection_range: src,
diff --git a/crates/ty_server/src/document/range.rs b/crates/ty_server/src/document/range.rs
index 1d107e5a30..1e7f381ae5 100644
--- a/crates/ty_server/src/document/range.rs
+++ b/crates/ty_server/src/document/range.rs
@@ -1,148 +1,288 @@
 use super::PositionEncoding;
-use super::notebook;
 use crate::system::file_to_url;
+use ty_python_semantic::Db;
 
 use lsp_types as types;
-use lsp_types::Location;
-
-use ruff_db::files::FileRange;
+use lsp_types::{Location, Position, Url};
+use ruff_db::files::{File, FileRange};
 use ruff_db::source::{line_index, source_text};
-use ruff_notebook::NotebookIndex;
 use ruff_source_file::LineIndex;
 use ruff_source_file::{OneIndexed, SourceLocation};
 use ruff_text_size::{Ranged, TextRange, TextSize};
-use ty_python_semantic::Db;
 
-#[expect(dead_code)]
-pub(crate) struct NotebookRange {
-    pub(crate) cell: notebook::CellId,
-    pub(crate) range: types::Range,
+/// Represents a range that has been prepared for LSP conversion but requires
+/// a decision about how to use it - either as a local range within the same
+/// document/cell, or as a location that can reference any document in the project.
+#[derive(Clone)]
+pub(crate) struct LspRange<'db> {
+    file: File,
+    range: TextRange,
+    db: &'db dyn Db,
+    encoding: PositionEncoding,
+}
+
+impl std::fmt::Debug for LspRange<'_> {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        f.debug_struct("LspRange")
+            .field("range", &self.range)
+            .field("file", &self.file)
+            .field("encoding", &self.encoding)
+            .finish_non_exhaustive()
+    }
+}
+
+impl LspRange<'_> {
+    /// Convert to an LSP Range for use within the same document/cell.
+    /// Returns only the LSP Range without any URI information.
+    ///
+    /// Use this when you already have a URI context and this range is guaranteed
+    /// to be within the same document/cell:
+    /// - Selection ranges within a `LocationLink` (where `target_uri` provides context)
+    /// - Additional ranges in the same cell (e.g., `selection_range` when you already have `target_range`)
+    ///
+    /// Do NOT use this for standalone ranges - use `to_location()` instead to ensure
+    /// the URI and range are consistent.
+    pub(crate) fn to_local_range(&self) -> types::Range {
+        self.to_uri_and_range().1
+    }
+
+    /// Convert to a Location that can reference any document.
+    /// Returns a Location with both URI and Range.
+    ///
+    /// Use this for:
+    /// - Go-to-definition targets
+    /// - References
+    /// - Diagnostics related information
+    /// - Any cross-file navigation
+    pub(crate) fn to_location(&self) -> Option {
+        let (uri, range) = self.to_uri_and_range();
+        Some(Location { uri: uri?, range })
+    }
+
+    pub(crate) fn to_uri_and_range(&self) -> (Option, lsp_types::Range) {
+        let source = source_text(self.db, self.file);
+        let index = line_index(self.db, self.file);
+
+        let uri = file_to_url(self.db, self.file);
+        let range = text_range_to_lsp_range(self.range, &source, &index, self.encoding);
+        (uri, range)
+    }
+}
+
+/// Represents a position that has been prepared for LSP conversion but requires
+/// a decision about how to use it - either as a local position within the same
+/// document/cell, or as a location with a single-point range that can reference
+/// any document in the project.
+#[derive(Clone)]
+pub(crate) struct LspPosition<'db> {
+    file: File,
+    position: TextSize,
+    db: &'db dyn Db,
+    encoding: PositionEncoding,
+}
+
+impl std::fmt::Debug for LspPosition<'_> {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        f.debug_struct("LspPosition")
+            .field("position", &self.position)
+            .field("file", &self.file)
+            .field("encoding", &self.encoding)
+            .finish_non_exhaustive()
+    }
+}
+
+impl LspPosition<'_> {
+    /// Convert to an LSP Position for use within the same document/cell.
+    /// Returns only the LSP Position without any URI information.
+    ///
+    /// Use this when you already have a URI context and this position is guaranteed
+    /// to be within the same document/cell:
+    /// - Inlay hints (where the document URI is already known)
+    /// - Positions within the same cell as a parent range
+    ///
+    /// Do NOT use this for standalone positions that might need a URI - use
+    /// `to_location()` instead to ensure the URI and position are consistent.
+    pub(crate) fn to_local_position(&self) -> types::Position {
+        self.to_location().1
+    }
+
+    /// Convert to a Location with a single-point range that can reference any document.
+    /// Returns a Location with both URI and a range where start == end.
+    ///
+    /// Use this for any cross-file navigation where you need both URI and position.
+    pub(crate) fn to_location(&self) -> (Option, Position) {
+        let source = source_text(self.db, self.file);
+        let index = line_index(self.db, self.file);
+
+        let uri = file_to_url(self.db, self.file);
+        let position = text_size_to_lsp_position(self.position, &source, &index, self.encoding);
+        (uri, position)
+    }
 }
 
 pub(crate) trait RangeExt {
-    fn to_text_range(&self, text: &str, index: &LineIndex, encoding: PositionEncoding)
-    -> TextRange;
+    /// Convert an LSP Range to internal `TextRange`.
+    fn to_text_range(
+        &self,
+        db: &dyn Db,
+        file: File,
+        url: &lsp_types::Url,
+        encoding: PositionEncoding,
+    ) -> TextRange;
+}
+
+impl RangeExt for lsp_types::Range {
+    fn to_text_range(
+        &self,
+        db: &dyn Db,
+        file: File,
+        url: &lsp_types::Url,
+        encoding: PositionEncoding,
+    ) -> TextRange {
+        let start = self.start.to_text_size(db, file, url, encoding);
+        let end = self.end.to_text_size(db, file, url, encoding);
+
+        TextRange::new(start, end)
+    }
 }
 
 pub(crate) trait PositionExt {
-    fn to_text_size(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) -> TextSize;
+    /// Convert an LSP Position to internal `TextSize`.
+    fn to_text_size(
+        &self,
+        db: &dyn Db,
+        file: File,
+        url: &lsp_types::Url,
+        encoding: PositionEncoding,
+    ) -> TextSize;
+}
+
+impl PositionExt for lsp_types::Position {
+    fn to_text_size(
+        &self,
+        db: &dyn Db,
+        file: File,
+        _url: &lsp_types::Url,
+        encoding: PositionEncoding,
+    ) -> TextSize {
+        let source = source_text(db, file);
+        let index = line_index(db, file);
+
+        lsp_position_to_text_size(*self, &source, &index, encoding)
+    }
 }
 
 pub(crate) trait TextSizeExt {
-    fn to_position(
-        self,
-        text: &str,
-        index: &LineIndex,
+    /// Converts this position to an `LspPosition`, which then requires an explicit
+    /// decision about how to use it (as a local position or as a location).
+    fn as_lsp_position<'db>(
+        &self,
+        db: &'db dyn Db,
+        file: File,
         encoding: PositionEncoding,
-    ) -> types::Position
+    ) -> LspPosition<'db>
     where
         Self: Sized;
 }
 
 impl TextSizeExt for TextSize {
-    fn to_position(
-        self,
-        text: &str,
-        index: &LineIndex,
+    fn as_lsp_position<'db>(
+        &self,
+        db: &'db dyn Db,
+        file: File,
         encoding: PositionEncoding,
-    ) -> types::Position {
-        let source_location = index.source_location(self, text, encoding.into());
-        source_location_to_position(&source_location)
+    ) -> LspPosition<'db> {
+        LspPosition {
+            file,
+            position: *self,
+            db,
+            encoding,
+        }
     }
 }
 
 pub(crate) trait ToRangeExt {
-    fn to_lsp_range(
+    /// Converts this range to an `LspRange`, which then requires an explicit
+    /// decision about how to use it (as a local range or as a location).
+    fn as_lsp_range<'db>(
         &self,
-        text: &str,
-        index: &LineIndex,
+        db: &'db dyn Db,
+        file: File,
         encoding: PositionEncoding,
-    ) -> types::Range;
-
-    #[expect(dead_code)]
-    fn to_notebook_range(
-        &self,
-        text: &str,
-        source_index: &LineIndex,
-        notebook_index: &NotebookIndex,
-        encoding: PositionEncoding,
-    ) -> NotebookRange;
+    ) -> LspRange<'db>;
 }
 
 fn u32_index_to_usize(index: u32) -> usize {
     usize::try_from(index).expect("u32 fits in usize")
 }
 
-impl PositionExt for lsp_types::Position {
-    fn to_text_size(&self, text: &str, index: &LineIndex, encoding: PositionEncoding) -> TextSize {
-        index.offset(
-            SourceLocation {
-                line: OneIndexed::from_zero_indexed(u32_index_to_usize(self.line)),
-                character_offset: OneIndexed::from_zero_indexed(u32_index_to_usize(self.character)),
-            },
-            text,
-            encoding.into(),
-        )
+fn text_size_to_lsp_position(
+    offset: TextSize,
+    text: &str,
+    index: &LineIndex,
+    encoding: PositionEncoding,
+) -> types::Position {
+    let source_location = index.source_location(offset, text, encoding.into());
+    source_location_to_position(&source_location)
+}
+
+fn text_range_to_lsp_range(
+    range: TextRange,
+    text: &str,
+    index: &LineIndex,
+    encoding: PositionEncoding,
+) -> types::Range {
+    types::Range {
+        start: text_size_to_lsp_position(range.start(), text, index, encoding),
+        end: text_size_to_lsp_position(range.end(), text, index, encoding),
     }
 }
 
-impl RangeExt for lsp_types::Range {
-    fn to_text_range(
-        &self,
-        text: &str,
-        index: &LineIndex,
-        encoding: PositionEncoding,
-    ) -> TextRange {
-        TextRange::new(
-            self.start.to_text_size(text, index, encoding),
-            self.end.to_text_size(text, index, encoding),
-        )
-    }
+/// Helper function to convert an LSP Position to internal `TextSize`.
+/// This is used internally by the `PositionExt` trait and other helpers.
+fn lsp_position_to_text_size(
+    position: lsp_types::Position,
+    text: &str,
+    index: &LineIndex,
+    encoding: PositionEncoding,
+) -> TextSize {
+    index.offset(
+        SourceLocation {
+            line: OneIndexed::from_zero_indexed(u32_index_to_usize(position.line)),
+            character_offset: OneIndexed::from_zero_indexed(u32_index_to_usize(position.character)),
+        },
+        text,
+        encoding.into(),
+    )
+}
+
+/// Helper function to convert an LSP Range to internal `TextRange`.
+/// This is used internally by the `RangeExt` trait and in special cases
+/// where `db` and `file` are not available (e.g., when applying document changes).
+pub(crate) fn lsp_range_to_text_range(
+    range: lsp_types::Range,
+    text: &str,
+    index: &LineIndex,
+    encoding: PositionEncoding,
+) -> TextRange {
+    TextRange::new(
+        lsp_position_to_text_size(range.start, text, index, encoding),
+        lsp_position_to_text_size(range.end, text, index, encoding),
+    )
 }
 
 impl ToRangeExt for TextRange {
-    fn to_lsp_range(
+    fn as_lsp_range<'db>(
         &self,
-        text: &str,
-        index: &LineIndex,
+        db: &'db dyn Db,
+        file: File,
         encoding: PositionEncoding,
-    ) -> types::Range {
-        types::Range {
-            start: self.start().to_position(text, index, encoding),
-            end: self.end().to_position(text, index, encoding),
-        }
-    }
-
-    fn to_notebook_range(
-        &self,
-        text: &str,
-        source_index: &LineIndex,
-        notebook_index: &NotebookIndex,
-        encoding: PositionEncoding,
-    ) -> NotebookRange {
-        let start = source_index.source_location(self.start(), text, encoding.into());
-        let mut end = source_index.source_location(self.end(), text, encoding.into());
-        let starting_cell = notebook_index.cell(start.line);
-
-        // weird edge case here - if the end of the range is where the newline after the cell got added (making it 'out of bounds')
-        // we need to move it one character back (which should place it at the end of the last line).
-        // we test this by checking if the ending offset is in a different (or nonexistent) cell compared to the cell of the starting offset.
-        if notebook_index.cell(end.line) != starting_cell {
-            end.line = end.line.saturating_sub(1);
-            let offset = self.end().checked_sub(1.into()).unwrap_or_default();
-            end.character_offset = source_index
-                .source_location(offset, text, encoding.into())
-                .character_offset;
-        }
-
-        let start = source_location_to_position(¬ebook_index.translate_source_location(&start));
-        let end = source_location_to_position(¬ebook_index.translate_source_location(&end));
-
-        NotebookRange {
-            cell: starting_cell
-                .map(OneIndexed::to_zero_indexed)
-                .unwrap_or_default(),
-            range: types::Range { start, end },
+    ) -> LspRange<'db> {
+        LspRange {
+            file,
+            range: *self,
+            db,
+            encoding,
         }
     }
 }
@@ -156,17 +296,18 @@ fn source_location_to_position(location: &SourceLocation) -> types::Position {
 }
 
 pub(crate) trait FileRangeExt {
-    fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option;
+    /// Converts this file range to an `LspRange`, which then requires an explicit
+    /// decision about how to use it (as a local range or as a location).
+    fn as_lsp_range<'db>(&self, db: &'db dyn Db, encoding: PositionEncoding) -> LspRange<'db>;
 }
 
 impl FileRangeExt for FileRange {
-    fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option {
-        let file = self.file();
-        let uri = file_to_url(db, file)?;
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
-
-        let range = self.range().to_lsp_range(&source, &line_index, encoding);
-        Some(Location { uri, range })
+    fn as_lsp_range<'db>(&self, db: &'db dyn Db, encoding: PositionEncoding) -> LspRange<'db> {
+        LspRange {
+            file: self.file(),
+            range: self.range(),
+            db,
+            encoding,
+        }
     }
 }
diff --git a/crates/ty_server/src/document/text_document.rs b/crates/ty_server/src/document/text_document.rs
index 9898dd670b..e6cd4c4e0b 100644
--- a/crates/ty_server/src/document/text_document.rs
+++ b/crates/ty_server/src/document/text_document.rs
@@ -3,7 +3,7 @@ use ruff_source_file::LineIndex;
 
 use crate::PositionEncoding;
 
-use super::RangeExt;
+use super::range::lsp_range_to_text_range;
 
 pub(crate) type DocumentVersion = i32;
 
@@ -114,7 +114,7 @@ impl TextDocument {
         } in changes
         {
             if let Some(range) = range {
-                let range = range.to_text_range(&new_contents, &active_index, encoding);
+                let range = lsp_range_to_text_range(range, &new_contents, &active_index, encoding);
 
                 new_contents.replace_range(
                     usize::from(range.start())..usize::from(range.end()),
diff --git a/crates/ty_server/src/server/api/diagnostics.rs b/crates/ty_server/src/server/api/diagnostics.rs
index 7680dc1bad..adbb17dcdf 100644
--- a/crates/ty_server/src/server/api/diagnostics.rs
+++ b/crates/ty_server/src/server/api/diagnostics.rs
@@ -9,7 +9,6 @@ use rustc_hash::FxHashMap;
 
 use ruff_db::diagnostic::{Annotation, Severity, SubDiagnostic};
 use ruff_db::files::FileRange;
-use ruff_db::source::{line_index, source_text};
 use ruff_db::system::SystemPathBuf;
 use ty_project::{Db, ProjectDatabase};
 
@@ -279,11 +278,9 @@ pub(super) fn to_lsp_diagnostic(
 ) -> Diagnostic {
     let range = if let Some(span) = diagnostic.primary_span() {
         let file = span.expect_ty_file();
-        let index = line_index(db, file);
-        let source = source_text(db, file);
 
         span.range()
-            .map(|range| range.to_lsp_range(&source, &index, encoding))
+            .map(|range| range.as_lsp_range(db, file, encoding).to_local_range())
             .unwrap_or_default()
     } else {
         Range::default()
@@ -365,7 +362,7 @@ fn annotation_to_related_information(
 
     let annotation_message = annotation.get_message()?;
     let range = FileRange::try_from(span).ok()?;
-    let location = range.to_location(db, encoding)?;
+    let location = range.as_lsp_range(db, encoding).to_location()?;
 
     Some(DiagnosticRelatedInformation {
         location,
@@ -383,7 +380,7 @@ fn sub_diagnostic_to_related_information(
 
     let span = primary_annotation.get_span();
     let range = FileRange::try_from(span).ok()?;
-    let location = range.to_location(db, encoding)?;
+    let location = range.as_lsp_range(db, encoding).to_location()?;
 
     Some(DiagnosticRelatedInformation {
         location,
diff --git a/crates/ty_server/src/server/api/requests/completion.rs b/crates/ty_server/src/server/api/requests/completion.rs
index bf712c5efb..6473661939 100644
--- a/crates/ty_server/src/server/api/requests/completion.rs
+++ b/crates/ty_server/src/server/api/requests/completion.rs
@@ -6,7 +6,6 @@ use lsp_types::{
     CompletionItem, CompletionItemKind, CompletionItemLabelDetails, CompletionList,
     CompletionParams, CompletionResponse, Documentation, TextEdit, Url,
 };
-use ruff_db::source::{line_index, source_text};
 use ruff_source_file::OneIndexed;
 use ruff_text_size::Ranged;
 use ty_ide::{CompletionKind, CompletionSettings, completion};
@@ -49,11 +48,10 @@ impl BackgroundDocumentRequestHandler for CompletionRequestHandler {
             return Ok(None);
         };
 
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
         let offset = params.text_document_position.position.to_text_size(
-            &source,
-            &line_index,
+            db,
+            file,
+            snapshot.url(),
             snapshot.encoding(),
         );
         let settings = CompletionSettings {
@@ -73,9 +71,10 @@ impl BackgroundDocumentRequestHandler for CompletionRequestHandler {
                 let kind = comp.kind(db).map(ty_kind_to_lsp_kind);
                 let type_display = comp.ty.map(|ty| ty.display(db).to_string());
                 let import_edit = comp.import.as_ref().map(|edit| {
-                    let range =
-                        edit.range()
-                            .to_lsp_range(&source, &line_index, snapshot.encoding());
+                    let range = edit
+                        .range()
+                        .as_lsp_range(db, file, snapshot.encoding())
+                        .to_local_range();
                     TextEdit {
                         range,
                         new_text: edit.content().map(ToString::to_string).unwrap_or_default(),
diff --git a/crates/ty_server/src/server/api/requests/doc_highlights.rs b/crates/ty_server/src/server/api/requests/doc_highlights.rs
index b5b6d0d9ab..c96c3d4fef 100644
--- a/crates/ty_server/src/server/api/requests/doc_highlights.rs
+++ b/crates/ty_server/src/server/api/requests/doc_highlights.rs
@@ -2,7 +2,6 @@ use std::borrow::Cow;
 
 use lsp_types::request::DocumentHighlightRequest;
 use lsp_types::{DocumentHighlight, DocumentHighlightKind, DocumentHighlightParams, Url};
-use ruff_db::source::{line_index, source_text};
 use ty_ide::{ReferenceKind, document_highlights};
 use ty_project::ProjectDatabase;
 
@@ -41,11 +40,10 @@ impl BackgroundDocumentRequestHandler for DocumentHighlightRequestHandler {
             return Ok(None);
         };
 
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
         let offset = params.text_document_position_params.position.to_text_size(
-            &source,
-            &line_index,
+            db,
+            file,
+            snapshot.url(),
             snapshot.encoding(),
         );
 
@@ -58,7 +56,8 @@ impl BackgroundDocumentRequestHandler for DocumentHighlightRequestHandler {
             .map(|target| {
                 let range = target
                     .range()
-                    .to_lsp_range(&source, &line_index, snapshot.encoding());
+                    .as_lsp_range(db, file, snapshot.encoding())
+                    .to_local_range();
 
                 let kind = match target.kind() {
                     ReferenceKind::Read => Some(DocumentHighlightKind::READ),
diff --git a/crates/ty_server/src/server/api/requests/document_symbols.rs b/crates/ty_server/src/server/api/requests/document_symbols.rs
index ea5ee312c6..980e0850ef 100644
--- a/crates/ty_server/src/server/api/requests/document_symbols.rs
+++ b/crates/ty_server/src/server/api/requests/document_symbols.rs
@@ -2,10 +2,9 @@ use std::borrow::Cow;
 
 use lsp_types::request::DocumentSymbolRequest;
 use lsp_types::{DocumentSymbol, DocumentSymbolParams, SymbolInformation, Url};
-use ruff_db::source::{line_index, source_text};
-use ruff_source_file::LineIndex;
+use ruff_db::files::File;
 use ty_ide::{HierarchicalSymbols, SymbolId, SymbolInfo, document_symbols};
-use ty_project::ProjectDatabase;
+use ty_project::{Db, ProjectDatabase};
 
 use crate::document::{PositionEncoding, ToRangeExt};
 use crate::server::api::symbols::{convert_symbol_kind, convert_to_lsp_symbol_information};
@@ -30,7 +29,7 @@ impl BackgroundDocumentRequestHandler for DocumentSymbolRequestHandler {
         db: &ProjectDatabase,
         snapshot: &DocumentSnapshot,
         _client: &Client,
-        params: DocumentSymbolParams,
+        _params: DocumentSymbolParams,
     ) -> crate::server::Result> {
         if snapshot
             .workspace_settings()
@@ -43,9 +42,6 @@ impl BackgroundDocumentRequestHandler for DocumentSymbolRequestHandler {
             return Ok(None);
         };
 
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
-
         // Check if the client supports hierarchical document symbols
         let supports_hierarchical = snapshot
             .resolved_client_capabilities()
@@ -62,11 +58,11 @@ impl BackgroundDocumentRequestHandler for DocumentSymbolRequestHandler {
                 .iter()
                 .map(|(id, symbol)| {
                     convert_to_lsp_document_symbol(
+                        db,
+                        file,
                         &symbols,
                         id,
                         symbol,
-                        &source,
-                        &line_index,
                         snapshot.encoding(),
                     )
                 })
@@ -77,14 +73,8 @@ impl BackgroundDocumentRequestHandler for DocumentSymbolRequestHandler {
             // Return flattened symbols as SymbolInformation
             let lsp_symbols: Vec = symbols
                 .iter()
-                .map(|(_, symbol)| {
-                    convert_to_lsp_symbol_information(
-                        symbol,
-                        ¶ms.text_document.uri,
-                        &source,
-                        &line_index,
-                        snapshot.encoding(),
-                    )
+                .filter_map(|(_, symbol)| {
+                    convert_to_lsp_symbol_information(db, file, symbol, snapshot.encoding())
                 })
                 .collect();
 
@@ -96,11 +86,11 @@ impl BackgroundDocumentRequestHandler for DocumentSymbolRequestHandler {
 impl RetriableRequestHandler for DocumentSymbolRequestHandler {}
 
 fn convert_to_lsp_document_symbol(
+    db: &dyn Db,
+    file: File,
     symbols: &HierarchicalSymbols,
     id: SymbolId,
     symbol: SymbolInfo<'_>,
-    source: &str,
-    line_index: &LineIndex,
     encoding: PositionEncoding,
 ) -> DocumentSymbol {
     let symbol_kind = convert_symbol_kind(symbol.kind);
@@ -112,15 +102,19 @@ fn convert_to_lsp_document_symbol(
         tags: None,
         #[allow(deprecated)]
         deprecated: None,
-        range: symbol.full_range.to_lsp_range(source, line_index, encoding),
-        selection_range: symbol.name_range.to_lsp_range(source, line_index, encoding),
+        range: symbol
+            .full_range
+            .as_lsp_range(db, file, encoding)
+            .to_local_range(),
+        selection_range: symbol
+            .name_range
+            .as_lsp_range(db, file, encoding)
+            .to_local_range(),
         children: Some(
             symbols
                 .children(id)
                 .map(|(child_id, child)| {
-                    convert_to_lsp_document_symbol(
-                        symbols, child_id, child, source, line_index, encoding,
-                    )
+                    convert_to_lsp_document_symbol(db, file, symbols, child_id, child, encoding)
                 })
                 .collect(),
         ),
diff --git a/crates/ty_server/src/server/api/requests/goto_declaration.rs b/crates/ty_server/src/server/api/requests/goto_declaration.rs
index 1c16a74bc5..2a8c931401 100644
--- a/crates/ty_server/src/server/api/requests/goto_declaration.rs
+++ b/crates/ty_server/src/server/api/requests/goto_declaration.rs
@@ -2,7 +2,6 @@ use std::borrow::Cow;
 
 use lsp_types::request::{GotoDeclaration, GotoDeclarationParams};
 use lsp_types::{GotoDefinitionResponse, Url};
-use ruff_db::source::{line_index, source_text};
 use ty_ide::goto_declaration;
 use ty_project::ProjectDatabase;
 
@@ -41,11 +40,10 @@ impl BackgroundDocumentRequestHandler for GotoDeclarationRequestHandler {
             return Ok(None);
         };
 
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
         let offset = params.text_document_position_params.position.to_text_size(
-            &source,
-            &line_index,
+            db,
+            file,
+            snapshot.url(),
             snapshot.encoding(),
         );
 
diff --git a/crates/ty_server/src/server/api/requests/goto_definition.rs b/crates/ty_server/src/server/api/requests/goto_definition.rs
index bc33411778..343f90a5c9 100644
--- a/crates/ty_server/src/server/api/requests/goto_definition.rs
+++ b/crates/ty_server/src/server/api/requests/goto_definition.rs
@@ -2,7 +2,6 @@ use std::borrow::Cow;
 
 use lsp_types::request::GotoDefinition;
 use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Url};
-use ruff_db::source::{line_index, source_text};
 use ty_ide::goto_definition;
 use ty_project::ProjectDatabase;
 
@@ -41,11 +40,10 @@ impl BackgroundDocumentRequestHandler for GotoDefinitionRequestHandler {
             return Ok(None);
         };
 
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
         let offset = params.text_document_position_params.position.to_text_size(
-            &source,
-            &line_index,
+            db,
+            file,
+            snapshot.url(),
             snapshot.encoding(),
         );
 
diff --git a/crates/ty_server/src/server/api/requests/goto_references.rs b/crates/ty_server/src/server/api/requests/goto_references.rs
index 3afaf28b14..6cdb8e21a4 100644
--- a/crates/ty_server/src/server/api/requests/goto_references.rs
+++ b/crates/ty_server/src/server/api/requests/goto_references.rs
@@ -2,7 +2,6 @@ use std::borrow::Cow;
 
 use lsp_types::request::References;
 use lsp_types::{Location, ReferenceParams, Url};
-use ruff_db::source::{line_index, source_text};
 use ty_ide::goto_references;
 use ty_project::ProjectDatabase;
 
@@ -41,11 +40,10 @@ impl BackgroundDocumentRequestHandler for ReferencesRequestHandler {
             return Ok(None);
         };
 
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
         let offset = params.text_document_position.position.to_text_size(
-            &source,
-            &line_index,
+            db,
+            file,
+            snapshot.url(),
             snapshot.encoding(),
         );
 
diff --git a/crates/ty_server/src/server/api/requests/goto_type_definition.rs b/crates/ty_server/src/server/api/requests/goto_type_definition.rs
index 379defa344..11564f50d7 100644
--- a/crates/ty_server/src/server/api/requests/goto_type_definition.rs
+++ b/crates/ty_server/src/server/api/requests/goto_type_definition.rs
@@ -2,7 +2,6 @@ use std::borrow::Cow;
 
 use lsp_types::request::{GotoTypeDefinition, GotoTypeDefinitionParams};
 use lsp_types::{GotoDefinitionResponse, Url};
-use ruff_db::source::{line_index, source_text};
 use ty_ide::goto_type_definition;
 use ty_project::ProjectDatabase;
 
@@ -41,11 +40,10 @@ impl BackgroundDocumentRequestHandler for GotoTypeDefinitionRequestHandler {
             return Ok(None);
         };
 
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
         let offset = params.text_document_position_params.position.to_text_size(
-            &source,
-            &line_index,
+            db,
+            file,
+            snapshot.url(),
             snapshot.encoding(),
         );
 
diff --git a/crates/ty_server/src/server/api/requests/hover.rs b/crates/ty_server/src/server/api/requests/hover.rs
index cc8f8e0dab..d051007003 100644
--- a/crates/ty_server/src/server/api/requests/hover.rs
+++ b/crates/ty_server/src/server/api/requests/hover.rs
@@ -1,6 +1,6 @@
 use std::borrow::Cow;
 
-use crate::document::{PositionExt, ToRangeExt};
+use crate::document::{FileRangeExt, PositionExt};
 use crate::server::api::traits::{
     BackgroundDocumentRequestHandler, RequestHandler, RetriableRequestHandler,
 };
@@ -8,8 +8,6 @@ use crate::session::DocumentSnapshot;
 use crate::session::client::Client;
 use lsp_types::request::HoverRequest;
 use lsp_types::{HoverContents, HoverParams, MarkupContent, Url};
-use ruff_db::source::{line_index, source_text};
-use ruff_text_size::Ranged;
 use ty_ide::{MarkupKind, hover};
 use ty_project::ProjectDatabase;
 
@@ -41,11 +39,10 @@ impl BackgroundDocumentRequestHandler for HoverRequestHandler {
             return Ok(None);
         };
 
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
         let offset = params.text_document_position_params.position.to_text_size(
-            &source,
-            &line_index,
+            db,
+            file,
+            snapshot.url(),
             snapshot.encoding(),
         );
 
@@ -69,11 +66,12 @@ impl BackgroundDocumentRequestHandler for HoverRequestHandler {
                 kind: lsp_markup_kind,
                 value: contents,
             }),
-            range: Some(range_info.file_range().range().to_lsp_range(
-                &source,
-                &line_index,
-                snapshot.encoding(),
-            )),
+            range: Some(
+                range_info
+                    .file_range()
+                    .as_lsp_range(db, snapshot.encoding())
+                    .to_local_range(),
+            ),
         }))
     }
 }
diff --git a/crates/ty_server/src/server/api/requests/inlay_hints.rs b/crates/ty_server/src/server/api/requests/inlay_hints.rs
index 21eb1d09b6..ec445f9b1e 100644
--- a/crates/ty_server/src/server/api/requests/inlay_hints.rs
+++ b/crates/ty_server/src/server/api/requests/inlay_hints.rs
@@ -8,7 +8,6 @@ use crate::session::DocumentSnapshot;
 use crate::session::client::Client;
 use lsp_types::request::InlayHintRequest;
 use lsp_types::{InlayHintParams, Url};
-use ruff_db::source::{line_index, source_text};
 use ty_ide::{InlayHintKind, InlayHintLabel, inlay_hints};
 use ty_project::ProjectDatabase;
 
@@ -40,12 +39,9 @@ impl BackgroundDocumentRequestHandler for InlayHintRequestHandler {
             return Ok(None);
         };
 
-        let index = line_index(db, file);
-        let source = source_text(db, file);
-
         let range = params
             .range
-            .to_text_range(&source, &index, snapshot.encoding());
+            .to_text_range(db, file, snapshot.url(), snapshot.encoding());
 
         let inlay_hints = inlay_hints(db, file, range, workspace_settings.inlay_hints());
 
@@ -54,7 +50,8 @@ impl BackgroundDocumentRequestHandler for InlayHintRequestHandler {
             .map(|hint| lsp_types::InlayHint {
                 position: hint
                     .position
-                    .to_position(&source, &index, snapshot.encoding()),
+                    .as_lsp_position(db, file, snapshot.encoding())
+                    .to_local_position(),
                 label: inlay_hint_label(&hint.label),
                 kind: Some(inlay_hint_kind(&hint.kind)),
                 tooltip: None,
diff --git a/crates/ty_server/src/server/api/requests/prepare_rename.rs b/crates/ty_server/src/server/api/requests/prepare_rename.rs
index a12541729d..2593122530 100644
--- a/crates/ty_server/src/server/api/requests/prepare_rename.rs
+++ b/crates/ty_server/src/server/api/requests/prepare_rename.rs
@@ -2,7 +2,6 @@ use std::borrow::Cow;
 
 use lsp_types::request::PrepareRenameRequest;
 use lsp_types::{PrepareRenameResponse, TextDocumentPositionParams, Url};
-use ruff_db::source::{line_index, source_text};
 use ty_ide::can_rename;
 use ty_project::ProjectDatabase;
 
@@ -41,17 +40,17 @@ impl BackgroundDocumentRequestHandler for PrepareRenameRequestHandler {
             return Ok(None);
         };
 
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
         let offset = params
             .position
-            .to_text_size(&source, &line_index, snapshot.encoding());
+            .to_text_size(db, file, snapshot.url(), snapshot.encoding());
 
         let Some(range) = can_rename(db, file, offset) else {
             return Ok(None);
         };
 
-        let lsp_range = range.to_lsp_range(&source, &line_index, snapshot.encoding());
+        let lsp_range = range
+            .as_lsp_range(db, file, snapshot.encoding())
+            .to_local_range();
 
         Ok(Some(PrepareRenameResponse::Range(lsp_range)))
     }
diff --git a/crates/ty_server/src/server/api/requests/rename.rs b/crates/ty_server/src/server/api/requests/rename.rs
index d434cb733e..efa3891ced 100644
--- a/crates/ty_server/src/server/api/requests/rename.rs
+++ b/crates/ty_server/src/server/api/requests/rename.rs
@@ -3,7 +3,6 @@ use std::collections::HashMap;
 
 use lsp_types::request::Rename;
 use lsp_types::{RenameParams, TextEdit, Url, WorkspaceEdit};
-use ruff_db::source::{line_index, source_text};
 use ty_ide::rename;
 use ty_project::ProjectDatabase;
 
@@ -42,11 +41,10 @@ impl BackgroundDocumentRequestHandler for RenameRequestHandler {
             return Ok(None);
         };
 
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
         let offset = params.text_document_position.position.to_text_size(
-            &source,
-            &line_index,
+            db,
+            file,
+            snapshot.url(),
             snapshot.encoding(),
         );
 
diff --git a/crates/ty_server/src/server/api/requests/selection_range.rs b/crates/ty_server/src/server/api/requests/selection_range.rs
index 516ea6aeda..77d9df4c25 100644
--- a/crates/ty_server/src/server/api/requests/selection_range.rs
+++ b/crates/ty_server/src/server/api/requests/selection_range.rs
@@ -2,7 +2,6 @@ use std::borrow::Cow;
 
 use lsp_types::request::SelectionRangeRequest;
 use lsp_types::{SelectionRange as LspSelectionRange, SelectionRangeParams, Url};
-use ruff_db::source::{line_index, source_text};
 use ty_ide::selection_range;
 use ty_project::ProjectDatabase;
 
@@ -41,13 +40,10 @@ impl BackgroundDocumentRequestHandler for SelectionRangeRequestHandler {
             return Ok(None);
         };
 
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
-
         let mut results = Vec::new();
 
         for position in params.positions {
-            let offset = position.to_text_size(&source, &line_index, snapshot.encoding());
+            let offset = position.to_text_size(db, file, snapshot.url(), snapshot.encoding());
 
             let ranges = selection_range(db, file, offset);
             if !ranges.is_empty() {
@@ -55,7 +51,9 @@ impl BackgroundDocumentRequestHandler for SelectionRangeRequestHandler {
                 let mut lsp_range = None;
                 for &range in &ranges {
                     lsp_range = Some(LspSelectionRange {
-                        range: range.to_lsp_range(&source, &line_index, snapshot.encoding()),
+                        range: range
+                            .as_lsp_range(db, file, snapshot.encoding())
+                            .to_local_range(),
                         parent: lsp_range.map(Box::new),
                     });
                 }
diff --git a/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs b/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs
index 03193b32a6..7daa116876 100644
--- a/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs
+++ b/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs
@@ -8,7 +8,6 @@ use crate::server::api::traits::{
 use crate::session::DocumentSnapshot;
 use crate::session::client::Client;
 use lsp_types::{SemanticTokens, SemanticTokensRangeParams, SemanticTokensRangeResult, Url};
-use ruff_db::source::{line_index, source_text};
 use ty_project::ProjectDatabase;
 
 pub(crate) struct SemanticTokensRangeRequestHandler;
@@ -39,13 +38,11 @@ impl BackgroundDocumentRequestHandler for SemanticTokensRangeRequestHandler {
             return Ok(None);
         };
 
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
-
         // Convert LSP range to text offsets
-        let requested_range = params
-            .range
-            .to_text_range(&source, &line_index, snapshot.encoding());
+        let requested_range =
+            params
+                .range
+                .to_text_range(db, file, snapshot.url(), snapshot.encoding());
 
         let lsp_tokens = generate_semantic_tokens(
             db,
diff --git a/crates/ty_server/src/server/api/requests/signature_help.rs b/crates/ty_server/src/server/api/requests/signature_help.rs
index f9b20cccd9..99d60c398f 100644
--- a/crates/ty_server/src/server/api/requests/signature_help.rs
+++ b/crates/ty_server/src/server/api/requests/signature_help.rs
@@ -11,7 +11,6 @@ use lsp_types::{
     Documentation, ParameterInformation, ParameterLabel, SignatureHelp, SignatureHelpParams,
     SignatureInformation, Url,
 };
-use ruff_db::source::{line_index, source_text};
 use ty_ide::signature_help;
 use ty_project::ProjectDatabase;
 
@@ -43,11 +42,10 @@ impl BackgroundDocumentRequestHandler for SignatureHelpRequestHandler {
             return Ok(None);
         };
 
-        let source = source_text(db, file);
-        let line_index = line_index(db, file);
         let offset = params.text_document_position_params.position.to_text_size(
-            &source,
-            &line_index,
+            db,
+            file,
+            snapshot.url(),
             snapshot.encoding(),
         );
 
diff --git a/crates/ty_server/src/server/api/requests/workspace_symbols.rs b/crates/ty_server/src/server/api/requests/workspace_symbols.rs
index a964954546..252857e7e4 100644
--- a/crates/ty_server/src/server/api/requests/workspace_symbols.rs
+++ b/crates/ty_server/src/server/api/requests/workspace_symbols.rs
@@ -8,8 +8,6 @@ use crate::server::api::traits::{
 };
 use crate::session::SessionSnapshot;
 use crate::session::client::Client;
-use crate::system::file_to_url;
-use ruff_db::source::{line_index, source_text};
 
 pub(crate) struct WorkspaceSymbolRequestHandler;
 
@@ -41,23 +39,19 @@ impl BackgroundRequestHandler for WorkspaceSymbolRequestHandler {
             for workspace_symbol_info in workspace_symbol_infos {
                 let WorkspaceSymbolInfo { symbol, file } = workspace_symbol_info;
 
-                // Get file information for URL conversion
-                let source = source_text(db, file);
-                let line_index = line_index(db, file);
-
-                // Convert file to URL
-                let Some(url) = file_to_url(db, file) else {
-                    tracing::debug!("Failed to convert file to URL at {}", file.path(db));
-                    continue;
-                };
-
                 // Get position encoding from session
                 let encoding = snapshot.position_encoding();
 
-                let lsp_symbol =
-                    convert_to_lsp_symbol_information(symbol, &url, &source, &line_index, encoding);
+                let Some(symbol) = convert_to_lsp_symbol_information(db, file, symbol, encoding)
+                else {
+                    tracing::debug!(
+                        "Failed to convert symbol '{}' to LSP symbol information",
+                        file.path(db)
+                    );
+                    continue;
+                };
 
-                all_symbols.push(lsp_symbol);
+                all_symbols.push(symbol);
             }
         }
 
diff --git a/crates/ty_server/src/server/api/semantic_tokens.rs b/crates/ty_server/src/server/api/semantic_tokens.rs
index b168ef7877..ee9808b791 100644
--- a/crates/ty_server/src/server/api/semantic_tokens.rs
+++ b/crates/ty_server/src/server/api/semantic_tokens.rs
@@ -1,5 +1,5 @@
 use lsp_types::SemanticToken;
-use ruff_db::source::{line_index, source_text};
+use ruff_db::source::source_text;
 use ruff_text_size::{Ranged, TextRange};
 use ty_ide::semantic_tokens;
 use ty_project::ProjectDatabase;
@@ -16,7 +16,6 @@ pub(crate) fn generate_semantic_tokens(
     multiline_token_support: bool,
 ) -> Vec {
     let source = source_text(db, file);
-    let line_index = line_index(db, file);
     let semantic_token_data = semantic_tokens(db, file, range);
 
     // Convert semantic tokens to LSP format
@@ -25,7 +24,10 @@ pub(crate) fn generate_semantic_tokens(
     let mut prev_start = 0u32;
 
     for token in &*semantic_token_data {
-        let lsp_range = token.range().to_lsp_range(&source, &line_index, encoding);
+        let lsp_range = token
+            .range()
+            .as_lsp_range(db, file, encoding)
+            .to_local_range();
         let line = lsp_range.start.line;
         let character = lsp_range.start.character;
 
diff --git a/crates/ty_server/src/server/api/symbols.rs b/crates/ty_server/src/server/api/symbols.rs
index 396f236e8d..fc6c1bc18c 100644
--- a/crates/ty_server/src/server/api/symbols.rs
+++ b/crates/ty_server/src/server/api/symbols.rs
@@ -1,9 +1,9 @@
 //! Utility functions common to language server request handlers
 //! that return symbol information.
 
-use lsp_types::{SymbolInformation, SymbolKind, Url};
-use ruff_source_file::LineIndex;
+use lsp_types::{SymbolInformation, SymbolKind};
 use ty_ide::SymbolInfo;
+use ty_project::Db;
 
 use crate::document::{PositionEncoding, ToRangeExt};
 
@@ -27,24 +27,25 @@ pub(crate) fn convert_symbol_kind(kind: ty_ide::SymbolKind) -> SymbolKind {
 
 /// Convert a `ty_ide` `SymbolInfo` to LSP `SymbolInformation`
 pub(crate) fn convert_to_lsp_symbol_information(
+    db: &dyn Db,
+    file: ruff_db::files::File,
     symbol: SymbolInfo<'_>,
-    uri: &Url,
-    source: &str,
-    line_index: &LineIndex,
     encoding: PositionEncoding,
-) -> SymbolInformation {
+) -> Option {
     let symbol_kind = convert_symbol_kind(symbol.kind);
 
-    SymbolInformation {
+    let location = symbol
+        .full_range
+        .as_lsp_range(db, file, encoding)
+        .to_location()?;
+
+    Some(SymbolInformation {
         name: symbol.name.into_owned(),
         kind: symbol_kind,
         tags: None,
         #[allow(deprecated)]
         deprecated: None,
-        location: lsp_types::Location {
-            uri: uri.clone(),
-            range: symbol.full_range.to_lsp_range(source, line_index, encoding),
-        },
+        location,
         container_name: None,
-    }
+    })
 }
diff --git a/crates/ty_server/src/session.rs b/crates/ty_server/src/session.rs
index c5daec77e3..9cc3553342 100644
--- a/crates/ty_server/src/session.rs
+++ b/crates/ty_server/src/session.rs
@@ -1028,6 +1028,11 @@ impl DocumentSnapshot {
         &self.document
     }
 
+    /// Returns the URL of the document.
+    pub(crate) fn url(&self) -> &lsp_types::Url {
+        self.document.url()
+    }
+
     pub(crate) fn notebook(&self) -> Option<&NotebookDocument> {
         self.notebook.as_deref()
     }

From 7569b09bdd8dfa4587e44cfbed63fa71ca361261 Mon Sep 17 00:00:00 2001
From: Micha Reiser 
Date: Wed, 5 Nov 2025 14:40:07 +0100
Subject: [PATCH 094/180] [ty] Add `ty_server::Db` trait (#21241)

---
 crates/ty/src/lib.rs                          |  4 +--
 crates/ty_project/src/lib.rs                  |  8 ++---
 crates/ty_server/src/db.rs                    | 33 +++++++++++++++++++
 crates/ty_server/src/document/location.rs     |  7 ++--
 crates/ty_server/src/document/range.rs        |  2 +-
 crates/ty_server/src/lib.rs                   |  2 ++
 .../ty_server/src/server/api/diagnostics.rs   |  3 +-
 .../notifications/did_change_watched_files.rs |  2 +-
 .../server/api/requests/document_symbols.rs   |  3 +-
 .../server/api/requests/execute_command.rs    |  2 +-
 .../api/requests/workspace_diagnostic.rs      | 13 +++++---
 crates/ty_server/src/server/api/symbols.rs    |  2 +-
 crates/ty_server/src/system.rs                | 25 +++++++-------
 13 files changed, 76 insertions(+), 30 deletions(-)
 create mode 100644 crates/ty_server/src/db.rs

diff --git a/crates/ty/src/lib.rs b/crates/ty/src/lib.rs
index 2b28329f3f..9c667b0c82 100644
--- a/crates/ty/src/lib.rs
+++ b/crates/ty/src/lib.rs
@@ -450,12 +450,12 @@ impl ty_project::ProgressReporter for IndicatifReporter {
         self.bar.set_draw_target(self.printer.progress_target());
     }
 
-    fn report_checked_file(&self, db: &dyn Db, file: File, diagnostics: &[Diagnostic]) {
+    fn report_checked_file(&self, db: &ProjectDatabase, file: File, diagnostics: &[Diagnostic]) {
         self.collector.report_checked_file(db, file, diagnostics);
         self.bar.inc(1);
     }
 
-    fn report_diagnostics(&mut self, db: &dyn Db, diagnostics: Vec) {
+    fn report_diagnostics(&mut self, db: &ProjectDatabase, diagnostics: Vec) {
         self.collector.report_diagnostics(db, diagnostics);
     }
 }
diff --git a/crates/ty_project/src/lib.rs b/crates/ty_project/src/lib.rs
index 4c7688d47f..fe034b0a07 100644
--- a/crates/ty_project/src/lib.rs
+++ b/crates/ty_project/src/lib.rs
@@ -124,12 +124,12 @@ pub trait ProgressReporter: Send + Sync {
     fn set_files(&mut self, files: usize);
 
     /// Report the completion of checking a given file along with its diagnostics.
-    fn report_checked_file(&self, db: &dyn Db, file: File, diagnostics: &[Diagnostic]);
+    fn report_checked_file(&self, db: &ProjectDatabase, file: File, diagnostics: &[Diagnostic]);
 
     /// Reports settings or IO related diagnostics. The diagnostics
     /// can belong to different files or no file at all.
     /// But it's never a file for which [`Self::report_checked_file`] gets called.
-    fn report_diagnostics(&mut self, db: &dyn Db, diagnostics: Vec);
+    fn report_diagnostics(&mut self, db: &ProjectDatabase, diagnostics: Vec);
 }
 
 /// Reporter that collects all diagnostics into a `Vec`.
@@ -149,7 +149,7 @@ impl CollectReporter {
 
 impl ProgressReporter for CollectReporter {
     fn set_files(&mut self, _files: usize) {}
-    fn report_checked_file(&self, _db: &dyn Db, _file: File, diagnostics: &[Diagnostic]) {
+    fn report_checked_file(&self, _db: &ProjectDatabase, _file: File, diagnostics: &[Diagnostic]) {
         if diagnostics.is_empty() {
             return;
         }
@@ -160,7 +160,7 @@ impl ProgressReporter for CollectReporter {
             .extend(diagnostics.iter().map(Clone::clone));
     }
 
-    fn report_diagnostics(&mut self, _db: &dyn Db, diagnostics: Vec) {
+    fn report_diagnostics(&mut self, _db: &ProjectDatabase, diagnostics: Vec) {
         self.0.get_mut().unwrap().extend(diagnostics);
     }
 }
diff --git a/crates/ty_server/src/db.rs b/crates/ty_server/src/db.rs
new file mode 100644
index 0000000000..9ddc746cf1
--- /dev/null
+++ b/crates/ty_server/src/db.rs
@@ -0,0 +1,33 @@
+use crate::NotebookDocument;
+use crate::session::index::Document;
+use crate::system::LSPSystem;
+use ruff_db::Db as _;
+use ruff_db::files::{File, FilePath};
+use ty_project::{Db as ProjectDb, ProjectDatabase};
+
+#[salsa::db]
+pub(crate) trait Db: ProjectDb {
+    /// Returns the LSP [`Document`] corresponding to `File` or
+    /// `None` if the file isn't open in the editor.
+    fn document(&self, file: File) -> Option<&Document>;
+
+    /// Returns the LSP [`NotebookDocument`] corresponding to `File` or
+    /// `None` if the file isn't open in the editor or if it isn't a notebook.
+    fn notebook_document(&self, file: File) -> Option<&NotebookDocument> {
+        self.document(file)?.as_notebook()
+    }
+}
+
+#[salsa::db]
+impl Db for ProjectDatabase {
+    fn document(&self, file: File) -> Option<&Document> {
+        self.system()
+            .as_any()
+            .downcast_ref::()
+            .and_then(|system| match file.path(self) {
+                FilePath::System(path) => system.system_path_to_document(path),
+                FilePath::SystemVirtual(path) => system.system_virtual_path_to_document(path),
+                FilePath::Vendored(_) => None,
+            })
+    }
+}
diff --git a/crates/ty_server/src/document/location.rs b/crates/ty_server/src/document/location.rs
index f02dc20d98..91a064acd3 100644
--- a/crates/ty_server/src/document/location.rs
+++ b/crates/ty_server/src/document/location.rs
@@ -1,9 +1,10 @@
-use crate::PositionEncoding;
-use crate::document::{FileRangeExt, ToRangeExt};
 use lsp_types::Location;
 use ruff_db::files::FileRange;
 use ty_ide::{NavigationTarget, ReferenceTarget};
-use ty_python_semantic::Db;
+
+use crate::Db;
+use crate::PositionEncoding;
+use crate::document::{FileRangeExt, ToRangeExt};
 
 pub(crate) trait ToLink {
     fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option;
diff --git a/crates/ty_server/src/document/range.rs b/crates/ty_server/src/document/range.rs
index 1e7f381ae5..6d3d3eb0d4 100644
--- a/crates/ty_server/src/document/range.rs
+++ b/crates/ty_server/src/document/range.rs
@@ -1,6 +1,6 @@
 use super::PositionEncoding;
+use crate::Db;
 use crate::system::file_to_url;
-use ty_python_semantic::Db;
 
 use lsp_types as types;
 use lsp_types::{Location, Position, Url};
diff --git a/crates/ty_server/src/lib.rs b/crates/ty_server/src/lib.rs
index 374c8421cf..122f50d277 100644
--- a/crates/ty_server/src/lib.rs
+++ b/crates/ty_server/src/lib.rs
@@ -4,6 +4,7 @@ use anyhow::Context;
 use lsp_server::Connection;
 use ruff_db::system::{OsSystem, SystemPathBuf};
 
+use crate::db::Db;
 pub use crate::logging::{LogLevel, init_logging};
 pub use crate::server::{PartialWorkspaceProgress, PartialWorkspaceProgressParams, Server};
 pub use crate::session::{ClientOptions, DiagnosticMode};
@@ -11,6 +12,7 @@ pub use document::{NotebookDocument, PositionEncoding, TextDocument};
 pub(crate) use session::Session;
 
 mod capabilities;
+mod db;
 mod document;
 mod logging;
 mod server;
diff --git a/crates/ty_server/src/server/api/diagnostics.rs b/crates/ty_server/src/server/api/diagnostics.rs
index adbb17dcdf..54a0e79a2e 100644
--- a/crates/ty_server/src/server/api/diagnostics.rs
+++ b/crates/ty_server/src/server/api/diagnostics.rs
@@ -10,8 +10,9 @@ use rustc_hash::FxHashMap;
 use ruff_db::diagnostic::{Annotation, Severity, SubDiagnostic};
 use ruff_db::files::FileRange;
 use ruff_db::system::SystemPathBuf;
-use ty_project::{Db, ProjectDatabase};
+use ty_project::{Db as _, ProjectDatabase};
 
+use crate::Db;
 use crate::document::{FileRangeExt, ToRangeExt};
 use crate::session::DocumentSnapshot;
 use crate::session::client::Client;
diff --git a/crates/ty_server/src/server/api/notifications/did_change_watched_files.rs b/crates/ty_server/src/server/api/notifications/did_change_watched_files.rs
index ce55100dee..2d9c308f36 100644
--- a/crates/ty_server/src/server/api/notifications/did_change_watched_files.rs
+++ b/crates/ty_server/src/server/api/notifications/did_change_watched_files.rs
@@ -8,7 +8,7 @@ use crate::system::AnySystemPath;
 use lsp_types as types;
 use lsp_types::{FileChangeType, notification as notif};
 use rustc_hash::FxHashMap;
-use ty_project::Db;
+use ty_project::Db as _;
 use ty_project::watch::{ChangeEvent, ChangedKind, CreatedKind, DeletedKind};
 
 pub(crate) struct DidChangeWatchedFiles;
diff --git a/crates/ty_server/src/server/api/requests/document_symbols.rs b/crates/ty_server/src/server/api/requests/document_symbols.rs
index 980e0850ef..95edd391f4 100644
--- a/crates/ty_server/src/server/api/requests/document_symbols.rs
+++ b/crates/ty_server/src/server/api/requests/document_symbols.rs
@@ -4,8 +4,9 @@ use lsp_types::request::DocumentSymbolRequest;
 use lsp_types::{DocumentSymbol, DocumentSymbolParams, SymbolInformation, Url};
 use ruff_db::files::File;
 use ty_ide::{HierarchicalSymbols, SymbolId, SymbolInfo, document_symbols};
-use ty_project::{Db, ProjectDatabase};
+use ty_project::ProjectDatabase;
 
+use crate::Db;
 use crate::document::{PositionEncoding, ToRangeExt};
 use crate::server::api::symbols::{convert_symbol_kind, convert_to_lsp_symbol_information};
 use crate::server::api::traits::{
diff --git a/crates/ty_server/src/server/api/requests/execute_command.rs b/crates/ty_server/src/server/api/requests/execute_command.rs
index a51ece8598..8c0c0f9076 100644
--- a/crates/ty_server/src/server/api/requests/execute_command.rs
+++ b/crates/ty_server/src/server/api/requests/execute_command.rs
@@ -9,7 +9,7 @@ use lsp_server::ErrorCode;
 use lsp_types::{self as types, request as req};
 use std::fmt::Write;
 use std::str::FromStr;
-use ty_project::Db;
+use ty_project::Db as _;
 
 pub(crate) struct ExecuteCommand;
 
diff --git a/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs b/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs
index 2d37436116..87c7e4c77c 100644
--- a/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs
+++ b/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs
@@ -26,7 +26,7 @@ use serde::{Deserialize, Serialize};
 use std::collections::BTreeMap;
 use std::sync::Mutex;
 use std::time::{Duration, Instant};
-use ty_project::{Db, ProgressReporter};
+use ty_project::{ProgressReporter, ProjectDatabase};
 
 /// Handler for [Workspace diagnostics](workspace-diagnostics)
 ///
@@ -230,7 +230,7 @@ impl ProgressReporter for WorkspaceDiagnosticsProgressReporter<'_> {
         state.report_progress(&self.work_done);
     }
 
-    fn report_checked_file(&self, db: &dyn Db, file: File, diagnostics: &[Diagnostic]) {
+    fn report_checked_file(&self, db: &ProjectDatabase, file: File, diagnostics: &[Diagnostic]) {
         // Another thread might have panicked at this point because of a salsa cancellation which
         // poisoned the result. If the response is poisoned, just don't report and wait for our thread
         // to unwind with a salsa cancellation next.
@@ -260,7 +260,7 @@ impl ProgressReporter for WorkspaceDiagnosticsProgressReporter<'_> {
         state.response.maybe_flush();
     }
 
-    fn report_diagnostics(&mut self, db: &dyn Db, diagnostics: Vec) {
+    fn report_diagnostics(&mut self, db: &ProjectDatabase, diagnostics: Vec) {
         let mut by_file: BTreeMap> = BTreeMap::new();
 
         for diagnostic in diagnostics {
@@ -358,7 +358,12 @@ impl<'a> ResponseWriter<'a> {
         }
     }
 
-    fn write_diagnostics_for_file(&mut self, db: &dyn Db, file: File, diagnostics: &[Diagnostic]) {
+    fn write_diagnostics_for_file(
+        &mut self,
+        db: &ProjectDatabase,
+        file: File,
+        diagnostics: &[Diagnostic],
+    ) {
         let Some(url) = file_to_url(db, file) else {
             tracing::debug!("Failed to convert file path to URL at {}", file.path(db));
             return;
diff --git a/crates/ty_server/src/server/api/symbols.rs b/crates/ty_server/src/server/api/symbols.rs
index fc6c1bc18c..e823e32a98 100644
--- a/crates/ty_server/src/server/api/symbols.rs
+++ b/crates/ty_server/src/server/api/symbols.rs
@@ -3,8 +3,8 @@
 
 use lsp_types::{SymbolInformation, SymbolKind};
 use ty_ide::SymbolInfo;
-use ty_project::Db;
 
+use crate::Db;
 use crate::document::{PositionEncoding, ToRangeExt};
 
 /// Convert `ty_ide` `SymbolKind` to LSP `SymbolKind`
diff --git a/crates/ty_server/src/system.rs b/crates/ty_server/src/system.rs
index 17b9bcbde6..ce93d9636b 100644
--- a/crates/ty_server/src/system.rs
+++ b/crates/ty_server/src/system.rs
@@ -4,6 +4,7 @@ use std::fmt::Display;
 use std::panic::RefUnwindSafe;
 use std::sync::Arc;
 
+use crate::Db;
 use crate::document::DocumentKey;
 use crate::session::index::{Document, Index};
 use lsp_types::Url;
@@ -16,7 +17,6 @@ use ruff_db::system::{
 };
 use ruff_notebook::{Notebook, NotebookError};
 use ty_ide::cached_vendored_path;
-use ty_python_semantic::Db;
 
 /// Returns a [`Url`] for the given [`File`].
 pub(crate) fn file_to_url(db: &dyn Db, file: File) -> Option {
@@ -112,25 +112,28 @@ impl LSPSystem {
         self.index.as_ref().unwrap()
     }
 
-    fn make_document_ref(&self, path: AnySystemPath) -> Option<&Document> {
+    fn document(&self, path: AnySystemPath) -> Option<&Document> {
         let index = self.index();
         index.document(&DocumentKey::from(path)).ok()
     }
 
-    fn system_path_to_document_ref(&self, path: &SystemPath) -> Option<&Document> {
+    pub(crate) fn system_path_to_document(&self, path: &SystemPath) -> Option<&Document> {
         let any_path = AnySystemPath::System(path.to_path_buf());
-        self.make_document_ref(any_path)
+        self.document(any_path)
     }
 
-    fn system_virtual_path_to_document_ref(&self, path: &SystemVirtualPath) -> Option<&Document> {
+    pub(crate) fn system_virtual_path_to_document(
+        &self,
+        path: &SystemVirtualPath,
+    ) -> Option<&Document> {
         let any_path = AnySystemPath::SystemVirtual(path.to_path_buf());
-        self.make_document_ref(any_path)
+        self.document(any_path)
     }
 }
 
 impl System for LSPSystem {
     fn path_metadata(&self, path: &SystemPath) -> Result {
-        let document = self.system_path_to_document_ref(path);
+        let document = self.system_path_to_document(path);
 
         if let Some(document) = document {
             Ok(Metadata::new(
@@ -152,7 +155,7 @@ impl System for LSPSystem {
     }
 
     fn read_to_string(&self, path: &SystemPath) -> Result {
-        let document = self.system_path_to_document_ref(path);
+        let document = self.system_path_to_document(path);
 
         match document {
             Some(Document::Text(document)) => Ok(document.contents().to_string()),
@@ -161,7 +164,7 @@ impl System for LSPSystem {
     }
 
     fn read_to_notebook(&self, path: &SystemPath) -> std::result::Result {
-        let document = self.system_path_to_document_ref(path);
+        let document = self.system_path_to_document(path);
 
         match document {
             Some(Document::Text(document)) => Notebook::from_source_code(document.contents()),
@@ -172,7 +175,7 @@ impl System for LSPSystem {
 
     fn read_virtual_path_to_string(&self, path: &SystemVirtualPath) -> Result {
         let document = self
-            .system_virtual_path_to_document_ref(path)
+            .system_virtual_path_to_document(path)
             .ok_or_else(|| virtual_path_not_found(path))?;
 
         if let Document::Text(document) = &document {
@@ -187,7 +190,7 @@ impl System for LSPSystem {
         path: &SystemVirtualPath,
     ) -> std::result::Result {
         let document = self
-            .system_virtual_path_to_document_ref(path)
+            .system_virtual_path_to_document(path)
             .ok_or_else(|| virtual_path_not_found(path))?;
 
         match document {

From 5c69e00d1c73a2a1cb9e2da1a1f0b76d5ccf8548 Mon Sep 17 00:00:00 2001
From: Ibraheem Ahmed 
Date: Wed, 5 Nov 2025 10:03:19 -0500
Subject: [PATCH 095/180] [ty] Simplify unions containing multiple type
 variables during inference (#21275)

## Summary

Splitting this one out from https://github.com/astral-sh/ruff/pull/21210. This is also something that should be made obselete by the new constraint solver, but is easy enough to fix now.
---
 .../resources/mdtest/generics/pep695/functions.md      | 10 ++++++++++
 crates/ty_python_semantic/src/types/generics.rs        |  6 ++++--
 2 files changed, 14 insertions(+), 2 deletions(-)

diff --git a/crates/ty_python_semantic/resources/mdtest/generics/pep695/functions.md b/crates/ty_python_semantic/resources/mdtest/generics/pep695/functions.md
index a5e62f6866..5db84cfd5a 100644
--- a/crates/ty_python_semantic/resources/mdtest/generics/pep695/functions.md
+++ b/crates/ty_python_semantic/resources/mdtest/generics/pep695/functions.md
@@ -474,6 +474,16 @@ def g(x: str):
     f(prefix=x, suffix=".tar.gz")
 ```
 
+If the type variable is present multiple times in the union, we choose the correct union element to
+infer against based on the argument type:
+
+```py
+def h[T](x: list[T] | dict[T, T]) -> T | None: ...
+def _(x: list[int], y: dict[int, int]):
+    reveal_type(h(x))  # revealed: int | None
+    reveal_type(h(y))  # revealed: int | None
+```
+
 ## Nested functions see typevars bound in outer function
 
 ```py
diff --git a/crates/ty_python_semantic/src/types/generics.rs b/crates/ty_python_semantic/src/types/generics.rs
index 444c5badd6..992e664401 100644
--- a/crates/ty_python_semantic/src/types/generics.rs
+++ b/crates/ty_python_semantic/src/types/generics.rs
@@ -1397,11 +1397,13 @@ impl<'db> SpecializationBuilder<'db> {
             return Ok(());
         }
 
-        // Remove the union elements that are not related to `formal`.
+        // Remove the union elements from `actual` that are not related to `formal`, and vice
+        // versa.
         //
         // For example, if `formal` is `list[T]` and `actual` is `list[int] | None`, we want to specialize `T`
-        // to `int`.
+        // to `int`, and so ignore the `None`.
         let actual = actual.filter_disjoint_elements(self.db, formal, self.inferable);
+        let formal = formal.filter_disjoint_elements(self.db, actual, self.inferable);
 
         match (formal, actual) {
             // TODO: We haven't implemented a full unification solver yet. If typevars appear in

From cef6600cf3a7a22db214c6cb5d2393ede4209c37 Mon Sep 17 00:00:00 2001
From: chiri 
Date: Wed, 5 Nov 2025 20:07:33 +0300
Subject: [PATCH 096/180] [`ruff`] Fix false positives on starred arguments
 (`RUF057`) (#21256)

## Summary

Fixes https://github.com/astral-sh/ruff/issues/21209

## Test Plan

`cargo nextest run ruf057`
---
 .../ruff_linter/resources/test/fixtures/ruff/RUF057.py   | 4 ++++
 .../src/rules/ruff/rules/unnecessary_round.rs            | 9 +++++++++
 ...uff_linter__rules__ruff__tests__RUF057_RUF057.py.snap | 5 +++++
 3 files changed, 18 insertions(+)

diff --git a/crates/ruff_linter/resources/test/fixtures/ruff/RUF057.py b/crates/ruff_linter/resources/test/fixtures/ruff/RUF057.py
index 2db91ac322..bb43b6d1d4 100644
--- a/crates/ruff_linter/resources/test/fixtures/ruff/RUF057.py
+++ b/crates/ruff_linter/resources/test/fixtures/ruff/RUF057.py
@@ -81,3 +81,7 @@ round(# a comment
 round(
     17 # a comment
 )
+
+# See: https://github.com/astral-sh/ruff/issues/21209
+print(round(125, **{"ndigits": -2}))
+print(round(125, *[-2]))
\ No newline at end of file
diff --git a/crates/ruff_linter/src/rules/ruff/rules/unnecessary_round.rs b/crates/ruff_linter/src/rules/ruff/rules/unnecessary_round.rs
index c7fe4687e8..e2ab51e1db 100644
--- a/crates/ruff_linter/src/rules/ruff/rules/unnecessary_round.rs
+++ b/crates/ruff_linter/src/rules/ruff/rules/unnecessary_round.rs
@@ -143,6 +143,15 @@ pub(super) fn rounded_and_ndigits<'a>(
         return None;
     }
 
+    // *args
+    if arguments.args.iter().any(Expr::is_starred_expr) {
+        return None;
+    }
+    // **kwargs
+    if arguments.keywords.iter().any(|kw| kw.arg.is_none()) {
+        return None;
+    }
+
     let rounded = arguments.find_argument_value("number", 0)?;
     let ndigits = arguments.find_argument_value("ndigits", 1);
 
diff --git a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF057_RUF057.py.snap b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF057_RUF057.py.snap
index abace6c8b4..8c536b67a8 100644
--- a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF057_RUF057.py.snap
+++ b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF057_RUF057.py.snap
@@ -253,6 +253,8 @@ RUF057 [*] Value being rounded is already an integer
 82 | |     17 # a comment
 83 | | )
    | |_^
+84 |
+85 |   # See: https://github.com/astral-sh/ruff/issues/21209
    |
 help: Remove unnecessary `round` call
 78 | round(# a comment
@@ -262,4 +264,7 @@ help: Remove unnecessary `round` call
    -     17 # a comment
    - )
 81 + 17
+82 | 
+83 | # See: https://github.com/astral-sh/ruff/issues/21209
+84 | print(round(125, **{"ndigits": -2}))
 note: This is an unsafe fix and may change runtime behavior

From eda85f3c646ddb9f3dddf13315d653f51d187f64 Mon Sep 17 00:00:00 2001
From: Douglas Creager 
Date: Wed, 5 Nov 2025 12:31:53 -0500
Subject: [PATCH 097/180] [ty] Constraining a typevar with itself (possibly via
 union or intersection) (#21273)

This PR carries over some of the `has_relation_to` logic for comparing a
typevar with itself. A typevar will specialize to the same type if it's
mentioned multiple times, so it is always assignable to and a subtype of
itself. (Note that typevars can only specialize to fully static types.)
This is also true when the typevar appears in a union on the right-hand
side, or in an intersection on the left-hand side. Similarly, a typevar
is always disjoint from its negation, so when a negated typevar appears
on the left-hand side, the constraint set is never satisfiable.

(Eventually this will allow us to remove the corresponding clauses from
`has_relation_to`, but that can't happen until more of #20093 lands.)
---
 .../mdtest/type_properties/constraints.md     | 169 +++++++++++++++++-
 crates/ty_python_semantic/src/types.rs        |  34 ++++
 .../ty_python_semantic/src/types/call/bind.rs |  20 +++
 .../src/types/constraints.rs                  |  55 +++++-
 .../ty_python_semantic/src/types/display.rs   |   3 +
 .../ty_extensions/ty_extensions.pyi           |   7 +
 6 files changed, 278 insertions(+), 10 deletions(-)

diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/constraints.md b/crates/ty_python_semantic/resources/mdtest/type_properties/constraints.md
index 00a3e2837f..f677298c51 100644
--- a/crates/ty_python_semantic/resources/mdtest/type_properties/constraints.md
+++ b/crates/ty_python_semantic/resources/mdtest/type_properties/constraints.md
@@ -258,6 +258,50 @@ def _[T]() -> None:
     reveal_type(ConstraintSet.range(SubSub, T, Sub) & ConstraintSet.range(Unrelated, T, object))
 ```
 
+Expanding on this, when intersecting two upper bounds constraints (`(T ≤ Base) ∧ (T ≤ Other)`), we
+intersect the upper bounds. Any type that satisfies both `T ≤ Base` and `T ≤ Other` must necessarily
+satisfy their intersection `T ≤ Base & Other`, and vice versa.
+
+```py
+from typing import Never
+from ty_extensions import Intersection, static_assert
+
+# This is not final, so it's possible for a subclass to inherit from both Base and Other.
+class Other: ...
+
+def upper_bounds[T]():
+    intersection_type = ConstraintSet.range(Never, T, Intersection[Base, Other])
+    # revealed: ty_extensions.ConstraintSet[(T@upper_bounds ≤ Base & Other)]
+    reveal_type(intersection_type)
+
+    intersection_constraint = ConstraintSet.range(Never, T, Base) & ConstraintSet.range(Never, T, Other)
+    # revealed: ty_extensions.ConstraintSet[(T@upper_bounds ≤ Base & Other)]
+    reveal_type(intersection_constraint)
+
+    # The two constraint sets are equivalent; each satisfies the other.
+    static_assert(intersection_type.satisfies(intersection_constraint))
+    static_assert(intersection_constraint.satisfies(intersection_type))
+```
+
+For an intersection of two lower bounds constraints (`(Base ≤ T) ∧ (Other ≤ T)`), we union the lower
+bounds. Any type that satisfies both `Base ≤ T` and `Other ≤ T` must necessarily satisfy their union
+`Base | Other ≤ T`, and vice versa.
+
+```py
+def lower_bounds[T]():
+    union_type = ConstraintSet.range(Base | Other, T, object)
+    # revealed: ty_extensions.ConstraintSet[(Base | Other ≤ T@lower_bounds)]
+    reveal_type(union_type)
+
+    intersection_constraint = ConstraintSet.range(Base, T, object) & ConstraintSet.range(Other, T, object)
+    # revealed: ty_extensions.ConstraintSet[(Base | Other ≤ T@lower_bounds)]
+    reveal_type(intersection_constraint)
+
+    # The two constraint sets are equivalent; each satisfies the other.
+    static_assert(union_type.satisfies(intersection_constraint))
+    static_assert(intersection_constraint.satisfies(union_type))
+```
+
 ### Intersection of a range and a negated range
 
 The bounds of the range constraint provide a range of types that should be included; the bounds of
@@ -335,7 +379,7 @@ def _[T]() -> None:
     reveal_type(~ConstraintSet.range(Sub, T, Super) & ~ConstraintSet.range(Sub, T, Super))
 ```
 
-Otherwise, the union cannot be simplified.
+Otherwise, the intersection cannot be simplified.
 
 ```py
 def _[T]() -> None:
@@ -350,13 +394,14 @@ def _[T]() -> None:
 In particular, the following does not simplify, even though it seems like it could simplify to
 `¬(SubSub ≤ T@_ ≤ Super)`. The issue is that there are types that are within the bounds of
 `SubSub ≤ T@_ ≤ Super`, but which are not comparable to `Base` or `Sub`, and which therefore should
-be included in the union. An example would be the type that contains all instances of `Super`,
-`Base`, and `SubSub` (but _not_ including instances of `Sub`). (We don't have a way to spell that
-type at the moment, but it is a valid type.) That type is not in `SubSub ≤ T ≤ Base`, since it
-includes `Super`, which is outside the range. It's also not in `Sub ≤ T ≤ Super`, because it does
-not include `Sub`. That means it should be in the union. (Remember that for negated range
-constraints, the lower and upper bounds define the "hole" of types that are _not_ allowed.) Since
-that type _is_ in `SubSub ≤ T ≤ Super`, it is not correct to simplify the union in this way.
+be included in the intersection. An example would be the type that contains all instances of
+`Super`, `Base`, and `SubSub` (but _not_ including instances of `Sub`). (We don't have a way to
+spell that type at the moment, but it is a valid type.) That type is not in `SubSub ≤ T ≤ Base`,
+since it includes `Super`, which is outside the range. It's also not in `Sub ≤ T ≤ Super`, because
+it does not include `Sub`. That means it should be in the intersection. (Remember that for negated
+range constraints, the lower and upper bounds define the "hole" of types that are _not_ allowed.)
+Since that type _is_ in `SubSub ≤ T ≤ Super`, it is not correct to simplify the intersection in this
+way.
 
 ```py
 def _[T]() -> None:
@@ -441,6 +486,65 @@ def _[T]() -> None:
     reveal_type(ConstraintSet.range(SubSub, T, Base) | ConstraintSet.range(Sub, T, Super))
 ```
 
+The union of two upper bound constraints (`(T ≤ Base) ∨ (T ≤ Other)`) is different than the single
+range constraint involving the corresponding union type (`T ≤ Base | Other`). There are types (such
+as `T = Base | Other`) that satisfy the union type, but not the union constraint. But every type
+that satisfies the union constraint satisfies the union type.
+
+```py
+from typing import Never
+from ty_extensions import static_assert
+
+# This is not final, so it's possible for a subclass to inherit from both Base and Other.
+class Other: ...
+
+def union[T]():
+    union_type = ConstraintSet.range(Never, T, Base | Other)
+    # revealed: ty_extensions.ConstraintSet[(T@union ≤ Base | Other)]
+    reveal_type(union_type)
+
+    union_constraint = ConstraintSet.range(Never, T, Base) | ConstraintSet.range(Never, T, Other)
+    # revealed: ty_extensions.ConstraintSet[(T@union ≤ Base) ∨ (T@union ≤ Other)]
+    reveal_type(union_constraint)
+
+    # (T = Base | Other) satisfies (T ≤ Base | Other) but not (T ≤ Base ∨ T ≤ Other)
+    specialization = ConstraintSet.range(Base | Other, T, Base | Other)
+    # revealed: ty_extensions.ConstraintSet[(T@union = Base | Other)]
+    reveal_type(specialization)
+    static_assert(specialization.satisfies(union_type))
+    static_assert(not specialization.satisfies(union_constraint))
+
+    # Every specialization that satisfies (T ≤ Base ∨ T ≤ Other) also satisfies
+    # (T ≤ Base | Other)
+    static_assert(union_constraint.satisfies(union_type))
+```
+
+These relationships are reversed for unions involving lower bounds. `T = Base` is an example that
+satisfies the union constraint (`(Base ≤ T) ∨ (Other ≤ T)`) but not the union type
+(`Base | Other ≤ T`). And every type that satisfies the union type satisfies the union constraint.
+
+```py
+def union[T]():
+    union_type = ConstraintSet.range(Base | Other, T, object)
+    # revealed: ty_extensions.ConstraintSet[(Base | Other ≤ T@union)]
+    reveal_type(union_type)
+
+    union_constraint = ConstraintSet.range(Base, T, object) | ConstraintSet.range(Other, T, object)
+    # revealed: ty_extensions.ConstraintSet[(Base ≤ T@union) ∨ (Other ≤ T@union)]
+    reveal_type(union_constraint)
+
+    # (T = Base) satisfies (Base ≤ T ∨ Other ≤ T) but not (Base | Other ≤ T)
+    specialization = ConstraintSet.range(Base, T, Base)
+    # revealed: ty_extensions.ConstraintSet[(T@union = Base)]
+    reveal_type(specialization)
+    static_assert(not specialization.satisfies(union_type))
+    static_assert(specialization.satisfies(union_constraint))
+
+    # Every specialization that satisfies (Base | Other ≤ T) also satisfies
+    # (Base ≤ T ∨ Other ≤ T)
+    static_assert(union_type.satisfies(union_constraint))
+```
+
 ### Union of a range and a negated range
 
 The bounds of the range constraint provide a range of types that should be included; the bounds of
@@ -729,3 +833,52 @@ def f[T]():
     # revealed: ty_extensions.ConstraintSet[(T@f ≤ int | str)]
     reveal_type(ConstraintSet.range(Never, T, int | str))
 ```
+
+### Constraints on the same typevar
+
+Any particular specialization maps each typevar to one type. That means it's not useful to constrain
+a typevar with itself as an upper or lower bound. No matter what type the typevar is specialized to,
+that type is always a subtype of itself. (Remember that typevars are only specialized to fully
+static types.)
+
+```py
+from typing import Never
+from ty_extensions import ConstraintSet
+
+def same_typevar[T]():
+    # revealed: ty_extensions.ConstraintSet[always]
+    reveal_type(ConstraintSet.range(Never, T, T))
+    # revealed: ty_extensions.ConstraintSet[always]
+    reveal_type(ConstraintSet.range(T, T, object))
+    # revealed: ty_extensions.ConstraintSet[always]
+    reveal_type(ConstraintSet.range(T, T, T))
+```
+
+This is also true when the typevar appears in a union in the upper bound, or in an intersection in
+the lower bound. (Note that this lines up with how we simplify the intersection of two constraints,
+as shown above.)
+
+```py
+from ty_extensions import Intersection
+
+def same_typevar[T]():
+    # revealed: ty_extensions.ConstraintSet[always]
+    reveal_type(ConstraintSet.range(Never, T, T | None))
+    # revealed: ty_extensions.ConstraintSet[always]
+    reveal_type(ConstraintSet.range(Intersection[T, None], T, object))
+    # revealed: ty_extensions.ConstraintSet[always]
+    reveal_type(ConstraintSet.range(Intersection[T, None], T, T | None))
+```
+
+Similarly, if the lower bound is an intersection containing the _negation_ of the typevar, then the
+constraint set can never be satisfied, since every type is disjoint with its negation.
+
+```py
+from ty_extensions import Not
+
+def same_typevar[T]():
+    # revealed: ty_extensions.ConstraintSet[never]
+    reveal_type(ConstraintSet.range(Intersection[Not[T], None], T, object))
+    # revealed: ty_extensions.ConstraintSet[never]
+    reveal_type(ConstraintSet.range(Not[T], T, object))
+```
diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs
index 131d9e7630..59e1ef4030 100644
--- a/crates/ty_python_semantic/src/types.rs
+++ b/crates/ty_python_semantic/src/types.rs
@@ -4197,6 +4197,14 @@ impl<'db> Type<'db> {
                 ))
                 .into()
             }
+            Type::KnownInstance(KnownInstanceType::ConstraintSet(tracked))
+                if name == "satisfies" =>
+            {
+                Place::bound(Type::KnownBoundMethod(
+                    KnownBoundMethodType::ConstraintSetSatisfies(tracked),
+                ))
+                .into()
+            }
             Type::KnownInstance(KnownInstanceType::ConstraintSet(tracked))
                 if name == "satisfied_by_all_typevars" =>
             {
@@ -6973,6 +6981,7 @@ impl<'db> Type<'db> {
                 | KnownBoundMethodType::ConstraintSetAlways
                 | KnownBoundMethodType::ConstraintSetNever
                 | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_)
+                | KnownBoundMethodType::ConstraintSetSatisfies(_)
                 | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_)
             )
             | Type::DataclassDecorator(_)
@@ -7126,6 +7135,7 @@ impl<'db> Type<'db> {
                 | KnownBoundMethodType::ConstraintSetAlways
                 | KnownBoundMethodType::ConstraintSetNever
                 | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_)
+                | KnownBoundMethodType::ConstraintSetSatisfies(_)
                 | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_),
             )
             | Type::DataclassDecorator(_)
@@ -10470,6 +10480,7 @@ pub enum KnownBoundMethodType<'db> {
     ConstraintSetAlways,
     ConstraintSetNever,
     ConstraintSetImpliesSubtypeOf(TrackedConstraintSet<'db>),
+    ConstraintSetSatisfies(TrackedConstraintSet<'db>),
     ConstraintSetSatisfiedByAllTypeVars(TrackedConstraintSet<'db>),
 }
 
@@ -10499,6 +10510,7 @@ pub(super) fn walk_method_wrapper_type<'db, V: visitor::TypeVisitor<'db> + ?Size
         | KnownBoundMethodType::ConstraintSetAlways
         | KnownBoundMethodType::ConstraintSetNever
         | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_)
+        | KnownBoundMethodType::ConstraintSetSatisfies(_)
         | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) => {}
     }
 }
@@ -10568,6 +10580,10 @@ impl<'db> KnownBoundMethodType<'db> {
                 KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_),
                 KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_),
             )
+            | (
+                KnownBoundMethodType::ConstraintSetSatisfies(_),
+                KnownBoundMethodType::ConstraintSetSatisfies(_),
+            )
             | (
                 KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_),
                 KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_),
@@ -10584,6 +10600,7 @@ impl<'db> KnownBoundMethodType<'db> {
                 | KnownBoundMethodType::ConstraintSetAlways
                 | KnownBoundMethodType::ConstraintSetNever
                 | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_)
+                | KnownBoundMethodType::ConstraintSetSatisfies(_)
                 | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_),
                 KnownBoundMethodType::FunctionTypeDunderGet(_)
                 | KnownBoundMethodType::FunctionTypeDunderCall(_)
@@ -10595,6 +10612,7 @@ impl<'db> KnownBoundMethodType<'db> {
                 | KnownBoundMethodType::ConstraintSetAlways
                 | KnownBoundMethodType::ConstraintSetNever
                 | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_)
+                | KnownBoundMethodType::ConstraintSetSatisfies(_)
                 | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_),
             ) => ConstraintSet::from(false),
         }
@@ -10649,6 +10667,10 @@ impl<'db> KnownBoundMethodType<'db> {
                 KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(left_constraints),
                 KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(right_constraints),
             )
+            | (
+                KnownBoundMethodType::ConstraintSetSatisfies(left_constraints),
+                KnownBoundMethodType::ConstraintSetSatisfies(right_constraints),
+            )
             | (
                 KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(left_constraints),
                 KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(right_constraints),
@@ -10667,6 +10689,7 @@ impl<'db> KnownBoundMethodType<'db> {
                 | KnownBoundMethodType::ConstraintSetAlways
                 | KnownBoundMethodType::ConstraintSetNever
                 | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_)
+                | KnownBoundMethodType::ConstraintSetSatisfies(_)
                 | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_),
                 KnownBoundMethodType::FunctionTypeDunderGet(_)
                 | KnownBoundMethodType::FunctionTypeDunderCall(_)
@@ -10678,6 +10701,7 @@ impl<'db> KnownBoundMethodType<'db> {
                 | KnownBoundMethodType::ConstraintSetAlways
                 | KnownBoundMethodType::ConstraintSetNever
                 | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_)
+                | KnownBoundMethodType::ConstraintSetSatisfies(_)
                 | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_),
             ) => ConstraintSet::from(false),
         }
@@ -10703,6 +10727,7 @@ impl<'db> KnownBoundMethodType<'db> {
             | KnownBoundMethodType::ConstraintSetAlways
             | KnownBoundMethodType::ConstraintSetNever
             | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_)
+            | KnownBoundMethodType::ConstraintSetSatisfies(_)
             | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) => self,
         }
     }
@@ -10720,6 +10745,7 @@ impl<'db> KnownBoundMethodType<'db> {
             | KnownBoundMethodType::ConstraintSetAlways
             | KnownBoundMethodType::ConstraintSetNever
             | KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_)
+            | KnownBoundMethodType::ConstraintSetSatisfies(_)
             | KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) => {
                 KnownClass::ConstraintSet
             }
@@ -10862,6 +10888,14 @@ impl<'db> KnownBoundMethodType<'db> {
                 )))
             }
 
+            KnownBoundMethodType::ConstraintSetSatisfies(_) => {
+                Either::Right(std::iter::once(Signature::new(
+                    Parameters::new([Parameter::positional_only(Some(Name::new_static("other")))
+                        .with_annotated_type(KnownClass::ConstraintSet.to_instance(db))]),
+                    Some(KnownClass::ConstraintSet.to_instance(db)),
+                )))
+            }
+
             KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(_) => {
                 Either::Right(std::iter::once(Signature::new(
                     Parameters::new([Parameter::keyword_only(Name::new_static("inferable"))
diff --git a/crates/ty_python_semantic/src/types/call/bind.rs b/crates/ty_python_semantic/src/types/call/bind.rs
index d2739fa696..60868cfc3f 100644
--- a/crates/ty_python_semantic/src/types/call/bind.rs
+++ b/crates/ty_python_semantic/src/types/call/bind.rs
@@ -1176,6 +1176,26 @@ impl<'db> Bindings<'db> {
                         ));
                     }
 
+                    Type::KnownBoundMethod(KnownBoundMethodType::ConstraintSetSatisfies(
+                        tracked,
+                    )) => {
+                        let [Some(other)] = overload.parameter_types() else {
+                            continue;
+                        };
+                        let Type::KnownInstance(KnownInstanceType::ConstraintSet(other)) = other
+                        else {
+                            continue;
+                        };
+
+                        let result = tracked
+                            .constraints(db)
+                            .implies(db, || other.constraints(db));
+                        let tracked = TrackedConstraintSet::new(db, result);
+                        overload.set_return_type(Type::KnownInstance(
+                            KnownInstanceType::ConstraintSet(tracked),
+                        ));
+                    }
+
                     Type::KnownBoundMethod(
                         KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(tracked),
                     ) => {
diff --git a/crates/ty_python_semantic/src/types/constraints.rs b/crates/ty_python_semantic/src/types/constraints.rs
index ee66cd85f3..c1e5ac2697 100644
--- a/crates/ty_python_semantic/src/types/constraints.rs
+++ b/crates/ty_python_semantic/src/types/constraints.rs
@@ -320,6 +320,11 @@ impl<'db> ConstraintSet<'db> {
         self
     }
 
+    /// Returns a constraint set encoding that this constraint set implies another.
+    pub(crate) fn implies(self, db: &'db dyn Db, other: impl FnOnce() -> Self) -> Self {
+        self.negate(db).or(db, other)
+    }
+
     pub(crate) fn iff(self, db: &'db dyn Db, other: Self) -> Self {
         ConstraintSet {
             node: self.node.iff(db, other.node),
@@ -381,12 +386,58 @@ impl<'db> ConstrainedTypeVar<'db> {
     fn new_node(
         db: &'db dyn Db,
         typevar: BoundTypeVarInstance<'db>,
-        lower: Type<'db>,
-        upper: Type<'db>,
+        mut lower: Type<'db>,
+        mut upper: Type<'db>,
     ) -> Node<'db> {
         debug_assert_eq!(lower, lower.bottom_materialization(db));
         debug_assert_eq!(upper, upper.top_materialization(db));
 
+        // Two identical typevars must always solve to the same type, so it is not useful to have
+        // an upper or lower bound that is the typevar being constrained.
+        match lower {
+            Type::TypeVar(lower_bound_typevar)
+                if typevar.is_same_typevar_as(db, lower_bound_typevar) =>
+            {
+                lower = Type::Never;
+            }
+            Type::Intersection(intersection)
+                if intersection.positive(db).iter().any(|element| {
+                    element.as_typevar().is_some_and(|element_bound_typevar| {
+                        typevar.is_same_typevar_as(db, element_bound_typevar)
+                    })
+                }) =>
+            {
+                lower = Type::Never;
+            }
+            Type::Intersection(intersection)
+                if intersection.negative(db).iter().any(|element| {
+                    element.as_typevar().is_some_and(|element_bound_typevar| {
+                        typevar.is_same_typevar_as(db, element_bound_typevar)
+                    })
+                }) =>
+            {
+                return Node::AlwaysFalse;
+            }
+            _ => {}
+        }
+        match upper {
+            Type::TypeVar(upper_bound_typevar)
+                if typevar.is_same_typevar_as(db, upper_bound_typevar) =>
+            {
+                upper = Type::object();
+            }
+            Type::Union(union)
+                if union.elements(db).iter().any(|element| {
+                    element.as_typevar().is_some_and(|element_bound_typevar| {
+                        typevar.is_same_typevar_as(db, element_bound_typevar)
+                    })
+                }) =>
+            {
+                upper = Type::object();
+            }
+            _ => {}
+        }
+
         // If `lower ≰ upper`, then the constraint cannot be satisfied, since there is no type that
         // is both greater than `lower`, and less than `upper`.
         if !lower.is_subtype_of(db, upper) {
diff --git a/crates/ty_python_semantic/src/types/display.rs b/crates/ty_python_semantic/src/types/display.rs
index 8500c142e8..42e2373134 100644
--- a/crates/ty_python_semantic/src/types/display.rs
+++ b/crates/ty_python_semantic/src/types/display.rs
@@ -535,6 +535,9 @@ impl Display for DisplayRepresentation<'_> {
             Type::KnownBoundMethod(KnownBoundMethodType::ConstraintSetImpliesSubtypeOf(_)) => {
                 f.write_str("bound method `ConstraintSet.implies_subtype_of`")
             }
+            Type::KnownBoundMethod(KnownBoundMethodType::ConstraintSetSatisfies(_)) => {
+                f.write_str("bound method `ConstraintSet.satisfies`")
+            }
             Type::KnownBoundMethod(KnownBoundMethodType::ConstraintSetSatisfiedByAllTypeVars(
                 _,
             )) => f.write_str("bound method `ConstraintSet.satisfied_by_all_typevars`"),
diff --git a/crates/ty_vendored/ty_extensions/ty_extensions.pyi b/crates/ty_vendored/ty_extensions/ty_extensions.pyi
index d23554f0ae..744bd5af37 100644
--- a/crates/ty_vendored/ty_extensions/ty_extensions.pyi
+++ b/crates/ty_vendored/ty_extensions/ty_extensions.pyi
@@ -67,6 +67,13 @@ class ConstraintSet:
         .. _subtype: https://typing.python.org/en/latest/spec/concepts.html#subtype-supertype-and-type-equivalence
         """
 
+    def satisfies(self, other: Self) -> Self:
+        """
+        Returns whether this constraint set satisfies another — that is, whether
+        every specialization that satisfies this constraint set also satisfies
+        `other`.
+        """
+
     def satisfied_by_all_typevars(
         self, *, inferable: tuple[Any, ...] | None = None
     ) -> bool:

From cddc0fedc24d6ca90601d81bacfaab418fe50a97 Mon Sep 17 00:00:00 2001
From: Bhuminjay Soni 
Date: Thu, 6 Nov 2025 00:43:28 +0530
Subject: [PATCH 098/180] [syntax-error]: no binding for nonlocal  PLE0117 as a
 semantic syntax error (#21032)



## Summary



This PR ports PLE0117 as a semantic syntax error.

## Test Plan


Tests previously written

---------

Signed-off-by: 11happy 
Co-authored-by: Brent Westbrook <36778786+ntBre@users.noreply.github.com>
Co-authored-by: Brent Westbrook 
Co-authored-by: Alex Waygood 
---
 .../src/checkers/ast/analyze/statement.rs     |  3 ---
 crates/ruff_linter/src/checkers/ast/mod.rs    | 13 ++++++++++-
 .../pylint/rules/nonlocal_without_binding.rs  | 19 ---------------
 .../ruff_python_parser/src/semantic_errors.rs | 23 ++++++++++++++++++-
 crates/ruff_python_parser/tests/fixtures.rs   |  4 ++++
 .../src/semantic_index/builder.rs             |  6 +++++
 6 files changed, 44 insertions(+), 24 deletions(-)

diff --git a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs
index 7c0037e10d..2e7523891b 100644
--- a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs
+++ b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs
@@ -43,9 +43,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
                     pycodestyle::rules::ambiguous_variable_name(checker, name, name.range());
                 }
             }
-            if checker.is_rule_enabled(Rule::NonlocalWithoutBinding) {
-                pylint::rules::nonlocal_without_binding(checker, nonlocal);
-            }
             if checker.is_rule_enabled(Rule::NonlocalAndGlobal) {
                 pylint::rules::nonlocal_and_global(checker, nonlocal);
             }
diff --git a/crates/ruff_linter/src/checkers/ast/mod.rs b/crates/ruff_linter/src/checkers/ast/mod.rs
index 2280dc33cb..f3315b3b47 100644
--- a/crates/ruff_linter/src/checkers/ast/mod.rs
+++ b/crates/ruff_linter/src/checkers/ast/mod.rs
@@ -73,7 +73,8 @@ use crate::rules::pyflakes::rules::{
     UndefinedLocalWithNestedImportStarUsage, YieldOutsideFunction,
 };
 use crate::rules::pylint::rules::{
-    AwaitOutsideAsync, LoadBeforeGlobalDeclaration, YieldFromInAsyncFunction,
+    AwaitOutsideAsync, LoadBeforeGlobalDeclaration, NonlocalWithoutBinding,
+    YieldFromInAsyncFunction,
 };
 use crate::rules::{flake8_pyi, flake8_type_checking, pyflakes, pyupgrade};
 use crate::settings::rule_table::RuleTable;
@@ -641,6 +642,10 @@ impl SemanticSyntaxContext for Checker<'_> {
         self.semantic.global(name)
     }
 
+    fn has_nonlocal_binding(&self, name: &str) -> bool {
+        self.semantic.nonlocal(name).is_some()
+    }
+
     fn report_semantic_error(&self, error: SemanticSyntaxError) {
         match error.kind {
             SemanticSyntaxErrorKind::LateFutureImport => {
@@ -717,6 +722,12 @@ impl SemanticSyntaxContext for Checker<'_> {
                     self.report_diagnostic(pyflakes::rules::ContinueOutsideLoop, error.range);
                 }
             }
+            SemanticSyntaxErrorKind::NonlocalWithoutBinding(name) => {
+                // PLE0117
+                if self.is_rule_enabled(Rule::NonlocalWithoutBinding) {
+                    self.report_diagnostic(NonlocalWithoutBinding { name }, error.range);
+                }
+            }
             SemanticSyntaxErrorKind::ReboundComprehensionVariable
             | SemanticSyntaxErrorKind::DuplicateTypeParameter
             | SemanticSyntaxErrorKind::MultipleCaseAssignment(_)
diff --git a/crates/ruff_linter/src/rules/pylint/rules/nonlocal_without_binding.rs b/crates/ruff_linter/src/rules/pylint/rules/nonlocal_without_binding.rs
index 90f5e0dde5..f71902cb32 100644
--- a/crates/ruff_linter/src/rules/pylint/rules/nonlocal_without_binding.rs
+++ b/crates/ruff_linter/src/rules/pylint/rules/nonlocal_without_binding.rs
@@ -1,9 +1,6 @@
 use ruff_macros::{ViolationMetadata, derive_message_formats};
-use ruff_python_ast as ast;
-use ruff_text_size::Ranged;
 
 use crate::Violation;
-use crate::checkers::ast::Checker;
 
 /// ## What it does
 /// Checks for `nonlocal` names without bindings.
@@ -46,19 +43,3 @@ impl Violation for NonlocalWithoutBinding {
         format!("Nonlocal name `{name}` found without binding")
     }
 }
-
-/// PLE0117
-pub(crate) fn nonlocal_without_binding(checker: &Checker, nonlocal: &ast::StmtNonlocal) {
-    if !checker.semantic().scope_id.is_global() {
-        for name in &nonlocal.names {
-            if checker.semantic().nonlocal(name).is_none() {
-                checker.report_diagnostic(
-                    NonlocalWithoutBinding {
-                        name: name.to_string(),
-                    },
-                    name.range(),
-                );
-            }
-        }
-    }
-}
diff --git a/crates/ruff_python_parser/src/semantic_errors.rs b/crates/ruff_python_parser/src/semantic_errors.rs
index f35029d4b9..2775aa9065 100644
--- a/crates/ruff_python_parser/src/semantic_errors.rs
+++ b/crates/ruff_python_parser/src/semantic_errors.rs
@@ -219,7 +219,7 @@ impl SemanticSyntaxChecker {
                     AwaitOutsideAsyncFunctionKind::AsyncWith,
                 );
             }
-            Stmt::Nonlocal(ast::StmtNonlocal { range, .. }) => {
+            Stmt::Nonlocal(ast::StmtNonlocal { names, range, .. }) => {
                 // test_ok nonlocal_declaration_at_module_level
                 // def _():
                 //     nonlocal x
@@ -234,6 +234,18 @@ impl SemanticSyntaxChecker {
                         *range,
                     );
                 }
+
+                if !ctx.in_module_scope() {
+                    for name in names {
+                        if !ctx.has_nonlocal_binding(name) {
+                            Self::add_error(
+                                ctx,
+                                SemanticSyntaxErrorKind::NonlocalWithoutBinding(name.to_string()),
+                                name.range,
+                            );
+                        }
+                    }
+                }
             }
             Stmt::Break(ast::StmtBreak { range, .. }) => {
                 if !ctx.in_loop_context() {
@@ -1154,6 +1166,9 @@ impl Display for SemanticSyntaxError {
             SemanticSyntaxErrorKind::DifferentMatchPatternBindings => {
                 write!(f, "alternative patterns bind different names")
             }
+            SemanticSyntaxErrorKind::NonlocalWithoutBinding(name) => {
+                write!(f, "no binding for nonlocal `{name}` found")
+            }
         }
     }
 }
@@ -1554,6 +1569,9 @@ pub enum SemanticSyntaxErrorKind {
     ///         ...
     /// ```
     DifferentMatchPatternBindings,
+
+    /// Represents a nonlocal statement for a name that has no binding in an enclosing scope.
+    NonlocalWithoutBinding(String),
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, get_size2::GetSize)]
@@ -2004,6 +2022,9 @@ pub trait SemanticSyntaxContext {
     /// Return the [`TextRange`] at which a name is declared as `global` in the current scope.
     fn global(&self, name: &str) -> Option;
 
+    /// Returns `true` if `name` has a binding in an enclosing scope.
+    fn has_nonlocal_binding(&self, name: &str) -> bool;
+
     /// Returns `true` if the visitor is currently in an async context, i.e. an async function.
     fn in_async_context(&self) -> bool;
 
diff --git a/crates/ruff_python_parser/tests/fixtures.rs b/crates/ruff_python_parser/tests/fixtures.rs
index c646fe525b..2de49e6d68 100644
--- a/crates/ruff_python_parser/tests/fixtures.rs
+++ b/crates/ruff_python_parser/tests/fixtures.rs
@@ -527,6 +527,10 @@ impl SemanticSyntaxContext for SemanticSyntaxCheckerVisitor<'_> {
         None
     }
 
+    fn has_nonlocal_binding(&self, _name: &str) -> bool {
+        true
+    }
+
     fn in_async_context(&self) -> bool {
         if let Some(scope) = self.scopes.iter().next_back() {
             match scope {
diff --git a/crates/ty_python_semantic/src/semantic_index/builder.rs b/crates/ty_python_semantic/src/semantic_index/builder.rs
index 5645fed7d4..6affc11424 100644
--- a/crates/ty_python_semantic/src/semantic_index/builder.rs
+++ b/crates/ty_python_semantic/src/semantic_index/builder.rs
@@ -2712,6 +2712,12 @@ impl SemanticSyntaxContext for SemanticIndexBuilder<'_, '_> {
         None
     }
 
+    // We handle the one syntax error that relies on this method (`NonlocalWithoutBinding`) directly
+    // in `TypeInferenceBuilder::infer_nonlocal_statement`, so this just returns `true`.
+    fn has_nonlocal_binding(&self, _name: &str) -> bool {
+        true
+    }
+
     fn in_async_context(&self) -> bool {
         for scope_info in self.scope_stack.iter().rev() {
             let scope = &self.scopes[scope_info.file_scope_id];

From 76127e5fb538ec7642af00a4dc68230ab52cf050 Mon Sep 17 00:00:00 2001
From: Micha Reiser 
Date: Wed, 5 Nov 2025 23:15:01 +0100
Subject: [PATCH 099/180] [ty] Update salsa (#21281)

---
 Cargo.lock                                   |  6 ++---
 Cargo.toml                                   |  2 +-
 crates/ty_python_semantic/src/types/infer.rs | 28 +++++++++-----------
 fuzz/Cargo.toml                              |  2 +-
 4 files changed, 17 insertions(+), 21 deletions(-)

diff --git a/Cargo.lock b/Cargo.lock
index 2da7d9ff0e..db433c9c1f 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -3586,7 +3586,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
 [[package]]
 name = "salsa"
 version = "0.24.0"
-source = "git+https://github.com/salsa-rs/salsa.git?rev=664750a6e588ed23a0d2d9105a02cb5993c8e178#664750a6e588ed23a0d2d9105a02cb5993c8e178"
+source = "git+https://github.com/salsa-rs/salsa.git?rev=05a9af7f554b64b8aadc2eeb6f2caf73d0408d09#05a9af7f554b64b8aadc2eeb6f2caf73d0408d09"
 dependencies = [
  "boxcar",
  "compact_str",
@@ -3610,12 +3610,12 @@ dependencies = [
 [[package]]
 name = "salsa-macro-rules"
 version = "0.24.0"
-source = "git+https://github.com/salsa-rs/salsa.git?rev=664750a6e588ed23a0d2d9105a02cb5993c8e178#664750a6e588ed23a0d2d9105a02cb5993c8e178"
+source = "git+https://github.com/salsa-rs/salsa.git?rev=05a9af7f554b64b8aadc2eeb6f2caf73d0408d09#05a9af7f554b64b8aadc2eeb6f2caf73d0408d09"
 
 [[package]]
 name = "salsa-macros"
 version = "0.24.0"
-source = "git+https://github.com/salsa-rs/salsa.git?rev=664750a6e588ed23a0d2d9105a02cb5993c8e178#664750a6e588ed23a0d2d9105a02cb5993c8e178"
+source = "git+https://github.com/salsa-rs/salsa.git?rev=05a9af7f554b64b8aadc2eeb6f2caf73d0408d09#05a9af7f554b64b8aadc2eeb6f2caf73d0408d09"
 dependencies = [
  "proc-macro2",
  "quote",
diff --git a/Cargo.toml b/Cargo.toml
index b2122cea97..cd9e05d6b8 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -146,7 +146,7 @@ regex-automata = { version = "0.4.9" }
 rustc-hash = { version = "2.0.0" }
 rustc-stable-hash = { version = "0.1.2" }
 # When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
-salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "664750a6e588ed23a0d2d9105a02cb5993c8e178", default-features = false, features = [
+salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "05a9af7f554b64b8aadc2eeb6f2caf73d0408d09", default-features = false, features = [
     "compact_str",
     "macros",
     "salsa_unstable",
diff --git a/crates/ty_python_semantic/src/types/infer.rs b/crates/ty_python_semantic/src/types/infer.rs
index 78e91f5883..f8ccfc05ae 100644
--- a/crates/ty_python_semantic/src/types/infer.rs
+++ b/crates/ty_python_semantic/src/types/infer.rs
@@ -114,17 +114,15 @@ pub(crate) fn infer_definition_types<'db>(
 fn definition_cycle_recover<'db>(
     db: &'db dyn Db,
     _id: salsa::Id,
-    _last_provisional_value: &DefinitionInference<'db>,
-    _value: &DefinitionInference<'db>,
+    last_provisional_value: &DefinitionInference<'db>,
+    value: DefinitionInference<'db>,
     count: u32,
     definition: Definition<'db>,
-) -> salsa::CycleRecoveryAction> {
-    if count == ITERATIONS_BEFORE_FALLBACK {
-        salsa::CycleRecoveryAction::Fallback(DefinitionInference::cycle_fallback(
-            definition.scope(db),
-        ))
+) -> DefinitionInference<'db> {
+    if &value == last_provisional_value || count != ITERATIONS_BEFORE_FALLBACK {
+        value
     } else {
-        salsa::CycleRecoveryAction::Iterate
+        DefinitionInference::cycle_fallback(definition.scope(db))
     }
 }
 
@@ -230,17 +228,15 @@ pub(crate) fn infer_isolated_expression<'db>(
 fn expression_cycle_recover<'db>(
     db: &'db dyn Db,
     _id: salsa::Id,
-    _last_provisional_value: &ExpressionInference<'db>,
-    _value: &ExpressionInference<'db>,
+    last_provisional_value: &ExpressionInference<'db>,
+    value: ExpressionInference<'db>,
     count: u32,
     input: InferExpression<'db>,
-) -> salsa::CycleRecoveryAction> {
-    if count == ITERATIONS_BEFORE_FALLBACK {
-        salsa::CycleRecoveryAction::Fallback(ExpressionInference::cycle_fallback(
-            input.expression(db).scope(db),
-        ))
+) -> ExpressionInference<'db> {
+    if &value == last_provisional_value || count != ITERATIONS_BEFORE_FALLBACK {
+        value
     } else {
-        salsa::CycleRecoveryAction::Iterate
+        ExpressionInference::cycle_fallback(input.expression(db).scope(db))
     }
 }
 
diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml
index 278267fc15..c4e40423d3 100644
--- a/fuzz/Cargo.toml
+++ b/fuzz/Cargo.toml
@@ -30,7 +30,7 @@ ty_python_semantic = { path = "../crates/ty_python_semantic" }
 ty_vendored = { path = "../crates/ty_vendored" }
 
 libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer", default-features = false }
-salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "664750a6e588ed23a0d2d9105a02cb5993c8e178", default-features = false, features = [
+salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "05a9af7f554b64b8aadc2eeb6f2caf73d0408d09", default-features = false, features = [
     "compact_str",
     "macros",
     "salsa_unstable",

From c6573b16ace72f7db86c9f6245bd0251a1e046bb Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Lo=C3=AFc=20Riegel?=
 <96702577+LoicRiegel@users.noreply.github.com>
Date: Thu, 6 Nov 2025 02:11:29 +0100
Subject: [PATCH 100/180] docs: revise Ruff setup instructions for Zed editor
 (#20935)

Co-authored-by: Micha Reiser 
---
 docs/editors/setup.md | 79 +++++++------------------------------------
 1 file changed, 13 insertions(+), 66 deletions(-)

diff --git a/docs/editors/setup.md b/docs/editors/setup.md
index 3d81935465..17652539c3 100644
--- a/docs/editors/setup.md
+++ b/docs/editors/setup.md
@@ -422,29 +422,12 @@ bundle for TextMate.
 
 ## Zed
 
-Ruff is available as an extension for the Zed editor. To install it:
+Ruff support is now built into Zed (no separate extension required).
 
-1. Open the command palette with `Cmd+Shift+P`
-1. Search for "zed: extensions"
-1. Search for "ruff" in the extensions list and click "Install"
+By default, Zed uses Ruff for formatting and linting.
 
-To configure Zed to use the Ruff language server for Python files, add the following
-to your `settings.json` file:
-
-```json
-{
-  "languages": {
-    "Python": {
-      "language_servers": ["ruff"]
-      // Or, if there are other language servers you want to use with Python
-      // "language_servers": ["pyright", "ruff"]
-    }
-  }
-}
-```
-
-To configure the language server, you can provide the [server settings](settings.md)
-under the [`lsp.ruff.initialization_options.settings`](https://zed.dev/docs/configuring-zed#lsp) key:
+To set up editor-wide Ruff options, provide the [server settings](settings.md)
+under the [`lsp.ruff.initialization_options.settings`](https://zed.dev/docs/configuring-zed#lsp) key of your `settings.json` file:
 
 ```json
 {
@@ -452,7 +435,7 @@ under the [`lsp.ruff.initialization_options.settings`](https://zed.dev/docs/conf
     "ruff": {
       "initialization_options": {
         "settings": {
-          // Ruff server settings goes here
+          // Ruff server settings go here
           "lineLength": 80,
           "lint": {
             "extendSelect": ["I"],
@@ -464,22 +447,14 @@ under the [`lsp.ruff.initialization_options.settings`](https://zed.dev/docs/conf
 }
 ```
 
-You can configure Ruff to format Python code on-save by registering the Ruff formatter
-and enabling the [`format_on_save`](https://zed.dev/docs/configuring-zed#format-on-save) setting:
+[`format_on_save`](https://zed.dev/docs/configuring-zed#format-on-save) is enabled by default.
+You can disable it for Python by changing `format_on_save` in your `settings.json` file:
 
 ```json
 {
   "languages": {
     "Python": {
-      "language_servers": ["ruff"],
-      "format_on_save": "on",
-      "formatter": [
-        {
-          "language_server": {
-            "name": "ruff"
-          }
-        }
-      ]
+      "format_on_save": "off"
     }
   }
 }
@@ -492,40 +467,12 @@ You can configure Ruff to fix lint violations and/or organize imports on-save by
 {
   "languages": {
     "Python": {
-      "language_servers": ["ruff"],
-      "format_on_save": "on",
-      "formatter": [
-        // Fix all auto-fixable lint violations
-        { "code_action": "source.fixAll.ruff" },
+      "code_actions_on_format": {
         // Organize imports
-        { "code_action": "source.organizeImports.ruff" }
-      ]
-    }
-  }
-}
-```
-
-Taken together, you can configure Ruff to format, fix, and organize imports on-save via the
-following `settings.json`:
-
-!!! note
-
-    For this configuration, it is important to use the correct order of the code action and
-    formatter language server settings. The code actions should be defined before the formatter to
-    ensure that the formatter takes care of any remaining style issues after the code actions have
-    been applied.
-
-```json
-{
-  "languages": {
-    "Python": {
-      "language_servers": ["ruff"],
-      "format_on_save": "on",
-      "formatter": [
-        { "code_action": "source.fixAll.ruff" },
-        { "code_action": "source.organizeImports.ruff" },
-        { "language_server": { "name": "ruff" } }
-      ]
+        "source.organizeImports.ruff": true,
+        // Fix all auto-fixable lint violations
+        "source.fixAll.ruff": true
+      }
     }
   }
 }

From b5ff96595dd3f2b85b7178fd1527b6aba9344c2d Mon Sep 17 00:00:00 2001
From: Matthew Mckee 
Date: Thu, 6 Nov 2025 11:46:08 +0000
Subject: [PATCH 101/180] [ty] Favour imported symbols over builtin symbols
 (#21285)



## Summary

Raised by @AlexWaygood.

We previously did not favour imported symbols, when we probably
should've

## Test Plan

Add test showing that we favour imported symbol even if it is
alphabetically after other symbols that are builtin.
---
 .../completion-evaluation-tasks.csv           |  2 +-
 crates/ty_ide/src/completion.rs               | 22 ++++++++++++++++++-
 2 files changed, 22 insertions(+), 2 deletions(-)

diff --git a/crates/ty_completion_eval/completion-evaluation-tasks.csv b/crates/ty_completion_eval/completion-evaluation-tasks.csv
index 4bea881bf6..92d1a3f03d 100644
--- a/crates/ty_completion_eval/completion-evaluation-tasks.csv
+++ b/crates/ty_completion_eval/completion-evaluation-tasks.csv
@@ -17,7 +17,7 @@ numpy-array,main.py,1,1
 object-attr-instance-methods,main.py,0,1
 object-attr-instance-methods,main.py,1,1
 raise-uses-base-exception,main.py,0,2
-scope-existing-over-new-import,main.py,0,13
+scope-existing-over-new-import,main.py,0,1
 scope-prioritize-closer,main.py,0,2
 scope-simple-long-identifier,main.py,0,1
 tstring-completions,main.py,0,1
diff --git a/crates/ty_ide/src/completion.rs b/crates/ty_ide/src/completion.rs
index 5b84e199f0..ae856dd7e7 100644
--- a/crates/ty_ide/src/completion.rs
+++ b/crates/ty_ide/src/completion.rs
@@ -883,9 +883,10 @@ fn is_in_definition_place(db: &dyn Db, tokens: &[Token], file: File) -> bool {
 /// This has the effect of putting all dunder attributes after "normal"
 /// attributes, and all single-underscore attributes after dunder attributes.
 fn compare_suggestions(c1: &Completion, c2: &Completion) -> Ordering {
-    fn key<'a>(completion: &'a Completion) -> (bool, NameKind, bool, &'a Name) {
+    fn key<'a>(completion: &'a Completion) -> (bool, bool, NameKind, bool, &'a Name) {
         (
             completion.module_name.is_some(),
+            completion.builtin,
             NameKind::classify(&completion.name),
             completion.is_type_check_only,
             &completion.name,
@@ -4196,6 +4197,25 @@ type 
         ");
     }
 
+    #[test]
+    fn favour_imported_over_builtin() {
+        let snapshot =
+            completion_test_builder("from typing import Protocol\nclass Foo(P: ...")
+                .filter(|c| c.name.starts_with('P'))
+                .build()
+                .snapshot();
+
+        // Here we favour `Protocol` over the other completions
+        // because `Protocol` has been imported, and the other completions are builtin.
+        assert_snapshot!(snapshot, @r"
+        Protocol
+        PendingDeprecationWarning
+        PermissionError
+        ProcessLookupError
+        PythonFinalizationError
+        ");
+    }
+
     /// A way to create a simple single-file (named `main.py`) completion test
     /// builder.
     ///

From 5517c9943a5a7d66b0ea75e95667831ceb46dd09 Mon Sep 17 00:00:00 2001
From: Ben Beasley 
Date: Thu, 6 Nov 2025 12:43:32 +0000
Subject: [PATCH 102/180] Require ignore 0.4.24 in `Cargo.toml` (#21292)



## Summary


Since 4c4ddc8c29e, ruff uses the `WalkBuilder::current_dir` API
[introduced in `ignore` version
0.4.24](https://diff.rs/ignore/0.4.23/0.4.24/src%2Fwalk.rs), so it
should explicitly depend on this minimum version.

See also https://github.com/astral-sh/ruff/pull/20979.

## Test Plan


Source inspection verifies this version is necessary; no additional
testing is required since `Cargo.lock` already has (at least) this
version.
---
 Cargo.toml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Cargo.toml b/Cargo.toml
index cd9e05d6b8..525366136c 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -103,7 +103,7 @@ hashbrown = { version = "0.16.0", default-features = false, features = [
     "inline-more",
 ] }
 heck = "0.5.0"
-ignore = { version = "0.4.22" }
+ignore = { version = "0.4.24" }
 imara-diff = { version = "0.1.5" }
 imperative = { version = "1.0.4" }
 indexmap = { version = "2.6.0" }

From f189aad6d2e835743d43228a6b5ff2e40b17a000 Mon Sep 17 00:00:00 2001
From: Alex Waygood 
Date: Thu, 6 Nov 2025 09:00:43 -0500
Subject: [PATCH 103/180] [ty] Make special cases for `UnionType` slightly
 narrower (#21276)

Fixes https://github.com/astral-sh/ty/issues/1478
---
 .../resources/mdtest/implicit_type_aliases.md | 90 +++++++++++++++++--
 crates/ty_python_semantic/src/types/call.rs   | 32 +++++--
 .../src/types/infer/builder.rs                | 75 +++++++++++-----
 3 files changed, 158 insertions(+), 39 deletions(-)

diff --git a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
index 9ce736e235..b557a730f7 100644
--- a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
+++ b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
@@ -135,14 +135,15 @@ def _(int_or_int: IntOrInt, list_of_int_or_list_of_int: ListOfIntOrListOfInt):
 None | None  # error: [unsupported-operator] "Operator `|` is unsupported between objects of type `None` and `None`"
 ```
 
-When constructing something non-sensical like `int | 1`, we could ideally emit a diagnostic for the
-expression itself, as it leads to a `TypeError` at runtime. No other type checker supports this, so
-for now we only emit an error when it is used in a type expression:
+When constructing something nonsensical like `int | 1`, we emit a diagnostic for the expression
+itself, as it leads to a `TypeError` at runtime. The result of the expression is then inferred as
+`Unknown`, so we permit it to be used in a type expression.
 
 ```py
-IntOrOne = int | 1
+IntOrOne = int | 1  # error: [unsupported-operator]
+
+reveal_type(IntOrOne)  # revealed: Unknown
 
-# error: [invalid-type-form] "Variable of type `Literal[1]` is not allowed in a type expression"
 def _(int_or_one: IntOrOne):
     reveal_type(int_or_one)  # revealed: Unknown
 ```
@@ -160,6 +161,77 @@ def f(SomeUnionType: UnionType):
 f(int | str)
 ```
 
+## `|` operator between class objects and non-class objects
+
+Using the `|` operator between a class object and a non-class object does not create a `UnionType`
+instance; it calls the relevant dunder as normal:
+
+```py
+class Foo:
+    def __or__(self, other) -> str:
+        return "foo"
+
+reveal_type(Foo() | int)  # revealed: str
+reveal_type(Foo() | list[int])  # revealed: str
+
+class Bar:
+    def __ror__(self, other) -> str:
+        return "bar"
+
+reveal_type(int | Bar())  # revealed: str
+reveal_type(list[int] | Bar())  # revealed: str
+
+class Invalid:
+    def __or__(self, other: "Invalid") -> str:
+        return "Invalid"
+
+    def __ror__(self, other: "Invalid") -> str:
+        return "Invalid"
+
+# error: [unsupported-operator]
+reveal_type(int | Invalid())  # revealed: Unknown
+# error: [unsupported-operator]
+reveal_type(Invalid() | list[int])  # revealed: Unknown
+```
+
+## Custom `__(r)or__` methods on metaclasses are only partially respected
+
+A drawback of our extensive special casing of `|` operations between class objects is that
+`__(r)or__` methods on metaclasses are completely disregarded if two classes are `|`'d together. We
+respect the metaclass dunder if a class is `|`'d with a non-class, however:
+
+```py
+class Meta(type):
+    def __or__(self, other) -> str:
+        return "Meta"
+
+class Foo(metaclass=Meta): ...
+class Bar(metaclass=Meta): ...
+
+X = Foo | Bar
+
+# In an ideal world, perhaps we would respect `Meta.__or__` here and reveal `str`?
+# But we still need to record what the elements are, since (according to the typing spec)
+# `X` is still a valid type alias
+reveal_type(X)  # revealed: types.UnionType
+
+def f(obj: X):
+    reveal_type(obj)  # revealed: Foo | Bar
+
+# We do respect the metaclass `__or__` if it's used between a class and a non-class, however:
+
+Y = Foo | 42
+reveal_type(Y)  # revealed: str
+
+Z = Bar | 56
+reveal_type(Z)  # revealed: str
+
+def g(
+    arg1: Y,  # error: [invalid-type-form]
+    arg2: Z,  # error: [invalid-type-form]
+): ...
+```
+
 ## Generic types
 
 Implicit type aliases can also refer to generic types:
@@ -191,7 +263,8 @@ From the [typing spec on type aliases](https://typing.python.org/en/latest/spec/
 > type hint is acceptable in a type alias
 
 However, no other type checker seems to support stringified annotations in implicit type aliases. We
-currently also do not support them:
+currently also do not support them, and we detect places where these attempted unions cause runtime
+errors:
 
 ```py
 AliasForStr = "str"
@@ -200,9 +273,10 @@ AliasForStr = "str"
 def _(s: AliasForStr):
     reveal_type(s)  # revealed: Unknown
 
-IntOrStr = int | "str"
+IntOrStr = int | "str"  # error: [unsupported-operator]
+
+reveal_type(IntOrStr)  # revealed: Unknown
 
-# error: [invalid-type-form] "Variable of type `Literal["str"]` is not allowed in a type expression"
 def _(int_or_str: IntOrStr):
     reveal_type(int_or_str)  # revealed: Unknown
 ```
diff --git a/crates/ty_python_semantic/src/types/call.rs b/crates/ty_python_semantic/src/types/call.rs
index e2fb7dac96..084fdbcfbd 100644
--- a/crates/ty_python_semantic/src/types/call.rs
+++ b/crates/ty_python_semantic/src/types/call.rs
@@ -1,8 +1,8 @@
 use super::context::InferContext;
 use super::{Signature, Type, TypeContext};
 use crate::Db;
-use crate::types::PropertyInstanceType;
 use crate::types::call::bind::BindingError;
+use crate::types::{MemberLookupPolicy, PropertyInstanceType};
 use ruff_python_ast as ast;
 
 mod arguments;
@@ -16,6 +16,16 @@ impl<'db> Type<'db> {
         left_ty: Type<'db>,
         op: ast::Operator,
         right_ty: Type<'db>,
+    ) -> Result, CallBinOpError> {
+        Self::try_call_bin_op_with_policy(db, left_ty, op, right_ty, MemberLookupPolicy::default())
+    }
+
+    pub(crate) fn try_call_bin_op_with_policy(
+        db: &'db dyn Db,
+        left_ty: Type<'db>,
+        op: ast::Operator,
+        right_ty: Type<'db>,
+        policy: MemberLookupPolicy,
     ) -> Result, CallBinOpError> {
         // We either want to call lhs.__op__ or rhs.__rop__. The full decision tree from
         // the Python spec [1] is:
@@ -43,39 +53,43 @@ impl<'db> Type<'db> {
                 && rhs_reflected != left_class.member(db, reflected_dunder).place
             {
                 return Ok(right_ty
-                    .try_call_dunder(
+                    .try_call_dunder_with_policy(
                         db,
                         reflected_dunder,
-                        CallArguments::positional([left_ty]),
+                        &mut CallArguments::positional([left_ty]),
                         TypeContext::default(),
+                        policy,
                     )
                     .or_else(|_| {
-                        left_ty.try_call_dunder(
+                        left_ty.try_call_dunder_with_policy(
                             db,
                             op.dunder(),
-                            CallArguments::positional([right_ty]),
+                            &mut CallArguments::positional([right_ty]),
                             TypeContext::default(),
+                            policy,
                         )
                     })?);
             }
         }
 
-        let call_on_left_instance = left_ty.try_call_dunder(
+        let call_on_left_instance = left_ty.try_call_dunder_with_policy(
             db,
             op.dunder(),
-            CallArguments::positional([right_ty]),
+            &mut CallArguments::positional([right_ty]),
             TypeContext::default(),
+            policy,
         );
 
         call_on_left_instance.or_else(|_| {
             if left_ty == right_ty {
                 Err(CallBinOpError::NotSupported)
             } else {
-                Ok(right_ty.try_call_dunder(
+                Ok(right_ty.try_call_dunder_with_policy(
                     db,
                     op.reflected_dunder(),
-                    CallArguments::positional([left_ty]),
+                    &mut CallArguments::positional([left_ty]),
                     TypeContext::default(),
+                    policy,
                 )?)
             }
         })
diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs
index b7608bbfff..e72b4af8db 100644
--- a/crates/ty_python_semantic/src/types/infer/builder.rs
+++ b/crates/ty_python_semantic/src/types/infer/builder.rs
@@ -8474,11 +8474,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
                 | Type::GenericAlias(..)
                 | Type::SpecialForm(_)
                 | Type::KnownInstance(KnownInstanceType::UnionType(_)),
-                _,
-                ast::Operator::BitOr,
-            )
-            | (
-                _,
                 Type::ClassLiteral(..)
                 | Type::SubclassOf(..)
                 | Type::GenericAlias(..)
@@ -8486,30 +8481,66 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
                 | Type::KnownInstance(KnownInstanceType::UnionType(_)),
                 ast::Operator::BitOr,
             ) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 => {
-                // For a value expression like `int | None`, the inferred type for `None` will be
-                // a nominal instance of `NoneType`, so we need to convert it to a class literal
-                // such that it can later be converted back to a nominal instance type when calling
-                // `.in_type_expression` on the `UnionType` instance.
-                let convert_none_type = |ty: Type<'db>| {
-                    if ty.is_none(self.db()) {
-                        KnownClass::NoneType.to_class_literal(self.db())
-                    } else {
-                        ty
-                    }
-                };
-
                 if left_ty.is_equivalent_to(self.db(), right_ty) {
                     Some(left_ty)
                 } else {
                     Some(Type::KnownInstance(KnownInstanceType::UnionType(
-                        UnionTypeInstance::new(
-                            self.db(),
-                            convert_none_type(left_ty),
-                            convert_none_type(right_ty),
-                        ),
+                        UnionTypeInstance::new(self.db(), left_ty, right_ty),
                     )))
                 }
             }
+            (
+                Type::ClassLiteral(..)
+                | Type::SubclassOf(..)
+                | Type::GenericAlias(..)
+                | Type::KnownInstance(..)
+                | Type::SpecialForm(..),
+                Type::NominalInstance(instance),
+                ast::Operator::BitOr,
+            )
+            | (
+                Type::NominalInstance(instance),
+                Type::ClassLiteral(..)
+                | Type::SubclassOf(..)
+                | Type::GenericAlias(..)
+                | Type::KnownInstance(..)
+                | Type::SpecialForm(..),
+                ast::Operator::BitOr,
+            ) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310
+                && instance.has_known_class(self.db(), KnownClass::NoneType) =>
+            {
+                Some(Type::KnownInstance(KnownInstanceType::UnionType(
+                    UnionTypeInstance::new(self.db(), left_ty, right_ty),
+                )))
+            }
+
+            // We avoid calling `type.__(r)or__`, as typeshed annotates these methods as
+            // accepting `Any` (since typeforms are inexpressable in the type system currently).
+            // This means that many common errors would not be caught if we fell back to typeshed's stubs here.
+            //
+            // Note that if a class had a custom metaclass that overrode `__(r)or__`, we would also ignore
+            // that custom method as we'd take one of the earlier branches.
+            // This seems like it's probably rare enough that it's acceptable, however.
+            (
+                Type::ClassLiteral(..) | Type::GenericAlias(..) | Type::SubclassOf(..),
+                _,
+                ast::Operator::BitOr,
+            )
+            | (
+                _,
+                Type::ClassLiteral(..) | Type::GenericAlias(..) | Type::SubclassOf(..),
+                ast::Operator::BitOr,
+            ) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 => {
+                Type::try_call_bin_op_with_policy(
+                    self.db(),
+                    left_ty,
+                    ast::Operator::BitOr,
+                    right_ty,
+                    MemberLookupPolicy::META_CLASS_NO_TYPE_FALLBACK,
+                )
+                .ok()
+                .map(|binding| binding.return_type(self.db()))
+            }
 
             // We've handled all of the special cases that we support for literals, so we need to
             // fall back on looking for dunder methods on one of the operand types.

From 132d10fb6fb30db17ebf894284e97cd2cc831e10 Mon Sep 17 00:00:00 2001
From: Zanie Blue 
Date: Thu, 6 Nov 2025 08:27:49 -0600
Subject: [PATCH 104/180] [ty] Discover site-packages from the environment that
 ty is installed in (#21286)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit



## Summary



Closes https://github.com/astral-sh/ty/issues/989

There are various situations where users expect the Python packages
installed in the same environment as ty itself to be considered during
type checking. A minimal example would look like:

```
uv venv my-env
uv pip install my-env ty httpx
echo "import httpx" > foo.py
./my-env/bin/ty check foo.py
```

or

```
uv tool install ty --with httpx
echo "import httpx" > foo.py
ty check foo.py
```

While these are a bit contrived, there are real-world situations where a
user would expect a similar behavior to work. Notably, all of the other
type checkers consider their own environment when determining search
paths (though I'll admit that I have not verified when they choose not
to do this).

One common situation where users are encountering this today is with
`uvx --with-requirements script.py ty check script.py` — which is
currently our "best" recommendation for type checking a PEP 723 script,
but it doesn't work.

Of the options discussed in
https://github.com/astral-sh/ty/issues/989#issuecomment-3307417985, I've
chosen (2) as our criteria for including ty's environment in the search
paths.

- If no virtual environment is discovered, we will always include ty's
environment.
- If a `.venv` is discovered in the working directory, we will _prepend_
ty's environment to the search paths. The dependencies in ty's
environment (e.g., from `uvx --with`) will take precedence.
- If a virtual environment is active, e.g., `VIRTUAL_ENV` (i.e.,
including conda prefixes) is set, we will not include ty's environment.

The reason we need to special case the `.venv` case is that we both

1.  Recommend `uvx ty` today as a way to check your project
2. Want to enable `uvx --with <...> ty`

And I don't want (2) to break when you _happen_ to be in a project
(i.e., if we only included ty's environment when _no_ environment is
found) and don't want to remove support for (1).

I think long-term, I want to make `uvx ` layer the environment on
_top_ of the project environment (in uv), which would obviate the need
for this change when you're using uv. However, that change is breaking
and I think users will expect this behavior in contexts where they're
not using uv, so I think we should handle it in ty regardless.

I've opted not to include the environment if it's non-virtual (i.e., a
system environment) for now. It seems better to start by being more
restrictive. I left a comment in the code.

## Test Plan

I did some manual testing with the initial commit, then subsequently
added some unit tests.

```
❯ echo "import httpx" > example.py
❯ uvx --with httpx ty check example.py
Installed 8 packages in 19ms
error[unresolved-import]: Cannot resolve imported module `httpx`
 --> foo/example.py:1:8
  |
1 | import httpx
  |        ^^^^^
  |
info: Searched in the following paths during module resolution:
info:   1. /Users/zb/workspace/ty/python (first-party code)
info:   2. /Users/zb/workspace/ty (first-party code)
info:   3. vendored://stdlib (stdlib typeshed stubs vendored by ty)
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
info: rule `unresolved-import` is enabled by default

Found 1 diagnostic
❯ uvx --from . --with httpx ty check example.py
All checks passed!
```

```
❯ uv init --script foo.py
Initialized script at `foo.py`
❯ uv add --script foo.py httpx
warning: The Python request from `.python-version` resolved to Python 3.13.8, which is incompatible with the script's Python requirement: `>=3.14`
Updated `foo.py`
❯ echo "import httpx" >> foo.py
❯ uvx --with-requirements foo.py ty check foo.py
error[unresolved-import]: Cannot resolve imported module `httpx`
  --> foo.py:15:8
   |
13 | if __name__ == "__main__":
14 |     main()
15 | import httpx
   |        ^^^^^
   |
info: Searched in the following paths during module resolution:
info:   1. /Users/zb/workspace/ty/python (first-party code)
info:   2. /Users/zb/workspace/ty (first-party code)
info:   3. vendored://stdlib (stdlib typeshed stubs vendored by ty)
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
info: rule `unresolved-import` is enabled by default

Found 1 diagnostic
❯ uvx --from . --with-requirements foo.py ty check foo.py
All checks passed!
```

Notice we do not include ty's environment if `VIRTUAL_ENV` is set

```
❯ VIRTUAL_ENV=.venv uvx --with httpx ty check foo/example.py
error[unresolved-import]: Cannot resolve imported module `httpx`
 --> foo/example.py:1:8
  |
1 | import httpx
  |        ^^^^^
  |
info: Searched in the following paths during module resolution:
info:   1. /Users/zb/workspace/ty/python (first-party code)
info:   2. /Users/zb/workspace/ty (first-party code)
info:   3. vendored://stdlib (stdlib typeshed stubs vendored by ty)
info:   4. /Users/zb/workspace/ty/.venv/lib/python3.13/site-packages (site-packages)
info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
info: rule `unresolved-import` is enabled by default

Found 1 diagnostic
```
---
 crates/ty/tests/cli/main.rs                   |  43 ++-
 crates/ty/tests/cli/python_environment.rs     | 274 +++++++++++++++++-
 crates/ty_project/src/metadata/options.rs     |  56 +++-
 .../ty_python_semantic/src/site_packages.rs   |  62 +++-
 4 files changed, 417 insertions(+), 18 deletions(-)

diff --git a/crates/ty/tests/cli/main.rs b/crates/ty/tests/cli/main.rs
index e911e300c0..446ba86611 100644
--- a/crates/ty/tests/cli/main.rs
+++ b/crates/ty/tests/cli/main.rs
@@ -5,6 +5,7 @@ mod python_environment;
 mod rule_selection;
 
 use anyhow::Context as _;
+use insta::Settings;
 use insta::internals::SettingsBindDropGuard;
 use insta_cmd::{assert_cmd_snapshot, get_cargo_bin};
 use std::{
@@ -760,8 +761,10 @@ fn can_handle_large_binop_expressions() -> anyhow::Result<()> {
 
 pub(crate) struct CliTest {
     _temp_dir: TempDir,
-    _settings_scope: SettingsBindDropGuard,
+    settings: Settings,
+    settings_scope: Option,
     project_dir: PathBuf,
+    ty_binary_path: PathBuf,
 }
 
 impl CliTest {
@@ -794,7 +797,9 @@ impl CliTest {
         Ok(Self {
             project_dir,
             _temp_dir: temp_dir,
-            _settings_scope: settings_scope,
+            settings,
+            settings_scope: Some(settings_scope),
+            ty_binary_path: get_cargo_bin("ty"),
         })
     }
 
@@ -823,6 +828,30 @@ impl CliTest {
         Ok(())
     }
 
+    /// Return [`Self`] with the ty binary copied to the specified path instead.
+    pub(crate) fn with_ty_at(mut self, dest_path: impl AsRef) -> anyhow::Result {
+        let dest_path = dest_path.as_ref();
+        let dest_path = self.project_dir.join(dest_path);
+
+        Self::ensure_parent_directory(&dest_path)?;
+        std::fs::copy(&self.ty_binary_path, &dest_path)
+            .with_context(|| format!("Failed to copy ty binary to `{}`", dest_path.display()))?;
+
+        self.ty_binary_path = dest_path;
+        Ok(self)
+    }
+
+    /// Add a filter to the settings and rebind them.
+    pub(crate) fn with_filter(mut self, pattern: &str, replacement: &str) -> Self {
+        self.settings.add_filter(pattern, replacement);
+        // Drop the old scope before binding a new one, otherwise the old scope is dropped _after_
+        // binding and assigning the new one, restoring the settings to their state before the old
+        // scope was bound.
+        drop(self.settings_scope.take());
+        self.settings_scope = Some(self.settings.bind_to_scope());
+        self
+    }
+
     fn ensure_parent_directory(path: &Path) -> anyhow::Result<()> {
         if let Some(parent) = path.parent() {
             std::fs::create_dir_all(parent)
@@ -868,7 +897,7 @@ impl CliTest {
     }
 
     pub(crate) fn command(&self) -> Command {
-        let mut command = Command::new(get_cargo_bin("ty"));
+        let mut command = Command::new(&self.ty_binary_path);
         command.current_dir(&self.project_dir).arg("check");
 
         // Unset all environment variables because they can affect test behavior.
@@ -881,3 +910,11 @@ impl CliTest {
 fn tempdir_filter(path: &Path) -> String {
     format!(r"{}\\?/?", regex::escape(path.to_str().unwrap()))
 }
+
+fn site_packages_filter(python_version: &str) -> String {
+    if cfg!(windows) {
+        "Lib/site-packages".to_string()
+    } else {
+        format!("lib/python{}/site-packages", regex::escape(python_version))
+    }
+}
diff --git a/crates/ty/tests/cli/python_environment.rs b/crates/ty/tests/cli/python_environment.rs
index 04fa8be88f..de6d99aa9a 100644
--- a/crates/ty/tests/cli/python_environment.rs
+++ b/crates/ty/tests/cli/python_environment.rs
@@ -1,7 +1,7 @@
 use insta_cmd::assert_cmd_snapshot;
 use ruff_python_ast::PythonVersion;
 
-use crate::CliTest;
+use crate::{CliTest, site_packages_filter};
 
 /// Specifying an option on the CLI should take precedence over the same setting in the
 /// project's configuration. Here, this is tested for the Python version.
@@ -1654,6 +1654,278 @@ home = ./
     Ok(())
 }
 
+/// ty should include site packages from its own environment when no other environment is found.
+#[test]
+fn ty_environment_is_only_environment() -> anyhow::Result<()> {
+    let ty_venv_site_packages = if cfg!(windows) {
+        "ty-venv/Lib/site-packages"
+    } else {
+        "ty-venv/lib/python3.13/site-packages"
+    };
+
+    let ty_executable_path = if cfg!(windows) {
+        "ty-venv/Scripts/ty.exe"
+    } else {
+        "ty-venv/bin/ty"
+    };
+
+    let ty_package_path = format!("{ty_venv_site_packages}/ty_package/__init__.py");
+
+    let case = CliTest::with_files([
+        (ty_package_path.as_str(), "class TyEnvClass: ..."),
+        (
+            "ty-venv/pyvenv.cfg",
+            r"
+            home = ./
+            version = 3.13
+            ",
+        ),
+        (
+            "test.py",
+            r"
+            from ty_package import TyEnvClass
+            ",
+        ),
+    ])?;
+
+    let case = case.with_ty_at(ty_executable_path)?;
+    assert_cmd_snapshot!(case.command(), @r###"
+    success: true
+    exit_code: 0
+    ----- stdout -----
+    All checks passed!
+
+    ----- stderr -----
+    "###);
+
+    Ok(())
+}
+
+/// ty should include site packages from both its own environment and a local `.venv`. The packages
+/// from ty's environment should take precedence.
+#[test]
+fn ty_environment_and_discovered_venv() -> anyhow::Result<()> {
+    let ty_venv_site_packages = if cfg!(windows) {
+        "ty-venv/Lib/site-packages"
+    } else {
+        "ty-venv/lib/python3.13/site-packages"
+    };
+
+    let ty_executable_path = if cfg!(windows) {
+        "ty-venv/Scripts/ty.exe"
+    } else {
+        "ty-venv/bin/ty"
+    };
+
+    let local_venv_site_packages = if cfg!(windows) {
+        ".venv/Lib/site-packages"
+    } else {
+        ".venv/lib/python3.13/site-packages"
+    };
+
+    let ty_unique_package = format!("{ty_venv_site_packages}/ty_package/__init__.py");
+    let local_unique_package = format!("{local_venv_site_packages}/local_package/__init__.py");
+    let ty_conflicting_package = format!("{ty_venv_site_packages}/shared_package/__init__.py");
+    let local_conflicting_package =
+        format!("{local_venv_site_packages}/shared_package/__init__.py");
+
+    let case = CliTest::with_files([
+        (ty_unique_package.as_str(), "class TyEnvClass: ..."),
+        (local_unique_package.as_str(), "class LocalClass: ..."),
+        (ty_conflicting_package.as_str(), "class FromTyEnv: ..."),
+        (
+            local_conflicting_package.as_str(),
+            "class FromLocalVenv: ...",
+        ),
+        (
+            "ty-venv/pyvenv.cfg",
+            r"
+            home = ./
+            version = 3.13
+            ",
+        ),
+        (
+            ".venv/pyvenv.cfg",
+            r"
+            home = ./
+            version = 3.13
+            ",
+        ),
+        (
+            "test.py",
+            r"
+            # Should resolve from ty's environment
+            from ty_package import TyEnvClass
+            # Should resolve from local .venv
+            from local_package import LocalClass
+            # Should resolve from ty's environment (takes precedence)
+            from shared_package import FromTyEnv
+            # Should NOT resolve (shadowed by ty's environment version)
+            from shared_package import FromLocalVenv
+            ",
+        ),
+    ])?
+    .with_ty_at(ty_executable_path)?;
+
+    assert_cmd_snapshot!(case.command(), @r###"
+    success: false
+    exit_code: 1
+    ----- stdout -----
+    error[unresolved-import]: Module `shared_package` has no member `FromLocalVenv`
+     --> test.py:9:28
+      |
+    7 | from shared_package import FromTyEnv
+    8 | # Should NOT resolve (shadowed by ty's environment version)
+    9 | from shared_package import FromLocalVenv
+      |                            ^^^^^^^^^^^^^
+      |
+    info: rule `unresolved-import` is enabled by default
+
+    Found 1 diagnostic
+
+    ----- stderr -----
+    "###);
+
+    Ok(())
+}
+
+/// When `VIRTUAL_ENV` is set, ty should *not* discover its own environment's site-packages.
+#[test]
+fn ty_environment_and_active_environment() -> anyhow::Result<()> {
+    let ty_venv_site_packages = if cfg!(windows) {
+        "ty-venv/Lib/site-packages"
+    } else {
+        "ty-venv/lib/python3.13/site-packages"
+    };
+
+    let ty_executable_path = if cfg!(windows) {
+        "ty-venv/Scripts/ty.exe"
+    } else {
+        "ty-venv/bin/ty"
+    };
+
+    let active_venv_site_packages = if cfg!(windows) {
+        "active-venv/Lib/site-packages"
+    } else {
+        "active-venv/lib/python3.13/site-packages"
+    };
+
+    let ty_package_path = format!("{ty_venv_site_packages}/ty_package/__init__.py");
+    let active_package_path = format!("{active_venv_site_packages}/active_package/__init__.py");
+
+    let case = CliTest::with_files([
+        (ty_package_path.as_str(), "class TyEnvClass: ..."),
+        (
+            "ty-venv/pyvenv.cfg",
+            r"
+            home = ./
+            version = 3.13
+            ",
+        ),
+        (active_package_path.as_str(), "class ActiveClass: ..."),
+        (
+            "active-venv/pyvenv.cfg",
+            r"
+            home = ./
+            version = 3.13
+            ",
+        ),
+        (
+            "test.py",
+            r"
+            from ty_package import TyEnvClass
+            from active_package import ActiveClass
+            ",
+        ),
+    ])?
+    .with_ty_at(ty_executable_path)?
+    .with_filter(&site_packages_filter("3.13"), "");
+
+    assert_cmd_snapshot!(
+        case.command()
+            .env("VIRTUAL_ENV", case.root().join("active-venv")),
+        @r"
+    success: false
+    exit_code: 1
+    ----- stdout -----
+    error[unresolved-import]: Cannot resolve imported module `ty_package`
+     --> test.py:2:6
+      |
+    2 | from ty_package import TyEnvClass
+      |      ^^^^^^^^^^
+    3 | from active_package import ActiveClass
+      |
+    info: Searched in the following paths during module resolution:
+    info:   1. / (first-party code)
+    info:   2. vendored://stdlib (stdlib typeshed stubs vendored by ty)
+    info:   3. /active-venv/ (site-packages)
+    info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
+    info: rule `unresolved-import` is enabled by default
+
+    Found 1 diagnostic
+
+    ----- stderr -----
+    "
+    );
+
+    Ok(())
+}
+
+/// When ty is installed in a system environment rather than a virtual environment, it should
+/// not include the environment's site-packages in its search path.
+#[test]
+fn ty_environment_is_system_not_virtual() -> anyhow::Result<()> {
+    let ty_system_site_packages = if cfg!(windows) {
+        "system-python/Lib/site-packages"
+    } else {
+        "system-python/lib/python3.13/site-packages"
+    };
+
+    let ty_executable_path = if cfg!(windows) {
+        "system-python/Scripts/ty.exe"
+    } else {
+        "system-python/bin/ty"
+    };
+
+    let ty_package_path = format!("{ty_system_site_packages}/system_package/__init__.py");
+
+    let case = CliTest::with_files([
+        // Package in system Python installation (should NOT be discovered)
+        (ty_package_path.as_str(), "class SystemClass: ..."),
+        // Note: NO pyvenv.cfg - this is a system installation, not a venv
+        (
+            "test.py",
+            r"
+            from system_package import SystemClass
+            ",
+        ),
+    ])?
+    .with_ty_at(ty_executable_path)?;
+
+    assert_cmd_snapshot!(case.command(), @r###"
+    success: false
+    exit_code: 1
+    ----- stdout -----
+    error[unresolved-import]: Cannot resolve imported module `system_package`
+     --> test.py:2:6
+      |
+    2 | from system_package import SystemClass
+      |      ^^^^^^^^^^^^^^
+      |
+    info: Searched in the following paths during module resolution:
+    info:   1. / (first-party code)
+    info:   2. vendored://stdlib (stdlib typeshed stubs vendored by ty)
+    info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment
+    info: rule `unresolved-import` is enabled by default
+
+    Found 1 diagnostic
+
+    ----- stderr -----
+    "###);
+
+    Ok(())
+}
+
 #[test]
 fn src_root_deprecation_warning() -> anyhow::Result<()> {
     let case = CliTest::with_files([
diff --git a/crates/ty_project/src/metadata/options.rs b/crates/ty_project/src/metadata/options.rs
index 1e498f6bf8..d83270be90 100644
--- a/crates/ty_project/src/metadata/options.rs
+++ b/crates/ty_project/src/metadata/options.rs
@@ -164,14 +164,24 @@ impl Options {
                 .context("Failed to discover local Python environment")?
         };
 
-        let site_packages_paths = if let Some(python_environment) = python_environment.as_ref() {
+        let self_site_packages = self_environment_search_paths(
             python_environment
-                .site_packages_paths(system)
-                .context("Failed to discover the site-packages directory")?
+                .as_ref()
+                .map(ty_python_semantic::PythonEnvironment::origin)
+                .cloned(),
+            system,
+        )
+        .unwrap_or_default();
+
+        let site_packages_paths = if let Some(python_environment) = python_environment.as_ref() {
+            self_site_packages.concatenate(
+                python_environment
+                    .site_packages_paths(system)
+                    .context("Failed to discover the site-packages directory")?,
+            )
         } else {
             tracing::debug!("No virtual environment found");
-
-            SitePackagesPaths::default()
+            self_site_packages
         };
 
         let real_stdlib_path = python_environment.as_ref().and_then(|python_environment| {
@@ -461,6 +471,42 @@ impl Options {
     }
 }
 
+/// Return the site-packages from the environment ty is installed in, as derived from ty's
+/// executable.
+///
+/// If there's an existing environment with an origin that does not allow including site-packages
+/// from ty's environment, discovery of ty's environment is skipped and [`None`] is returned.
+///
+/// Since ty may be executed from an arbitrary non-Python location, errors during discovery of ty's
+/// environment are not raised, instead [`None`] is returned.
+fn self_environment_search_paths(
+    existing_origin: Option,
+    system: &dyn System,
+) -> Option {
+    if existing_origin.is_some_and(|origin| !origin.allows_concatenation_with_self_environment()) {
+        return None;
+    }
+
+    let Ok(exe_path) = std::env::current_exe() else {
+        return None;
+    };
+    let ty_path = SystemPath::from_std_path(exe_path.as_path())?;
+
+    let environment = PythonEnvironment::new(ty_path, SysPrefixPathOrigin::SelfEnvironment, system)
+        .inspect_err(|err| tracing::debug!("Failed to discover ty's environment: {err}"))
+        .ok()?;
+
+    let search_paths = environment
+        .site_packages_paths(system)
+        .inspect_err(|err| {
+            tracing::debug!("Failed to discover site-packages in ty's environment: {err}");
+        })
+        .ok();
+
+    tracing::debug!("Using site-packages from ty's environment");
+    search_paths
+}
+
 #[derive(
     Debug,
     Default,
diff --git a/crates/ty_python_semantic/src/site_packages.rs b/crates/ty_python_semantic/src/site_packages.rs
index c162dfc70a..8418db6124 100644
--- a/crates/ty_python_semantic/src/site_packages.rs
+++ b/crates/ty_python_semantic/src/site_packages.rs
@@ -62,6 +62,15 @@ impl SitePackagesPaths {
         self.0.extend(other.0);
     }
 
+    /// Concatenate two instances of [`SitePackagesPaths`].
+    #[must_use]
+    pub fn concatenate(mut self, other: Self) -> Self {
+        for path in other {
+            self.0.insert(path);
+        }
+        self
+    }
+
     /// Tries to detect the version from the layout of the `site-packages` directory.
     pub fn python_version_from_layout(&self) -> Option {
         if cfg!(windows) {
@@ -252,6 +261,13 @@ impl PythonEnvironment {
             Self::System(env) => env.real_stdlib_directory(system),
         }
     }
+
+    pub fn origin(&self) -> &SysPrefixPathOrigin {
+        match self {
+            Self::Virtual(env) => &env.root_path.origin,
+            Self::System(env) => &env.root_path.origin,
+        }
+    }
 }
 
 /// Enumeration of the subdirectories of `sys.prefix` that could contain a
@@ -1393,15 +1409,15 @@ impl SysPrefixPath {
     ) -> SitePackagesDiscoveryResult {
         let sys_prefix = if !origin.must_point_directly_to_sys_prefix()
             && system.is_file(unvalidated_path)
-            && unvalidated_path
-                .file_name()
-                .is_some_and(|name| name.starts_with("python"))
-        {
-            // It looks like they passed us a path to a Python executable, e.g. `.venv/bin/python3`.
-            // Try to figure out the `sys.prefix` value from the Python executable.
+            && unvalidated_path.file_name().is_some_and(|name| {
+                name.starts_with("python")
+                    || name.eq_ignore_ascii_case(&format!("ty{}", std::env::consts::EXE_SUFFIX))
+            }) {
+            // It looks like they passed us a path to an executable, e.g. `.venv/bin/python3`. Try
+            // to figure out the `sys.prefix` value from the Python executable.
             let sys_prefix = if cfg!(windows) {
-                // On Windows, the relative path to the Python executable from `sys.prefix`
-                // is different depending on whether it's a virtual environment or a system installation.
+                // On Windows, the relative path to the executable from `sys.prefix` is different
+                // depending on whether it's a virtual environment or a system installation.
                 // System installations have their executable at `/python.exe`,
                 // whereas virtual environments have their executable at `/Scripts/python.exe`.
                 unvalidated_path.parent().and_then(|parent| {
@@ -1586,6 +1602,8 @@ pub enum SysPrefixPathOrigin {
     /// A `.venv` directory was found in the current working directory,
     /// and the `sys.prefix` path is the path to that virtual environment.
     LocalVenv,
+    /// The `sys.prefix` path came from the environment ty is installed in.
+    SelfEnvironment,
 }
 
 impl SysPrefixPathOrigin {
@@ -1599,6 +1617,13 @@ impl SysPrefixPathOrigin {
             | Self::Editor
             | Self::DerivedFromPyvenvCfg
             | Self::CondaPrefixVar => false,
+            // It's not strictly true that the self environment must be virtual, e.g., ty could be
+            // installed in a system Python environment and users may expect us to respect
+            // dependencies installed alongside it. However, we're intentionally excluding support
+            // for this to start. Note a change here has downstream implications, i.e., we probably
+            // don't want the packages in a system environment to take precedence over those in a
+            // virtual environment and would need to reverse the ordering in that case.
+            Self::SelfEnvironment => true,
         }
     }
 
@@ -1608,13 +1633,31 @@ impl SysPrefixPathOrigin {
     /// the `sys.prefix` directory, e.g. the `--python` CLI flag.
     pub(crate) const fn must_point_directly_to_sys_prefix(&self) -> bool {
         match self {
-            Self::PythonCliFlag | Self::ConfigFileSetting(..) | Self::Editor => false,
+            Self::PythonCliFlag
+            | Self::ConfigFileSetting(..)
+            | Self::Editor
+            | Self::SelfEnvironment => false,
             Self::VirtualEnvVar
             | Self::CondaPrefixVar
             | Self::DerivedFromPyvenvCfg
             | Self::LocalVenv => true,
         }
     }
+
+    /// Whether paths with this origin should allow combination with paths with a
+    /// [`SysPrefixPathOrigin::SelfEnvironment`] origin.
+    pub const fn allows_concatenation_with_self_environment(&self) -> bool {
+        match self {
+            Self::SelfEnvironment
+            | Self::CondaPrefixVar
+            | Self::VirtualEnvVar
+            | Self::Editor
+            | Self::DerivedFromPyvenvCfg
+            | Self::ConfigFileSetting(..)
+            | Self::PythonCliFlag => false,
+            Self::LocalVenv => true,
+        }
+    }
 }
 
 impl std::fmt::Display for SysPrefixPathOrigin {
@@ -1627,6 +1670,7 @@ impl std::fmt::Display for SysPrefixPathOrigin {
             Self::DerivedFromPyvenvCfg => f.write_str("derived `sys.prefix` path"),
             Self::LocalVenv => f.write_str("local virtual environment"),
             Self::Editor => f.write_str("selected interpreter in your editor"),
+            Self::SelfEnvironment => f.write_str("ty environment"),
         }
     }
 }

From cb2e277482f30c7b45dd3c5a163fef952d66281a Mon Sep 17 00:00:00 2001
From: Dhruv Manilawala 
Date: Thu, 6 Nov 2025 11:14:40 -0500
Subject: [PATCH 105/180] [ty] Understand legacy and PEP 695 `ParamSpec`
 (#21139)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

## Summary

This PR adds support for understanding the legacy definition and PEP 695
definition for `ParamSpec`.

This is still very initial and doesn't really implement any of the
semantics.

Part of https://github.com/astral-sh/ty/issues/157

## Test Plan

Add mdtest cases.

## Ecosystem analysis

Most of the diagnostics in `starlette` are due to the fact that ty now
understands `ParamSpec` is not a `Todo` type, so the assignability check
fails. The code looks something like:

```py
class _MiddlewareFactory(Protocol[P]):
    def __call__(self, app: ASGIApp, /, *args: P.args, **kwargs: P.kwargs) -> ASGIApp: ...  # pragma: no cover

class Middleware:
    def __init__(
        self,
        cls: _MiddlewareFactory[P],
        *args: P.args,
        **kwargs: P.kwargs,
    ) -> None:
        self.cls = cls
        self.args = args
        self.kwargs = kwargs

# ty complains that `ServerErrorMiddleware` is not assignable to `_MiddlewareFactory[P]`
Middleware(ServerErrorMiddleware, handler=error_handler, debug=debug)
```

There are multiple diagnostics where there's an attribute access on the
`Wrapped` object of `functools` which Pyright also raises:
```py
from functools import wraps

def my_decorator(f):
    @wraps(f)
    def wrapper(*args, **kwds):
        return f(*args, **kwds)

	# Pyright: Cannot access attribute "__signature__" for class "_Wrapped[..., Unknown, ..., Unknown]"
      Attribute "__signature__" is unknown [reportAttributeAccessIssue]
	# ty: Object of type `_Wrapped[Unknown, Unknown, Unknown, Unknown]` has no attribute `__signature__` [unresolved-attribute]
    wrapper.__signature__
    return wrapper
```

There are additional diagnostics that is due to the assignability checks
failing because ty now infers the `ParamSpec` instead of using the
`Todo` type which would always succeed. This results in a few
`no-matching-overload` diagnostics because the assignability checks
fail.

There are a few diagnostics related to
https://github.com/astral-sh/ty/issues/491 where there's a variable
which is either a bound method or a variable that's annotated with
`Callable` that doesn't contain the instance as the first parameter.

Another set of (valid) diagnostics are where the code hasn't provided
all the type variables. ty is now raising diagnostics for these because
we include `ParamSpec` type variable in the signature. For example,
`staticmethod[Any]` which contains two type variables.
---
 crates/ty/docs/rules.md                       | 163 ++++----
 crates/ty_ide/src/goto_type_definition.rs     |  18 +-
 crates/ty_ide/src/hover.rs                    |   5 +-
 .../resources/mdtest/annotations/callable.md  |   5 +-
 .../annotations/unsupported_special_forms.md  |   5 +-
 .../mdtest/generics/legacy/classes.md         |   9 +-
 .../resources/mdtest/paramspec.md             | 159 ++++++++
 .../type_properties/is_assignable_to.md       |   4 +-
 crates/ty_python_semantic/src/types.rs        |  47 ++-
 .../src/types/class_base.rs                   |   5 +-
 .../src/types/diagnostic.rs                   |  25 ++
 .../src/types/infer/builder.rs                | 367 +++++++++++++++---
 .../types/infer/builder/type_expression.rs    |   4 +-
 .../src/types/type_ordering.rs                |   3 -
 .../e2e__commands__debug_command.snap         |   1 +
 ty.schema.json                                |  10 +
 16 files changed, 684 insertions(+), 146 deletions(-)
 create mode 100644 crates/ty_python_semantic/resources/mdtest/paramspec.md

diff --git a/crates/ty/docs/rules.md b/crates/ty/docs/rules.md
index 4218eee1af..951c364462 100644
--- a/crates/ty/docs/rules.md
+++ b/crates/ty/docs/rules.md
@@ -39,7 +39,7 @@ def test(): -> "int":
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -63,7 +63,7 @@ Calling a non-callable object will raise a `TypeError` at runtime.
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -95,7 +95,7 @@ f(int)  # error
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -126,7 +126,7 @@ a = 1
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -158,7 +158,7 @@ class C(A, B): ...
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -190,7 +190,7 @@ class B(A): ...
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -217,7 +217,7 @@ class B(A, A): ...
 Default level: error ·
 Added in 0.0.1-alpha.12 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -329,7 +329,7 @@ def test(): -> "Literal[5]":
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -359,7 +359,7 @@ class C(A, B): ...
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -385,7 +385,7 @@ t[3]  # IndexError: tuple index out of range
 Default level: error ·
 Added in 0.0.1-alpha.12 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -474,7 +474,7 @@ an atypical memory layout.
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -501,7 +501,7 @@ func("foo")  # error: [invalid-argument-type]
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -529,7 +529,7 @@ a: int = ''
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -563,7 +563,7 @@ C.instance_var = 3  # error: Cannot assign to instance variable
 Default level: error ·
 Added in 0.0.1-alpha.19 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -599,7 +599,7 @@ asyncio.run(main())
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -623,7 +623,7 @@ class A(42): ...  # error: [invalid-base]
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -650,7 +650,7 @@ with 1:
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -679,7 +679,7 @@ a: str
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -723,7 +723,7 @@ except ZeroDivisionError:
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -756,7 +756,7 @@ class C[U](Generic[T]): ...
 Default level: error ·
 Added in 0.0.1-alpha.17 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -795,7 +795,7 @@ carol = Person(name="Carol", age=25)  # typo!
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -830,7 +830,7 @@ def f(t: TypeVar("U")): ...
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -864,7 +864,7 @@ class B(metaclass=f): ...
 Default level: error ·
 Added in 0.0.1-alpha.19 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -896,7 +896,7 @@ TypeError: can only inherit from a NamedTuple type and Generic
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -946,7 +946,7 @@ def foo(x: int) -> int: ...
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -966,13 +966,44 @@ weakens a type checker's ability to accurately reason about your code.
 def f(a: int = ''): ...
 ```
 
+## `invalid-paramspec`
+
+
+Default level: error ·
+Added in 0.0.1-alpha.1 ·
+Related issues ·
+View source
+
+
+
+**What it does**
+
+Checks for the creation of invalid `ParamSpec`s
+
+**Why is this bad?**
+
+There are several requirements that you must follow when creating a `ParamSpec`.
+
+**Examples**
+
+```python
+from typing import ParamSpec
+
+P1 = ParamSpec("P1")  # okay
+P2 = ParamSpec("S2")  # error: ParamSpec name must match the variable it's assigned to
+```
+
+**References**
+
+- [Typing spec: ParamSpec](https://typing.python.org/en/latest/spec/generics.html#paramspec)
+
 ## `invalid-protocol`
 
 
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1006,7 +1037,7 @@ TypeError: Protocols can only inherit from other protocols, got 
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1055,7 +1086,7 @@ def g():
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1080,7 +1111,7 @@ def func() -> int:
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1138,7 +1169,7 @@ TODO #14889
 Default level: error ·
 Added in 0.0.1-alpha.6 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1165,7 +1196,7 @@ NewAlias = TypeAliasType(get_name(), int)        # error: TypeAliasType name mus
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1195,7 +1226,7 @@ TYPE_CHECKING = ''
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1225,7 +1256,7 @@ b: Annotated[int]  # `Annotated` expects at least two arguments
 Default level: error ·
 Added in 0.0.1-alpha.11 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1259,7 +1290,7 @@ f(10)  # Error
 Default level: error ·
 Added in 0.0.1-alpha.11 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1293,7 +1324,7 @@ class C:
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1328,7 +1359,7 @@ T = TypeVar('T', bound=str)  # valid bound TypeVar
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1353,7 +1384,7 @@ func()  # TypeError: func() missing 1 required positional argument: 'x'
 Default level: error ·
 Added in 0.0.1-alpha.20 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1386,7 +1417,7 @@ alice["age"]  # KeyError
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1415,7 +1446,7 @@ func("string")  # error: [no-matching-overload]
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1439,7 +1470,7 @@ Subscripting an object that does not support it will raise a `TypeError` at runt
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1465,7 +1496,7 @@ for i in 34:  # TypeError: 'int' object is not iterable
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1492,7 +1523,7 @@ f(1, x=2)  # Error raised here
 Default level: error ·
 Added in 0.0.1-alpha.22 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1550,7 +1581,7 @@ def test(): -> "int":
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1580,7 +1611,7 @@ static_assert(int(2.0 * 3.0) == 6)  # error: does not have a statically known tr
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1609,7 +1640,7 @@ class B(A): ...  # Error raised here
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1636,7 +1667,7 @@ f("foo")  # Error raised here
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1664,7 +1695,7 @@ def _(x: int):
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1710,7 +1741,7 @@ class A:
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1737,7 +1768,7 @@ f(x=1, y=2)  # Error raised here
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1765,7 +1796,7 @@ A().foo  # AttributeError: 'A' object has no attribute 'foo'
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1790,7 +1821,7 @@ import foo  # ModuleNotFoundError: No module named 'foo'
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1815,7 +1846,7 @@ print(x)  # NameError: name 'x' is not defined
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1852,7 +1883,7 @@ b1 < b2 < b1  # exception raised here
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1880,7 +1911,7 @@ A() + A()  # TypeError: unsupported operand type(s) for +: 'A' and 'A'
 Default level: error ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1905,7 +1936,7 @@ l[1:10:0]  # ValueError: slice step cannot be zero
 Default level: warn ·
 Added in 0.0.1-alpha.20 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -1946,7 +1977,7 @@ class SubProto(BaseProto, Protocol):
 Default level: warn ·
 Added in 0.0.1-alpha.16 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -2034,7 +2065,7 @@ a = 20 / 0  # type: ignore
 Default level: warn ·
 Added in 0.0.1-alpha.22 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -2062,7 +2093,7 @@ A.c  # AttributeError: type object 'A' has no attribute 'c'
 Default level: warn ·
 Added in 0.0.1-alpha.22 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -2094,7 +2125,7 @@ A()[0]  # TypeError: 'A' object is not subscriptable
 Default level: warn ·
 Added in 0.0.1-alpha.22 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -2126,7 +2157,7 @@ from module import a  # ImportError: cannot import name 'a' from 'module'
 Default level: warn ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -2153,7 +2184,7 @@ cast(int, f())  # Redundant
 Default level: warn ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -2177,7 +2208,7 @@ reveal_type(1)  # NameError: name 'reveal_type' is not defined
 Default level: warn ·
 Added in 0.0.1-alpha.15 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -2235,7 +2266,7 @@ def g():
 Default level: warn ·
 Added in 0.0.1-alpha.7 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -2274,7 +2305,7 @@ class D(C): ...  # error: [unsupported-base]
 Default level: warn ·
 Added in 0.0.1-alpha.22 ·
 Related issues ·
-View source
+View source
 
 
 
@@ -2337,7 +2368,7 @@ def foo(x: int | str) -> int | str:
 Default level: ignore ·
 Preview (since 0.0.1-alpha.1) ·
 Related issues ·
-View source
+View source
 
 
 
@@ -2361,7 +2392,7 @@ Dividing by zero raises a `ZeroDivisionError` at runtime.
 Default level: ignore ·
 Added in 0.0.1-alpha.1 ·
 Related issues ·
-View source
+View source
 
 
 
diff --git a/crates/ty_ide/src/goto_type_definition.rs b/crates/ty_ide/src/goto_type_definition.rs
index f39e5dc4b6..7d86743af4 100644
--- a/crates/ty_ide/src/goto_type_definition.rs
+++ b/crates/ty_ide/src/goto_type_definition.rs
@@ -276,10 +276,20 @@ mod tests {
             "#,
         );
 
-        // TODO: Goto type definition currently doesn't work for type param specs
-        // because the inference doesn't support them yet.
-        // This snapshot should show a single target pointing to `T`
-        assert_snapshot!(test.goto_type_definition(), @"No type definitions found");
+        assert_snapshot!(test.goto_type_definition(), @r"
+        info[goto-type-definition]: Type definition
+         --> main.py:2:14
+          |
+        2 | type Alias[**P = [int, str]] = Callable[P, int]
+          |              ^
+          |
+        info: Source
+         --> main.py:2:41
+          |
+        2 | type Alias[**P = [int, str]] = Callable[P, int]
+          |                                         ^
+          |
+        ");
     }
 
     #[test]
diff --git a/crates/ty_ide/src/hover.rs b/crates/ty_ide/src/hover.rs
index b555295678..1b348b82b9 100644
--- a/crates/ty_ide/src/hover.rs
+++ b/crates/ty_ide/src/hover.rs
@@ -1633,11 +1633,12 @@ def ab(a: int, *, c: int):
             "#,
         );
 
+        // TODO: This should be `P@Alias ()`
         assert_snapshot!(test.hover(), @r"
-        @Todo
+        typing.ParamSpec
         ---------------------------------------------
         ```python
-        @Todo
+        typing.ParamSpec
         ```
         ---------------------------------------------
         info[hover]: Hovered content is
diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/callable.md b/crates/ty_python_semantic/resources/mdtest/annotations/callable.md
index 780b2a87db..e7e55f7a44 100644
--- a/crates/ty_python_semantic/resources/mdtest/annotations/callable.md
+++ b/crates/ty_python_semantic/resources/mdtest/annotations/callable.md
@@ -307,8 +307,9 @@ Using a `ParamSpec` in a `Callable` annotation:
 from typing_extensions import Callable
 
 def _[**P1](c: Callable[P1, int]):
-    reveal_type(P1.args)  # revealed: @Todo(ParamSpec)
-    reveal_type(P1.kwargs)  # revealed: @Todo(ParamSpec)
+    # TODO: Should reveal `ParamSpecArgs` and `ParamSpecKwargs`
+    reveal_type(P1.args)  # revealed: @Todo(ParamSpecArgs / ParamSpecKwargs)
+    reveal_type(P1.kwargs)  # revealed: @Todo(ParamSpecArgs / ParamSpecKwargs)
 
     # TODO: Signature should be (**P1) -> int
     reveal_type(c)  # revealed: (...) -> int
diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/unsupported_special_forms.md b/crates/ty_python_semantic/resources/mdtest/annotations/unsupported_special_forms.md
index c61a94a8d6..c5d737d9eb 100644
--- a/crates/ty_python_semantic/resources/mdtest/annotations/unsupported_special_forms.md
+++ b/crates/ty_python_semantic/resources/mdtest/annotations/unsupported_special_forms.md
@@ -21,8 +21,9 @@ def f(*args: Unpack[Ts]) -> tuple[Unpack[Ts]]:
 
 def g() -> TypeGuard[int]: ...
 def i(callback: Callable[Concatenate[int, P], R_co], *args: P.args, **kwargs: P.kwargs) -> R_co:
-    reveal_type(args)  # revealed: tuple[@Todo(Support for `typing.ParamSpec`), ...]
-    reveal_type(kwargs)  # revealed: dict[str, @Todo(Support for `typing.ParamSpec`)]
+    # TODO: Should reveal a type representing `P.args` and `P.kwargs`
+    reveal_type(args)  # revealed: tuple[@Todo(ParamSpecArgs / ParamSpecKwargs), ...]
+    reveal_type(kwargs)  # revealed: dict[str, @Todo(ParamSpecArgs / ParamSpecKwargs)]
     return callback(42, *args, **kwargs)
 
 class Foo:
diff --git a/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md b/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md
index c520b7e883..a1f47c3b11 100644
--- a/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md
+++ b/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md
@@ -26,9 +26,12 @@ reveal_type(generic_context(SingleTypevar))
 # revealed: tuple[T@MultipleTypevars, S@MultipleTypevars]
 reveal_type(generic_context(MultipleTypevars))
 
-# TODO: support `ParamSpec`/`TypeVarTuple` properly (these should not reveal `None`)
-reveal_type(generic_context(SingleParamSpec))  # revealed: None
-reveal_type(generic_context(TypeVarAndParamSpec))  # revealed: None
+# revealed: tuple[P@SingleParamSpec]
+reveal_type(generic_context(SingleParamSpec))
+# revealed: tuple[P@TypeVarAndParamSpec, T@TypeVarAndParamSpec]
+reveal_type(generic_context(TypeVarAndParamSpec))
+
+# TODO: support `TypeVarTuple` properly (these should not reveal `None`)
 reveal_type(generic_context(SingleTypeVarTuple))  # revealed: None
 reveal_type(generic_context(TypeVarAndTypeVarTuple))  # revealed: None
 ```
diff --git a/crates/ty_python_semantic/resources/mdtest/paramspec.md b/crates/ty_python_semantic/resources/mdtest/paramspec.md
new file mode 100644
index 0000000000..4ebc336d7f
--- /dev/null
+++ b/crates/ty_python_semantic/resources/mdtest/paramspec.md
@@ -0,0 +1,159 @@
+# `ParamSpec`
+
+## Definition
+
+### Valid
+
+```py
+from typing import ParamSpec
+
+P = ParamSpec("P")
+reveal_type(type(P))  # revealed: 
+reveal_type(P)  # revealed: typing.ParamSpec
+reveal_type(P.__name__)  # revealed: Literal["P"]
+```
+
+The paramspec name can also be provided as a keyword argument:
+
+```py
+from typing import ParamSpec
+
+P = ParamSpec(name="P")
+reveal_type(P.__name__)  # revealed: Literal["P"]
+```
+
+### Must be directly assigned to a variable
+
+```py
+from typing import ParamSpec
+
+P = ParamSpec("P")
+# error: [invalid-paramspec]
+P1: ParamSpec = ParamSpec("P1")
+
+# error: [invalid-paramspec]
+tuple_with_typevar = ("foo", ParamSpec("W"))
+reveal_type(tuple_with_typevar[1])  # revealed: ParamSpec
+```
+
+```py
+from typing_extensions import ParamSpec
+
+T = ParamSpec("T")
+# error: [invalid-paramspec]
+P1: ParamSpec = ParamSpec("P1")
+
+# error: [invalid-paramspec]
+tuple_with_typevar = ("foo", ParamSpec("P2"))
+reveal_type(tuple_with_typevar[1])  # revealed: ParamSpec
+```
+
+### `ParamSpec` parameter must match variable name
+
+```py
+from typing import ParamSpec
+
+P1 = ParamSpec("P1")
+
+# error: [invalid-paramspec]
+P2 = ParamSpec("P3")
+```
+
+### Accepts only a single `name` argument
+
+> The runtime should accept bounds and covariant and contravariant arguments in the declaration just
+> as typing.TypeVar does, but for now we will defer the standardization of the semantics of those
+> options to a later PEP.
+
+```py
+from typing import ParamSpec
+
+# error: [invalid-paramspec]
+P1 = ParamSpec("P1", bound=int)
+# error: [invalid-paramspec]
+P2 = ParamSpec("P2", int, str)
+# error: [invalid-paramspec]
+P3 = ParamSpec("P3", covariant=True)
+# error: [invalid-paramspec]
+P4 = ParamSpec("P4", contravariant=True)
+```
+
+### Defaults
+
+```toml
+[environment]
+python-version = "3.13"
+```
+
+The default value for a `ParamSpec` can be either a list of types, `...`, or another `ParamSpec`.
+
+```py
+from typing import ParamSpec
+
+P1 = ParamSpec("P1", default=[int, str])
+P2 = ParamSpec("P2", default=...)
+P3 = ParamSpec("P3", default=P2)
+```
+
+Other values are invalid.
+
+```py
+# error: [invalid-paramspec]
+P4 = ParamSpec("P4", default=int)
+```
+
+### PEP 695
+
+```toml
+[environment]
+python-version = "3.12"
+```
+
+#### Valid
+
+```py
+def foo1[**P]() -> None:
+    reveal_type(P)  # revealed: typing.ParamSpec
+
+def foo2[**P = ...]() -> None:
+    reveal_type(P)  # revealed: typing.ParamSpec
+
+def foo3[**P = [int, str]]() -> None:
+    reveal_type(P)  # revealed: typing.ParamSpec
+
+def foo4[**P, **Q = P]():
+    reveal_type(P)  # revealed: typing.ParamSpec
+    reveal_type(Q)  # revealed: typing.ParamSpec
+```
+
+#### Invalid
+
+ParamSpec, when defined using the new syntax, does not allow defining bounds or constraints.
+
+This results in a lot of syntax errors mainly because the AST doesn't accept them in this position.
+The parser could do a better job in recovering from these errors.
+
+
+
+```py
+# error: [invalid-syntax]
+# error: [invalid-syntax]
+# error: [invalid-syntax]
+# error: [invalid-syntax]
+# error: [invalid-syntax]
+# error: [invalid-syntax]
+def foo[**P: int]() -> None:
+    # error: [invalid-syntax]
+    # error: [invalid-syntax]
+    pass
+```
+
+
+
+#### Invalid default
+
+```py
+# error: [invalid-paramspec]
+def foo[**P = int]() -> None:
+    pass
+```
diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/is_assignable_to.md b/crates/ty_python_semantic/resources/mdtest/type_properties/is_assignable_to.md
index a39b6a6f16..1386a9e158 100644
--- a/crates/ty_python_semantic/resources/mdtest/type_properties/is_assignable_to.md
+++ b/crates/ty_python_semantic/resources/mdtest/type_properties/is_assignable_to.md
@@ -1171,9 +1171,7 @@ class EggsLegacy(Generic[T, P]): ...
 static_assert(not is_assignable_to(Spam, Callable[..., Any]))
 static_assert(not is_assignable_to(SpamLegacy, Callable[..., Any]))
 static_assert(not is_assignable_to(Eggs, Callable[..., Any]))
-
-# TODO: should pass
-static_assert(not is_assignable_to(EggsLegacy, Callable[..., Any]))  # error: [static-assert-error]
+static_assert(not is_assignable_to(EggsLegacy, Callable[..., Any]))
 ```
 
 ### Classes with `__call__` as attribute
diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs
index 59e1ef4030..bc75895833 100644
--- a/crates/ty_python_semantic/src/types.rs
+++ b/crates/ty_python_semantic/src/types.rs
@@ -4358,6 +4358,13 @@ impl<'db> Type<'db> {
                     .into()
             }
 
+            Type::KnownInstance(KnownInstanceType::TypeVar(typevar))
+                if typevar.kind(db).is_paramspec()
+                    && matches!(name.as_str(), "args" | "kwargs") =>
+            {
+                Place::bound(todo_type!("ParamSpecArgs / ParamSpecKwargs")).into()
+            }
+
             Type::NominalInstance(..)
             | Type::ProtocolInstance(..)
             | Type::BooleanLiteral(..)
@@ -7024,7 +7031,7 @@ impl<'db> Type<'db> {
             Type::TypeVar(bound_typevar) => {
                 if matches!(
                     bound_typevar.typevar(db).kind(db),
-                    TypeVarKind::Legacy | TypeVarKind::TypingSelf
+                    TypeVarKind::Legacy | TypeVarKind::TypingSelf | TypeVarKind::ParamSpec
                 ) && binding_context.is_none_or(|binding_context| {
                     bound_typevar.binding_context(db) == BindingContext::Definition(binding_context)
                 }) {
@@ -7743,6 +7750,9 @@ impl<'db> KnownInstanceType<'db> {
     fn class(self, db: &'db dyn Db) -> KnownClass {
         match self {
             Self::SubscriptedProtocol(_) | Self::SubscriptedGeneric(_) => KnownClass::SpecialForm,
+            Self::TypeVar(typevar_instance) if typevar_instance.kind(db).is_paramspec() => {
+                KnownClass::ParamSpec
+            }
             Self::TypeVar(_) => KnownClass::TypeVar,
             Self::TypeAliasType(TypeAliasType::PEP695(alias)) if alias.is_specialized(db) => {
                 KnownClass::GenericAlias
@@ -7808,7 +7818,13 @@ impl<'db> KnownInstanceType<'db> {
                     // This is a legacy `TypeVar` _outside_ of any generic class or function, so we render
                     // it as an instance of `typing.TypeVar`. Inside of a generic class or function, we'll
                     // have a `Type::TypeVar(_)`, which is rendered as the typevar's name.
-                    KnownInstanceType::TypeVar(_) => f.write_str("typing.TypeVar"),
+                    KnownInstanceType::TypeVar(typevar_instance) => {
+                        if typevar_instance.kind(self.db).is_paramspec() {
+                            f.write_str("typing.ParamSpec")
+                        } else {
+                            f.write_str("typing.TypeVar")
+                        }
+                    }
                     KnownInstanceType::Deprecated(_) => f.write_str("warnings.deprecated"),
                     KnownInstanceType::Field(field) => {
                         f.write_str("dataclasses.Field")?;
@@ -7864,9 +7880,6 @@ pub enum DynamicType<'db> {
     ///
     /// This variant should be created with the `todo_type!` macro.
     Todo(TodoType),
-    /// A special Todo-variant for PEP-695 `ParamSpec` types. A temporary variant to detect and special-
-    /// case the handling of these types in `Callable` annotations.
-    TodoPEP695ParamSpec,
     /// A special Todo-variant for type aliases declared using `typing.TypeAlias`.
     /// A temporary variant to detect and special-case the handling of these aliases in autocomplete suggestions.
     TodoTypeAlias,
@@ -7894,13 +7907,6 @@ impl std::fmt::Display for DynamicType<'_> {
             // `DynamicType::Todo`'s display should be explicit that is not a valid display of
             // any other type
             DynamicType::Todo(todo) => write!(f, "@Todo{todo}"),
-            DynamicType::TodoPEP695ParamSpec => {
-                if cfg!(debug_assertions) {
-                    f.write_str("@Todo(ParamSpec)")
-                } else {
-                    f.write_str("@Todo")
-                }
-            }
             DynamicType::TodoUnpack => {
                 if cfg!(debug_assertions) {
                     f.write_str("@Todo(typing.Unpack)")
@@ -8239,12 +8245,20 @@ pub enum TypeVarKind {
     Pep695,
     /// `typing.Self`
     TypingSelf,
+    /// `P = ParamSpec("P")`
+    ParamSpec,
+    /// `def foo[**P]() -> None: ...`
+    Pep695ParamSpec,
 }
 
 impl TypeVarKind {
     const fn is_self(self) -> bool {
         matches!(self, Self::TypingSelf)
     }
+
+    const fn is_paramspec(self) -> bool {
+        matches!(self, Self::ParamSpec | Self::Pep695ParamSpec)
+    }
 }
 
 /// The identity of a type variable.
@@ -8597,6 +8611,15 @@ impl<'db> TypeVarInstance<'db> {
                 let expr = &call_expr.arguments.find_keyword("default")?.value;
                 Some(definition_expression_type(db, definition, expr))
             }
+            // PEP 695 ParamSpec
+            DefinitionKind::ParamSpec(paramspec) => {
+                let paramspec_node = paramspec.node(&module);
+                Some(definition_expression_type(
+                    db,
+                    definition,
+                    paramspec_node.default.as_ref()?,
+                ))
+            }
             _ => None,
         }
     }
diff --git a/crates/ty_python_semantic/src/types/class_base.rs b/crates/ty_python_semantic/src/types/class_base.rs
index bed18de8b9..071d4b92b7 100644
--- a/crates/ty_python_semantic/src/types/class_base.rs
+++ b/crates/ty_python_semantic/src/types/class_base.rs
@@ -49,10 +49,7 @@ impl<'db> ClassBase<'db> {
             ClassBase::Dynamic(DynamicType::Any) => "Any",
             ClassBase::Dynamic(DynamicType::Unknown) => "Unknown",
             ClassBase::Dynamic(
-                DynamicType::Todo(_)
-                | DynamicType::TodoPEP695ParamSpec
-                | DynamicType::TodoTypeAlias
-                | DynamicType::TodoUnpack,
+                DynamicType::Todo(_) | DynamicType::TodoTypeAlias | DynamicType::TodoUnpack,
             ) => "@Todo",
             ClassBase::Dynamic(DynamicType::Divergent(_)) => "Divergent",
             ClassBase::Protocol => "Protocol",
diff --git a/crates/ty_python_semantic/src/types/diagnostic.rs b/crates/ty_python_semantic/src/types/diagnostic.rs
index 5d647f108f..33efcc74fd 100644
--- a/crates/ty_python_semantic/src/types/diagnostic.rs
+++ b/crates/ty_python_semantic/src/types/diagnostic.rs
@@ -63,6 +63,7 @@ pub(crate) fn register_lints(registry: &mut LintRegistryBuilder) {
     registry.register_lint(&INVALID_EXCEPTION_CAUGHT);
     registry.register_lint(&INVALID_GENERIC_CLASS);
     registry.register_lint(&INVALID_LEGACY_TYPE_VARIABLE);
+    registry.register_lint(&INVALID_PARAMSPEC);
     registry.register_lint(&INVALID_TYPE_ALIAS_TYPE);
     registry.register_lint(&INVALID_METACLASS);
     registry.register_lint(&INVALID_OVERLOAD);
@@ -880,6 +881,30 @@ declare_lint! {
     }
 }
 
+declare_lint! {
+    /// ## What it does
+    /// Checks for the creation of invalid `ParamSpec`s
+    ///
+    /// ## Why is this bad?
+    /// There are several requirements that you must follow when creating a `ParamSpec`.
+    ///
+    /// ## Examples
+    /// ```python
+    /// from typing import ParamSpec
+    ///
+    /// P1 = ParamSpec("P1")  # okay
+    /// P2 = ParamSpec("S2")  # error: ParamSpec name must match the variable it's assigned to
+    /// ```
+    ///
+    /// ## References
+    /// - [Typing spec: ParamSpec](https://typing.python.org/en/latest/spec/generics.html#paramspec)
+    pub(crate) static INVALID_PARAMSPEC = {
+        summary: "detects invalid ParamSpec usage",
+        status: LintStatus::stable("0.0.1-alpha.1"),
+        default_level: Level::Error,
+    }
+}
+
 declare_lint! {
     /// ## What it does
     /// Checks for the creation of invalid `TypeAliasType`s
diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs
index e72b4af8db..8806dff536 100644
--- a/crates/ty_python_semantic/src/types/infer/builder.rs
+++ b/crates/ty_python_semantic/src/types/infer/builder.rs
@@ -59,11 +59,12 @@ use crate::types::diagnostic::{
     DIVISION_BY_ZERO, DUPLICATE_KW_ONLY, INCONSISTENT_MRO, INVALID_ARGUMENT_TYPE,
     INVALID_ASSIGNMENT, INVALID_ATTRIBUTE_ACCESS, INVALID_BASE, INVALID_DECLARATION,
     INVALID_GENERIC_CLASS, INVALID_KEY, INVALID_LEGACY_TYPE_VARIABLE, INVALID_METACLASS,
-    INVALID_NAMED_TUPLE, INVALID_OVERLOAD, INVALID_PARAMETER_DEFAULT, INVALID_PROTOCOL,
-    INVALID_TYPE_FORM, INVALID_TYPE_GUARD_CALL, INVALID_TYPE_VARIABLE_CONSTRAINTS,
-    IncompatibleBases, NON_SUBSCRIPTABLE, POSSIBLY_MISSING_IMPLICIT_CALL, POSSIBLY_MISSING_IMPORT,
-    SUBCLASS_OF_FINAL_CLASS, UNDEFINED_REVEAL, UNRESOLVED_ATTRIBUTE, UNRESOLVED_GLOBAL,
-    UNRESOLVED_IMPORT, UNRESOLVED_REFERENCE, UNSUPPORTED_OPERATOR, USELESS_OVERLOAD_BODY,
+    INVALID_NAMED_TUPLE, INVALID_OVERLOAD, INVALID_PARAMETER_DEFAULT, INVALID_PARAMSPEC,
+    INVALID_PROTOCOL, INVALID_TYPE_FORM, INVALID_TYPE_GUARD_CALL,
+    INVALID_TYPE_VARIABLE_CONSTRAINTS, IncompatibleBases, NON_SUBSCRIPTABLE,
+    POSSIBLY_MISSING_IMPLICIT_CALL, POSSIBLY_MISSING_IMPORT, SUBCLASS_OF_FINAL_CLASS,
+    UNDEFINED_REVEAL, UNRESOLVED_ATTRIBUTE, UNRESOLVED_GLOBAL, UNRESOLVED_IMPORT,
+    UNRESOLVED_REFERENCE, UNSUPPORTED_OPERATOR, USELESS_OVERLOAD_BODY,
     hint_if_stdlib_attribute_exists_on_other_versions,
     hint_if_stdlib_submodule_exists_on_other_versions, report_attempted_protocol_instantiation,
     report_bad_dunder_set_call, report_cannot_pop_required_field_on_typed_dict,
@@ -1296,6 +1297,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
             DefinitionKind::TypeVar(typevar) => {
                 self.infer_typevar_deferred(typevar.node(self.module()));
             }
+            DefinitionKind::ParamSpec(paramspec) => {
+                self.infer_paramspec_deferred(paramspec.node(self.module()));
+            }
             DefinitionKind::Assignment(assignment) => {
                 self.infer_assignment_deferred(assignment.value(self.module()));
             }
@@ -3182,18 +3186,120 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
         let ast::TypeParamParamSpec {
             range: _,
             node_index: _,
-            name: _,
+            name,
             default,
         } = node;
-        self.infer_optional_expression(default.as_deref(), TypeContext::default());
-        let pep_695_todo = Type::Dynamic(DynamicType::TodoPEP695ParamSpec);
+        if default.is_some() {
+            self.deferred.insert(definition);
+        }
+        let identity = TypeVarIdentity::new(
+            self.db(),
+            &name.id,
+            Some(definition),
+            TypeVarKind::Pep695ParamSpec,
+        );
+        let ty = Type::KnownInstance(KnownInstanceType::TypeVar(TypeVarInstance::new(
+            self.db(),
+            identity,
+            None, // ParamSpec, when declared using PEP 695 syntax, has no bounds or constraints
+            None, // explicit_variance
+            default.as_deref().map(|_| TypeVarDefaultEvaluation::Lazy),
+        )));
         self.add_declaration_with_binding(
             node.into(),
             definition,
-            &DeclaredAndInferredType::are_the_same_type(pep_695_todo),
+            &DeclaredAndInferredType::are_the_same_type(ty),
         );
     }
 
+    fn infer_paramspec_deferred(&mut self, node: &ast::TypeParamParamSpec) {
+        let ast::TypeParamParamSpec {
+            range: _,
+            node_index: _,
+            name: _,
+            default: Some(default),
+        } = node
+        else {
+            return;
+        };
+        let previous_deferred_state =
+            std::mem::replace(&mut self.deferred_state, DeferredExpressionState::Deferred);
+        let default_ty = self.infer_paramspec_default(default);
+        self.store_expression_type(default, default_ty);
+        self.deferred_state = previous_deferred_state;
+    }
+
+    fn infer_paramspec_default(&mut self, default: &ast::Expr) -> Type<'db> {
+        // This is the same logic as `TypeInferenceBuilder::infer_callable_parameter_types` except
+        // for the subscript branch which is required for `Concatenate` but that cannot be
+        // specified in this context.
+        match default {
+            ast::Expr::EllipsisLiteral(_) => {
+                CallableType::single(self.db(), Signature::new(Parameters::gradual_form(), None))
+            }
+            ast::Expr::List(ast::ExprList { elts, .. }) => {
+                let mut parameter_types = Vec::with_capacity(elts.len());
+
+                // Whether to infer `Todo` for the parameters
+                let mut return_todo = false;
+
+                for param in elts {
+                    let param_type = self.infer_type_expression(param);
+                    // This is similar to what we currently do for inferring tuple type expression.
+                    // We currently infer `Todo` for the parameters to avoid invalid diagnostics
+                    // when trying to check for assignability or any other relation. For example,
+                    // `*tuple[int, str]`, `Unpack[]`, etc. are not yet supported.
+                    return_todo |= param_type.is_todo()
+                        && matches!(param, ast::Expr::Starred(_) | ast::Expr::Subscript(_));
+                    parameter_types.push(param_type);
+                }
+
+                let parameters = if return_todo {
+                    // TODO: `Unpack`
+                    Parameters::todo()
+                } else {
+                    Parameters::new(parameter_types.iter().map(|param_type| {
+                        Parameter::positional_only(None).with_annotated_type(*param_type)
+                    }))
+                };
+
+                CallableType::single(self.db(), Signature::new(parameters, None))
+            }
+            ast::Expr::Name(name) => {
+                let name_ty = self.infer_name_load(name);
+                let is_paramspec = match name_ty {
+                    Type::KnownInstance(known_instance) => {
+                        known_instance.class(self.db()) == KnownClass::ParamSpec
+                    }
+                    Type::NominalInstance(nominal) => {
+                        nominal.has_known_class(self.db(), KnownClass::ParamSpec)
+                    }
+                    _ => false,
+                };
+                if is_paramspec {
+                    name_ty
+                } else {
+                    if let Some(builder) = self.context.report_lint(&INVALID_PARAMSPEC, default) {
+                        builder.into_diagnostic(
+                            "The default value to `ParamSpec` must be either a list of types, \
+                        `ParamSpec`, or `...`",
+                        );
+                    }
+                    Type::unknown()
+                }
+            }
+            _ => {
+                if let Some(builder) = self.context.report_lint(&INVALID_PARAMSPEC, default) {
+                    builder.into_diagnostic(
+                        "The default value to `ParamSpec` must be either a list of types, \
+                        `ParamSpec`, or `...`",
+                    );
+                }
+                Type::unknown()
+            }
+        }
+    }
+
     fn infer_typevartuple_definition(
         &mut self,
         node: &ast::TypeParamTypeVarTuple,
@@ -4324,17 +4430,21 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
                         TypeContext::default(),
                     );
 
-                    let typevar_class = callable_type
+                    let ty = match callable_type
                         .as_class_literal()
                         .and_then(|cls| cls.known(self.db()))
-                        .filter(|cls| {
-                            matches!(cls, KnownClass::TypeVar | KnownClass::ExtensionsTypeVar)
-                        });
-
-                    let ty = if let Some(typevar_class) = typevar_class {
-                        self.infer_legacy_typevar(target, call_expr, definition, typevar_class)
-                    } else {
-                        self.infer_call_expression_impl(call_expr, callable_type, tcx)
+                    {
+                        Some(
+                            typevar_class @ (KnownClass::TypeVar | KnownClass::ExtensionsTypeVar),
+                        ) => {
+                            self.infer_legacy_typevar(target, call_expr, definition, typevar_class)
+                        }
+                        Some(KnownClass::ParamSpec) => {
+                            self.infer_paramspec(target, call_expr, definition)
+                        }
+                        Some(_) | None => {
+                            self.infer_call_expression_impl(call_expr, callable_type, tcx)
+                        }
                     };
 
                     self.store_expression_type(value, ty);
@@ -4371,6 +4481,160 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
         target_ty
     }
 
+    fn infer_paramspec(
+        &mut self,
+        target: &ast::Expr,
+        call_expr: &ast::ExprCall,
+        definition: Definition<'db>,
+    ) -> Type<'db> {
+        fn error<'db>(
+            context: &InferContext<'db, '_>,
+            message: impl std::fmt::Display,
+            node: impl Ranged,
+        ) -> Type<'db> {
+            if let Some(builder) = context.report_lint(&INVALID_PARAMSPEC, node) {
+                builder.into_diagnostic(message);
+            }
+            // If the call doesn't create a valid paramspec, we'll emit diagnostics and fall back to
+            // just creating a regular instance of `typing.ParamSpec`.
+            KnownClass::ParamSpec.to_instance(context.db())
+        }
+
+        let db = self.db();
+        let arguments = &call_expr.arguments;
+        let assume_all_features = self.in_stub();
+        let python_version = Program::get(db).python_version(db);
+        let have_features_from =
+            |version: PythonVersion| assume_all_features || python_version >= version;
+
+        let mut default = None;
+        let mut name_param_ty = None;
+
+        if arguments.args.len() > 1 {
+            return error(
+                &self.context,
+                "`ParamSpec` can only have one positional argument",
+                call_expr,
+            );
+        }
+
+        if let Some(starred) = arguments.args.iter().find(|arg| arg.is_starred_expr()) {
+            return error(
+                &self.context,
+                "Starred arguments are not supported in `ParamSpec` creation",
+                starred,
+            );
+        }
+
+        for kwarg in &arguments.keywords {
+            let Some(identifier) = kwarg.arg.as_ref() else {
+                return error(
+                    &self.context,
+                    "Starred arguments are not supported in `ParamSpec` creation",
+                    kwarg,
+                );
+            };
+            match identifier.id().as_str() {
+                "name" => {
+                    // Duplicate keyword argument is a syntax error, so we don't have to check if
+                    // `name_param_ty.is_some()` here.
+                    if !arguments.args.is_empty() {
+                        return error(
+                            &self.context,
+                            "The `name` parameter of `ParamSpec` can only be provided once",
+                            kwarg,
+                        );
+                    }
+                    name_param_ty =
+                        Some(self.infer_expression(&kwarg.value, TypeContext::default()));
+                }
+                "bound" | "covariant" | "contravariant" | "infer_variance" => {
+                    return error(
+                        &self.context,
+                        "The variance and bound arguments for `ParamSpec` do not have defined semantics yet",
+                        call_expr,
+                    );
+                }
+                "default" => {
+                    if !have_features_from(PythonVersion::PY313) {
+                        // We don't return here; this error is informational since this will error
+                        // at runtime, but the user's intent is plain, we may as well respect it.
+                        error(
+                            &self.context,
+                            "The `default` parameter of `typing.ParamSpec` was added in Python 3.13",
+                            kwarg,
+                        );
+                    }
+                    default = Some(TypeVarDefaultEvaluation::Lazy);
+                }
+                name => {
+                    // We don't return here; this error is informational since this will error
+                    // at runtime, but it will likely cause fewer cascading errors if we just
+                    // ignore the unknown keyword and still understand as much of the typevar as we
+                    // can.
+                    error(
+                        &self.context,
+                        format_args!("Unknown keyword argument `{name}` in `ParamSpec` creation"),
+                        kwarg,
+                    );
+                    self.infer_expression(&kwarg.value, TypeContext::default());
+                }
+            }
+        }
+
+        let Some(name_param_ty) = name_param_ty.or_else(|| {
+            arguments
+                .find_positional(0)
+                .map(|arg| self.infer_expression(arg, TypeContext::default()))
+        }) else {
+            return error(
+                &self.context,
+                "The `name` parameter of `ParamSpec` is required.",
+                call_expr,
+            );
+        };
+
+        let Some(name_param) = name_param_ty.as_string_literal().map(|name| name.value(db)) else {
+            return error(
+                &self.context,
+                "The first argument to `ParamSpec` must be a string literal",
+                call_expr,
+            );
+        };
+
+        let ast::Expr::Name(ast::ExprName {
+            id: target_name, ..
+        }) = target
+        else {
+            return error(
+                &self.context,
+                "A `ParamSpec` definition must be a simple variable assignment",
+                target,
+            );
+        };
+
+        if name_param != target_name {
+            return error(
+                &self.context,
+                format_args!(
+                    "The name of a `ParamSpec` (`{name_param}`) must match \
+                    the name of the variable it is assigned to (`{target_name}`)"
+                ),
+                target,
+            );
+        }
+
+        if default.is_some() {
+            self.deferred.insert(definition);
+        }
+
+        let identity =
+            TypeVarIdentity::new(db, target_name, Some(definition), TypeVarKind::ParamSpec);
+        Type::KnownInstance(KnownInstanceType::TypeVar(TypeVarInstance::new(
+            db, identity, None, None, default,
+        )))
+    }
+
     fn infer_legacy_typevar(
         &mut self,
         target: &ast::Expr,
@@ -4617,8 +4881,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
     }
 
     fn infer_assignment_deferred(&mut self, value: &ast::Expr) {
-        // Infer deferred bounds/constraints/defaults of a legacy TypeVar.
-        let ast::Expr::Call(ast::ExprCall { arguments, .. }) = value else {
+        // Infer deferred bounds/constraints/defaults of a legacy TypeVar / ParamSpec.
+        let ast::Expr::Call(ast::ExprCall {
+            func, arguments, ..
+        }) = value
+        else {
             return;
         };
         for arg in arguments.args.iter().skip(1) {
@@ -4628,7 +4895,14 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
             self.infer_type_expression(&bound.value);
         }
         if let Some(default) = arguments.find_keyword("default") {
-            self.infer_type_expression(&default.value);
+            let func_ty = self.get_or_infer_expression(func, TypeContext::default());
+            if func_ty.as_class_literal().is_some_and(|class_literal| {
+                class_literal.is_known(self.db(), KnownClass::ParamSpec)
+            }) {
+                self.infer_paramspec_default(&default.value);
+            } else {
+                self.infer_type_expression(&default.value);
+            }
         }
     }
 
@@ -7047,22 +7321,33 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
                     .to_class_type(self.db())
                     .is_none_or(|enum_class| !class.is_subclass_of(self.db(), enum_class))
             {
-                if matches!(
-                    class.known(self.db()),
-                    Some(KnownClass::TypeVar | KnownClass::ExtensionsTypeVar)
-                ) {
-                    // Inference of correctly-placed `TypeVar` definitions is done in
-                    // `TypeInferenceBuilder::infer_legacy_typevar`, and doesn't use the full
-                    // call-binding machinery. If we reach here, it means that someone is trying to
-                    // instantiate a `typing.TypeVar` in an invalid context.
-                    if let Some(builder) = self
-                        .context
-                        .report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression)
-                    {
-                        builder.into_diagnostic(
-                            "A `TypeVar` definition must be a simple variable assignment",
-                        );
+                // Inference of correctly-placed `TypeVar` and `ParamSpec` definitions is done in
+                // `TypeInferenceBuilder::infer_legacy_typevar` and
+                // `TypeInferenceBuilder::infer_paramspec`, and doesn't use the full
+                // call-binding machinery. If we reach here, it means that someone is trying to
+                // instantiate a `typing.TypeVar` and `typing.ParamSpec` in an invalid context.
+                match class.known(self.db()) {
+                    Some(KnownClass::TypeVar | KnownClass::ExtensionsTypeVar) => {
+                        if let Some(builder) = self
+                            .context
+                            .report_lint(&INVALID_LEGACY_TYPE_VARIABLE, call_expression)
+                        {
+                            builder.into_diagnostic(
+                                "A `TypeVar` definition must be a simple variable assignment",
+                            );
+                        }
                     }
+                    Some(KnownClass::ParamSpec) => {
+                        if let Some(builder) = self
+                            .context
+                            .report_lint(&INVALID_PARAMSPEC, call_expression)
+                        {
+                            builder.into_diagnostic(
+                                "A `ParamSpec` definition must be a simple variable assignment",
+                            );
+                        }
+                    }
+                    _ => {}
                 }
 
                 let db = self.db();
@@ -8270,10 +8555,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
 
             (
                 todo @ Type::Dynamic(
-                    DynamicType::Todo(_)
-                    | DynamicType::TodoPEP695ParamSpec
-                    | DynamicType::TodoUnpack
-                    | DynamicType::TodoTypeAlias,
+                    DynamicType::Todo(_) | DynamicType::TodoUnpack | DynamicType::TodoTypeAlias,
                 ),
                 _,
                 _,
@@ -8281,10 +8563,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
             | (
                 _,
                 todo @ Type::Dynamic(
-                    DynamicType::Todo(_)
-                    | DynamicType::TodoPEP695ParamSpec
-                    | DynamicType::TodoUnpack
-                    | DynamicType::TodoTypeAlias,
+                    DynamicType::Todo(_) | DynamicType::TodoUnpack | DynamicType::TodoTypeAlias,
                 ),
                 _,
             ) => Some(todo),
diff --git a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs
index 1e1ff82c0b..c6b2bbbef0 100644
--- a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs
+++ b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs
@@ -1524,7 +1524,9 @@ impl<'db> TypeInferenceBuilder<'db, '_> {
                     self.db(),
                     self.infer_name_load(name),
                     &|ty| match ty {
-                        Type::Dynamic(DynamicType::TodoPEP695ParamSpec) => true,
+                        Type::KnownInstance(known_instance) => {
+                            known_instance.class(self.db()) == KnownClass::ParamSpec
+                        }
                         Type::NominalInstance(nominal) => {
                             nominal.has_known_class(self.db(), KnownClass::ParamSpec)
                         }
diff --git a/crates/ty_python_semantic/src/types/type_ordering.rs b/crates/ty_python_semantic/src/types/type_ordering.rs
index e45e0c9ba5..f6797f87d9 100644
--- a/crates/ty_python_semantic/src/types/type_ordering.rs
+++ b/crates/ty_python_semantic/src/types/type_ordering.rs
@@ -262,9 +262,6 @@ fn dynamic_elements_ordering(left: DynamicType, right: DynamicType) -> Ordering
         #[cfg(not(debug_assertions))]
         (DynamicType::Todo(TodoType), DynamicType::Todo(TodoType)) => Ordering::Equal,
 
-        (DynamicType::TodoPEP695ParamSpec, _) => Ordering::Less,
-        (_, DynamicType::TodoPEP695ParamSpec) => Ordering::Greater,
-
         (DynamicType::TodoUnpack, _) => Ordering::Less,
         (_, DynamicType::TodoUnpack) => Ordering::Greater,
 
diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap b/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap
index ba3b75028c..7373c4cf25 100644
--- a/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap
+++ b/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap
@@ -61,6 +61,7 @@ Settings: Settings {
         "invalid-named-tuple": Error (Default),
         "invalid-overload": Error (Default),
         "invalid-parameter-default": Error (Default),
+        "invalid-paramspec": Error (Default),
         "invalid-protocol": Error (Default),
         "invalid-raise": Error (Default),
         "invalid-return-type": Error (Default),
diff --git a/ty.schema.json b/ty.schema.json
index 55d5bdf996..cae55e4a1b 100644
--- a/ty.schema.json
+++ b/ty.schema.json
@@ -643,6 +643,16 @@
             }
           ]
         },
+        "invalid-paramspec": {
+          "title": "detects invalid ParamSpec usage",
+          "description": "## What it does\nChecks for the creation of invalid `ParamSpec`s\n\n## Why is this bad?\nThere are several requirements that you must follow when creating a `ParamSpec`.\n\n## Examples\n```python\nfrom typing import ParamSpec\n\nP1 = ParamSpec(\"P1\")  # okay\nP2 = ParamSpec(\"S2\")  # error: ParamSpec name must match the variable it's assigned to\n```\n\n## References\n- [Typing spec: ParamSpec](https://typing.python.org/en/latest/spec/generics.html#paramspec)",
+          "default": "error",
+          "oneOf": [
+            {
+              "$ref": "#/definitions/Level"
+            }
+          ]
+        },
         "invalid-protocol": {
           "title": "detects invalid protocol class definitions",
           "description": "## What it does\nChecks for protocol classes that will raise `TypeError` at runtime.\n\n## Why is this bad?\nAn invalidly defined protocol class may lead to the type checker inferring\nunexpected things. It may also lead to `TypeError`s at runtime.\n\n## Examples\nA `Protocol` class cannot inherit from a non-`Protocol` class;\nthis raises a `TypeError` at runtime:\n\n```pycon\n>>> from typing import Protocol\n>>> class Foo(int, Protocol): ...\n...\nTraceback (most recent call last):\n  File \"\", line 1, in \n    class Foo(int, Protocol): ...\nTypeError: Protocols can only inherit from other protocols, got \n```",

From 35640dd8534f694fb4cfdc3a96c13eed063f1015 Mon Sep 17 00:00:00 2001
From: Dhruv Manilawala 
Date: Thu, 6 Nov 2025 15:10:43 -0500
Subject: [PATCH 106/180] Fix main by using `infer_expression` (#21299)

---
 crates/ty_python_semantic/src/types/infer/builder.rs | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs
index 8806dff536..8dc2b244cf 100644
--- a/crates/ty_python_semantic/src/types/infer/builder.rs
+++ b/crates/ty_python_semantic/src/types/infer/builder.rs
@@ -4895,7 +4895,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
             self.infer_type_expression(&bound.value);
         }
         if let Some(default) = arguments.find_keyword("default") {
-            let func_ty = self.get_or_infer_expression(func, TypeContext::default());
+            let func_ty = self
+                .try_expression_type(func)
+                .unwrap_or_else(|| self.infer_expression(func, TypeContext::default()));
             if func_ty.as_class_literal().is_some_and(|class_literal| {
                 class_literal.is_known(self.db(), KnownClass::ParamSpec)
             }) {

From c7ff9826d614a34a940c924f494ea98dc1030445 Mon Sep 17 00:00:00 2001
From: Dylan 
Date: Thu, 6 Nov 2025 15:47:29 -0600
Subject: [PATCH 107/180] Bump 0.14.4 (#21306)

---
 CHANGELOG.md                      | 45 +++++++++++++++++++++++++++++++
 Cargo.lock                        |  6 ++---
 README.md                         |  6 ++---
 crates/ruff/Cargo.toml            |  2 +-
 crates/ruff_linter/Cargo.toml     |  2 +-
 crates/ruff_wasm/Cargo.toml       |  2 +-
 docs/integrations.md              |  8 +++---
 docs/tutorial.md                  |  2 +-
 pyproject.toml                    |  2 +-
 scripts/benchmarks/pyproject.toml |  2 +-
 10 files changed, 61 insertions(+), 16 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index e7d5ed2e2b..07e8d25786 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,50 @@
 # Changelog
 
+## 0.14.4
+
+Released on 2025-11-06.
+
+### Preview features
+
+- [formatter] Allow newlines after function headers without docstrings ([#21110](https://github.com/astral-sh/ruff/pull/21110))
+- [formatter] Avoid extra parentheses for long `match` patterns with `as` captures ([#21176](https://github.com/astral-sh/ruff/pull/21176))
+- \[`refurb`\] Expand fix safety for keyword arguments and `Decimal`s (`FURB164`) ([#21259](https://github.com/astral-sh/ruff/pull/21259))
+- \[`refurb`\] Preserve argument ordering in autofix (`FURB103`) ([#20790](https://github.com/astral-sh/ruff/pull/20790))
+
+### Bug fixes
+
+- [server] Fix missing diagnostics for notebooks ([#21156](https://github.com/astral-sh/ruff/pull/21156))
+- \[`flake8-bugbear`\] Ignore non-NFKC attribute names in `B009` and `B010` ([#21131](https://github.com/astral-sh/ruff/pull/21131))
+- \[`refurb`\] Fix false negative for underscores before sign in `Decimal` constructor (`FURB157`) ([#21190](https://github.com/astral-sh/ruff/pull/21190))
+- \[`ruff`\] Fix false positives on starred arguments (`RUF057`) ([#21256](https://github.com/astral-sh/ruff/pull/21256))
+
+### Rule changes
+
+- \[`airflow`\] extend deprecated argument `concurrency` in `airflow..DAG` (`AIR301`) ([#21220](https://github.com/astral-sh/ruff/pull/21220))
+
+### Documentation
+
+- Improve `extend` docs ([#21135](https://github.com/astral-sh/ruff/pull/21135))
+- \[`flake8-comprehensions`\] Fix typo in `C416` documentation ([#21184](https://github.com/astral-sh/ruff/pull/21184))
+- Revise Ruff setup instructions for Zed editor ([#20935](https://github.com/astral-sh/ruff/pull/20935))
+
+### Other changes
+
+- Make `ruff analyze graph` work with jupyter notebooks ([#21161](https://github.com/astral-sh/ruff/pull/21161))
+
+### Contributors
+
+- [@chirizxc](https://github.com/chirizxc)
+- [@Lee-W](https://github.com/Lee-W)
+- [@musicinmybrain](https://github.com/musicinmybrain)
+- [@MichaReiser](https://github.com/MichaReiser)
+- [@tjkuson](https://github.com/tjkuson)
+- [@danparizher](https://github.com/danparizher)
+- [@renovate](https://github.com/renovate)
+- [@ntBre](https://github.com/ntBre)
+- [@gauthsvenkat](https://github.com/gauthsvenkat)
+- [@LoicRiegel](https://github.com/LoicRiegel)
+
 ## 0.14.3
 
 Released on 2025-10-30.
diff --git a/Cargo.lock b/Cargo.lock
index db433c9c1f..2f7b83e994 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -2858,7 +2858,7 @@ dependencies = [
 
 [[package]]
 name = "ruff"
-version = "0.14.3"
+version = "0.14.4"
 dependencies = [
  "anyhow",
  "argfile",
@@ -3115,7 +3115,7 @@ dependencies = [
 
 [[package]]
 name = "ruff_linter"
-version = "0.14.3"
+version = "0.14.4"
 dependencies = [
  "aho-corasick",
  "anyhow",
@@ -3470,7 +3470,7 @@ dependencies = [
 
 [[package]]
 name = "ruff_wasm"
-version = "0.14.3"
+version = "0.14.4"
 dependencies = [
  "console_error_panic_hook",
  "console_log",
diff --git a/README.md b/README.md
index dcb399dd83..fd8cc2450e 100644
--- a/README.md
+++ b/README.md
@@ -147,8 +147,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
 powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
 
 # For a specific version.
-curl -LsSf https://astral.sh/ruff/0.14.3/install.sh | sh
-powershell -c "irm https://astral.sh/ruff/0.14.3/install.ps1 | iex"
+curl -LsSf https://astral.sh/ruff/0.14.4/install.sh | sh
+powershell -c "irm https://astral.sh/ruff/0.14.4/install.ps1 | iex"
 ```
 
 You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
@@ -181,7 +181,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
 ```yaml
 - repo: https://github.com/astral-sh/ruff-pre-commit
   # Ruff version.
-  rev: v0.14.3
+  rev: v0.14.4
   hooks:
     # Run the linter.
     - id: ruff-check
diff --git a/crates/ruff/Cargo.toml b/crates/ruff/Cargo.toml
index e977d5223e..a0daa3486e 100644
--- a/crates/ruff/Cargo.toml
+++ b/crates/ruff/Cargo.toml
@@ -1,6 +1,6 @@
 [package]
 name = "ruff"
-version = "0.14.3"
+version = "0.14.4"
 publish = true
 authors = { workspace = true }
 edition = { workspace = true }
diff --git a/crates/ruff_linter/Cargo.toml b/crates/ruff_linter/Cargo.toml
index 0826f28fbc..d1f48c4276 100644
--- a/crates/ruff_linter/Cargo.toml
+++ b/crates/ruff_linter/Cargo.toml
@@ -1,6 +1,6 @@
 [package]
 name = "ruff_linter"
-version = "0.14.3"
+version = "0.14.4"
 publish = false
 authors = { workspace = true }
 edition = { workspace = true }
diff --git a/crates/ruff_wasm/Cargo.toml b/crates/ruff_wasm/Cargo.toml
index 2dc77f3b8e..aa10112777 100644
--- a/crates/ruff_wasm/Cargo.toml
+++ b/crates/ruff_wasm/Cargo.toml
@@ -1,6 +1,6 @@
 [package]
 name = "ruff_wasm"
-version = "0.14.3"
+version = "0.14.4"
 publish = false
 authors = { workspace = true }
 edition = { workspace = true }
diff --git a/docs/integrations.md b/docs/integrations.md
index f37ce29852..78850e7940 100644
--- a/docs/integrations.md
+++ b/docs/integrations.md
@@ -80,7 +80,7 @@ You can add the following configuration to `.gitlab-ci.yml` to run a `ruff forma
   stage: build
   interruptible: true
   image:
-    name: ghcr.io/astral-sh/ruff:0.14.3-alpine
+    name: ghcr.io/astral-sh/ruff:0.14.4-alpine
   before_script:
     - cd $CI_PROJECT_DIR
     - ruff --version
@@ -106,7 +106,7 @@ Ruff can be used as a [pre-commit](https://pre-commit.com) hook via [`ruff-pre-c
 ```yaml
 - repo: https://github.com/astral-sh/ruff-pre-commit
   # Ruff version.
-  rev: v0.14.3
+  rev: v0.14.4
   hooks:
     # Run the linter.
     - id: ruff-check
@@ -119,7 +119,7 @@ To enable lint fixes, add the `--fix` argument to the lint hook:
 ```yaml
 - repo: https://github.com/astral-sh/ruff-pre-commit
   # Ruff version.
-  rev: v0.14.3
+  rev: v0.14.4
   hooks:
     # Run the linter.
     - id: ruff-check
@@ -133,7 +133,7 @@ To avoid running on Jupyter Notebooks, remove `jupyter` from the list of allowed
 ```yaml
 - repo: https://github.com/astral-sh/ruff-pre-commit
   # Ruff version.
-  rev: v0.14.3
+  rev: v0.14.4
   hooks:
     # Run the linter.
     - id: ruff-check
diff --git a/docs/tutorial.md b/docs/tutorial.md
index 4b0c43ac06..a35a58787a 100644
--- a/docs/tutorial.md
+++ b/docs/tutorial.md
@@ -369,7 +369,7 @@ This tutorial has focused on Ruff's command-line interface, but Ruff can also be
 ```yaml
 - repo: https://github.com/astral-sh/ruff-pre-commit
   # Ruff version.
-  rev: v0.14.3
+  rev: v0.14.4
   hooks:
     # Run the linter.
     - id: ruff
diff --git a/pyproject.toml b/pyproject.toml
index 28c9c93b39..70d8e6d8a1 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "maturin"
 
 [project]
 name = "ruff"
-version = "0.14.3"
+version = "0.14.4"
 description = "An extremely fast Python linter and code formatter, written in Rust."
 authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }]
 readme = "README.md"
diff --git a/scripts/benchmarks/pyproject.toml b/scripts/benchmarks/pyproject.toml
index 92a8da6ea7..6867f98577 100644
--- a/scripts/benchmarks/pyproject.toml
+++ b/scripts/benchmarks/pyproject.toml
@@ -1,6 +1,6 @@
 [project]
 name = "scripts"
-version = "0.14.3"
+version = "0.14.4"
 description = ""
 authors = ["Charles Marsh "]
 

From 6cc3393ccd9059439d9c1325e0e041db1d7481af Mon Sep 17 00:00:00 2001
From: Micha Reiser 
Date: Fri, 7 Nov 2025 15:44:23 +0100
Subject: [PATCH 108/180] [ty] Make range/position conversions fallible
 (#21297)

Co-authored-by: Aria Desires 
---
 crates/ty_server/src/document/location.rs     |  22 +-
 crates/ty_server/src/document/range.rs        | 247 ++++++++----------
 .../ty_server/src/server/api/diagnostics.rs   |   7 +-
 .../src/server/api/requests/completion.rs     |  17 +-
 .../src/server/api/requests/doc_highlights.rs |  14 +-
 .../server/api/requests/document_symbols.rs   |  18 +-
 .../server/api/requests/goto_declaration.rs   |   6 +-
 .../server/api/requests/goto_definition.rs    |   6 +-
 .../server/api/requests/goto_references.rs    |   6 +-
 .../api/requests/goto_type_definition.rs      |   6 +-
 .../src/server/api/requests/hover.rs          |  16 +-
 .../src/server/api/requests/inlay_hints.rs    |  33 ++-
 .../src/server/api/requests/prepare_rename.rs |  19 +-
 .../src/server/api/requests/rename.rs         |   6 +-
 .../server/api/requests/selection_range.rs    |  16 +-
 .../api/requests/semantic_tokens_range.rs     |   7 +-
 .../src/server/api/requests/signature_help.rs |   6 +-
 .../src/server/api/semantic_tokens.rs         |  10 +-
 crates/ty_server/src/server/api/symbols.rs    |   2 +-
 19 files changed, 239 insertions(+), 225 deletions(-)

diff --git a/crates/ty_server/src/document/location.rs b/crates/ty_server/src/document/location.rs
index 91a064acd3..67b7d0c659 100644
--- a/crates/ty_server/src/document/location.rs
+++ b/crates/ty_server/src/document/location.rs
@@ -20,7 +20,7 @@ pub(crate) trait ToLink {
 impl ToLink for NavigationTarget {
     fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option {
         FileRange::new(self.file(), self.focus_range())
-            .as_lsp_range(db, encoding)
+            .to_lsp_range(db, encoding)?
             .to_location()
     }
 
@@ -35,17 +35,17 @@ impl ToLink for NavigationTarget {
         // Get target_range and URI together to ensure they're consistent (same cell for notebooks)
         let target_location = self
             .full_range()
-            .as_lsp_range(db, file, encoding)
-            .to_location()?;
+            .to_lsp_range(db, file, encoding)?
+            .into_location()?;
         let target_range = target_location.range;
 
         // For selection_range, we can use as_local_range since we know it's in the same document/cell
         let selection_range = self
             .focus_range()
-            .as_lsp_range(db, file, encoding)
-            .to_local_range();
+            .to_lsp_range(db, file, encoding)?
+            .local_range();
 
-        let src = src.map(|src| src.as_lsp_range(db, encoding).to_local_range());
+        let src = src.and_then(|src| Some(src.to_lsp_range(db, encoding)?.local_range()));
 
         Some(lsp_types::LocationLink {
             target_uri: target_location.uri,
@@ -58,7 +58,9 @@ impl ToLink for NavigationTarget {
 
 impl ToLink for ReferenceTarget {
     fn to_location(&self, db: &dyn Db, encoding: PositionEncoding) -> Option {
-        self.file_range().as_lsp_range(db, encoding).to_location()
+        self.file_range()
+            .to_lsp_range(db, encoding)?
+            .into_location()
     }
 
     fn to_link(
@@ -70,12 +72,12 @@ impl ToLink for ReferenceTarget {
         // Get target_range and URI together to ensure they're consistent (same cell for notebooks)
         let target_location = self
             .range()
-            .as_lsp_range(db, self.file(), encoding)
-            .to_location()?;
+            .to_lsp_range(db, self.file(), encoding)?
+            .into_location()?;
         let target_range = target_location.range;
         let selection_range = target_range;
 
-        let src = src.map(|src| src.as_lsp_range(db, encoding).to_local_range());
+        let src = src.and_then(|src| Some(src.to_lsp_range(db, encoding)?.local_range()));
 
         Some(lsp_types::LocationLink {
             target_uri: target_location.uri,
diff --git a/crates/ty_server/src/document/range.rs b/crates/ty_server/src/document/range.rs
index 6d3d3eb0d4..894ef9ba09 100644
--- a/crates/ty_server/src/document/range.rs
+++ b/crates/ty_server/src/document/range.rs
@@ -2,133 +2,94 @@ use super::PositionEncoding;
 use crate::Db;
 use crate::system::file_to_url;
 
-use lsp_types as types;
-use lsp_types::{Location, Position, Url};
 use ruff_db::files::{File, FileRange};
 use ruff_db::source::{line_index, source_text};
 use ruff_source_file::LineIndex;
 use ruff_source_file::{OneIndexed, SourceLocation};
 use ruff_text_size::{Ranged, TextRange, TextSize};
 
-/// Represents a range that has been prepared for LSP conversion but requires
-/// a decision about how to use it - either as a local range within the same
-/// document/cell, or as a location that can reference any document in the project.
-#[derive(Clone)]
-pub(crate) struct LspRange<'db> {
-    file: File,
-    range: TextRange,
-    db: &'db dyn Db,
-    encoding: PositionEncoding,
+/// A range in an LSP text document (cell or a regular document).
+#[derive(Clone, Debug, Default)]
+pub(crate) struct LspRange {
+    range: lsp_types::Range,
+
+    /// The URI of this range's text document
+    uri: Option,
 }
 
-impl std::fmt::Debug for LspRange<'_> {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        f.debug_struct("LspRange")
-            .field("range", &self.range)
-            .field("file", &self.file)
-            .field("encoding", &self.encoding)
-            .finish_non_exhaustive()
-    }
-}
-
-impl LspRange<'_> {
-    /// Convert to an LSP Range for use within the same document/cell.
-    /// Returns only the LSP Range without any URI information.
+impl LspRange {
+    /// Returns the range within this document.
     ///
-    /// Use this when you already have a URI context and this range is guaranteed
+    /// Only use `range` when you already have a URI context and this range is guaranteed
     /// to be within the same document/cell:
     /// - Selection ranges within a `LocationLink` (where `target_uri` provides context)
     /// - Additional ranges in the same cell (e.g., `selection_range` when you already have `target_range`)
     ///
-    /// Do NOT use this for standalone ranges - use `to_location()` instead to ensure
+    /// Do NOT use this for standalone ranges - use [`Self::to_location`] instead to ensure
     /// the URI and range are consistent.
-    pub(crate) fn to_local_range(&self) -> types::Range {
-        self.to_uri_and_range().1
+    pub(crate) fn local_range(&self) -> lsp_types::Range {
+        self.range
     }
 
-    /// Convert to a Location that can reference any document.
-    /// Returns a Location with both URI and Range.
+    /// Converts this range into an LSP location.
     ///
-    /// Use this for:
-    /// - Go-to-definition targets
-    /// - References
-    /// - Diagnostics related information
-    /// - Any cross-file navigation
-    pub(crate) fn to_location(&self) -> Option {
-        let (uri, range) = self.to_uri_and_range();
-        Some(Location { uri: uri?, range })
+    /// Returns `None` if the URI for this file couldn't be resolved.
+    pub(crate) fn to_location(&self) -> Option {
+        Some(lsp_types::Location {
+            uri: self.uri.clone()?,
+            range: self.range,
+        })
     }
 
-    pub(crate) fn to_uri_and_range(&self) -> (Option, lsp_types::Range) {
-        let source = source_text(self.db, self.file);
-        let index = line_index(self.db, self.file);
-
-        let uri = file_to_url(self.db, self.file);
-        let range = text_range_to_lsp_range(self.range, &source, &index, self.encoding);
-        (uri, range)
+    pub(crate) fn into_location(self) -> Option {
+        Some(lsp_types::Location {
+            uri: self.uri?,
+            range: self.range,
+        })
     }
 }
 
-/// Represents a position that has been prepared for LSP conversion but requires
-/// a decision about how to use it - either as a local position within the same
-/// document/cell, or as a location with a single-point range that can reference
-/// any document in the project.
-#[derive(Clone)]
-pub(crate) struct LspPosition<'db> {
-    file: File,
-    position: TextSize,
-    db: &'db dyn Db,
-    encoding: PositionEncoding,
+/// A position in an LSP text document (cell or a regular document).
+#[derive(Clone, Debug, Default)]
+pub(crate) struct LspPosition {
+    position: lsp_types::Position,
+
+    /// The URI of this range's text document
+    uri: Option,
 }
 
-impl std::fmt::Debug for LspPosition<'_> {
-    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
-        f.debug_struct("LspPosition")
-            .field("position", &self.position)
-            .field("file", &self.file)
-            .field("encoding", &self.encoding)
-            .finish_non_exhaustive()
-    }
-}
-
-impl LspPosition<'_> {
-    /// Convert to an LSP Position for use within the same document/cell.
-    /// Returns only the LSP Position without any URI information.
+impl LspPosition {
+    /// Returns the position within this document.
     ///
-    /// Use this when you already have a URI context and this position is guaranteed
-    /// to be within the same document/cell:
-    /// - Inlay hints (where the document URI is already known)
-    /// - Positions within the same cell as a parent range
+    /// Only use [`Self::local_position`] when you already have a URI context and this position is guaranteed
+    /// to be within the same document/cell
     ///
-    /// Do NOT use this for standalone positions that might need a URI - use
-    /// `to_location()` instead to ensure the URI and position are consistent.
-    pub(crate) fn to_local_position(&self) -> types::Position {
-        self.to_location().1
+    /// Do NOT use this for standalone positions - use [`Self::to_location`] instead to ensure
+    /// the URI and position are consistent.
+    pub(crate) fn local_position(&self) -> lsp_types::Position {
+        self.position
     }
 
-    /// Convert to a Location with a single-point range that can reference any document.
-    /// Returns a Location with both URI and a range where start == end.
-    ///
-    /// Use this for any cross-file navigation where you need both URI and position.
-    pub(crate) fn to_location(&self) -> (Option, Position) {
-        let source = source_text(self.db, self.file);
-        let index = line_index(self.db, self.file);
-
-        let uri = file_to_url(self.db, self.file);
-        let position = text_size_to_lsp_position(self.position, &source, &index, self.encoding);
-        (uri, position)
+    /// Returns the uri of the text document this position belongs to.
+    #[expect(unused)]
+    pub(crate) fn uri(&self) -> Option<&lsp_types::Url> {
+        self.uri.as_ref()
     }
 }
 
 pub(crate) trait RangeExt {
-    /// Convert an LSP Range to internal `TextRange`.
+    /// Convert an LSP Range to internal [`TextRange`].
+    ///
+    /// Returns `None` if `file` is a notebook and the
+    /// cell identified by `url` can't be looked up or if the notebook
+    /// isn't open in the editor.
     fn to_text_range(
         &self,
         db: &dyn Db,
         file: File,
         url: &lsp_types::Url,
         encoding: PositionEncoding,
-    ) -> TextRange;
+    ) -> Option;
 }
 
 impl RangeExt for lsp_types::Range {
@@ -138,23 +99,27 @@ impl RangeExt for lsp_types::Range {
         file: File,
         url: &lsp_types::Url,
         encoding: PositionEncoding,
-    ) -> TextRange {
-        let start = self.start.to_text_size(db, file, url, encoding);
-        let end = self.end.to_text_size(db, file, url, encoding);
+    ) -> Option {
+        let start = self.start.to_text_size(db, file, url, encoding)?;
+        let end = self.end.to_text_size(db, file, url, encoding)?;
 
-        TextRange::new(start, end)
+        Some(TextRange::new(start, end))
     }
 }
 
 pub(crate) trait PositionExt {
     /// Convert an LSP Position to internal `TextSize`.
+    ///
+    /// Returns `None` if `file` is a notebook and the
+    /// cell identified by `url` can't be looked up or if the notebook
+    /// isn't open in the editor.
     fn to_text_size(
         &self,
         db: &dyn Db,
         file: File,
         url: &lsp_types::Url,
         encoding: PositionEncoding,
-    ) -> TextSize;
+    ) -> Option;
 }
 
 impl PositionExt for lsp_types::Position {
@@ -164,52 +129,59 @@ impl PositionExt for lsp_types::Position {
         file: File,
         _url: &lsp_types::Url,
         encoding: PositionEncoding,
-    ) -> TextSize {
+    ) -> Option {
         let source = source_text(db, file);
         let index = line_index(db, file);
 
-        lsp_position_to_text_size(*self, &source, &index, encoding)
+        Some(lsp_position_to_text_size(*self, &source, &index, encoding))
     }
 }
 
 pub(crate) trait TextSizeExt {
-    /// Converts this position to an `LspPosition`, which then requires an explicit
-    /// decision about how to use it (as a local position or as a location).
-    fn as_lsp_position<'db>(
+    /// Converts self into a position into an LSP text document (can be a cell or regular document).
+    ///
+    /// Returns `None` if the position can't be converted:
+    ///
+    /// * If `file` is a notebook but the notebook isn't open in the editor,
+    ///   preventing us from looking up the corresponding cell.
+    /// * If `position` is out of bounds.
+    fn to_lsp_position(
         &self,
-        db: &'db dyn Db,
+        db: &dyn Db,
         file: File,
         encoding: PositionEncoding,
-    ) -> LspPosition<'db>
+    ) -> Option
     where
         Self: Sized;
 }
 
 impl TextSizeExt for TextSize {
-    fn as_lsp_position<'db>(
+    fn to_lsp_position(
         &self,
-        db: &'db dyn Db,
+        db: &dyn Db,
         file: File,
         encoding: PositionEncoding,
-    ) -> LspPosition<'db> {
-        LspPosition {
-            file,
-            position: *self,
-            db,
-            encoding,
-        }
+    ) -> Option {
+        let source = source_text(db, file);
+        let index = line_index(db, file);
+
+        let uri = file_to_url(db, file);
+        let position = text_size_to_lsp_position(*self, &source, &index, encoding);
+
+        Some(LspPosition { position, uri })
     }
 }
 
 pub(crate) trait ToRangeExt {
-    /// Converts this range to an `LspRange`, which then requires an explicit
-    /// decision about how to use it (as a local range or as a location).
-    fn as_lsp_range<'db>(
-        &self,
-        db: &'db dyn Db,
-        file: File,
-        encoding: PositionEncoding,
-    ) -> LspRange<'db>;
+    /// Converts self into a range into an LSP text document (can be a cell or regular document).
+    ///
+    /// Returns `None` if the range can't be converted:
+    ///
+    /// * If `file` is a notebook but the notebook isn't open in the editor,
+    ///   preventing us from looking up the corresponding cell.
+    /// * If range is out of bounds.
+    fn to_lsp_range(&self, db: &dyn Db, file: File, encoding: PositionEncoding)
+    -> Option;
 }
 
 fn u32_index_to_usize(index: u32) -> usize {
@@ -221,7 +193,7 @@ fn text_size_to_lsp_position(
     text: &str,
     index: &LineIndex,
     encoding: PositionEncoding,
-) -> types::Position {
+) -> lsp_types::Position {
     let source_location = index.source_location(offset, text, encoding.into());
     source_location_to_position(&source_location)
 }
@@ -231,8 +203,8 @@ fn text_range_to_lsp_range(
     text: &str,
     index: &LineIndex,
     encoding: PositionEncoding,
-) -> types::Range {
-    types::Range {
+) -> lsp_types::Range {
+    lsp_types::Range {
         start: text_size_to_lsp_position(range.start(), text, index, encoding),
         end: text_size_to_lsp_position(range.end(), text, index, encoding),
     }
@@ -272,23 +244,23 @@ pub(crate) fn lsp_range_to_text_range(
 }
 
 impl ToRangeExt for TextRange {
-    fn as_lsp_range<'db>(
+    fn to_lsp_range(
         &self,
-        db: &'db dyn Db,
+        db: &dyn Db,
         file: File,
         encoding: PositionEncoding,
-    ) -> LspRange<'db> {
-        LspRange {
-            file,
-            range: *self,
-            db,
-            encoding,
-        }
+    ) -> Option {
+        let source = source_text(db, file);
+        let index = line_index(db, file);
+        let range = text_range_to_lsp_range(*self, &source, &index, encoding);
+
+        let uri = file_to_url(db, file);
+        Some(LspRange { range, uri })
     }
 }
 
-fn source_location_to_position(location: &SourceLocation) -> types::Position {
-    types::Position {
+fn source_location_to_position(location: &SourceLocation) -> lsp_types::Position {
+    lsp_types::Position {
         line: u32::try_from(location.line.to_zero_indexed()).expect("line usize fits in u32"),
         character: u32::try_from(location.character_offset.to_zero_indexed())
             .expect("character usize fits in u32"),
@@ -298,16 +270,11 @@ fn source_location_to_position(location: &SourceLocation) -> types::Position {
 pub(crate) trait FileRangeExt {
     /// Converts this file range to an `LspRange`, which then requires an explicit
     /// decision about how to use it (as a local range or as a location).
-    fn as_lsp_range<'db>(&self, db: &'db dyn Db, encoding: PositionEncoding) -> LspRange<'db>;
+    fn to_lsp_range(&self, db: &dyn Db, encoding: PositionEncoding) -> Option;
 }
 
 impl FileRangeExt for FileRange {
-    fn as_lsp_range<'db>(&self, db: &'db dyn Db, encoding: PositionEncoding) -> LspRange<'db> {
-        LspRange {
-            file: self.file(),
-            range: self.range(),
-            db,
-            encoding,
-        }
+    fn to_lsp_range(&self, db: &dyn Db, encoding: PositionEncoding) -> Option {
+        self.range().to_lsp_range(db, self.file(), encoding)
     }
 }
diff --git a/crates/ty_server/src/server/api/diagnostics.rs b/crates/ty_server/src/server/api/diagnostics.rs
index 54a0e79a2e..98d927cf2e 100644
--- a/crates/ty_server/src/server/api/diagnostics.rs
+++ b/crates/ty_server/src/server/api/diagnostics.rs
@@ -281,8 +281,9 @@ pub(super) fn to_lsp_diagnostic(
         let file = span.expect_ty_file();
 
         span.range()
-            .map(|range| range.as_lsp_range(db, file, encoding).to_local_range())
+            .and_then(|range| range.to_lsp_range(db, file, encoding))
             .unwrap_or_default()
+            .local_range()
     } else {
         Range::default()
     };
@@ -363,7 +364,7 @@ fn annotation_to_related_information(
 
     let annotation_message = annotation.get_message()?;
     let range = FileRange::try_from(span).ok()?;
-    let location = range.as_lsp_range(db, encoding).to_location()?;
+    let location = range.to_lsp_range(db, encoding)?.into_location()?;
 
     Some(DiagnosticRelatedInformation {
         location,
@@ -381,7 +382,7 @@ fn sub_diagnostic_to_related_information(
 
     let span = primary_annotation.get_span();
     let range = FileRange::try_from(span).ok()?;
-    let location = range.as_lsp_range(db, encoding).to_location()?;
+    let location = range.to_lsp_range(db, encoding)?.into_location()?;
 
     Some(DiagnosticRelatedInformation {
         location,
diff --git a/crates/ty_server/src/server/api/requests/completion.rs b/crates/ty_server/src/server/api/requests/completion.rs
index 6473661939..e99de7fb39 100644
--- a/crates/ty_server/src/server/api/requests/completion.rs
+++ b/crates/ty_server/src/server/api/requests/completion.rs
@@ -48,12 +48,15 @@ impl BackgroundDocumentRequestHandler for CompletionRequestHandler {
             return Ok(None);
         };
 
-        let offset = params.text_document_position.position.to_text_size(
+        let Some(offset) = params.text_document_position.position.to_text_size(
             db,
             file,
             snapshot.url(),
             snapshot.encoding(),
-        );
+        ) else {
+            return Ok(None);
+        };
+
         let settings = CompletionSettings {
             auto_import: snapshot.global_settings().is_auto_import_enabled(),
         };
@@ -70,15 +73,15 @@ impl BackgroundDocumentRequestHandler for CompletionRequestHandler {
             .map(|(i, comp)| {
                 let kind = comp.kind(db).map(ty_kind_to_lsp_kind);
                 let type_display = comp.ty.map(|ty| ty.display(db).to_string());
-                let import_edit = comp.import.as_ref().map(|edit| {
+                let import_edit = comp.import.as_ref().and_then(|edit| {
                     let range = edit
                         .range()
-                        .as_lsp_range(db, file, snapshot.encoding())
-                        .to_local_range();
-                    TextEdit {
+                        .to_lsp_range(db, file, snapshot.encoding())?
+                        .local_range();
+                    Some(TextEdit {
                         range,
                         new_text: edit.content().map(ToString::to_string).unwrap_or_default(),
-                    }
+                    })
                 });
 
                 let name = comp.name.to_string();
diff --git a/crates/ty_server/src/server/api/requests/doc_highlights.rs b/crates/ty_server/src/server/api/requests/doc_highlights.rs
index c96c3d4fef..bf10a95310 100644
--- a/crates/ty_server/src/server/api/requests/doc_highlights.rs
+++ b/crates/ty_server/src/server/api/requests/doc_highlights.rs
@@ -40,12 +40,14 @@ impl BackgroundDocumentRequestHandler for DocumentHighlightRequestHandler {
             return Ok(None);
         };
 
-        let offset = params.text_document_position_params.position.to_text_size(
+        let Some(offset) = params.text_document_position_params.position.to_text_size(
             db,
             file,
             snapshot.url(),
             snapshot.encoding(),
-        );
+        ) else {
+            return Ok(None);
+        };
 
         let Some(highlights_result) = document_highlights(db, file, offset) else {
             return Ok(None);
@@ -53,11 +55,11 @@ impl BackgroundDocumentRequestHandler for DocumentHighlightRequestHandler {
 
         let highlights: Vec<_> = highlights_result
             .into_iter()
-            .map(|target| {
+            .filter_map(|target| {
                 let range = target
                     .range()
-                    .as_lsp_range(db, file, snapshot.encoding())
-                    .to_local_range();
+                    .to_lsp_range(db, file, snapshot.encoding())?
+                    .local_range();
 
                 let kind = match target.kind() {
                     ReferenceKind::Read => Some(DocumentHighlightKind::READ),
@@ -65,7 +67,7 @@ impl BackgroundDocumentRequestHandler for DocumentHighlightRequestHandler {
                     ReferenceKind::Other => Some(DocumentHighlightKind::TEXT),
                 };
 
-                DocumentHighlight { range, kind }
+                Some(DocumentHighlight { range, kind })
             })
             .collect();
 
diff --git a/crates/ty_server/src/server/api/requests/document_symbols.rs b/crates/ty_server/src/server/api/requests/document_symbols.rs
index 95edd391f4..1001d33648 100644
--- a/crates/ty_server/src/server/api/requests/document_symbols.rs
+++ b/crates/ty_server/src/server/api/requests/document_symbols.rs
@@ -57,7 +57,7 @@ impl BackgroundDocumentRequestHandler for DocumentSymbolRequestHandler {
             let symbols = symbols.to_hierarchical();
             let lsp_symbols: Vec = symbols
                 .iter()
-                .map(|(id, symbol)| {
+                .filter_map(|(id, symbol)| {
                     convert_to_lsp_document_symbol(
                         db,
                         file,
@@ -93,10 +93,10 @@ fn convert_to_lsp_document_symbol(
     id: SymbolId,
     symbol: SymbolInfo<'_>,
     encoding: PositionEncoding,
-) -> DocumentSymbol {
+) -> Option {
     let symbol_kind = convert_symbol_kind(symbol.kind);
 
-    DocumentSymbol {
+    Some(DocumentSymbol {
         name: symbol.name.into_owned(),
         detail: None,
         kind: symbol_kind,
@@ -105,19 +105,19 @@ fn convert_to_lsp_document_symbol(
         deprecated: None,
         range: symbol
             .full_range
-            .as_lsp_range(db, file, encoding)
-            .to_local_range(),
+            .to_lsp_range(db, file, encoding)?
+            .local_range(),
         selection_range: symbol
             .name_range
-            .as_lsp_range(db, file, encoding)
-            .to_local_range(),
+            .to_lsp_range(db, file, encoding)?
+            .local_range(),
         children: Some(
             symbols
                 .children(id)
-                .map(|(child_id, child)| {
+                .filter_map(|(child_id, child)| {
                     convert_to_lsp_document_symbol(db, file, symbols, child_id, child, encoding)
                 })
                 .collect(),
         ),
-    }
+    })
 }
diff --git a/crates/ty_server/src/server/api/requests/goto_declaration.rs b/crates/ty_server/src/server/api/requests/goto_declaration.rs
index 2a8c931401..7d8864ced9 100644
--- a/crates/ty_server/src/server/api/requests/goto_declaration.rs
+++ b/crates/ty_server/src/server/api/requests/goto_declaration.rs
@@ -40,12 +40,14 @@ impl BackgroundDocumentRequestHandler for GotoDeclarationRequestHandler {
             return Ok(None);
         };
 
-        let offset = params.text_document_position_params.position.to_text_size(
+        let Some(offset) = params.text_document_position_params.position.to_text_size(
             db,
             file,
             snapshot.url(),
             snapshot.encoding(),
-        );
+        ) else {
+            return Ok(None);
+        };
 
         let Some(ranged) = goto_declaration(db, file, offset) else {
             return Ok(None);
diff --git a/crates/ty_server/src/server/api/requests/goto_definition.rs b/crates/ty_server/src/server/api/requests/goto_definition.rs
index 343f90a5c9..24dd781032 100644
--- a/crates/ty_server/src/server/api/requests/goto_definition.rs
+++ b/crates/ty_server/src/server/api/requests/goto_definition.rs
@@ -40,12 +40,14 @@ impl BackgroundDocumentRequestHandler for GotoDefinitionRequestHandler {
             return Ok(None);
         };
 
-        let offset = params.text_document_position_params.position.to_text_size(
+        let Some(offset) = params.text_document_position_params.position.to_text_size(
             db,
             file,
             snapshot.url(),
             snapshot.encoding(),
-        );
+        ) else {
+            return Ok(None);
+        };
 
         let Some(ranged) = goto_definition(db, file, offset) else {
             return Ok(None);
diff --git a/crates/ty_server/src/server/api/requests/goto_references.rs b/crates/ty_server/src/server/api/requests/goto_references.rs
index 6cdb8e21a4..a2bba12569 100644
--- a/crates/ty_server/src/server/api/requests/goto_references.rs
+++ b/crates/ty_server/src/server/api/requests/goto_references.rs
@@ -40,12 +40,14 @@ impl BackgroundDocumentRequestHandler for ReferencesRequestHandler {
             return Ok(None);
         };
 
-        let offset = params.text_document_position.position.to_text_size(
+        let Some(offset) = params.text_document_position.position.to_text_size(
             db,
             file,
             snapshot.url(),
             snapshot.encoding(),
-        );
+        ) else {
+            return Ok(None);
+        };
 
         let include_declaration = params.context.include_declaration;
 
diff --git a/crates/ty_server/src/server/api/requests/goto_type_definition.rs b/crates/ty_server/src/server/api/requests/goto_type_definition.rs
index 11564f50d7..31e2816225 100644
--- a/crates/ty_server/src/server/api/requests/goto_type_definition.rs
+++ b/crates/ty_server/src/server/api/requests/goto_type_definition.rs
@@ -40,12 +40,14 @@ impl BackgroundDocumentRequestHandler for GotoTypeDefinitionRequestHandler {
             return Ok(None);
         };
 
-        let offset = params.text_document_position_params.position.to_text_size(
+        let Some(offset) = params.text_document_position_params.position.to_text_size(
             db,
             file,
             snapshot.url(),
             snapshot.encoding(),
-        );
+        ) else {
+            return Ok(None);
+        };
 
         let Some(ranged) = goto_type_definition(db, file, offset) else {
             return Ok(None);
diff --git a/crates/ty_server/src/server/api/requests/hover.rs b/crates/ty_server/src/server/api/requests/hover.rs
index d051007003..d9e7ec6430 100644
--- a/crates/ty_server/src/server/api/requests/hover.rs
+++ b/crates/ty_server/src/server/api/requests/hover.rs
@@ -39,12 +39,14 @@ impl BackgroundDocumentRequestHandler for HoverRequestHandler {
             return Ok(None);
         };
 
-        let offset = params.text_document_position_params.position.to_text_size(
+        let Some(offset) = params.text_document_position_params.position.to_text_size(
             db,
             file,
             snapshot.url(),
             snapshot.encoding(),
-        );
+        ) else {
+            return Ok(None);
+        };
 
         let Some(range_info) = hover(db, file, offset) else {
             return Ok(None);
@@ -66,12 +68,10 @@ impl BackgroundDocumentRequestHandler for HoverRequestHandler {
                 kind: lsp_markup_kind,
                 value: contents,
             }),
-            range: Some(
-                range_info
-                    .file_range()
-                    .as_lsp_range(db, snapshot.encoding())
-                    .to_local_range(),
-            ),
+            range: range_info
+                .file_range()
+                .to_lsp_range(db, snapshot.encoding())
+                .map(|lsp_range| lsp_range.local_range()),
         }))
     }
 }
diff --git a/crates/ty_server/src/server/api/requests/inlay_hints.rs b/crates/ty_server/src/server/api/requests/inlay_hints.rs
index ec445f9b1e..2698456970 100644
--- a/crates/ty_server/src/server/api/requests/inlay_hints.rs
+++ b/crates/ty_server/src/server/api/requests/inlay_hints.rs
@@ -39,26 +39,31 @@ impl BackgroundDocumentRequestHandler for InlayHintRequestHandler {
             return Ok(None);
         };
 
-        let range = params
+        let Some(range) = params
             .range
-            .to_text_range(db, file, snapshot.url(), snapshot.encoding());
+            .to_text_range(db, file, snapshot.url(), snapshot.encoding())
+        else {
+            return Ok(None);
+        };
 
         let inlay_hints = inlay_hints(db, file, range, workspace_settings.inlay_hints());
 
         let inlay_hints = inlay_hints
             .into_iter()
-            .map(|hint| lsp_types::InlayHint {
-                position: hint
-                    .position
-                    .as_lsp_position(db, file, snapshot.encoding())
-                    .to_local_position(),
-                label: inlay_hint_label(&hint.label),
-                kind: Some(inlay_hint_kind(&hint.kind)),
-                tooltip: None,
-                padding_left: None,
-                padding_right: None,
-                data: None,
-                text_edits: None,
+            .filter_map(|hint| {
+                Some(lsp_types::InlayHint {
+                    position: hint
+                        .position
+                        .to_lsp_position(db, file, snapshot.encoding())?
+                        .local_position(),
+                    label: inlay_hint_label(&hint.label),
+                    kind: Some(inlay_hint_kind(&hint.kind)),
+                    tooltip: None,
+                    padding_left: None,
+                    padding_right: None,
+                    data: None,
+                    text_edits: None,
+                })
             })
             .collect();
 
diff --git a/crates/ty_server/src/server/api/requests/prepare_rename.rs b/crates/ty_server/src/server/api/requests/prepare_rename.rs
index 2593122530..f12dde90b7 100644
--- a/crates/ty_server/src/server/api/requests/prepare_rename.rs
+++ b/crates/ty_server/src/server/api/requests/prepare_rename.rs
@@ -40,17 +40,24 @@ impl BackgroundDocumentRequestHandler for PrepareRenameRequestHandler {
             return Ok(None);
         };
 
-        let offset = params
-            .position
-            .to_text_size(db, file, snapshot.url(), snapshot.encoding());
+        let Some(offset) =
+            params
+                .position
+                .to_text_size(db, file, snapshot.url(), snapshot.encoding())
+        else {
+            return Ok(None);
+        };
 
         let Some(range) = can_rename(db, file, offset) else {
             return Ok(None);
         };
 
-        let lsp_range = range
-            .as_lsp_range(db, file, snapshot.encoding())
-            .to_local_range();
+        let Some(lsp_range) = range
+            .to_lsp_range(db, file, snapshot.encoding())
+            .map(|lsp_range| lsp_range.local_range())
+        else {
+            return Ok(None);
+        };
 
         Ok(Some(PrepareRenameResponse::Range(lsp_range)))
     }
diff --git a/crates/ty_server/src/server/api/requests/rename.rs b/crates/ty_server/src/server/api/requests/rename.rs
index efa3891ced..978e1769df 100644
--- a/crates/ty_server/src/server/api/requests/rename.rs
+++ b/crates/ty_server/src/server/api/requests/rename.rs
@@ -41,12 +41,14 @@ impl BackgroundDocumentRequestHandler for RenameRequestHandler {
             return Ok(None);
         };
 
-        let offset = params.text_document_position.position.to_text_size(
+        let Some(offset) = params.text_document_position.position.to_text_size(
             db,
             file,
             snapshot.url(),
             snapshot.encoding(),
-        );
+        ) else {
+            return Ok(None);
+        };
 
         let Some(rename_results) = rename(db, file, offset, ¶ms.new_name) else {
             return Ok(None);
diff --git a/crates/ty_server/src/server/api/requests/selection_range.rs b/crates/ty_server/src/server/api/requests/selection_range.rs
index 77d9df4c25..46518810f6 100644
--- a/crates/ty_server/src/server/api/requests/selection_range.rs
+++ b/crates/ty_server/src/server/api/requests/selection_range.rs
@@ -43,17 +43,25 @@ impl BackgroundDocumentRequestHandler for SelectionRangeRequestHandler {
         let mut results = Vec::new();
 
         for position in params.positions {
-            let offset = position.to_text_size(db, file, snapshot.url(), snapshot.encoding());
+            let Some(offset) = position.to_text_size(db, file, snapshot.url(), snapshot.encoding())
+            else {
+                continue;
+            };
 
             let ranges = selection_range(db, file, offset);
             if !ranges.is_empty() {
                 // Convert ranges to nested LSP SelectionRange structure
                 let mut lsp_range = None;
                 for &range in &ranges {
+                    let Some(range) = range
+                        .to_lsp_range(db, file, snapshot.encoding())
+                        .map(|lsp_range| lsp_range.local_range())
+                    else {
+                        break;
+                    };
+
                     lsp_range = Some(LspSelectionRange {
-                        range: range
-                            .as_lsp_range(db, file, snapshot.encoding())
-                            .to_local_range(),
+                        range,
                         parent: lsp_range.map(Box::new),
                     });
                 }
diff --git a/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs b/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs
index 7daa116876..1cd0484f14 100644
--- a/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs
+++ b/crates/ty_server/src/server/api/requests/semantic_tokens_range.rs
@@ -39,10 +39,13 @@ impl BackgroundDocumentRequestHandler for SemanticTokensRangeRequestHandler {
         };
 
         // Convert LSP range to text offsets
-        let requested_range =
+        let Some(requested_range) =
             params
                 .range
-                .to_text_range(db, file, snapshot.url(), snapshot.encoding());
+                .to_text_range(db, file, snapshot.url(), snapshot.encoding())
+        else {
+            return Ok(None);
+        };
 
         let lsp_tokens = generate_semantic_tokens(
             db,
diff --git a/crates/ty_server/src/server/api/requests/signature_help.rs b/crates/ty_server/src/server/api/requests/signature_help.rs
index 99d60c398f..81c31adc1b 100644
--- a/crates/ty_server/src/server/api/requests/signature_help.rs
+++ b/crates/ty_server/src/server/api/requests/signature_help.rs
@@ -42,12 +42,14 @@ impl BackgroundDocumentRequestHandler for SignatureHelpRequestHandler {
             return Ok(None);
         };
 
-        let offset = params.text_document_position_params.position.to_text_size(
+        let Some(offset) = params.text_document_position_params.position.to_text_size(
             db,
             file,
             snapshot.url(),
             snapshot.encoding(),
-        );
+        ) else {
+            return Ok(None);
+        };
 
         // Extract signature help capabilities from the client
         let resolved_capabilities = snapshot.resolved_client_capabilities();
diff --git a/crates/ty_server/src/server/api/semantic_tokens.rs b/crates/ty_server/src/server/api/semantic_tokens.rs
index ee9808b791..a6208087c0 100644
--- a/crates/ty_server/src/server/api/semantic_tokens.rs
+++ b/crates/ty_server/src/server/api/semantic_tokens.rs
@@ -24,10 +24,14 @@ pub(crate) fn generate_semantic_tokens(
     let mut prev_start = 0u32;
 
     for token in &*semantic_token_data {
-        let lsp_range = token
+        let Some(lsp_range) = token
             .range()
-            .as_lsp_range(db, file, encoding)
-            .to_local_range();
+            .to_lsp_range(db, file, encoding)
+            .map(|lsp_range| lsp_range.local_range())
+        else {
+            continue;
+        };
+
         let line = lsp_range.start.line;
         let character = lsp_range.start.character;
 
diff --git a/crates/ty_server/src/server/api/symbols.rs b/crates/ty_server/src/server/api/symbols.rs
index e823e32a98..dd0dc67dcb 100644
--- a/crates/ty_server/src/server/api/symbols.rs
+++ b/crates/ty_server/src/server/api/symbols.rs
@@ -36,7 +36,7 @@ pub(crate) fn convert_to_lsp_symbol_information(
 
     let location = symbol
         .full_range
-        .as_lsp_range(db, file, encoding)
+        .to_lsp_range(db, file, encoding)?
         .to_location()?;
 
     Some(SymbolInformation {

From 6185a2af9e0f25d87bb26a1fd588822d114f7a5c Mon Sep 17 00:00:00 2001
From: Dan Parizher <105245560+danparizher@users.noreply.github.com>
Date: Fri, 7 Nov 2025 11:01:52 -0500
Subject: [PATCH 109/180] [`pyupgrade`] Fix false positive on relative imports
 from local `.builtins` module (`UP029`) (#21309)

---
 .../resources/test/fixtures/pyupgrade/UP029_2.py            | 5 +++++
 crates/ruff_linter/src/checkers/ast/analyze/statement.rs    | 4 +++-
 crates/ruff_linter/src/rules/pyupgrade/mod.rs               | 1 +
 .../src/rules/pyupgrade/rules/unnecessary_builtin_import.rs | 6 ++++++
 .../ruff_linter__rules__pyupgrade__tests__UP029_2.py.snap   | 4 ++++
 5 files changed, 19 insertions(+), 1 deletion(-)
 create mode 100644 crates/ruff_linter/resources/test/fixtures/pyupgrade/UP029_2.py
 create mode 100644 crates/ruff_linter/src/rules/pyupgrade/snapshots/ruff_linter__rules__pyupgrade__tests__UP029_2.py.snap

diff --git a/crates/ruff_linter/resources/test/fixtures/pyupgrade/UP029_2.py b/crates/ruff_linter/resources/test/fixtures/pyupgrade/UP029_2.py
new file mode 100644
index 0000000000..34ff94fa90
--- /dev/null
+++ b/crates/ruff_linter/resources/test/fixtures/pyupgrade/UP029_2.py
@@ -0,0 +1,5 @@
+from .builtins import next
+from ..builtins import str
+from ...builtins import int
+from .builtins import next as _next
+
diff --git a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs
index 2e7523891b..1232dc52a9 100644
--- a/crates/ruff_linter/src/checkers/ast/analyze/statement.rs
+++ b/crates/ruff_linter/src/checkers/ast/analyze/statement.rs
@@ -717,7 +717,9 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
             }
             if checker.is_rule_enabled(Rule::UnnecessaryBuiltinImport) {
                 if let Some(module) = module {
-                    pyupgrade::rules::unnecessary_builtin_import(checker, stmt, module, names);
+                    pyupgrade::rules::unnecessary_builtin_import(
+                        checker, stmt, module, names, level,
+                    );
                 }
             }
             if checker.any_rule_enabled(&[
diff --git a/crates/ruff_linter/src/rules/pyupgrade/mod.rs b/crates/ruff_linter/src/rules/pyupgrade/mod.rs
index c933de5ee8..a4bd1c5ac6 100644
--- a/crates/ruff_linter/src/rules/pyupgrade/mod.rs
+++ b/crates/ruff_linter/src/rules/pyupgrade/mod.rs
@@ -99,6 +99,7 @@ mod tests {
     #[test_case(Rule::UTF8EncodingDeclaration, Path::new("UP009_many_empty_lines.py"))]
     #[test_case(Rule::UnicodeKindPrefix, Path::new("UP025.py"))]
     #[test_case(Rule::UnnecessaryBuiltinImport, Path::new("UP029_0.py"))]
+    #[test_case(Rule::UnnecessaryBuiltinImport, Path::new("UP029_2.py"))]
     #[test_case(Rule::UnnecessaryClassParentheses, Path::new("UP039.py"))]
     #[test_case(Rule::UnnecessaryDefaultTypeArgs, Path::new("UP043.py"))]
     #[test_case(Rule::UnnecessaryEncodeUTF8, Path::new("UP012.py"))]
diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_builtin_import.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_builtin_import.rs
index 41e73a3096..f15b2dfeb3 100644
--- a/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_builtin_import.rs
+++ b/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_builtin_import.rs
@@ -75,7 +75,13 @@ pub(crate) fn unnecessary_builtin_import(
     stmt: &Stmt,
     module: &str,
     names: &[Alias],
+    level: u32,
 ) {
+    // Ignore relative imports (they're importing from local modules, not Python's builtins).
+    if level > 0 {
+        return;
+    }
+
     // Ignore irrelevant modules.
     if !matches!(
         module,
diff --git a/crates/ruff_linter/src/rules/pyupgrade/snapshots/ruff_linter__rules__pyupgrade__tests__UP029_2.py.snap b/crates/ruff_linter/src/rules/pyupgrade/snapshots/ruff_linter__rules__pyupgrade__tests__UP029_2.py.snap
new file mode 100644
index 0000000000..2bacb5d540
--- /dev/null
+++ b/crates/ruff_linter/src/rules/pyupgrade/snapshots/ruff_linter__rules__pyupgrade__tests__UP029_2.py.snap
@@ -0,0 +1,4 @@
+---
+source: crates/ruff_linter/src/rules/pyupgrade/mod.rs
+---
+

From 8ba1cfebed603652152313e1bed2b67d8acdd083 Mon Sep 17 00:00:00 2001
From: Micha Reiser 
Date: Fri, 7 Nov 2025 17:18:28 +0100
Subject: [PATCH 110/180] [ty] Add missing `heap_size` to `variance_of` queries
 (#21318)

---
 crates/ty_python_semantic/src/types/class.rs | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/crates/ty_python_semantic/src/types/class.rs b/crates/ty_python_semantic/src/types/class.rs
index 1b794aefe0..de0b4c180c 100644
--- a/crates/ty_python_semantic/src/types/class.rs
+++ b/crates/ty_python_semantic/src/types/class.rs
@@ -294,7 +294,7 @@ impl<'db> From> for Type<'db> {
 
 #[salsa::tracked]
 impl<'db> VarianceInferable<'db> for GenericAlias<'db> {
-    #[salsa::tracked]
+    #[salsa::tracked(heap_size=ruff_memory_usage::heap_size)]
     fn variance_of(self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarVariance {
         let origin = self.origin(db);
 
@@ -3547,7 +3547,7 @@ impl<'db> From> for ClassType<'db> {
 
 #[salsa::tracked]
 impl<'db> VarianceInferable<'db> for ClassLiteral<'db> {
-    #[salsa::tracked(cycle_initial=crate::types::variance_cycle_initial)]
+    #[salsa::tracked(cycle_initial=crate::types::variance_cycle_initial, heap_size=ruff_memory_usage::heap_size)]
     fn variance_of(self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarVariance {
         let typevar_in_generic_context = self
             .generic_context(db)

From ed18112cfaf1d8b5a056ea5ecaf7f5ef327c8429 Mon Sep 17 00:00:00 2001
From: David Peter 
Date: Fri, 7 Nov 2025 17:46:55 +0100
Subject: [PATCH 111/180] [ty] Add support for `Literal`s in implicit type
 aliases (#21296)

## Summary

Add support for `Literal` types in implicit type aliases.

part of https://github.com/astral-sh/ty/issues/221

## Ecosystem analysis

This looks good to me, true positives and known problems.

## Test Plan

New Markdown tests.
---
 .../resources/mdtest/annotations/literal.md   | 60 ++++++----------
 .../resources/mdtest/implicit_type_aliases.md | 72 ++++++++++++++++++-
 crates/ty_python_semantic/src/types.rs        | 57 ++++++++++-----
 .../src/types/class_base.rs                   |  3 +-
 .../src/types/infer/builder.rs                | 39 ++++++++--
 .../types/infer/builder/type_expression.rs    | 16 ++++-
 6 files changed, 179 insertions(+), 68 deletions(-)

diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/literal.md b/crates/ty_python_semantic/resources/mdtest/annotations/literal.md
index 897be97e77..0c6a443afa 100644
--- a/crates/ty_python_semantic/resources/mdtest/annotations/literal.md
+++ b/crates/ty_python_semantic/resources/mdtest/annotations/literal.md
@@ -181,30 +181,20 @@ def _(
     bool2: Literal[Bool2],
     multiple: Literal[SingleInt, SingleStr, SingleEnum],
 ):
-    # TODO should be `Literal[1]`
-    reveal_type(single_int)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `Literal["foo"]`
-    reveal_type(single_str)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `Literal[b"bar"]`
-    reveal_type(single_bytes)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `Literal[True]`
-    reveal_type(single_bool)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `None`
-    reveal_type(single_none)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `Literal[E.A]`
-    reveal_type(single_enum)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `Literal[1, "foo", b"bar", True, E.A] | None`
-    reveal_type(union_literals)  # revealed: @Todo(Inference of subscript on special form)
+    reveal_type(single_int)  # revealed: Literal[1]
+    reveal_type(single_str)  # revealed: Literal["foo"]
+    reveal_type(single_bytes)  # revealed: Literal[b"bar"]
+    reveal_type(single_bool)  # revealed: Literal[True]
+    reveal_type(single_none)  # revealed: None
+    reveal_type(single_enum)  # revealed: Literal[E.A]
+    reveal_type(union_literals)  # revealed: Literal[1, "foo", b"bar", True, E.A] | None
     # Could also be `E`
     reveal_type(an_enum1)  # revealed: Unknown
-    # TODO should be `E`
-    reveal_type(an_enum2)  # revealed: @Todo(Inference of subscript on special form)
+    reveal_type(an_enum2)  # revealed: E
     # Could also be `bool`
     reveal_type(bool1)  # revealed: Unknown
-    # TODO should be `bool`
-    reveal_type(bool2)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `Literal[1, "foo", E.A]`
-    reveal_type(multiple)  # revealed: @Todo(Inference of subscript on special form)
+    reveal_type(bool2)  # revealed: bool
+    reveal_type(multiple)  # revealed: Literal[1, "foo", E.A]
 ```
 
 ### Implicit type alias
@@ -246,28 +236,18 @@ def _(
     bool2: Literal[Bool2],
     multiple: Literal[SingleInt, SingleStr, SingleEnum],
 ):
-    # TODO should be `Literal[1]`
-    reveal_type(single_int)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `Literal["foo"]`
-    reveal_type(single_str)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `Literal[b"bar"]`
-    reveal_type(single_bytes)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `Literal[True]`
-    reveal_type(single_bool)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `None`
-    reveal_type(single_none)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `Literal[E.A]`
-    reveal_type(single_enum)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `Literal[1, "foo", b"bar", True, E.A] | None`
-    reveal_type(union_literals)  # revealed: @Todo(Inference of subscript on special form)
+    reveal_type(single_int)  # revealed: Literal[1]
+    reveal_type(single_str)  # revealed: Literal["foo"]
+    reveal_type(single_bytes)  # revealed: Literal[b"bar"]
+    reveal_type(single_bool)  # revealed: Literal[True]
+    reveal_type(single_none)  # revealed: None
+    reveal_type(single_enum)  # revealed: Literal[E.A]
+    reveal_type(union_literals)  # revealed: Literal[1, "foo", b"bar", True, E.A] | None
     reveal_type(an_enum1)  # revealed: Unknown
-    # TODO should be `E`
-    reveal_type(an_enum2)  # revealed: @Todo(Inference of subscript on special form)
+    reveal_type(an_enum2)  # revealed: E
     reveal_type(bool1)  # revealed: Unknown
-    # TODO should be `bool`
-    reveal_type(bool2)  # revealed: @Todo(Inference of subscript on special form)
-    # TODO should be `Literal[1, "foo", E.A]`
-    reveal_type(multiple)  # revealed: @Todo(Inference of subscript on special form)
+    reveal_type(bool2)  # revealed: bool
+    reveal_type(multiple)  # revealed: Literal[1, "foo", E.A]
 ```
 
 ## Shortening unions of literals
diff --git a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
index b557a730f7..504921c317 100644
--- a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
+++ b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md
@@ -33,7 +33,7 @@ g(None)
 We also support unions in type aliases:
 
 ```py
-from typing_extensions import Any, Never
+from typing_extensions import Any, Never, Literal
 from ty_extensions import Unknown
 
 IntOrStr = int | str
@@ -54,6 +54,8 @@ NeverOrAny = Never | Any
 AnyOrNever = Any | Never
 UnknownOrInt = Unknown | int
 IntOrUnknown = int | Unknown
+StrOrZero = str | Literal[0]
+ZeroOrStr = Literal[0] | str
 
 reveal_type(IntOrStr)  # revealed: types.UnionType
 reveal_type(IntOrStrOrBytes1)  # revealed: types.UnionType
@@ -73,6 +75,8 @@ reveal_type(NeverOrAny)  # revealed: types.UnionType
 reveal_type(AnyOrNever)  # revealed: types.UnionType
 reveal_type(UnknownOrInt)  # revealed: types.UnionType
 reveal_type(IntOrUnknown)  # revealed: types.UnionType
+reveal_type(StrOrZero)  # revealed: types.UnionType
+reveal_type(ZeroOrStr)  # revealed: types.UnionType
 
 def _(
     int_or_str: IntOrStr,
@@ -93,6 +97,8 @@ def _(
     any_or_never: AnyOrNever,
     unknown_or_int: UnknownOrInt,
     int_or_unknown: IntOrUnknown,
+    str_or_zero: StrOrZero,
+    zero_or_str: ZeroOrStr,
 ):
     reveal_type(int_or_str)  # revealed: int | str
     reveal_type(int_or_str_or_bytes1)  # revealed: int | str | bytes
@@ -112,6 +118,8 @@ def _(
     reveal_type(any_or_never)  # revealed: Any
     reveal_type(unknown_or_int)  # revealed: Unknown | int
     reveal_type(int_or_unknown)  # revealed: int | Unknown
+    reveal_type(str_or_zero)  # revealed: str | Literal[0]
+    reveal_type(zero_or_str)  # revealed: Literal[0] | str
 ```
 
 If a type is unioned with itself in a value expression, the result is just that type. No
@@ -255,6 +263,68 @@ def _(list_or_tuple: ListOrTuple[int]):
     reveal_type(list_or_tuple)  # revealed: @Todo(Generic specialization of types.UnionType)
 ```
 
+## `Literal`s
+
+We also support `typing.Literal` in implicit type aliases.
+
+```py
+from typing import Literal
+from enum import Enum
+
+IntLiteral1 = Literal[26]
+IntLiteral2 = Literal[0x1A]
+IntLiterals = Literal[-1, 0, 1]
+NestedLiteral = Literal[Literal[1]]
+StringLiteral = Literal["a"]
+BytesLiteral = Literal[b"b"]
+BoolLiteral = Literal[True]
+MixedLiterals = Literal[1, "a", True, None]
+
+class Color(Enum):
+    RED = 0
+    GREEN = 1
+    BLUE = 2
+
+EnumLiteral = Literal[Color.RED]
+
+def _(
+    int_literal1: IntLiteral1,
+    int_literal2: IntLiteral2,
+    int_literals: IntLiterals,
+    nested_literal: NestedLiteral,
+    string_literal: StringLiteral,
+    bytes_literal: BytesLiteral,
+    bool_literal: BoolLiteral,
+    mixed_literals: MixedLiterals,
+    enum_literal: EnumLiteral,
+):
+    reveal_type(int_literal1)  # revealed: Literal[26]
+    reveal_type(int_literal2)  # revealed: Literal[26]
+    reveal_type(int_literals)  # revealed: Literal[-1, 0, 1]
+    reveal_type(nested_literal)  # revealed: Literal[1]
+    reveal_type(string_literal)  # revealed: Literal["a"]
+    reveal_type(bytes_literal)  # revealed: Literal[b"b"]
+    reveal_type(bool_literal)  # revealed: Literal[True]
+    reveal_type(mixed_literals)  # revealed: Literal[1, "a", True] | None
+    reveal_type(enum_literal)  # revealed: Literal[Color.RED]
+```
+
+We reject invalid uses:
+
+```py
+# error: [invalid-type-form] "Type arguments for `Literal` must be `None`, a literal value (int, bool, str, or bytes), or an enum member"
+LiteralInt = Literal[int]
+
+reveal_type(LiteralInt)  # revealed: Unknown
+
+def _(weird: LiteralInt):
+    reveal_type(weird)  # revealed: Unknown
+
+# error: [invalid-type-form] "`Literal[26]` is not a generic class"
+def _(weird: IntLiteral1[int]):
+    reveal_type(weird)  # revealed: Unknown
+```
+
 ## Stringified annotations?
 
 From the [typing spec on type aliases](https://typing.python.org/en/latest/spec/aliases.html):
diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs
index bc75895833..6c9cdefa20 100644
--- a/crates/ty_python_semantic/src/types.rs
+++ b/crates/ty_python_semantic/src/types.rs
@@ -6451,9 +6451,9 @@ impl<'db> Type<'db> {
                     invalid_expressions: smallvec::smallvec_inline![InvalidTypeExpression::Generic],
                     fallback_type: Type::unknown(),
                 }),
-                KnownInstanceType::UnionType(union_type) => {
+                KnownInstanceType::UnionType(list) => {
                     let mut builder = UnionBuilder::new(db);
-                    for element in union_type.elements(db) {
+                    for element in list.elements(db) {
                         builder = builder.add(element.in_type_expression(
                             db,
                             scope_id,
@@ -6462,6 +6462,7 @@ impl<'db> Type<'db> {
                     }
                     Ok(builder.build())
                 }
+                KnownInstanceType::Literal(list) => Ok(list.to_union(db)),
             },
 
             Type::SpecialForm(special_form) => match special_form {
@@ -7675,7 +7676,10 @@ pub enum KnownInstanceType<'db> {
 
     /// A single instance of `types.UnionType`, which stores the left- and
     /// right-hand sides of a PEP 604 union.
-    UnionType(UnionTypeInstance<'db>),
+    UnionType(TypeList<'db>),
+
+    /// A single instance of `typing.Literal`
+    Literal(TypeList<'db>),
 }
 
 fn walk_known_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
@@ -7702,9 +7706,9 @@ fn walk_known_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>(
                 visitor.visit_type(db, default_ty);
             }
         }
-        KnownInstanceType::UnionType(union_type) => {
-            for element in union_type.elements(db) {
-                visitor.visit_type(db, element);
+        KnownInstanceType::UnionType(list) | KnownInstanceType::Literal(list) => {
+            for element in list.elements(db) {
+                visitor.visit_type(db, *element);
             }
         }
     }
@@ -7743,7 +7747,8 @@ impl<'db> KnownInstanceType<'db> {
                 // Nothing to normalize
                 Self::ConstraintSet(set)
             }
-            Self::UnionType(union_type) => Self::UnionType(union_type.normalized_impl(db, visitor)),
+            Self::UnionType(list) => Self::UnionType(list.normalized_impl(db, visitor)),
+            Self::Literal(list) => Self::Literal(list.normalized_impl(db, visitor)),
         }
     }
 
@@ -7762,6 +7767,7 @@ impl<'db> KnownInstanceType<'db> {
             Self::Field(_) => KnownClass::Field,
             Self::ConstraintSet(_) => KnownClass::ConstraintSet,
             Self::UnionType(_) => KnownClass::UnionType,
+            Self::Literal(_) => KnownClass::GenericAlias,
         }
     }
 
@@ -7842,6 +7848,7 @@ impl<'db> KnownInstanceType<'db> {
                         )
                     }
                     KnownInstanceType::UnionType(_) => f.write_str("types.UnionType"),
+                    KnownInstanceType::Literal(_) => f.write_str("typing.Literal"),
                 }
             }
         }
@@ -8972,32 +8979,46 @@ impl<'db> TypeVarBoundOrConstraints<'db> {
     }
 }
 
-/// An instance of `types.UnionType`.
+/// A salsa-interned list of types.
 ///
 /// # Ordering
 /// Ordering is based on the context's salsa-assigned id and not on its values.
 /// The id may change between runs, or when the context was garbage collected and recreated.
 #[salsa::interned(debug)]
 #[derive(PartialOrd, Ord)]
-pub struct UnionTypeInstance<'db> {
-    left: Type<'db>,
-    right: Type<'db>,
+pub struct TypeList<'db> {
+    #[returns(deref)]
+    elements: Box<[Type<'db>]>,
 }
 
-impl get_size2::GetSize for UnionTypeInstance<'_> {}
+impl get_size2::GetSize for TypeList<'_> {}
 
-impl<'db> UnionTypeInstance<'db> {
-    pub(crate) fn elements(self, db: &'db dyn Db) -> [Type<'db>; 2] {
-        [self.left(db), self.right(db)]
+impl<'db> TypeList<'db> {
+    pub(crate) fn from_elements(
+        db: &'db dyn Db,
+        elements: impl IntoIterator>,
+    ) -> TypeList<'db> {
+        TypeList::new(db, elements.into_iter().collect::>())
+    }
+
+    pub(crate) fn singleton(db: &'db dyn Db, element: Type<'db>) -> TypeList<'db> {
+        TypeList::from_elements(db, [element])
     }
 
     pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self {
-        UnionTypeInstance::new(
+        TypeList::new(
             db,
-            self.left(db).normalized_impl(db, visitor),
-            self.right(db).normalized_impl(db, visitor),
+            self.elements(db)
+                .iter()
+                .map(|ty| ty.normalized_impl(db, visitor))
+                .collect::>(),
         )
     }
+
+    /// Turn this list of types `[T1, T2, ...]` into a union type `T1 | T2 | ...`.
+    pub(crate) fn to_union(self, db: &'db dyn Db) -> Type<'db> {
+        UnionType::from_elements(db, self.elements(db))
+    }
 }
 
 /// Error returned if a type is not awaitable.
diff --git a/crates/ty_python_semantic/src/types/class_base.rs b/crates/ty_python_semantic/src/types/class_base.rs
index 071d4b92b7..caddc88567 100644
--- a/crates/ty_python_semantic/src/types/class_base.rs
+++ b/crates/ty_python_semantic/src/types/class_base.rs
@@ -168,7 +168,8 @@ impl<'db> ClassBase<'db> {
                 | KnownInstanceType::Deprecated(_)
                 | KnownInstanceType::Field(_)
                 | KnownInstanceType::ConstraintSet(_)
-                | KnownInstanceType::UnionType(_) => None,
+                | KnownInstanceType::UnionType(_)
+                | KnownInstanceType::Literal(_) => None,
             },
 
             Type::SpecialForm(special_form) => match special_form {
diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs
index 8dc2b244cf..3b1142b89b 100644
--- a/crates/ty_python_semantic/src/types/infer/builder.rs
+++ b/crates/ty_python_semantic/src/types/infer/builder.rs
@@ -103,10 +103,10 @@ use crate::types::{
     DynamicType, IntersectionBuilder, IntersectionType, KnownClass, KnownInstanceType,
     MemberLookupPolicy, MetaclassCandidate, PEP695TypeAliasType, Parameter, ParameterForm,
     Parameters, SpecialFormType, SubclassOfType, TrackedConstraintSet, Truthiness, Type,
-    TypeAliasType, TypeAndQualifiers, TypeContext, TypeQualifiers,
+    TypeAliasType, TypeAndQualifiers, TypeContext, TypeList, TypeQualifiers,
     TypeVarBoundOrConstraintsEvaluation, TypeVarDefaultEvaluation, TypeVarIdentity,
     TypeVarInstance, TypeVarKind, TypeVarVariance, TypedDictType, UnionBuilder, UnionType,
-    UnionTypeInstance, binding_type, todo_type,
+    binding_type, todo_type,
 };
 use crate::types::{ClassBase, add_inferred_python_version_hint_to_diagnostic};
 use crate::unpack::{EvaluationMode, UnpackPosition};
@@ -8754,19 +8754,23 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
                 | Type::SubclassOf(..)
                 | Type::GenericAlias(..)
                 | Type::SpecialForm(_)
-                | Type::KnownInstance(KnownInstanceType::UnionType(_)),
+                | Type::KnownInstance(
+                    KnownInstanceType::UnionType(_) | KnownInstanceType::Literal(_),
+                ),
                 Type::ClassLiteral(..)
                 | Type::SubclassOf(..)
                 | Type::GenericAlias(..)
                 | Type::SpecialForm(_)
-                | Type::KnownInstance(KnownInstanceType::UnionType(_)),
+                | Type::KnownInstance(
+                    KnownInstanceType::UnionType(_) | KnownInstanceType::Literal(_),
+                ),
                 ast::Operator::BitOr,
             ) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 => {
                 if left_ty.is_equivalent_to(self.db(), right_ty) {
                     Some(left_ty)
                 } else {
                     Some(Type::KnownInstance(KnownInstanceType::UnionType(
-                        UnionTypeInstance::new(self.db(), left_ty, right_ty),
+                        TypeList::from_elements(self.db(), [left_ty, right_ty]),
                     )))
                 }
             }
@@ -8791,7 +8795,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
                 && instance.has_known_class(self.db(), KnownClass::NoneType) =>
             {
                 Some(Type::KnownInstance(KnownInstanceType::UnionType(
-                    UnionTypeInstance::new(self.db(), left_ty, right_ty),
+                    TypeList::from_elements(self.db(), [left_ty, right_ty]),
                 )))
             }
 
@@ -9924,6 +9928,29 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> {
                 );
             }
         }
+        if value_ty == Type::SpecialForm(SpecialFormType::Literal) {
+            match self.infer_literal_parameter_type(slice) {
+                Ok(result) => {
+                    return Type::KnownInstance(KnownInstanceType::Literal(TypeList::singleton(
+                        self.db(),
+                        result,
+                    )));
+                }
+                Err(nodes) => {
+                    for node in nodes {
+                        let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, node)
+                        else {
+                            continue;
+                        };
+                        builder.into_diagnostic(
+                            "Type arguments for `Literal` must be `None`, \
+                            a literal value (int, bool, str, or bytes), or an enum member",
+                        );
+                    }
+                    return Type::unknown();
+                }
+            }
+        }
 
         let slice_ty = self.infer_expression(slice, TypeContext::default());
         let result_ty = self.infer_subscript_expression_types(subscript, value_ty, slice_ty, *ctx);
diff --git a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs
index c6b2bbbef0..d091487ce7 100644
--- a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs
+++ b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs
@@ -814,6 +814,16 @@ impl<'db> TypeInferenceBuilder<'db, '_> {
                     self.infer_type_expression(slice);
                     todo_type!("Generic specialization of types.UnionType")
                 }
+                KnownInstanceType::Literal(ty) => {
+                    self.infer_type_expression(slice);
+                    if let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, subscript) {
+                        builder.into_diagnostic(format_args!(
+                            "`{ty}` is not a generic class",
+                            ty = ty.to_union(self.db()).display(self.db())
+                        ));
+                    }
+                    Type::unknown()
+                }
             },
             Type::Dynamic(DynamicType::Todo(_)) => {
                 self.infer_type_expression(slice);
@@ -1367,7 +1377,7 @@ impl<'db> TypeInferenceBuilder<'db, '_> {
         }
     }
 
-    fn infer_literal_parameter_type<'param>(
+    pub(crate) fn infer_literal_parameter_type<'param>(
         &mut self,
         parameters: &'param ast::Expr,
     ) -> Result, Vec<&'param ast::Expr>> {
@@ -1435,7 +1445,6 @@ impl<'db> TypeInferenceBuilder<'db, '_> {
             // enum members and aliases to literal types
             ast::Expr::Name(_) | ast::Expr::Attribute(_) => {
                 let subscript_ty = self.infer_expression(parameters, TypeContext::default());
-                // TODO handle implicit type aliases also
                 match subscript_ty {
                     // type aliases to literal types
                     Type::KnownInstance(KnownInstanceType::TypeAliasType(type_alias)) => {
@@ -1444,6 +1453,9 @@ impl<'db> TypeInferenceBuilder<'db, '_> {
                             return Ok(value_ty);
                         }
                     }
+                    Type::KnownInstance(KnownInstanceType::Literal(list)) => {
+                        return Ok(list.to_union(self.db()));
+                    }
                     // `Literal[SomeEnum.Member]`
                     Type::EnumLiteral(_) => {
                         return Ok(subscript_ty);

From 276f1d0d88d7815f70fabb712af44bb4de85d9a7 Mon Sep 17 00:00:00 2001
From: Brent Westbrook <36778786+ntBre@users.noreply.github.com>
Date: Fri, 7 Nov 2025 12:47:21 -0500
Subject: [PATCH 112/180] Remove duplicate preview tests for `FURB101` and
 `FURB103` (#21303)

Summary
--

These rules are themselves in preview, so we don't need the additional
preview checks on the fixes or the separate preview tests. This has
confused me in a couple of reviews of changes to the fixes.

Test Plan
--

Existing tests, with the fixes previously only shown in the preview
tests now in the "non-preview" tests.
---
 crates/ruff_linter/src/preview.rs             |  10 -
 crates/ruff_linter/src/rules/refurb/mod.rs    |  20 --
 .../src/rules/refurb/rules/read_whole_file.rs |   4 -
 .../rules/refurb/rules/write_whole_file.rs    |   4 -
 ...es__refurb__tests__FURB101_FURB101.py.snap |  97 +++++-
 ...es__refurb__tests__FURB103_FURB103.py.snap | 154 +++++++++-
 ...rb__tests__preview_FURB101_FURB101.py.snap | 191 ------------
 ...rb__tests__preview_FURB103_FURB103.py.snap | 281 ------------------
 ...rb__tests__write_whole_file_python_39.snap | 108 ++++++-
 9 files changed, 336 insertions(+), 533 deletions(-)
 delete mode 100644 crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB101_FURB101.py.snap
 delete mode 100644 crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB103_FURB103.py.snap

diff --git a/crates/ruff_linter/src/preview.rs b/crates/ruff_linter/src/preview.rs
index d84035c762..836ba4feea 100644
--- a/crates/ruff_linter/src/preview.rs
+++ b/crates/ruff_linter/src/preview.rs
@@ -261,16 +261,6 @@ pub(crate) const fn is_b006_unsafe_fix_preserve_assignment_expr_enabled(
     settings.preview.is_enabled()
 }
 
-// https://github.com/astral-sh/ruff/pull/20520
-pub(crate) const fn is_fix_read_whole_file_enabled(settings: &LinterSettings) -> bool {
-    settings.preview.is_enabled()
-}
-
-// https://github.com/astral-sh/ruff/pull/20520
-pub(crate) const fn is_fix_write_whole_file_enabled(settings: &LinterSettings) -> bool {
-    settings.preview.is_enabled()
-}
-
 pub(crate) const fn is_typing_extensions_str_alias_enabled(settings: &LinterSettings) -> bool {
     settings.preview.is_enabled()
 }
diff --git a/crates/ruff_linter/src/rules/refurb/mod.rs b/crates/ruff_linter/src/rules/refurb/mod.rs
index 97d9fae7a6..9187853141 100644
--- a/crates/ruff_linter/src/rules/refurb/mod.rs
+++ b/crates/ruff_linter/src/rules/refurb/mod.rs
@@ -12,7 +12,6 @@ mod tests {
     use test_case::test_case;
 
     use crate::registry::Rule;
-    use crate::settings::types::PreviewMode;
     use crate::test::test_path;
     use crate::{assert_diagnostics, settings};
 
@@ -63,25 +62,6 @@ mod tests {
         Ok(())
     }
 
-    #[test_case(Rule::ReadWholeFile, Path::new("FURB101.py"))]
-    #[test_case(Rule::WriteWholeFile, Path::new("FURB103.py"))]
-    fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
-        let snapshot = format!(
-            "preview_{}_{}",
-            rule_code.noqa_code(),
-            path.to_string_lossy()
-        );
-        let diagnostics = test_path(
-            Path::new("refurb").join(path).as_path(),
-            &settings::LinterSettings {
-                preview: PreviewMode::Enabled,
-                ..settings::LinterSettings::for_rule(rule_code)
-            },
-        )?;
-        assert_diagnostics!(snapshot, diagnostics);
-        Ok(())
-    }
-
     #[test]
     fn write_whole_file_python_39() -> Result<()> {
         let diagnostics = test_path(
diff --git a/crates/ruff_linter/src/rules/refurb/rules/read_whole_file.rs b/crates/ruff_linter/src/rules/refurb/rules/read_whole_file.rs
index b64f91829a..279ecb66aa 100644
--- a/crates/ruff_linter/src/rules/refurb/rules/read_whole_file.rs
+++ b/crates/ruff_linter/src/rules/refurb/rules/read_whole_file.rs
@@ -125,10 +125,6 @@ impl<'a> Visitor<'a> for ReadMatcher<'a, '_> {
                     open.item.range(),
                 );
 
-                if !crate::preview::is_fix_read_whole_file_enabled(self.checker.settings()) {
-                    return;
-                }
-
                 let target = match self.with_stmt.body.first() {
                     Some(Stmt::Assign(assign))
                         if assign.value.range().contains_range(expr.range()) =>
diff --git a/crates/ruff_linter/src/rules/refurb/rules/write_whole_file.rs b/crates/ruff_linter/src/rules/refurb/rules/write_whole_file.rs
index da99733efd..f25faa3eb2 100644
--- a/crates/ruff_linter/src/rules/refurb/rules/write_whole_file.rs
+++ b/crates/ruff_linter/src/rules/refurb/rules/write_whole_file.rs
@@ -141,10 +141,6 @@ impl<'a> Visitor<'a> for WriteMatcher<'a, '_> {
                         open.item.range(),
                     );
 
-                    if !crate::preview::is_fix_write_whole_file_enabled(self.checker.settings()) {
-                        return;
-                    }
-
                     if let Some(fix) =
                         generate_fix(self.checker, &open, self.with_stmt, &suggestion)
                     {
diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB101_FURB101.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB101_FURB101.py.snap
index 3f851c3f12..4131499c0c 100644
--- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB101_FURB101.py.snap
+++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB101_FURB101.py.snap
@@ -1,7 +1,7 @@
 ---
 source: crates/ruff_linter/src/rules/refurb/mod.rs
 ---
-FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()`
+FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()`
   --> FURB101.py:12:6
    |
 11 | # FURB101
@@ -10,8 +10,22 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()`
 13 |     x = f.read()
    |
 help: Replace with `Path("file.txt").read_text()`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+10 | # Errors.
+11 | 
+12 | # FURB101
+   - with open("file.txt") as f:
+   -     x = f.read()
+13 + x = pathlib.Path("file.txt").read_text()
+14 | 
+15 | # FURB101
+16 | with open("file.txt", "rb") as f:
 
-FURB101 `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
+FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
   --> FURB101.py:16:6
    |
 15 | # FURB101
@@ -20,8 +34,22 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
 17 |     x = f.read()
    |
 help: Replace with `Path("file.txt").read_bytes()`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+14 |     x = f.read()
+15 | 
+16 | # FURB101
+   - with open("file.txt", "rb") as f:
+   -     x = f.read()
+17 + x = pathlib.Path("file.txt").read_bytes()
+18 | 
+19 | # FURB101
+20 | with open("file.txt", mode="rb") as f:
 
-FURB101 `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
+FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
   --> FURB101.py:20:6
    |
 19 | # FURB101
@@ -30,8 +58,22 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
 21 |     x = f.read()
    |
 help: Replace with `Path("file.txt").read_bytes()`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+18 |     x = f.read()
+19 | 
+20 | # FURB101
+   - with open("file.txt", mode="rb") as f:
+   -     x = f.read()
+21 + x = pathlib.Path("file.txt").read_bytes()
+22 | 
+23 | # FURB101
+24 | with open("file.txt", encoding="utf8") as f:
 
-FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf8")`
+FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf8")`
   --> FURB101.py:24:6
    |
 23 | # FURB101
@@ -40,8 +82,22 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(enco
 25 |     x = f.read()
    |
 help: Replace with `Path("file.txt").read_text(encoding="utf8")`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+22 |     x = f.read()
+23 | 
+24 | # FURB101
+   - with open("file.txt", encoding="utf8") as f:
+   -     x = f.read()
+25 + x = pathlib.Path("file.txt").read_text(encoding="utf8")
+26 | 
+27 | # FURB101
+28 | with open("file.txt", errors="ignore") as f:
 
-FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(errors="ignore")`
+FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(errors="ignore")`
   --> FURB101.py:28:6
    |
 27 | # FURB101
@@ -50,8 +106,22 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(erro
 29 |     x = f.read()
    |
 help: Replace with `Path("file.txt").read_text(errors="ignore")`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+26 |     x = f.read()
+27 | 
+28 | # FURB101
+   - with open("file.txt", errors="ignore") as f:
+   -     x = f.read()
+29 + x = pathlib.Path("file.txt").read_text(errors="ignore")
+30 | 
+31 | # FURB101
+32 | with open("file.txt", mode="r") as f:  # noqa: FURB120
 
-FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()`
+FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()`
   --> FURB101.py:32:6
    |
 31 | # FURB101
@@ -60,6 +130,21 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()`
 33 |     x = f.read()
    |
 help: Replace with `Path("file.txt").read_text()`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+30 |     x = f.read()
+31 | 
+32 | # FURB101
+   - with open("file.txt", mode="r") as f:  # noqa: FURB120
+   -     x = f.read()
+33 + x = pathlib.Path("file.txt").read_text()
+34 | 
+35 | # FURB101
+36 | with open(foo(), "rb") as f:
+note: This is an unsafe fix and may change runtime behavior
 
 FURB101 `open` and `read` should be replaced by `Path(foo()).read_bytes()`
   --> FURB101.py:36:6
diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB103_FURB103.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB103_FURB103.py.snap
index 74f3749953..8148035435 100644
--- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB103_FURB103.py.snap
+++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB103_FURB103.py.snap
@@ -1,7 +1,7 @@
 ---
 source: crates/ruff_linter/src/rules/refurb/mod.rs
 ---
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text("test")`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")`
   --> FURB103.py:12:6
    |
 11 | # FURB103
@@ -10,8 +10,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text("t
 13 |     f.write("test")
    |
 help: Replace with `Path("file.txt").write_text("test")`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+10 | # Errors.
+11 | 
+12 | # FURB103
+   - with open("file.txt", "w") as f:
+   -     f.write("test")
+13 + pathlib.Path("file.txt").write_text("test")
+14 | 
+15 | # FURB103
+16 | with open("file.txt", "wb") as f:
 
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)`
   --> FURB103.py:16:6
    |
 15 | # FURB103
@@ -20,8 +34,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(f
 17 |     f.write(foobar)
    |
 help: Replace with `Path("file.txt").write_bytes(foobar)`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+14 |     f.write("test")
+15 | 
+16 | # FURB103
+   - with open("file.txt", "wb") as f:
+   -     f.write(foobar)
+17 + pathlib.Path("file.txt").write_bytes(foobar)
+18 | 
+19 | # FURB103
+20 | with open("file.txt", mode="wb") as f:
 
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")`
   --> FURB103.py:20:6
    |
 19 | # FURB103
@@ -30,8 +58,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(b
 21 |     f.write(b"abc")
    |
 help: Replace with `Path("file.txt").write_bytes(b"abc")`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+18 |     f.write(foobar)
+19 | 
+20 | # FURB103
+   - with open("file.txt", mode="wb") as f:
+   -     f.write(b"abc")
+21 + pathlib.Path("file.txt").write_bytes(b"abc")
+22 | 
+23 | # FURB103
+24 | with open("file.txt", "w", encoding="utf8") as f:
 
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")`
   --> FURB103.py:24:6
    |
 23 | # FURB103
@@ -40,8 +82,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo
 25 |     f.write(foobar)
    |
 help: Replace with `Path("file.txt").write_text(foobar, encoding="utf8")`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+22 |     f.write(b"abc")
+23 | 
+24 | # FURB103
+   - with open("file.txt", "w", encoding="utf8") as f:
+   -     f.write(foobar)
+25 + pathlib.Path("file.txt").write_text(foobar, encoding="utf8")
+26 | 
+27 | # FURB103
+28 | with open("file.txt", "w", errors="ignore") as f:
 
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")`
   --> FURB103.py:28:6
    |
 27 | # FURB103
@@ -50,8 +106,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo
 29 |     f.write(foobar)
    |
 help: Replace with `Path("file.txt").write_text(foobar, errors="ignore")`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+26 |     f.write(foobar)
+27 | 
+28 | # FURB103
+   - with open("file.txt", "w", errors="ignore") as f:
+   -     f.write(foobar)
+29 + pathlib.Path("file.txt").write_text(foobar, errors="ignore")
+30 | 
+31 | # FURB103
+32 | with open("file.txt", mode="w") as f:
 
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)`
   --> FURB103.py:32:6
    |
 31 | # FURB103
@@ -60,6 +130,20 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo
 33 |     f.write(foobar)
    |
 help: Replace with `Path("file.txt").write_text(foobar)`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+30 |     f.write(foobar)
+31 | 
+32 | # FURB103
+   - with open("file.txt", mode="w") as f:
+   -     f.write(foobar)
+33 + pathlib.Path("file.txt").write_text(foobar)
+34 | 
+35 | # FURB103
+36 | with open(foo(), "wb") as f:
 
 FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())`
   --> FURB103.py:36:6
@@ -105,7 +189,7 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(ba
    |
 help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))`
 
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
   --> FURB103.py:58:6
    |
 57 | # FURB103
@@ -114,8 +198,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo
 59 |     f.write(foobar)
    |
 help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+56 | 
+57 | 
+58 | # FURB103
+   - with open("file.txt", "w", newline="\r\n") as f:
+   -     f.write(foobar)
+59 + pathlib.Path("file.txt").write_text(foobar, newline="\r\n")
+60 | 
+61 | 
+62 | import builtins
 
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
   --> FURB103.py:66:6
    |
 65 | # FURB103
@@ -124,8 +222,21 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo
 67 |     f.write(foobar)
    |
 help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
+60 | 
+61 | 
+62 | import builtins
+63 + import pathlib
+64 | 
+65 | 
+66 | # FURB103
+   - with builtins.open("file.txt", "w", newline="\r\n") as f:
+   -     f.write(foobar)
+67 + pathlib.Path("file.txt").write_text(foobar, newline="\r\n")
+68 | 
+69 | 
+70 | from builtins import open as o
 
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
   --> FURB103.py:74:6
    |
 73 | # FURB103
@@ -134,8 +245,21 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo
 75 |     f.write(foobar)
    |
 help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
+68 | 
+69 | 
+70 | from builtins import open as o
+71 + import pathlib
+72 | 
+73 | 
+74 | # FURB103
+   - with o("file.txt", "w", newline="\r\n") as f:
+   -     f.write(foobar)
+75 + pathlib.Path("file.txt").write_text(foobar, newline="\r\n")
+76 | 
+77 | # Non-errors.
+78 | 
 
-FURB103 `open` and `write` should be replaced by `Path("test.json")....`
+FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....`
    --> FURB103.py:154:6
     |
 152 | data = {"price": 100}
@@ -145,3 +269,13 @@ FURB103 `open` and `write` should be replaced by `Path("test.json")....`
 155 |     f.write(json.dumps(data, indent=4).encode("utf-8"))
     |
 help: Replace with `Path("test.json")....`
+148 | 
+149 | # See: https://github.com/astral-sh/ruff/issues/20785
+150 | import json
+151 + import pathlib
+152 | 
+153 | data = {"price": 100}
+154 | 
+    - with open("test.json", "wb") as f:
+    -     f.write(json.dumps(data, indent=4).encode("utf-8"))
+155 + pathlib.Path("test.json").write_bytes(json.dumps(data, indent=4).encode("utf-8"))
diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB101_FURB101.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB101_FURB101.py.snap
deleted file mode 100644
index 4131499c0c..0000000000
--- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB101_FURB101.py.snap
+++ /dev/null
@@ -1,191 +0,0 @@
----
-source: crates/ruff_linter/src/rules/refurb/mod.rs
----
-FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()`
-  --> FURB101.py:12:6
-   |
-11 | # FURB101
-12 | with open("file.txt") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^
-13 |     x = f.read()
-   |
-help: Replace with `Path("file.txt").read_text()`
-1  + import pathlib
-2  | def foo():
-3  |     ...
-4  | 
---------------------------------------------------------------------------------
-10 | # Errors.
-11 | 
-12 | # FURB101
-   - with open("file.txt") as f:
-   -     x = f.read()
-13 + x = pathlib.Path("file.txt").read_text()
-14 | 
-15 | # FURB101
-16 | with open("file.txt", "rb") as f:
-
-FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
-  --> FURB101.py:16:6
-   |
-15 | # FURB101
-16 | with open("file.txt", "rb") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^
-17 |     x = f.read()
-   |
-help: Replace with `Path("file.txt").read_bytes()`
-1  + import pathlib
-2  | def foo():
-3  |     ...
-4  | 
---------------------------------------------------------------------------------
-14 |     x = f.read()
-15 | 
-16 | # FURB101
-   - with open("file.txt", "rb") as f:
-   -     x = f.read()
-17 + x = pathlib.Path("file.txt").read_bytes()
-18 | 
-19 | # FURB101
-20 | with open("file.txt", mode="rb") as f:
-
-FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
-  --> FURB101.py:20:6
-   |
-19 | # FURB101
-20 | with open("file.txt", mode="rb") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-21 |     x = f.read()
-   |
-help: Replace with `Path("file.txt").read_bytes()`
-1  + import pathlib
-2  | def foo():
-3  |     ...
-4  | 
---------------------------------------------------------------------------------
-18 |     x = f.read()
-19 | 
-20 | # FURB101
-   - with open("file.txt", mode="rb") as f:
-   -     x = f.read()
-21 + x = pathlib.Path("file.txt").read_bytes()
-22 | 
-23 | # FURB101
-24 | with open("file.txt", encoding="utf8") as f:
-
-FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf8")`
-  --> FURB101.py:24:6
-   |
-23 | # FURB101
-24 | with open("file.txt", encoding="utf8") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-25 |     x = f.read()
-   |
-help: Replace with `Path("file.txt").read_text(encoding="utf8")`
-1  + import pathlib
-2  | def foo():
-3  |     ...
-4  | 
---------------------------------------------------------------------------------
-22 |     x = f.read()
-23 | 
-24 | # FURB101
-   - with open("file.txt", encoding="utf8") as f:
-   -     x = f.read()
-25 + x = pathlib.Path("file.txt").read_text(encoding="utf8")
-26 | 
-27 | # FURB101
-28 | with open("file.txt", errors="ignore") as f:
-
-FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(errors="ignore")`
-  --> FURB101.py:28:6
-   |
-27 | # FURB101
-28 | with open("file.txt", errors="ignore") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-29 |     x = f.read()
-   |
-help: Replace with `Path("file.txt").read_text(errors="ignore")`
-1  + import pathlib
-2  | def foo():
-3  |     ...
-4  | 
---------------------------------------------------------------------------------
-26 |     x = f.read()
-27 | 
-28 | # FURB101
-   - with open("file.txt", errors="ignore") as f:
-   -     x = f.read()
-29 + x = pathlib.Path("file.txt").read_text(errors="ignore")
-30 | 
-31 | # FURB101
-32 | with open("file.txt", mode="r") as f:  # noqa: FURB120
-
-FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()`
-  --> FURB101.py:32:6
-   |
-31 | # FURB101
-32 | with open("file.txt", mode="r") as f:  # noqa: FURB120
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-33 |     x = f.read()
-   |
-help: Replace with `Path("file.txt").read_text()`
-1  + import pathlib
-2  | def foo():
-3  |     ...
-4  | 
---------------------------------------------------------------------------------
-30 |     x = f.read()
-31 | 
-32 | # FURB101
-   - with open("file.txt", mode="r") as f:  # noqa: FURB120
-   -     x = f.read()
-33 + x = pathlib.Path("file.txt").read_text()
-34 | 
-35 | # FURB101
-36 | with open(foo(), "rb") as f:
-note: This is an unsafe fix and may change runtime behavior
-
-FURB101 `open` and `read` should be replaced by `Path(foo()).read_bytes()`
-  --> FURB101.py:36:6
-   |
-35 | # FURB101
-36 | with open(foo(), "rb") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^
-37 |     # The body of `with` is non-trivial, but the recommendation holds.
-38 |     bar("pre")
-   |
-help: Replace with `Path(foo()).read_bytes()`
-
-FURB101 `open` and `read` should be replaced by `Path("a.txt").read_text()`
-  --> FURB101.py:44:6
-   |
-43 | # FURB101
-44 | with open("a.txt") as a, open("b.txt", "rb") as b:
-   |      ^^^^^^^^^^^^^^^^^^
-45 |     x = a.read()
-46 |     y = b.read()
-   |
-help: Replace with `Path("a.txt").read_text()`
-
-FURB101 `open` and `read` should be replaced by `Path("b.txt").read_bytes()`
-  --> FURB101.py:44:26
-   |
-43 | # FURB101
-44 | with open("a.txt") as a, open("b.txt", "rb") as b:
-   |                          ^^^^^^^^^^^^^^^^^^^^^^^^
-45 |     x = a.read()
-46 |     y = b.read()
-   |
-help: Replace with `Path("b.txt").read_bytes()`
-
-FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()`
-  --> FURB101.py:49:18
-   |
-48 | # FURB101
-49 | with foo() as a, open("file.txt") as b, foo() as c:
-   |                  ^^^^^^^^^^^^^^^^^^^^^
-50 |     # We have other things in here, multiple with items, but
-51 |     # the user reads the whole file and that bit they can replace.
-   |
-help: Replace with `Path("file.txt").read_text()`
diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB103_FURB103.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB103_FURB103.py.snap
deleted file mode 100644
index 8148035435..0000000000
--- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__preview_FURB103_FURB103.py.snap
+++ /dev/null
@@ -1,281 +0,0 @@
----
-source: crates/ruff_linter/src/rules/refurb/mod.rs
----
-FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")`
-  --> FURB103.py:12:6
-   |
-11 | # FURB103
-12 | with open("file.txt", "w") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^
-13 |     f.write("test")
-   |
-help: Replace with `Path("file.txt").write_text("test")`
-1  + import pathlib
-2  | def foo():
-3  |     ...
-4  | 
---------------------------------------------------------------------------------
-10 | # Errors.
-11 | 
-12 | # FURB103
-   - with open("file.txt", "w") as f:
-   -     f.write("test")
-13 + pathlib.Path("file.txt").write_text("test")
-14 | 
-15 | # FURB103
-16 | with open("file.txt", "wb") as f:
-
-FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)`
-  --> FURB103.py:16:6
-   |
-15 | # FURB103
-16 | with open("file.txt", "wb") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^
-17 |     f.write(foobar)
-   |
-help: Replace with `Path("file.txt").write_bytes(foobar)`
-1  + import pathlib
-2  | def foo():
-3  |     ...
-4  | 
---------------------------------------------------------------------------------
-14 |     f.write("test")
-15 | 
-16 | # FURB103
-   - with open("file.txt", "wb") as f:
-   -     f.write(foobar)
-17 + pathlib.Path("file.txt").write_bytes(foobar)
-18 | 
-19 | # FURB103
-20 | with open("file.txt", mode="wb") as f:
-
-FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")`
-  --> FURB103.py:20:6
-   |
-19 | # FURB103
-20 | with open("file.txt", mode="wb") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-21 |     f.write(b"abc")
-   |
-help: Replace with `Path("file.txt").write_bytes(b"abc")`
-1  + import pathlib
-2  | def foo():
-3  |     ...
-4  | 
---------------------------------------------------------------------------------
-18 |     f.write(foobar)
-19 | 
-20 | # FURB103
-   - with open("file.txt", mode="wb") as f:
-   -     f.write(b"abc")
-21 + pathlib.Path("file.txt").write_bytes(b"abc")
-22 | 
-23 | # FURB103
-24 | with open("file.txt", "w", encoding="utf8") as f:
-
-FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")`
-  --> FURB103.py:24:6
-   |
-23 | # FURB103
-24 | with open("file.txt", "w", encoding="utf8") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-25 |     f.write(foobar)
-   |
-help: Replace with `Path("file.txt").write_text(foobar, encoding="utf8")`
-1  + import pathlib
-2  | def foo():
-3  |     ...
-4  | 
---------------------------------------------------------------------------------
-22 |     f.write(b"abc")
-23 | 
-24 | # FURB103
-   - with open("file.txt", "w", encoding="utf8") as f:
-   -     f.write(foobar)
-25 + pathlib.Path("file.txt").write_text(foobar, encoding="utf8")
-26 | 
-27 | # FURB103
-28 | with open("file.txt", "w", errors="ignore") as f:
-
-FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")`
-  --> FURB103.py:28:6
-   |
-27 | # FURB103
-28 | with open("file.txt", "w", errors="ignore") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-29 |     f.write(foobar)
-   |
-help: Replace with `Path("file.txt").write_text(foobar, errors="ignore")`
-1  + import pathlib
-2  | def foo():
-3  |     ...
-4  | 
---------------------------------------------------------------------------------
-26 |     f.write(foobar)
-27 | 
-28 | # FURB103
-   - with open("file.txt", "w", errors="ignore") as f:
-   -     f.write(foobar)
-29 + pathlib.Path("file.txt").write_text(foobar, errors="ignore")
-30 | 
-31 | # FURB103
-32 | with open("file.txt", mode="w") as f:
-
-FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)`
-  --> FURB103.py:32:6
-   |
-31 | # FURB103
-32 | with open("file.txt", mode="w") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-33 |     f.write(foobar)
-   |
-help: Replace with `Path("file.txt").write_text(foobar)`
-1  + import pathlib
-2  | def foo():
-3  |     ...
-4  | 
---------------------------------------------------------------------------------
-30 |     f.write(foobar)
-31 | 
-32 | # FURB103
-   - with open("file.txt", mode="w") as f:
-   -     f.write(foobar)
-33 + pathlib.Path("file.txt").write_text(foobar)
-34 | 
-35 | # FURB103
-36 | with open(foo(), "wb") as f:
-
-FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())`
-  --> FURB103.py:36:6
-   |
-35 | # FURB103
-36 | with open(foo(), "wb") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^
-37 |     # The body of `with` is non-trivial, but the recommendation holds.
-38 |     bar("pre")
-   |
-help: Replace with `Path(foo()).write_bytes(bar())`
-
-FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
-  --> FURB103.py:44:6
-   |
-43 | # FURB103
-44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^
-45 |     a.write(x)
-46 |     b.write(y)
-   |
-help: Replace with `Path("a.txt").write_text(x)`
-
-FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
-  --> FURB103.py:44:31
-   |
-43 | # FURB103
-44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
-   |                               ^^^^^^^^^^^^^^^^^^^^^^^^
-45 |     a.write(x)
-46 |     b.write(y)
-   |
-help: Replace with `Path("b.txt").write_bytes(y)`
-
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(bar(bar(a + x)))`
-  --> FURB103.py:49:18
-   |
-48 | # FURB103
-49 | with foo() as a, open("file.txt", "w") as b, foo() as c:
-   |                  ^^^^^^^^^^^^^^^^^^^^^^^^^^
-50 |     # We have other things in here, multiple with items, but the user
-51 |     # writes a single time to file and that bit they can replace.
-   |
-help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))`
-
-FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
-  --> FURB103.py:58:6
-   |
-57 | # FURB103
-58 | with open("file.txt", "w", newline="\r\n") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-59 |     f.write(foobar)
-   |
-help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
-1  + import pathlib
-2  | def foo():
-3  |     ...
-4  | 
---------------------------------------------------------------------------------
-56 | 
-57 | 
-58 | # FURB103
-   - with open("file.txt", "w", newline="\r\n") as f:
-   -     f.write(foobar)
-59 + pathlib.Path("file.txt").write_text(foobar, newline="\r\n")
-60 | 
-61 | 
-62 | import builtins
-
-FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
-  --> FURB103.py:66:6
-   |
-65 | # FURB103
-66 | with builtins.open("file.txt", "w", newline="\r\n") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-67 |     f.write(foobar)
-   |
-help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
-60 | 
-61 | 
-62 | import builtins
-63 + import pathlib
-64 | 
-65 | 
-66 | # FURB103
-   - with builtins.open("file.txt", "w", newline="\r\n") as f:
-   -     f.write(foobar)
-67 + pathlib.Path("file.txt").write_text(foobar, newline="\r\n")
-68 | 
-69 | 
-70 | from builtins import open as o
-
-FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
-  --> FURB103.py:74:6
-   |
-73 | # FURB103
-74 | with o("file.txt", "w", newline="\r\n") as f:
-   |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-75 |     f.write(foobar)
-   |
-help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
-68 | 
-69 | 
-70 | from builtins import open as o
-71 + import pathlib
-72 | 
-73 | 
-74 | # FURB103
-   - with o("file.txt", "w", newline="\r\n") as f:
-   -     f.write(foobar)
-75 + pathlib.Path("file.txt").write_text(foobar, newline="\r\n")
-76 | 
-77 | # Non-errors.
-78 | 
-
-FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....`
-   --> FURB103.py:154:6
-    |
-152 | data = {"price": 100}
-153 |
-154 | with open("test.json", "wb") as f:
-    |      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-155 |     f.write(json.dumps(data, indent=4).encode("utf-8"))
-    |
-help: Replace with `Path("test.json")....`
-148 | 
-149 | # See: https://github.com/astral-sh/ruff/issues/20785
-150 | import json
-151 + import pathlib
-152 | 
-153 | data = {"price": 100}
-154 | 
-    - with open("test.json", "wb") as f:
-    -     f.write(json.dumps(data, indent=4).encode("utf-8"))
-155 + pathlib.Path("test.json").write_bytes(json.dumps(data, indent=4).encode("utf-8"))
diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__write_whole_file_python_39.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__write_whole_file_python_39.snap
index 140a274468..3b68b110d5 100644
--- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__write_whole_file_python_39.snap
+++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__write_whole_file_python_39.snap
@@ -1,7 +1,7 @@
 ---
 source: crates/ruff_linter/src/rules/refurb/mod.rs
 ---
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text("test")`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")`
   --> FURB103.py:12:6
    |
 11 | # FURB103
@@ -10,8 +10,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text("t
 13 |     f.write("test")
    |
 help: Replace with `Path("file.txt").write_text("test")`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+10 | # Errors.
+11 | 
+12 | # FURB103
+   - with open("file.txt", "w") as f:
+   -     f.write("test")
+13 + pathlib.Path("file.txt").write_text("test")
+14 | 
+15 | # FURB103
+16 | with open("file.txt", "wb") as f:
 
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)`
   --> FURB103.py:16:6
    |
 15 | # FURB103
@@ -20,8 +34,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(f
 17 |     f.write(foobar)
    |
 help: Replace with `Path("file.txt").write_bytes(foobar)`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+14 |     f.write("test")
+15 | 
+16 | # FURB103
+   - with open("file.txt", "wb") as f:
+   -     f.write(foobar)
+17 + pathlib.Path("file.txt").write_bytes(foobar)
+18 | 
+19 | # FURB103
+20 | with open("file.txt", mode="wb") as f:
 
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")`
   --> FURB103.py:20:6
    |
 19 | # FURB103
@@ -30,8 +58,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_bytes(b
 21 |     f.write(b"abc")
    |
 help: Replace with `Path("file.txt").write_bytes(b"abc")`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+18 |     f.write(foobar)
+19 | 
+20 | # FURB103
+   - with open("file.txt", mode="wb") as f:
+   -     f.write(b"abc")
+21 + pathlib.Path("file.txt").write_bytes(b"abc")
+22 | 
+23 | # FURB103
+24 | with open("file.txt", "w", encoding="utf8") as f:
 
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")`
   --> FURB103.py:24:6
    |
 23 | # FURB103
@@ -40,8 +82,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo
 25 |     f.write(foobar)
    |
 help: Replace with `Path("file.txt").write_text(foobar, encoding="utf8")`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+22 |     f.write(b"abc")
+23 | 
+24 | # FURB103
+   - with open("file.txt", "w", encoding="utf8") as f:
+   -     f.write(foobar)
+25 + pathlib.Path("file.txt").write_text(foobar, encoding="utf8")
+26 | 
+27 | # FURB103
+28 | with open("file.txt", "w", errors="ignore") as f:
 
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")`
   --> FURB103.py:28:6
    |
 27 | # FURB103
@@ -50,8 +106,22 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo
 29 |     f.write(foobar)
    |
 help: Replace with `Path("file.txt").write_text(foobar, errors="ignore")`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+26 |     f.write(foobar)
+27 | 
+28 | # FURB103
+   - with open("file.txt", "w", errors="ignore") as f:
+   -     f.write(foobar)
+29 + pathlib.Path("file.txt").write_text(foobar, errors="ignore")
+30 | 
+31 | # FURB103
+32 | with open("file.txt", mode="w") as f:
 
-FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)`
+FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)`
   --> FURB103.py:32:6
    |
 31 | # FURB103
@@ -60,6 +130,20 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(fo
 33 |     f.write(foobar)
    |
 help: Replace with `Path("file.txt").write_text(foobar)`
+1  + import pathlib
+2  | def foo():
+3  |     ...
+4  | 
+--------------------------------------------------------------------------------
+30 |     f.write(foobar)
+31 | 
+32 | # FURB103
+   - with open("file.txt", mode="w") as f:
+   -     f.write(foobar)
+33 + pathlib.Path("file.txt").write_text(foobar)
+34 | 
+35 | # FURB103
+36 | with open(foo(), "wb") as f:
 
 FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())`
   --> FURB103.py:36:6
@@ -105,7 +189,7 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(ba
    |
 help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))`
 
-FURB103 `open` and `write` should be replaced by `Path("test.json")....`
+FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....`
    --> FURB103.py:154:6
     |
 152 | data = {"price": 100}
@@ -115,3 +199,13 @@ FURB103 `open` and `write` should be replaced by `Path("test.json")....`
 155 |     f.write(json.dumps(data, indent=4).encode("utf-8"))
     |
 help: Replace with `Path("test.json")....`
+148 | 
+149 | # See: https://github.com/astral-sh/ruff/issues/20785
+150 | import json
+151 + import pathlib
+152 | 
+153 | data = {"price": 100}
+154 | 
+    - with open("test.json", "wb") as f:
+    -     f.write(json.dumps(data, indent=4).encode("utf-8"))
+155 + pathlib.Path("test.json").write_bytes(json.dumps(data, indent=4).encode("utf-8"))

From faae72b836817cad39eec797046ae9cfea3820c8 Mon Sep 17 00:00:00 2001
From: Douglas Creager 
Date: Fri, 7 Nov 2025 14:01:39 -0500
Subject: [PATCH 113/180] [ty] Clarify behavior of constraint sets for gradual
 upper bounds and constraints (#21287)

When checking whether a constraint set is satisfied, if a typevar has a
non-fully-static upper bound or constraint, we are free to choose any
materialization that makes the check succeed.

In non-inferable positions, we have to show that the constraint set is
satisfied for all valid specializations, so it's best to choose the most
restrictive materialization, since that minimizes the set of valid
specializations that have to pass.

In inferable positions, we only have to show that the constraint set is
satisfied for _some_ valid specializations, so it's best to choose the
most permissive materialization, since that maximizes our chances of
finding a specialization that passes.
---
 .../satisfied_by_all_typevars.md              | 262 ++++++++++++++++++
 .../src/types/constraints.rs                  | 161 ++++++++---
 2 files changed, 388 insertions(+), 35 deletions(-)

diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/satisfied_by_all_typevars.md b/crates/ty_python_semantic/resources/mdtest/type_properties/satisfied_by_all_typevars.md
index 8d9f563250..865cfa8395 100644
--- a/crates/ty_python_semantic/resources/mdtest/type_properties/satisfied_by_all_typevars.md
+++ b/crates/ty_python_semantic/resources/mdtest/type_properties/satisfied_by_all_typevars.md
@@ -141,6 +141,97 @@ def bounded[T: Base]():
     static_assert(not constraints.satisfied_by_all_typevars())
 ```
 
+If the upper bound is a gradual type, we are free to choose any materialization of the upper bound
+that makes the test succeed. In non-inferable positions, it is most helpful to choose the bottom
+materialization as the upper bound. That is the most restrictive possible choice, which minimizes
+the number of valid specializations that must satisfy the constraint set. In inferable positions,
+the opposite is true: it is most helpful to choose the top materialization. That is the most
+permissive possible choice, which maximizes the number of valid specializations that might satisfy
+the constraint set.
+
+```py
+from typing import Any
+
+def bounded_by_gradual[T: Any]():
+    static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(ConstraintSet.always().satisfied_by_all_typevars())
+
+    static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(not ConstraintSet.never().satisfied_by_all_typevars())
+
+    # If we choose Base as the materialization for the upper bound, then (T = Base) is a valid
+    # specialization, which satisfies (T ≤ Base).
+    static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars(inferable=tuple[T]))
+    # We are free to choose any materialization of the upper bound, and only have to show that the
+    # constraint set holds for that one materialization. Having chosen one materialization, we then
+    # have to show that the constraint set holds for all valid specializations of that
+    # materialization. If we choose Never as the materialization, then all valid specializations
+    # must satisfy (T ≤ Never). That means there is only one valid specialization, (T = Never),
+    # which satisfies (T ≤ Base).
+    static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars())
+
+    # If we choose Unrelated as the materialization, then (T = Unrelated) is a valid specialization,
+    # which satisfies (T ≤ Unrelated).
+    constraints = ConstraintSet.range(Never, T, Unrelated)
+    static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T]))
+    # If we choose Never as the materialization, then (T = Never) is the only valid specialization,
+    # which satisfies (T ≤ Unrelated).
+    static_assert(constraints.satisfied_by_all_typevars())
+
+    # If we choose Unrelated as the materialization, then (T = Unrelated) is a valid specialization,
+    # which satisfies (T ≤ Unrelated ∧ T ≠ Never).
+    constraints = constraints & ~ConstraintSet.range(Never, T, Never)
+    static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T]))
+    # There is no upper bound that we can choose to satisfy this constraint set in non-inferable
+    # position. (T = Never) will be a valid assignment no matter what, and that does not satisfy
+    # (T ≤ Unrelated ∧ T ≠ Never).
+    static_assert(not constraints.satisfied_by_all_typevars())
+```
+
+When the upper bound is a more complex gradual type, we are still free to choose any materialization
+that causes the check to succeed, and we will still choose the bottom materialization in
+non-inferable position, and the top materialization in inferable position. The variance of the
+typevar does not affect whether there is a materialization we can choose. Below, we test the most
+restrictive variance (i.e., invariance), but we get the same results for other variances as well.
+
+```py
+def bounded_by_gradual[T: list[Any]]():
+    static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(ConstraintSet.always().satisfied_by_all_typevars())
+
+    static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(not ConstraintSet.never().satisfied_by_all_typevars())
+
+    # If we choose list[Base] as the materialization of the upper bound, then (T = list[Base]) is a
+    # valid specialization, which satisfies (T ≤ list[Base]).
+    static_assert(ConstraintSet.range(Never, T, list[Base]).satisfied_by_all_typevars(inferable=tuple[T]))
+    # If we choose Base as the materialization, then all valid specializations must satisfy
+    # (T ≤ list[Base]).
+    # We are free to choose any materialization of the upper bound, and only have to show that the
+    # constraint set holds for that one materialization. Having chosen one materialization, we then
+    # have to show that the constraint set holds for all valid specializations of that
+    # materialization. If we choose list[Base] as the materialization, then all valid specializations
+    # must satisfy (T ≤ list[Base]), which is exactly the constraint set that we need to satisfy.
+    static_assert(ConstraintSet.range(Never, T, list[Base]).satisfied_by_all_typevars())
+
+    # If we choose Unrelated as the materialization, then (T = list[Unrelated]) is a valid
+    # specialization, which satisfies (T ≤ list[Unrelated]).
+    constraints = ConstraintSet.range(Never, T, list[Unrelated])
+    static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T]))
+    # If we choose Unrelated as the materialization, then all valid specializations must satisfy
+    # (T ≤ list[Unrelated]).
+    static_assert(constraints.satisfied_by_all_typevars())
+
+    # If we choose Unrelated as the materialization, then (T = list[Unrelated]) is a valid
+    # specialization, which satisfies (T ≤ list[Unrelated] ∧ T ≠ Never).
+    constraints = constraints & ~ConstraintSet.range(Never, T, Never)
+    static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T]))
+    # There is no upper bound that we can choose to satisfy this constraint set in non-inferable
+    # position. (T = Never) will be a valid assignment no matter what, and that does not satisfy
+    # (T ≤ list[Unrelated] ∧ T ≠ Never).
+    static_assert(not constraints.satisfied_by_all_typevars())
+```
+
 ## Constrained typevar
 
 If a typevar has constraints, then it must specialize to one of those specific types. (Not to a
@@ -218,3 +309,174 @@ def constrained[T: (Base, Unrelated)]():
     # (T = Base) is a valid specialization, which does not satisfy (T = Sub ∨ T = Unrelated).
     static_assert(not constraints.satisfied_by_all_typevars())
 ```
+
+If any of the constraints is a gradual type, we are free to choose any materialization of that
+constraint that makes the test succeed. In non-inferable positions, it is most helpful to choose the
+bottom materialization as the constraint. That is the most restrictive possible choice, which
+minimizes the number of valid specializations that must satisfy the constraint set. In inferable
+positions, the opposite is true: it is most helpful to choose the top materialization. That is the
+most permissive possible choice, which maximizes the number of valid specializations that might
+satisfy the constraint set.
+
+```py
+from typing import Any
+
+def constrained_by_gradual[T: (Base, Any)]():
+    static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(ConstraintSet.always().satisfied_by_all_typevars())
+
+    static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(not ConstraintSet.never().satisfied_by_all_typevars())
+
+    # If we choose Unrelated as the materialization of the gradual constraint, then (T = Unrelated)
+    # is a valid specialization, which satisfies (T ≤ Unrelated).
+    static_assert(ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars(inferable=tuple[T]))
+    # No matter which materialization we choose, (T = Base) is a valid specialization, which does
+    # not satisfy (T ≤ Unrelated).
+    static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars())
+
+    # If we choose Super as the materialization, then (T = Super) is a valid specialization, which
+    # satisfies (T ≤ Super).
+    static_assert(ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars(inferable=tuple[T]))
+    # If we choose Never as the materialization, then (T = Base) and (T = Never) are the only valid
+    # specializations, both of which satisfy (T ≤ Super).
+    static_assert(ConstraintSet.range(Never, T, Super).satisfied_by_all_typevars())
+
+    # If we choose Base as the materialization, then (T = Base) is a valid specialization, which
+    # satisfies (T ≤ Base).
+    static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars(inferable=tuple[T]))
+    # If we choose Never as the materialization, then (T = Base) and (T = Never) are the only valid
+    # specializations, both of which satisfy (T ≤ Base).
+    static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars())
+
+def constrained_by_two_gradual[T: (Any, Any)]():
+    static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(ConstraintSet.always().satisfied_by_all_typevars())
+
+    static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(not ConstraintSet.never().satisfied_by_all_typevars())
+
+    # If we choose Unrelated as the materialization of either constraint, then (T = Unrelated) is a
+    # valid specialization, which satisfies (T ≤ Unrelated).
+    static_assert(ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars(inferable=tuple[T]))
+    # If we choose Unrelated as the materialization of both constraints, then (T = Unrelated) is the
+    # only valid specialization, which satisfies (T ≤ Unrelated).
+    static_assert(ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars())
+
+    # If we choose Base as the materialization of either constraint, then (T = Base) is a valid
+    # specialization, which satisfies (T ≤ Base).
+    static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars(inferable=tuple[T]))
+    # If we choose Never as the materialization of both constraints, then (T = Never) is the only
+    # valid specialization, which satisfies (T ≤ Base).
+    static_assert(ConstraintSet.range(Never, T, Base).satisfied_by_all_typevars())
+```
+
+When a constraint is a more complex gradual type, we are still free to choose any materialization
+that causes the check to succeed, and we will still choose the bottom materialization in
+non-inferable position, and the top materialization in inferable position. The variance of the
+typevar does not affect whether there is a materialization we can choose. Below, we test the most
+restrictive variance (i.e., invariance), but we get the same results for other variances as well.
+
+```py
+def constrained_by_gradual[T: (list[Base], list[Any])]():
+    static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(ConstraintSet.always().satisfied_by_all_typevars())
+
+    static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(not ConstraintSet.never().satisfied_by_all_typevars())
+
+    # No matter which materialization we choose, every valid specialization will be of the form
+    # (T = list[X]). Because Unrelated is final, it is disjoint from all lists. There is therefore
+    # no materialization or specialization that satisfies (T ≤ Unrelated).
+    static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars())
+
+    # If we choose list[Super] as the materialization, then (T = list[Super]) is a valid
+    # specialization, which satisfies (T ≤ list[Super]).
+    static_assert(ConstraintSet.range(Never, T, list[Super]).satisfied_by_all_typevars(inferable=tuple[T]))
+    # No matter which materialization we choose, (T = list[Base]) is a valid specialization, which
+    # does not satisfy (T ≤ list[Super]).
+    static_assert(not ConstraintSet.range(Never, T, list[Super]).satisfied_by_all_typevars())
+
+    # If we choose list[Base] as the materialization, then (T = list[Base]) is a valid
+    # specialization, which satisfies (T ≤ list[Base]).
+    static_assert(ConstraintSet.range(Never, T, list[Base]).satisfied_by_all_typevars(inferable=tuple[T]))
+    # If we choose list[Base] as the materialization, then all valid specializations must satisfy
+    # (T ≤ list[Base]).
+    static_assert(ConstraintSet.range(Never, T, list[Base]).satisfied_by_all_typevars())
+
+    # If we choose list[Sub] as the materialization, then (T = list[Sub]) is a valid specialization,
+    # which # satisfies (T ≤ list[Sub]).
+    static_assert(ConstraintSet.range(Never, T, list[Sub]).satisfied_by_all_typevars(inferable=tuple[T]))
+    # No matter which materialization we choose, (T = list[Base]) is a valid specialization, which
+    # does not satisfy (T ≤ list[Sub]).
+    static_assert(not ConstraintSet.range(Never, T, list[Sub]).satisfied_by_all_typevars())
+
+    # If we choose list[Unrelated] as the materialization, then (T = list[Unrelated]) is a valid
+    # specialization, which satisfies (T ≤ list[Unrelated]).
+    constraints = ConstraintSet.range(Never, T, list[Unrelated])
+    static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T]))
+    # No matter which materialization we choose, (T = list[Base]) is a valid specialization, which
+    # does not satisfy (T ≤ list[Unrelated]).
+    static_assert(not constraints.satisfied_by_all_typevars())
+
+    # If we choose list[Unrelated] as the materialization, then (T = list[Unrelated]) is a valid
+    # specialization, which satisfies (T ≤ list[Unrelated] ∧ T ≠ Never).
+    constraints = constraints & ~ConstraintSet.range(Never, T, Never)
+    static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T]))
+    # There is no materialization that we can choose to satisfy this constraint set in non-inferable
+    # position. (T = Never) will be a valid assignment no matter what, and that does not satisfy
+    # (T ≤ list[Unrelated] ∧ T ≠ Never).
+    static_assert(not constraints.satisfied_by_all_typevars())
+
+def constrained_by_two_gradual[T: (list[Any], list[Any])]():
+    static_assert(ConstraintSet.always().satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(ConstraintSet.always().satisfied_by_all_typevars())
+
+    static_assert(not ConstraintSet.never().satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(not ConstraintSet.never().satisfied_by_all_typevars())
+
+    # No matter which materialization we choose, every valid specialization will be of the form
+    # (T = list[X]). Because Unrelated is final, it is disjoint from all lists. There is therefore
+    # no materialization or specialization that satisfies (T ≤ Unrelated).
+    static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars(inferable=tuple[T]))
+    static_assert(not ConstraintSet.range(Never, T, Unrelated).satisfied_by_all_typevars())
+
+    # If we choose list[Super] as the materialization, then (T = list[Super]) is a valid
+    # specialization, which satisfies (T ≤ list[Super]).
+    static_assert(ConstraintSet.range(Never, T, list[Super]).satisfied_by_all_typevars(inferable=tuple[T]))
+    # No matter which materialization we choose, (T = list[Base]) is a valid specialization, which
+    # does not satisfy (T ≤ list[Super]).
+    static_assert(ConstraintSet.range(Never, T, list[Super]).satisfied_by_all_typevars())
+
+    # If we choose list[Base] as the materialization, then (T = list[Base]) is a valid
+    # specialization, which satisfies (T ≤ list[Base]).
+    static_assert(ConstraintSet.range(Never, T, list[Base]).satisfied_by_all_typevars(inferable=tuple[T]))
+    # If we choose Base as the materialization, then all valid specializations must satisfy
+    # (T ≤ list[Base]).
+    static_assert(ConstraintSet.range(Never, T, list[Base]).satisfied_by_all_typevars())
+
+    # If we choose list[Sub] as the materialization, then (T = list[Sub]) is a valid specialization,
+    # which satisfies (T ≤ list[Sub]).
+    static_assert(ConstraintSet.range(Never, T, list[Sub]).satisfied_by_all_typevars(inferable=tuple[T]))
+    # No matter which materialization we choose, (T = list[Base]) is a valid specialization, which
+    # does not satisfy (T ≤ list[Sub]).
+    static_assert(ConstraintSet.range(Never, T, list[Sub]).satisfied_by_all_typevars())
+
+    # If we choose list[Unrelated] as the materialization, then (T = list[Unrelated]) is a valid
+    # specialization, which satisfies (T ≤ list[Unrelated]).
+    constraints = ConstraintSet.range(Never, T, list[Unrelated])
+    static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T]))
+    # No matter which materialization we choose, (T = list[Base]) is a valid specialization, which
+    # does not satisfy (T ≤ list[Unrelated]).
+    static_assert(constraints.satisfied_by_all_typevars())
+
+    # If we choose list[Unrelated] as the materialization, then (T = list[Unrelated]) is a valid
+    # specialization, which satisfies (T ≤ list[Unrelated] ∧ T ≠ Never).
+    constraints = constraints & ~ConstraintSet.range(Never, T, Never)
+    static_assert(constraints.satisfied_by_all_typevars(inferable=tuple[T]))
+    # There is no constraint that we can choose to satisfy this constraint set in non-inferable
+    # position. (T = Never) will be a valid assignment no matter what, and that does not satisfy
+    # (T ≤ list[Unrelated] ∧ T ≠ Never).
+    static_assert(constraints.satisfied_by_all_typevars())
+```
diff --git a/crates/ty_python_semantic/src/types/constraints.rs b/crates/ty_python_semantic/src/types/constraints.rs
index c1e5ac2697..d2520deb08 100644
--- a/crates/ty_python_semantic/src/types/constraints.rs
+++ b/crates/ty_python_semantic/src/types/constraints.rs
@@ -869,25 +869,56 @@ impl<'db> Node<'db> {
             typevars.insert(constraint.typevar(db));
         });
 
-        for typevar in typevars {
-            // Determine which valid specializations of this typevar satisfy the constraint set.
-            let valid_specializations = typevar.valid_specializations(db).node;
-            let when_satisfied = valid_specializations
+        // Returns if some specialization satisfies this constraint set.
+        let some_specialization_satisfies = move |specializations: Node<'db>| {
+            let when_satisfied = specializations
                 .satisfies(db, self)
-                .and(db, valid_specializations);
-            let satisfied = if typevar.is_inferable(db, inferable) {
-                // If the typevar is inferable, then we only need one valid specialization to
-                // satisfy the constraint set.
-                !when_satisfied.is_never_satisfied()
+                .and(db, specializations)
+                .simplify(db);
+            !when_satisfied.is_never_satisfied()
+        };
+
+        // Returns if all specializations satisfy this constraint set.
+        let all_specializations_satisfy = move |specializations: Node<'db>| {
+            let when_satisfied = specializations
+                .satisfies(db, self)
+                .and(db, specializations)
+                .simplify(db);
+            when_satisfied
+                .iff(db, specializations)
+                .is_always_satisfied(db)
+        };
+
+        for typevar in typevars {
+            if typevar.is_inferable(db, inferable) {
+                // If the typevar is in inferable position, we need to verify that some valid
+                // specialization satisfies the constraint set.
+                let valid_specializations = typevar.valid_specializations(db);
+                if !some_specialization_satisfies(valid_specializations) {
+                    return false;
+                }
             } else {
-                // If the typevar is non-inferable, then we need _all_ valid specializations to
-                // satisfy the constraint set.
-                when_satisfied
-                    .iff(db, valid_specializations)
-                    .is_always_satisfied(db)
-            };
-            if !satisfied {
-                return false;
+                // If the typevar is in non-inferable position, we need to verify that all required
+                // specializations satisfy the constraint set. Complicating things, the typevar
+                // might have gradual constraints. For those, we need to know the range of valid
+                // materializations, but we only need some materialization to satisfy the
+                // constraint set.
+                //
+                // NB: We could also model this by introducing a synthetic typevar for the gradual
+                // constraint, treating that synthetic typevar as always inferable (so that we only
+                // need to verify for some materialization), and then update this typevar's
+                // constraint to refer to the synthetic typevar instead of the original gradual
+                // constraint.
+                let (static_specializations, gradual_constraints) =
+                    typevar.required_specializations(db);
+                if !all_specializations_satisfy(static_specializations) {
+                    return false;
+                }
+                for gradual_constraint in gradual_constraints {
+                    if !some_specialization_satisfies(gradual_constraint) {
+                        return false;
+                    }
+                }
             }
         }
 
@@ -1982,28 +2013,88 @@ impl<'db> SatisfiedClauses<'db> {
     }
 }
 
-/// Returns a constraint set describing the valid specializations of a typevar.
 impl<'db> BoundTypeVarInstance<'db> {
-    pub(crate) fn valid_specializations(self, db: &'db dyn Db) -> ConstraintSet<'db> {
+    /// Returns the valid specializations of a typevar. This is used when checking a constraint set
+    /// when this typevar is in inferable position, where we only need _some_ specialization to
+    /// satisfy the constraint set.
+    fn valid_specializations(self, db: &'db dyn Db) -> Node<'db> {
+        // For gradual upper bounds and constraints, we are free to choose any materialization that
+        // makes the check succeed. In inferable positions, it is most helpful to choose a
+        // materialization that is as permissive as possible, since that maximizes the number of
+        // valid specializations that might satisfy the check. We therefore take the top
+        // materialization of the bound or constraints.
+        //
+        // Moreover, for a gradual constraint, we don't need to worry that typevar constraints are
+        // _equality_ comparisons, not _subtyping_ comparisons — since we are only going to check
+        // that _some_ valid specialization satisfies the constraint set, it's correct for us to
+        // return the range of valid materializations that we can choose from.
         match self.typevar(db).bound_or_constraints(db) {
-            None => ConstraintSet::from(true),
-            Some(TypeVarBoundOrConstraints::UpperBound(bound)) => ConstraintSet::constrain_typevar(
-                db,
-                self,
-                Type::Never,
-                bound,
-                TypeRelation::Assignability,
-            ),
+            None => Node::AlwaysTrue,
+            Some(TypeVarBoundOrConstraints::UpperBound(bound)) => {
+                let bound = bound.top_materialization(db);
+                ConstrainedTypeVar::new_node(db, self, Type::Never, bound)
+            }
             Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
-                constraints.elements(db).iter().when_any(db, |constraint| {
-                    ConstraintSet::constrain_typevar(
+                let mut specializations = Node::AlwaysFalse;
+                for constraint in constraints.elements(db) {
+                    let constraint_lower = constraint.bottom_materialization(db);
+                    let constraint_upper = constraint.top_materialization(db);
+                    specializations = specializations.or(
                         db,
-                        self,
-                        *constraint,
-                        *constraint,
-                        TypeRelation::Assignability,
-                    )
-                })
+                        ConstrainedTypeVar::new_node(db, self, constraint_lower, constraint_upper),
+                    );
+                }
+                specializations
+            }
+        }
+    }
+
+    /// Returns the required specializations of a typevar. This is used when checking a constraint
+    /// set when this typevar is in non-inferable position, where we need _all_ specializations to
+    /// satisfy the constraint set.
+    ///
+    /// That causes complications if this is a constrained typevar, where one of the constraints is
+    /// gradual. In that case, we need to return the range of valid materializations, but we don't
+    /// want to require that all of those materializations satisfy the constraint set.
+    ///
+    /// To handle this, we return a "primary" result, and an iterator of any gradual constraints.
+    /// For an unbounded/unconstrained typevar or a bounded typevar, the primary result fully
+    /// specifies the required specializations, and the iterator will be empty. For a constrained
+    /// typevar, the primary result will include the fully static constraints, and the iterator
+    /// will include an entry for each non-fully-static constraint.
+    fn required_specializations(
+        self,
+        db: &'db dyn Db,
+    ) -> (Node<'db>, impl IntoIterator>) {
+        // For upper bounds and constraints, we are free to choose any materialization that makes
+        // the check succeed. In non-inferable positions, it is most helpful to choose a
+        // materialization that is as restrictive as possible, since that minimizes the number of
+        // valid specializations that must satisfy the check. We therefore take the bottom
+        // materialization of the bound or constraints.
+        match self.typevar(db).bound_or_constraints(db) {
+            None => (Node::AlwaysTrue, Vec::new()),
+            Some(TypeVarBoundOrConstraints::UpperBound(bound)) => {
+                let bound = bound.bottom_materialization(db);
+                (
+                    ConstrainedTypeVar::new_node(db, self, Type::Never, bound),
+                    Vec::new(),
+                )
+            }
+            Some(TypeVarBoundOrConstraints::Constraints(constraints)) => {
+                let mut non_gradual_constraints = Node::AlwaysFalse;
+                let mut gradual_constraints = Vec::new();
+                for constraint in constraints.elements(db) {
+                    let constraint_lower = constraint.bottom_materialization(db);
+                    let constraint_upper = constraint.top_materialization(db);
+                    let constraint =
+                        ConstrainedTypeVar::new_node(db, self, constraint_lower, constraint_upper);
+                    if constraint_lower == constraint_upper {
+                        non_gradual_constraints = non_gradual_constraints.or(db, constraint);
+                    } else {
+                        gradual_constraints.push(constraint);
+                    }
+                }
+                (non_gradual_constraints, gradual_constraints)
             }
         }
     }

From 1617292e9f1d3139359f912e64a01be5334a0059 Mon Sep 17 00:00:00 2001
From: Micha Reiser 
Date: Fri, 7 Nov 2025 20:39:52 +0100
Subject: [PATCH 114/180] Update CodSpeedHQ/action action to v4.3.3 (#21254)

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
---
 .github/workflows/ci.yaml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 64c1d9f9b6..9f56f96e15 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -956,7 +956,7 @@ jobs:
         run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
 
       - name: "Run benchmarks"
-        uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
+        uses: CodSpeedHQ/action@bb005fe1c1eea036d3894f02c049cb6b154a1c27 # v4.3.3
         with:
           mode: instrumentation
           run: cargo codspeed run
@@ -994,7 +994,7 @@ jobs:
         run: cargo codspeed build --features "codspeed,instrumented" --profile profiling --no-default-features -p ruff_benchmark --bench ty
 
       - name: "Run benchmarks"
-        uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
+        uses: CodSpeedHQ/action@bb005fe1c1eea036d3894f02c049cb6b154a1c27 # v4.3.3
         with:
           mode: instrumentation
           run: cargo codspeed run
@@ -1032,7 +1032,7 @@ jobs:
         run: cargo codspeed build --features "codspeed,walltime" --profile profiling --no-default-features -p ruff_benchmark
 
       - name: "Run benchmarks"
-        uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
+        uses: CodSpeedHQ/action@bb005fe1c1eea036d3894f02c049cb6b154a1c27 # v4.3.3
         env:
           # enabling walltime flamegraphs adds ~6 minutes to the CI time, and they don't
           # appear to provide much useful insight for our walltime benchmarks right now

From 39c21d7c6cc8682fa791d93ac23aeac0b32fbca6 Mon Sep 17 00:00:00 2001
From: Alex Waygood 
Date: Fri, 7 Nov 2025 16:26:30 -0500
Subject: [PATCH 115/180] [ty] Generalize some infrastructure around type
 visitors (#21323)

We have lots of `TypeVisitor`s that end up having very similar
`visit_type` implementations. This PR consolidates some of the code for
these so that there's less repetition and duplication.
---
 crates/ty_python_semantic/src/types/class.rs  | 15 ++-----
 .../ty_python_semantic/src/types/generics.rs  | 17 ++------
 .../ty_python_semantic/src/types/visitor.rs   | 42 +++++++++++++------
 3 files changed, 37 insertions(+), 37 deletions(-)

diff --git a/crates/ty_python_semantic/src/types/class.rs b/crates/ty_python_semantic/src/types/class.rs
index de0b4c180c..862ed4b974 100644
--- a/crates/ty_python_semantic/src/types/class.rs
+++ b/crates/ty_python_semantic/src/types/class.rs
@@ -30,7 +30,7 @@ use crate::types::member::{Member, class_member};
 use crate::types::signatures::{CallableSignature, Parameter, Parameters, Signature};
 use crate::types::tuple::{TupleSpec, TupleType};
 use crate::types::typed_dict::typed_dict_params_from_class_def;
-use crate::types::visitor::{NonAtomicType, TypeKind, TypeVisitor, walk_non_atomic_type};
+use crate::types::visitor::{TypeCollector, TypeVisitor, walk_type_with_recursion_guard};
 use crate::types::{
     ApplyTypeMappingVisitor, Binding, BoundSuperType, CallableType, DataclassFlags,
     DataclassParams, DeprecatedInstance, FindLegacyTypeVarsVisitor, HasRelationToVisitor,
@@ -1437,7 +1437,7 @@ impl<'db> ClassLiteral<'db> {
         #[derive(Default)]
         struct CollectTypeVars<'db> {
             typevars: RefCell>>,
-            seen_types: RefCell>>,
+            recursion_guard: TypeCollector<'db>,
         }
 
         impl<'db> TypeVisitor<'db> for CollectTypeVars<'db> {
@@ -1454,16 +1454,7 @@ impl<'db> ClassLiteral<'db> {
             }
 
             fn visit_type(&self, db: &'db dyn Db, ty: Type<'db>) {
-                match TypeKind::from(ty) {
-                    TypeKind::Atomic => {}
-                    TypeKind::NonAtomic(non_atomic_type) => {
-                        if !self.seen_types.borrow_mut().insert(non_atomic_type) {
-                            // If we have already seen this type, we can skip it.
-                            return;
-                        }
-                        walk_non_atomic_type(db, non_atomic_type, self);
-                    }
-                }
+                walk_type_with_recursion_guard(db, ty, self, &self.recursion_guard);
             }
         }
 
diff --git a/crates/ty_python_semantic/src/types/generics.rs b/crates/ty_python_semantic/src/types/generics.rs
index 992e664401..555ab47f01 100644
--- a/crates/ty_python_semantic/src/types/generics.rs
+++ b/crates/ty_python_semantic/src/types/generics.rs
@@ -14,7 +14,7 @@ use crate::types::constraints::ConstraintSet;
 use crate::types::instance::{Protocol, ProtocolInstanceType};
 use crate::types::signatures::{Parameter, Parameters, Signature};
 use crate::types::tuple::{TupleSpec, TupleType, walk_tuple_type};
-use crate::types::visitor::{NonAtomicType, TypeKind, TypeVisitor, walk_non_atomic_type};
+use crate::types::visitor::{TypeCollector, TypeVisitor, walk_type_with_recursion_guard};
 use crate::types::{
     ApplyTypeMappingVisitor, BoundTypeVarIdentity, BoundTypeVarInstance, ClassLiteral,
     FindLegacyTypeVarsVisitor, HasRelationToVisitor, IsDisjointVisitor, IsEquivalentVisitor,
@@ -22,7 +22,7 @@ use crate::types::{
     TypeMapping, TypeRelation, TypeVarBoundOrConstraints, TypeVarIdentity, TypeVarInstance,
     TypeVarKind, TypeVarVariance, UnionType, declaration_type, walk_bound_type_var_type,
 };
-use crate::{Db, FxIndexSet, FxOrderMap, FxOrderSet};
+use crate::{Db, FxOrderMap, FxOrderSet};
 
 /// Returns an iterator of any generic context introduced by the given scope or any enclosing
 /// scope.
@@ -288,7 +288,7 @@ impl<'db> GenericContext<'db> {
         #[derive(Default)]
         struct CollectTypeVars<'db> {
             typevars: RefCell>>,
-            seen_types: RefCell>>,
+            recursion_guard: TypeCollector<'db>,
         }
 
         impl<'db> TypeVisitor<'db> for CollectTypeVars<'db> {
@@ -308,16 +308,7 @@ impl<'db> GenericContext<'db> {
             }
 
             fn visit_type(&self, db: &'db dyn Db, ty: Type<'db>) {
-                match TypeKind::from(ty) {
-                    TypeKind::Atomic => {}
-                    TypeKind::NonAtomic(non_atomic_type) => {
-                        if !self.seen_types.borrow_mut().insert(non_atomic_type) {
-                            // If we have already seen this type, we can skip it.
-                            return;
-                        }
-                        walk_non_atomic_type(db, non_atomic_type, self);
-                    }
-                }
+                walk_type_with_recursion_guard(db, ty, self, &self.recursion_guard);
             }
         }
 
diff --git a/crates/ty_python_semantic/src/types/visitor.rs b/crates/ty_python_semantic/src/types/visitor.rs
index d58bf046f1..dd1ddfdfe5 100644
--- a/crates/ty_python_semantic/src/types/visitor.rs
+++ b/crates/ty_python_semantic/src/types/visitor.rs
@@ -242,6 +242,33 @@ pub(super) fn walk_non_atomic_type<'db, V: TypeVisitor<'db> + ?Sized>(
     }
 }
 
+pub(crate) fn walk_type_with_recursion_guard<'db>(
+    db: &'db dyn Db,
+    ty: Type<'db>,
+    visitor: &impl TypeVisitor<'db>,
+    recursion_guard: &TypeCollector<'db>,
+) {
+    match TypeKind::from(ty) {
+        TypeKind::Atomic => {}
+        TypeKind::NonAtomic(non_atomic_type) => {
+            if recursion_guard.type_was_already_seen(ty) {
+                // If we have already seen this type, we can skip it.
+                return;
+            }
+            walk_non_atomic_type(db, non_atomic_type, visitor);
+        }
+    }
+}
+
+#[derive(Default, Debug)]
+pub(crate) struct TypeCollector<'db>(RefCell>>);
+
+impl<'db> TypeCollector<'db> {
+    pub(crate) fn type_was_already_seen(&self, ty: Type<'db>) -> bool {
+        !self.0.borrow_mut().insert(ty)
+    }
+}
+
 /// Return `true` if `ty`, or any of the types contained in `ty`, match the closure passed in.
 ///
 /// The function guards against infinite recursion
@@ -258,7 +285,7 @@ pub(super) fn any_over_type<'db>(
 ) -> bool {
     struct AnyOverTypeVisitor<'db, 'a> {
         query: &'a dyn Fn(Type<'db>) -> bool,
-        seen_types: RefCell>>,
+        recursion_guard: TypeCollector<'db>,
         found_matching_type: Cell,
         should_visit_lazy_type_attributes: bool,
     }
@@ -278,22 +305,13 @@ pub(super) fn any_over_type<'db>(
             if found {
                 return;
             }
-            match TypeKind::from(ty) {
-                TypeKind::Atomic => {}
-                TypeKind::NonAtomic(non_atomic_type) => {
-                    if !self.seen_types.borrow_mut().insert(non_atomic_type) {
-                        // If we have already seen this type, we can skip it.
-                        return;
-                    }
-                    walk_non_atomic_type(db, non_atomic_type, self);
-                }
-            }
+            walk_type_with_recursion_guard(db, ty, self, &self.recursion_guard);
         }
     }
 
     let visitor = AnyOverTypeVisitor {
         query,
-        seen_types: RefCell::new(FxIndexSet::default()),
+        recursion_guard: TypeCollector::default(),
         found_matching_type: Cell::new(false),
         should_visit_lazy_type_attributes,
     };

From b6add3ee6d15aa17dc6b3b77e0133d3a4d8e65cb Mon Sep 17 00:00:00 2001
From: William Woodruff 
Date: Fri, 7 Nov 2025 17:09:29 -0500
Subject: [PATCH 116/180] chore: bump dist, remove old commenting workflows
 (#21302)

---
 .github/workflows/ci.yaml                     |  12 +-
 .github/workflows/mypy_primer.yaml            |   9 +-
 .github/workflows/mypy_primer_comment.yaml    | 122 ------------------
 .github/workflows/pr-comment.yaml             |  88 -------------
 .github/workflows/release.yml                 |   6 +-
 .github/workflows/ty-ecosystem-analyzer.yaml  |  10 +-
 .../ty-ecosystem-analyzer_comment.yaml        |  85 ------------
 .github/workflows/typing_conformance.yaml     |  11 +-
 .../workflows/typing_conformance_comment.yaml | 112 ----------------
 .github/zizmor.yml                            |   3 -
 dist-workspace.toml                           |   2 +-
 11 files changed, 14 insertions(+), 446 deletions(-)
 delete mode 100644 .github/workflows/mypy_primer_comment.yaml
 delete mode 100644 .github/workflows/pr-comment.yaml
 delete mode 100644 .github/workflows/ty-ecosystem-analyzer_comment.yaml
 delete mode 100644 .github/workflows/typing_conformance_comment.yaml

diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 9f56f96e15..b4b642df4d 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -627,16 +627,8 @@ jobs:
           cat ecosystem-result-format-preview >> ecosystem-result
           echo "" >> ecosystem-result
 
-      - name: Export pull request number
-        run: |
-          echo ${{ github.event.number }} > pr-number
-
-      - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
-        name: Upload PR Number
-        with:
-          name: pr-number
-          path: pr-number
-
+      # NOTE: astral-sh-bot uses this artifact to post comments on PRs.
+      # Make sure to update the bot if you rename the artifact.
       - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
         name: Upload Results
         with:
diff --git a/.github/workflows/mypy_primer.yaml b/.github/workflows/mypy_primer.yaml
index 89028a2235..4e292823e4 100644
--- a/.github/workflows/mypy_primer.yaml
+++ b/.github/workflows/mypy_primer.yaml
@@ -59,20 +59,15 @@ jobs:
         run: |
           cd ruff
           scripts/mypy_primer.sh
-          echo ${{ github.event.number }} > ../pr-number
 
+      # NOTE: astral-sh-bot uses this artifact to post comments on PRs.
+      # Make sure to update the bot if you rename the artifact.
       - name: Upload diff
         uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
         with:
           name: mypy_primer_diff
           path: mypy_primer.diff
 
-      - name: Upload pr-number
-        uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
-        with:
-          name: pr-number
-          path: pr-number
-
   memory_usage:
     name: Run memory statistics
     runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
diff --git a/.github/workflows/mypy_primer_comment.yaml b/.github/workflows/mypy_primer_comment.yaml
deleted file mode 100644
index 895956e766..0000000000
--- a/.github/workflows/mypy_primer_comment.yaml
+++ /dev/null
@@ -1,122 +0,0 @@
-name: PR comment (mypy_primer)
-
-on: # zizmor: ignore[dangerous-triggers]
-  workflow_run:
-    workflows: [Run mypy_primer]
-    types: [completed]
-  workflow_dispatch:
-    inputs:
-      workflow_run_id:
-        description: The mypy_primer workflow that triggers the workflow run
-        required: true
-
-jobs:
-  comment:
-    runs-on: ubuntu-24.04
-    permissions:
-      pull-requests: write
-    steps:
-      - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
-        name: Download PR number
-        with:
-          name: pr-number
-          run_id: ${{ github.event.workflow_run.id ||  github.event.inputs.workflow_run_id }}
-          if_no_artifact_found: ignore
-          allow_forks: true
-
-      - name: Parse pull request number
-        id: pr-number
-        run: |
-          if [[ -f pr-number ]]
-          then
-            echo "pr-number=$(> "$GITHUB_OUTPUT"
-          fi
-
-      - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
-        name: "Download mypy_primer results"
-        id: download-mypy_primer_diff
-        if: steps.pr-number.outputs.pr-number
-        with:
-          name: mypy_primer_diff
-          workflow: mypy_primer.yaml
-          pr: ${{ steps.pr-number.outputs.pr-number }}
-          path: pr/mypy_primer_diff
-          workflow_conclusion: completed
-          if_no_artifact_found: ignore
-          allow_forks: true
-
-      - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8
-        name: "Download mypy_primer memory results"
-        id: download-mypy_primer_memory_diff
-        if: steps.pr-number.outputs.pr-number
-        with:
-          name: mypy_primer_memory_diff
-          workflow: mypy_primer.yaml
-          pr: ${{ steps.pr-number.outputs.pr-number }}
-          path: pr/mypy_primer_memory_diff
-          workflow_conclusion: completed
-          if_no_artifact_found: ignore
-          allow_forks: true
-
-      - name: Generate comment content
-        id: generate-comment
-        if: ${{ steps.download-mypy_primer_diff.outputs.found_artifact == 'true' && steps.download-mypy_primer_memory_diff.outputs.found_artifact == 'true' }}
-        run: |
-          # Guard against malicious mypy_primer results that symlink to a secret
-          # file on this runner
-          if [[ -L pr/mypy_primer_diff/mypy_primer.diff ]] || [[ -L pr/mypy_primer_memory_diff/mypy_primer_memory.diff ]]
-          then
-              echo "Error: mypy_primer.diff and mypy_primer_memory.diff cannot be a symlink"
-              exit 1
-          fi
-
-          # Note this identifier is used to find the comment to update on
-          # subsequent runs
-          echo '' >> comment.txt
-
-          echo '## `mypy_primer` results' >> comment.txt
-          if [ -s "pr/mypy_primer_diff/mypy_primer.diff" ]; then
-            echo '
' >> comment.txt - echo 'Changes were detected when running on open source projects' >> comment.txt - echo '' >> comment.txt - echo '```diff' >> comment.txt - cat pr/mypy_primer_diff/mypy_primer.diff >> comment.txt - echo '```' >> comment.txt - echo '
' >> comment.txt - else - echo 'No ecosystem changes detected ✅' >> comment.txt - fi - - if [ -s "pr/mypy_primer_memory_diff/mypy_primer_memory.diff" ]; then - echo '
' >> comment.txt - echo 'Memory usage changes were detected when running on open source projects' >> comment.txt - echo '' >> comment.txt - echo '```diff' >> comment.txt - cat pr/mypy_primer_memory_diff/mypy_primer_memory.diff >> comment.txt - echo '```' >> comment.txt - echo '
' >> comment.txt - else - echo 'No memory usage changes detected ✅' >> comment.txt - fi - - echo 'comment<> "$GITHUB_OUTPUT" - cat comment.txt >> "$GITHUB_OUTPUT" - echo 'EOF' >> "$GITHUB_OUTPUT" - - - name: Find existing comment - uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 - if: steps.generate-comment.outcome == 'success' - id: find-comment - with: - issue-number: ${{ steps.pr-number.outputs.pr-number }} - comment-author: "github-actions[bot]" - body-includes: "" - - - name: Create or update comment - if: steps.find-comment.outcome == 'success' - uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 - with: - comment-id: ${{ steps.find-comment.outputs.comment-id }} - issue-number: ${{ steps.pr-number.outputs.pr-number }} - body-path: comment.txt - edit-mode: replace diff --git a/.github/workflows/pr-comment.yaml b/.github/workflows/pr-comment.yaml deleted file mode 100644 index 0ef00644c1..0000000000 --- a/.github/workflows/pr-comment.yaml +++ /dev/null @@ -1,88 +0,0 @@ -name: Ecosystem check comment - -on: - workflow_run: - workflows: [CI] - types: [completed] - workflow_dispatch: - inputs: - workflow_run_id: - description: The ecosystem workflow that triggers the workflow run - required: true - -jobs: - comment: - runs-on: ubuntu-latest - permissions: - pull-requests: write - steps: - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: Download pull request number - with: - name: pr-number - run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }} - if_no_artifact_found: ignore - allow_forks: true - - - name: Parse pull request number - id: pr-number - run: | - if [[ -f pr-number ]] - then - echo "pr-number=$(> "$GITHUB_OUTPUT" - fi - - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: "Download ecosystem results" - id: download-ecosystem-result - if: steps.pr-number.outputs.pr-number - with: - name: ecosystem-result - workflow: ci.yaml - pr: ${{ steps.pr-number.outputs.pr-number }} - path: pr/ecosystem - workflow_conclusion: completed - if_no_artifact_found: ignore - allow_forks: true - - - name: Generate comment content - id: generate-comment - if: steps.download-ecosystem-result.outputs.found_artifact == 'true' - run: | - # Guard against malicious ecosystem results that symlink to a secret - # file on this runner - if [[ -L pr/ecosystem/ecosystem-result ]] - then - echo "Error: ecosystem-result cannot be a symlink" - exit 1 - fi - - # Note this identifier is used to find the comment to update on - # subsequent runs - echo '' >> comment.txt - - echo '## `ruff-ecosystem` results' >> comment.txt - cat pr/ecosystem/ecosystem-result >> comment.txt - echo "" >> comment.txt - - echo 'comment<> "$GITHUB_OUTPUT" - cat comment.txt >> "$GITHUB_OUTPUT" - echo 'EOF' >> "$GITHUB_OUTPUT" - - - name: Find existing comment - uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 - if: steps.generate-comment.outcome == 'success' - id: find-comment - with: - issue-number: ${{ steps.pr-number.outputs.pr-number }} - comment-author: "github-actions[bot]" - body-includes: "" - - - name: Create or update comment - if: steps.find-comment.outcome == 'success' - uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 - with: - comment-id: ${{ steps.find-comment.outputs.comment-id }} - issue-number: ${{ steps.pr-number.outputs.pr-number }} - body-path: comment.txt - edit-mode: replace diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e2a385715f..10730750a5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -68,7 +68,7 @@ jobs: # we specify bash to get pipefail; it guards against the `curl` command # failing. otherwise `sh` won't catch that `curl` returned non-0 shell: bash - run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.0/cargo-dist-installer.sh | sh" + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.30.2/cargo-dist-installer.sh | sh" - name: Cache dist uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 with: @@ -166,8 +166,8 @@ jobs: - custom-build-binaries - custom-build-docker - build-global-artifacts - # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) - if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }} + # Only run if we're "publishing", and only if plan, local and global didn't fail (skipped is fine) + if: ${{ always() && needs.plan.result == 'success' && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.custom-build-binaries.result == 'skipped' || needs.custom-build-binaries.result == 'success') && (needs.custom-build-docker.result == 'skipped' || needs.custom-build-docker.result == 'success') }} env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} runs-on: "depot-ubuntu-latest-4" diff --git a/.github/workflows/ty-ecosystem-analyzer.yaml b/.github/workflows/ty-ecosystem-analyzer.yaml index cd763c3db1..417cf7d1f5 100644 --- a/.github/workflows/ty-ecosystem-analyzer.yaml +++ b/.github/workflows/ty-ecosystem-analyzer.yaml @@ -112,8 +112,6 @@ jobs: cat diff-statistics.md >> "$GITHUB_STEP_SUMMARY" - echo ${{ github.event.number }} > pr-number - - name: "Deploy to Cloudflare Pages" if: ${{ env.CF_API_TOKEN_EXISTS == 'true' }} id: deploy @@ -131,18 +129,14 @@ jobs: echo >> comment.md echo "**[Full report with detailed diff]($DEPLOYMENT_URL/diff)** ([timing results]($DEPLOYMENT_URL/timing))" >> comment.md + # NOTE: astral-sh-bot uses this artifact to post comments on PRs. + # Make sure to update the bot if you rename the artifact. - name: Upload comment uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: comment.md path: comment.md - - name: Upload pr-number - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: pr-number - path: pr-number - - name: Upload diagnostics diff uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: diff --git a/.github/workflows/ty-ecosystem-analyzer_comment.yaml b/.github/workflows/ty-ecosystem-analyzer_comment.yaml deleted file mode 100644 index f237f45e1e..0000000000 --- a/.github/workflows/ty-ecosystem-analyzer_comment.yaml +++ /dev/null @@ -1,85 +0,0 @@ -name: PR comment (ty ecosystem-analyzer) - -on: # zizmor: ignore[dangerous-triggers] - workflow_run: - workflows: [ty ecosystem-analyzer] - types: [completed] - workflow_dispatch: - inputs: - workflow_run_id: - description: The ty ecosystem-analyzer workflow that triggers the workflow run - required: true - -jobs: - comment: - runs-on: ubuntu-24.04 - permissions: - pull-requests: write - steps: - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: Download PR number - with: - name: pr-number - run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }} - if_no_artifact_found: ignore - allow_forks: true - - - name: Parse pull request number - id: pr-number - run: | - if [[ -f pr-number ]] - then - echo "pr-number=$(> "$GITHUB_OUTPUT" - fi - - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: "Download comment.md" - id: download-comment - if: steps.pr-number.outputs.pr-number - with: - name: comment.md - workflow: ty-ecosystem-analyzer.yaml - pr: ${{ steps.pr-number.outputs.pr-number }} - path: pr/comment - workflow_conclusion: completed - if_no_artifact_found: ignore - allow_forks: true - - - name: Generate comment content - id: generate-comment - if: ${{ steps.download-comment.outputs.found_artifact == 'true' }} - run: | - # Guard against malicious ty ecosystem-analyzer results that symlink to a secret - # file on this runner - if [[ -L pr/comment/comment.md ]] - then - echo "Error: comment.md cannot be a symlink" - exit 1 - fi - - # Note: this identifier is used to find the comment to update on subsequent runs - echo '' > comment.md - echo >> comment.md - cat pr/comment/comment.md >> comment.md - - echo 'comment<> "$GITHUB_OUTPUT" - cat comment.md >> "$GITHUB_OUTPUT" - echo 'EOF' >> "$GITHUB_OUTPUT" - - - name: Find existing comment - uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 - if: steps.generate-comment.outcome == 'success' - id: find-comment - with: - issue-number: ${{ steps.pr-number.outputs.pr-number }} - comment-author: "github-actions[bot]" - body-includes: "" - - - name: Create or update comment - if: steps.find-comment.outcome == 'success' - uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 - with: - comment-id: ${{ steps.find-comment.outputs.comment-id }} - issue-number: ${{ steps.pr-number.outputs.pr-number }} - body-path: comment.md - edit-mode: replace diff --git a/.github/workflows/typing_conformance.yaml b/.github/workflows/typing_conformance.yaml index aa99f6dd72..aefe0b6c40 100644 --- a/.github/workflows/typing_conformance.yaml +++ b/.github/workflows/typing_conformance.yaml @@ -94,21 +94,18 @@ jobs: touch typing_conformance_diagnostics.diff fi - echo ${{ github.event.number }} > pr-number echo "${CONFORMANCE_SUITE_COMMIT}" > conformance-suite-commit + # NOTE: astral-sh-bot uses this artifact to post comments on PRs. + # Make sure to update the bot if you rename the artifact. - name: Upload diff uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: name: typing_conformance_diagnostics_diff path: typing_conformance_diagnostics.diff - - name: Upload pr-number - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: pr-number - path: pr-number - + # NOTE: astral-sh-bot uses this artifact to post comments on PRs. + # Make sure to update the bot if you rename the artifact. - name: Upload conformance suite commit uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 with: diff --git a/.github/workflows/typing_conformance_comment.yaml b/.github/workflows/typing_conformance_comment.yaml deleted file mode 100644 index f596507448..0000000000 --- a/.github/workflows/typing_conformance_comment.yaml +++ /dev/null @@ -1,112 +0,0 @@ -name: PR comment (typing_conformance) - -on: # zizmor: ignore[dangerous-triggers] - workflow_run: - workflows: [Run typing conformance] - types: [completed] - workflow_dispatch: - inputs: - workflow_run_id: - description: The typing_conformance workflow that triggers the workflow run - required: true - -jobs: - comment: - runs-on: ubuntu-24.04 - permissions: - pull-requests: write - steps: - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: Download PR number - with: - name: pr-number - run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }} - if_no_artifact_found: ignore - allow_forks: true - - - name: Parse pull request number - id: pr-number - run: | - if [[ -f pr-number ]] - then - echo "pr-number=$(> "$GITHUB_OUTPUT" - fi - - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: Download typing conformance suite commit - with: - name: conformance-suite-commit - run_id: ${{ github.event.workflow_run.id || github.event.inputs.workflow_run_id }} - if_no_artifact_found: ignore - allow_forks: true - - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: "Download typing_conformance results" - id: download-typing_conformance_diff - if: steps.pr-number.outputs.pr-number - with: - name: typing_conformance_diagnostics_diff - workflow: typing_conformance.yaml - pr: ${{ steps.pr-number.outputs.pr-number }} - path: pr/typing_conformance_diagnostics_diff - workflow_conclusion: completed - if_no_artifact_found: ignore - allow_forks: true - - - name: Generate comment content - id: generate-comment - if: ${{ steps.download-typing_conformance_diff.outputs.found_artifact == 'true' }} - run: | - # Guard against malicious typing_conformance results that symlink to a secret - # file on this runner - if [[ -L pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff ]] - then - echo "Error: typing_conformance_diagnostics.diff cannot be a symlink" - exit 1 - fi - - # Note this identifier is used to find the comment to update on - # subsequent runs - echo '' >> comment.txt - - if [[ -f conformance-suite-commit ]] - then - echo "## Diagnostic diff on [typing conformance tests](https://github.com/python/typing/tree/$(> comment.txt - else - echo "conformance-suite-commit file not found" - echo "## Diagnostic diff on typing conformance tests" >> comment.txt - fi - - if [ -s "pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff" ]; then - echo '
' >> comment.txt - echo 'Changes were detected when running ty on typing conformance tests' >> comment.txt - echo '' >> comment.txt - echo '```diff' >> comment.txt - cat pr/typing_conformance_diagnostics_diff/typing_conformance_diagnostics.diff >> comment.txt - echo '```' >> comment.txt - echo '
' >> comment.txt - else - echo 'No changes detected when running ty on typing conformance tests ✅' >> comment.txt - fi - - echo 'comment<> "$GITHUB_OUTPUT" - cat comment.txt >> "$GITHUB_OUTPUT" - echo 'EOF' >> "$GITHUB_OUTPUT" - - - name: Find existing comment - uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3.1.0 - if: steps.generate-comment.outcome == 'success' - id: find-comment - with: - issue-number: ${{ steps.pr-number.outputs.pr-number }} - comment-author: "github-actions[bot]" - body-includes: "" - - - name: Create or update comment - if: steps.find-comment.outcome == 'success' - uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043 # v4 - with: - comment-id: ${{ steps.find-comment.outputs.comment-id }} - issue-number: ${{ steps.pr-number.outputs.pr-number }} - body-path: comment.txt - edit-mode: replace diff --git a/.github/zizmor.yml b/.github/zizmor.yml index 237af95e7b..28ec2a61ef 100644 --- a/.github/zizmor.yml +++ b/.github/zizmor.yml @@ -3,9 +3,6 @@ # # TODO: can we remove the ignores here so that our workflows are more secure? rules: - dangerous-triggers: - ignore: - - pr-comment.yaml cache-poisoning: ignore: - build-docker.yml diff --git a/dist-workspace.toml b/dist-workspace.toml index 20f123b05a..ddc157cdf3 100644 --- a/dist-workspace.toml +++ b/dist-workspace.toml @@ -5,7 +5,7 @@ packages = ["ruff"] # Config for 'dist' [dist] # The preferred dist version to use in CI (Cargo.toml SemVer syntax) -cargo-dist-version = "0.30.0" +cargo-dist-version = "0.30.2" # Whether to consider the binaries in a package for distribution (defaults true) dist = false # CI backends to support From e06e108095b9283e6352e48694503bc81e46974b Mon Sep 17 00:00:00 2001 From: Brent Westbrook <36778786+ntBre@users.noreply.github.com> Date: Fri, 7 Nov 2025 18:45:53 -0500 Subject: [PATCH 117/180] [`flake8-annotations`] Add link to `allow-star-arg-any` option (`ANN401`) (#21326) Summary -- Addresses https://github.com/astral-sh/ruff/issues/19152#issuecomment-3501373508 by adding a link to the configuration option to the rule page. Test Plan -- Built the docs locally and made sure the link was present and working --- .../src/rules/flake8_annotations/rules/definition.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs b/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs index a70659f99e..d111bbf525 100644 --- a/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs +++ b/crates/ruff_linter/src/rules/flake8_annotations/rules/definition.rs @@ -513,6 +513,9 @@ impl Violation for MissingReturnTypeClassMethod { /// def foo(x: MyAny): ... /// ``` /// +/// ## Options +/// - `lint.flake8-annotations.allow-star-arg-any` +/// /// ## References /// - [Typing spec: `Any`](https://typing.python.org/en/latest/spec/special-types.html#any) /// - [Python documentation: `typing.Any`](https://docs.python.org/3/library/typing.html#typing.Any) From 16de4aa3ccc3bb8acfb6e750fad639a767b68a0e Mon Sep 17 00:00:00 2001 From: Dan Parizher <105245560+danparizher@users.noreply.github.com> Date: Fri, 7 Nov 2025 19:04:45 -0500 Subject: [PATCH 118/180] [`refurb`] Auto-fix annotated assignments (`FURB101`) (#21278) ## Summary Fixed FURB101 (`read-whole-file`) to handle annotated assignments. Previously, the rule would detect violations in code like `contents: str = f.read()` but fail to generate a fix. Now it correctly generates fixes that preserve type annotations (e.g., `contents: str = Path("file.txt").read_text(encoding="utf-8")`). Fixes #21274 ## Problem Analysis The FURB101 rule was only checking for `Stmt::Assign` statements when determining whether a fix could be applied. When encountering annotated assignments (`Stmt::AnnAssign`) like `contents: str = f.read()`, the rule would: 1. Correctly detect the violation (the diagnostic was reported) 2. Fail to generate a fix because: - The `visit_expr` method only matched `Stmt::Assign`, not `Stmt::AnnAssign` - The `generate_fix` function only accepted `Stmt::Assign` in its body validation - The replacement code generation didn't account for type annotations This occurred because Python's AST represents annotated assignments as a different node type (`StmtAnnAssign`) with separate fields for the target, annotation, and value, unlike regular assignments which use a list of targets. ## Approach The fix extends the rule to handle both assignment types: 1. **Updated `visit_expr` method**: Now matches both `Stmt::Assign` and `Stmt::AnnAssign`, extracting: - Variable name from the target expression - Type annotation code (when present) using the code generator 2. **Updated `generate_fix` function**: - Added `annotation: Option` parameter to accept annotation code - Updated body validation to accept both `Stmt::Assign` and `Stmt::AnnAssign` - Modified replacement code generation to preserve annotations: `{var}: {annotation} = {binding}({filename_code}).{suggestion}` 3. **Added test case**: Added an annotated assignment test case to verify the fix works correctly. The implementation maintains backward compatibility with regular assignments while adding support for annotated assignments, ensuring type annotations are preserved in the generated fixes. --------- Co-authored-by: Brent Westbrook --- .../resources/test/fixtures/refurb/FURB101.py | 15 +++++ .../src/rules/refurb/rules/read_whole_file.rs | 55 +++++++++++++------ ...es__refurb__tests__FURB101_FURB101.py.snap | 55 +++++++++++++++++++ 3 files changed, 107 insertions(+), 18 deletions(-) diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB101.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB101.py index 31b1ccd341..77306cfe18 100644 --- a/crates/ruff_linter/resources/test/fixtures/refurb/FURB101.py +++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB101.py @@ -125,3 +125,18 @@ with open(*filename, mode="r") as f: # `buffering`. with open(*filename, file="file.txt", mode="r") as f: x = f.read() + +# FURB101 +with open("file.txt", encoding="utf-8") as f: + contents: str = f.read() + +# FURB101 but no fix because it would remove the assignment to `x` +with open("file.txt", encoding="utf-8") as f: + contents, x = f.read(), 2 + +# FURB101 but no fix because it would remove the `process_contents` call +with open("file.txt", encoding="utf-8") as f: + contents = process_contents(f.read()) + +with open("file.txt", encoding="utf-8") as f: + contents: str = process_contents(f.read()) diff --git a/crates/ruff_linter/src/rules/refurb/rules/read_whole_file.rs b/crates/ruff_linter/src/rules/refurb/rules/read_whole_file.rs index 279ecb66aa..2b43af89a8 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/read_whole_file.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/read_whole_file.rs @@ -125,20 +125,8 @@ impl<'a> Visitor<'a> for ReadMatcher<'a, '_> { open.item.range(), ); - let target = match self.with_stmt.body.first() { - Some(Stmt::Assign(assign)) - if assign.value.range().contains_range(expr.range()) => - { - match assign.targets.first() { - Some(Expr::Name(name)) => Some(name.id.as_str()), - _ => None, - } - } - _ => None, - }; - if let Some(fix) = - generate_fix(self.checker, &open, target, self.with_stmt, &suggestion) + generate_fix(self.checker, &open, expr, self.with_stmt, &suggestion) { diagnostic.set_fix(fix); } @@ -190,15 +178,16 @@ fn make_suggestion(open: &FileOpen<'_>, generator: Generator) -> String { fn generate_fix( checker: &Checker, open: &FileOpen, - target: Option<&str>, + expr: &Expr, with_stmt: &ast::StmtWith, suggestion: &str, ) -> Option { - if !(with_stmt.items.len() == 1 && matches!(with_stmt.body.as_slice(), [Stmt::Assign(_)])) { + if with_stmt.items.len() != 1 { return None; } let locator = checker.locator(); + let filename_code = locator.slice(open.filename.range()); let (import_edit, binding) = checker @@ -210,9 +199,39 @@ fn generate_fix( ) .ok()?; - let replacement = match target { - Some(var) => format!("{var} = {binding}({filename_code}).{suggestion}"), - None => format!("{binding}({filename_code}).{suggestion}"), + // Only replace context managers with a single assignment or annotated assignment in the body. + // The assignment's RHS must also be the same as the `read` call in `expr`, otherwise this fix + // would remove the rest of the expression. + let replacement = match with_stmt.body.as_slice() { + [Stmt::Assign(ast::StmtAssign { targets, value, .. })] if value.range() == expr.range() => { + match targets.as_slice() { + [Expr::Name(name)] => { + format!( + "{name} = {binding}({filename_code}).{suggestion}", + name = name.id + ) + } + _ => return None, + } + } + [ + Stmt::AnnAssign(ast::StmtAnnAssign { + target, + annotation, + value: Some(value), + .. + }), + ] if value.range() == expr.range() => match target.as_ref() { + Expr::Name(name) => { + format!( + "{var}: {ann} = {binding}({filename_code}).{suggestion}", + var = name.id, + ann = locator.slice(annotation.range()) + ) + } + _ => return None, + }, + _ => return None, }; let applicability = if checker.comment_ranges().intersects(with_stmt.range()) { diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB101_FURB101.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB101_FURB101.py.snap index 4131499c0c..3fea418d76 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB101_FURB101.py.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB101_FURB101.py.snap @@ -189,3 +189,58 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()` 51 | # the user reads the whole file and that bit they can replace. | help: Replace with `Path("file.txt").read_text()` + +FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")` + --> FURB101.py:130:6 + | +129 | # FURB101 +130 | with open("file.txt", encoding="utf-8") as f: + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +131 | contents: str = f.read() + | +help: Replace with `Path("file.txt").read_text(encoding="utf-8")` +1 + import pathlib +2 | def foo(): +3 | ... +4 | +-------------------------------------------------------------------------------- +128 | x = f.read() +129 | +130 | # FURB101 + - with open("file.txt", encoding="utf-8") as f: + - contents: str = f.read() +131 + contents: str = pathlib.Path("file.txt").read_text(encoding="utf-8") +132 | +133 | # FURB101 but no fix because it would remove the assignment to `x` +134 | with open("file.txt", encoding="utf-8") as f: + +FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")` + --> FURB101.py:134:6 + | +133 | # FURB101 but no fix because it would remove the assignment to `x` +134 | with open("file.txt", encoding="utf-8") as f: + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +135 | contents, x = f.read(), 2 + | +help: Replace with `Path("file.txt").read_text(encoding="utf-8")` + +FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")` + --> FURB101.py:138:6 + | +137 | # FURB101 but no fix because it would remove the `process_contents` call +138 | with open("file.txt", encoding="utf-8") as f: + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +139 | contents = process_contents(f.read()) + | +help: Replace with `Path("file.txt").read_text(encoding="utf-8")` + +FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")` + --> FURB101.py:141:6 + | +139 | contents = process_contents(f.read()) +140 | +141 | with open("file.txt", encoding="utf-8") as f: + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +142 | contents: str = process_contents(f.read()) + | +help: Replace with `Path("file.txt").read_text(encoding="utf-8")` From 76efc8061d1db56fdd9beba6771b58aaa36dc8a7 Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Sat, 8 Nov 2025 15:10:24 +0100 Subject: [PATCH 119/180] [ty] Make `variance_of` logging `trace` only (#21339) --- crates/ty_python_semantic/src/types.rs | 2 +- crates/ty_python_semantic/src/types/signatures.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index 6c9cdefa20..e3b8b8e89e 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -7400,7 +7400,7 @@ impl<'db> From<&Type<'db>> for Type<'db> { impl<'db> VarianceInferable<'db> for Type<'db> { fn variance_of(self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarVariance { - tracing::debug!( + tracing::trace!( "Checking variance of '{tvar}' in `{ty:?}`", tvar = typevar.typevar(db).name(db), ty = self.display(db), diff --git a/crates/ty_python_semantic/src/types/signatures.rs b/crates/ty_python_semantic/src/types/signatures.rs index 11979100bb..fb96b59679 100644 --- a/crates/ty_python_semantic/src/types/signatures.rs +++ b/crates/ty_python_semantic/src/types/signatures.rs @@ -1111,7 +1111,7 @@ impl<'db> Signature<'db> { impl<'db> VarianceInferable<'db> for &Signature<'db> { fn variance_of(self, db: &'db dyn Db, typevar: BoundTypeVarInstance<'db>) -> TypeVarVariance { - tracing::debug!( + tracing::trace!( "Checking variance of `{tvar}` in `{self:?}`", tvar = typevar.typevar(db).name(db) ); From 09e6af16c81dfef6ee607ebaf72a6a253035b941 Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade <1324225+hugovk@users.noreply.github.com> Date: Sat, 8 Nov 2025 18:17:14 +0200 Subject: [PATCH 120/180] [ruff+ty] Add colour to `--help` (#21337) --- crates/ruff/src/args.rs | 10 ++++++++++ crates/ty/src/args.rs | 10 ++++++++++ 2 files changed, 20 insertions(+) diff --git a/crates/ruff/src/args.rs b/crates/ruff/src/args.rs index eb4bcd0a92..e1c114a66e 100644 --- a/crates/ruff/src/args.rs +++ b/crates/ruff/src/args.rs @@ -7,6 +7,8 @@ use std::sync::Arc; use crate::commands::completions::config::{OptionString, OptionStringParser}; use anyhow::bail; +use clap::builder::Styles; +use clap::builder::styling::{AnsiColor, Effects}; use clap::builder::{TypedValueParser, ValueParserFactory}; use clap::{Parser, Subcommand, command}; use colored::Colorize; @@ -78,6 +80,13 @@ impl GlobalConfigArgs { } } +// Configures Clap v3-style help menu colors +const STYLES: Styles = Styles::styled() + .header(AnsiColor::Green.on_default().effects(Effects::BOLD)) + .usage(AnsiColor::Green.on_default().effects(Effects::BOLD)) + .literal(AnsiColor::Cyan.on_default().effects(Effects::BOLD)) + .placeholder(AnsiColor::Cyan.on_default()); + #[derive(Debug, Parser)] #[command( author, @@ -86,6 +95,7 @@ impl GlobalConfigArgs { after_help = "For help with a specific command, see: `ruff help `." )] #[command(version)] +#[command(styles = STYLES)] pub struct Args { #[command(subcommand)] pub(crate) command: Command, diff --git a/crates/ty/src/args.rs b/crates/ty/src/args.rs index f6a52a3c8c..ac334f37bf 100644 --- a/crates/ty/src/args.rs +++ b/crates/ty/src/args.rs @@ -1,5 +1,7 @@ use crate::logging::Verbosity; use crate::python_version::PythonVersion; +use clap::builder::Styles; +use clap::builder::styling::{AnsiColor, Effects}; use clap::error::ErrorKind; use clap::{ArgAction, ArgMatches, Error, Parser}; use ruff_db::system::SystemPathBuf; @@ -8,9 +10,17 @@ use ty_project::metadata::options::{EnvironmentOptions, Options, SrcOptions, Ter use ty_project::metadata::value::{RangedValue, RelativeGlobPattern, RelativePathBuf, ValueSource}; use ty_python_semantic::lint; +// Configures Clap v3-style help menu colors +const STYLES: Styles = Styles::styled() + .header(AnsiColor::Green.on_default().effects(Effects::BOLD)) + .usage(AnsiColor::Green.on_default().effects(Effects::BOLD)) + .literal(AnsiColor::Cyan.on_default().effects(Effects::BOLD)) + .placeholder(AnsiColor::Cyan.on_default()); + #[derive(Debug, Parser)] #[command(author, name = "ty", about = "An extremely fast Python type checker.")] #[command(long_version = crate::version::version())] +#[command(styles = STYLES)] pub struct Cli { #[command(subcommand)] pub(crate) command: Command, From 020ff1723b428e8aa9cbcee743a6dcb9fd95cf8b Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Sat, 8 Nov 2025 18:20:46 +0000 Subject: [PATCH 121/180] [ty] Add narrowing for `isinstance()` and `issubclass()` checks that use PEP-604 unions (#21334) --- .../resources/mdtest/narrow/isinstance.md | 68 +++++++++++++++++++ .../resources/mdtest/narrow/issubclass.md | 68 +++++++++++++++++++ crates/ty_python_semantic/src/types/narrow.rs | 23 ++++++- 3 files changed, 156 insertions(+), 3 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md b/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md index 375cc55b29..0d3e11b996 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md @@ -70,6 +70,74 @@ def _(flag: bool): reveal_type(x) # revealed: Literal["a"] ``` +## `classinfo` is a PEP-604 union of types + +```toml +[environment] +python-version = "3.10" +``` + +```py +def _(x: int | str | bytes | memoryview | range): + if isinstance(x, int | str): + reveal_type(x) # revealed: int | str + elif isinstance(x, bytes | memoryview): + reveal_type(x) # revealed: bytes | memoryview[Unknown] + else: + reveal_type(x) # revealed: range +``` + +Although `isinstance()` usually only works if all elements in the `UnionType` are class objects, at +runtime a special exception is made for `None` so that `isinstance(x, int | None)` can work: + +```py +def _(x: int | str | bytes | range | None): + if isinstance(x, int | str | None): + reveal_type(x) # revealed: int | str | None + else: + reveal_type(x) # revealed: bytes | range +``` + +## `classinfo` is an invalid PEP-604 union of types + +Except for the `None` special case mentioned above, narrowing can only take place if all elements in +the PEP-604 union are class literals. If any elements are generic aliases or other types, the +`isinstance()` call may fail at runtime, so no narrowing can take place: + +```toml +[environment] +python-version = "3.10" +``` + +```py +def _(x: int | list[int] | bytes): + # TODO: this fails at runtime; we should emit a diagnostic + # (requires special-casing of the `isinstance()` signature) + if isinstance(x, int | list[int]): + reveal_type(x) # revealed: int | list[int] | bytes + else: + reveal_type(x) # revealed: int | list[int] | bytes +``` + +## PEP-604 unions on Python \<3.10 + +PEP-604 unions were added in Python 3.10, so attempting to use them on Python 3.9 does not lead to +any type narrowing. + +```toml +[environment] +python-version = "3.9" +``` + +```py +def _(x: int | str | bytes): + # error: [unsupported-operator] + if isinstance(x, int | str): + reveal_type(x) # revealed: (int & Unknown) | (str & Unknown) | (bytes & Unknown) + else: + reveal_type(x) # revealed: (int & Unknown) | (str & Unknown) | (bytes & Unknown) +``` + ## Class types ```py diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md b/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md index 052b4de2fe..11eb2ebaf4 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md @@ -131,6 +131,74 @@ def _(flag1: bool, flag2: bool): reveal_type(t) # revealed: ``` +## `classinfo` is a PEP-604 union of types + +```toml +[environment] +python-version = "3.10" +``` + +```py +def f(x: type[int | str | bytes | range]): + if issubclass(x, int | str): + reveal_type(x) # revealed: type[int] | type[str] + elif issubclass(x, bytes | memoryview): + reveal_type(x) # revealed: type[bytes] + else: + reveal_type(x) # revealed: +``` + +Although `issubclass()` usually only works if all elements in the `UnionType` are class objects, at +runtime a special exception is made for `None` so that `issubclass(x, int | None)` can work: + +```py +def _(x: type): + if issubclass(x, int | str | None): + reveal_type(x) # revealed: type[int] | type[str] | + else: + reveal_type(x) # revealed: type & ~type[int] & ~type[str] & ~ +``` + +## `classinfo` is an invalid PEP-604 union of types + +Except for the `None` special case mentioned above, narrowing can only take place if all elements in +the PEP-604 union are class literals. If any elements are generic aliases or other types, the +`issubclass()` call may fail at runtime, so no narrowing can take place: + +```toml +[environment] +python-version = "3.10" +``` + +```py +def _(x: type[int | list | bytes]): + # TODO: this fails at runtime; we should emit a diagnostic + # (requires special-casing of the `issubclass()` signature) + if issubclass(x, int | list[int]): + reveal_type(x) # revealed: type[int] | type[list[Unknown]] | type[bytes] + else: + reveal_type(x) # revealed: type[int] | type[list[Unknown]] | type[bytes] +``` + +## PEP-604 unions on Python \<3.10 + +PEP-604 unions were added in Python 3.10, so attempting to use them on Python 3.9 does not lead to +any type narrowing. + +```toml +[environment] +python-version = "3.9" +``` + +```py +def _(x: type[int | str | bytes]): + # error: [unsupported-operator] + if issubclass(x, int | str): + reveal_type(x) # revealed: (type[int] & Unknown) | (type[str] & Unknown) | (type[bytes] & Unknown) + else: + reveal_type(x) # revealed: (type[int] & Unknown) | (type[str] & Unknown) | (type[bytes] & Unknown) +``` + ## Special cases ### Emit a diagnostic if the first argument is of wrong type diff --git a/crates/ty_python_semantic/src/types/narrow.rs b/crates/ty_python_semantic/src/types/narrow.rs index 5b709551f5..2e81c92448 100644 --- a/crates/ty_python_semantic/src/types/narrow.rs +++ b/crates/ty_python_semantic/src/types/narrow.rs @@ -11,9 +11,9 @@ use crate::types::enums::{enum_member_literals, enum_metadata}; use crate::types::function::KnownFunction; use crate::types::infer::infer_same_file_expression_type; use crate::types::{ - ClassLiteral, ClassType, IntersectionBuilder, KnownClass, SpecialFormType, SubclassOfInner, - SubclassOfType, Truthiness, Type, TypeContext, TypeVarBoundOrConstraints, UnionBuilder, - infer_expression_types, + ClassLiteral, ClassType, IntersectionBuilder, KnownClass, KnownInstanceType, SpecialFormType, + SubclassOfInner, SubclassOfType, Truthiness, Type, TypeContext, TypeVarBoundOrConstraints, + UnionBuilder, infer_expression_types, }; use ruff_db::parsed::{ParsedModuleRef, parsed_module}; @@ -212,6 +212,23 @@ impl ClassInfoConstraintFunction { ) }), + Type::KnownInstance(KnownInstanceType::UnionType(elements)) => { + UnionType::try_from_elements( + db, + elements.elements(db).iter().map(|element| { + // A special case is made for `None` at runtime + // (it's implicitly converted to `NoneType` in `int | None`) + // which means that `isinstance(x, int | None)` works even though + // `None` is not a class literal. + if element.is_none(db) { + self.generate_constraint(db, KnownClass::NoneType.to_class_literal(db)) + } else { + self.generate_constraint(db, *element) + } + }), + ) + } + Type::AlwaysFalsy | Type::AlwaysTruthy | Type::BooleanLiteral(_) From dd751e8d07b1fe3fa90bc437a2981b38fcaed3d0 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 10 Nov 2025 01:35:42 +0000 Subject: [PATCH 122/180] Update dependency ruff to v0.14.4 (#21353) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- docs/requirements-insiders.txt | 2 +- docs/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/requirements-insiders.txt b/docs/requirements-insiders.txt index 5d0d47a756..954e47c74c 100644 --- a/docs/requirements-insiders.txt +++ b/docs/requirements-insiders.txt @@ -1,5 +1,5 @@ PyYAML==6.0.3 -ruff==0.14.3 +ruff==0.14.4 mkdocs==1.6.1 mkdocs-material @ git+ssh://git@github.com/astral-sh/mkdocs-material-insiders.git@39da7a5e761410349e9a1b8abf593b0cdd5453ff mkdocs-redirects==1.2.2 diff --git a/docs/requirements.txt b/docs/requirements.txt index 9ccce87029..01c34ac2f6 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,5 +1,5 @@ PyYAML==6.0.3 -ruff==0.14.3 +ruff==0.14.4 mkdocs==1.6.1 mkdocs-material==9.5.38 mkdocs-redirects==1.2.2 From 2a1d412f729831b6182ab4703657aeb0e5c59c5f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 10 Nov 2025 09:14:48 +0100 Subject: [PATCH 123/180] Update Rust crate syn to v2.0.110 (#21357) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [syn](https://redirect.github.com/dtolnay/syn) | workspace.dependencies | patch | `2.0.108` -> `2.0.110` | --- > [!WARNING] > Some dependencies could not be looked up. Check the Dependency Dashboard for more information. --- ### Release Notes
dtolnay/syn (syn) ### [`v2.0.110`](https://redirect.github.com/dtolnay/syn/releases/tag/2.0.110) [Compare Source](https://redirect.github.com/dtolnay/syn/compare/2.0.109...2.0.110) - Tweaks to improve build speed ([#​1939](https://redirect.github.com/dtolnay/syn/issues/1939), thanks [@​dishmaker](https://redirect.github.com/dishmaker)) - Make `syn::ext::IdentExt::unraw` available without "parsing" feature ([#​1940](https://redirect.github.com/dtolnay/syn/issues/1940)) - Support parsing `syn::Meta` followed by `=>` ([#​1944](https://redirect.github.com/dtolnay/syn/issues/1944)) ### [`v2.0.109`](https://redirect.github.com/dtolnay/syn/releases/tag/2.0.109) [Compare Source](https://redirect.github.com/dtolnay/syn/compare/2.0.108...2.0.109) - Tweaks to improve build speed ([#​1927](https://redirect.github.com/dtolnay/syn/issues/1927), [#​1928](https://redirect.github.com/dtolnay/syn/issues/1928), [#​1930](https://redirect.github.com/dtolnay/syn/issues/1930), [#​1932](https://redirect.github.com/dtolnay/syn/issues/1932), [#​1934](https://redirect.github.com/dtolnay/syn/issues/1934), thanks [@​dishmaker](https://redirect.github.com/dishmaker))
--- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/astral-sh/ruff). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2f7b83e994..602cd19935 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3927,9 +3927,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.108" +version = "2.0.110" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da58917d35242480a05c2897064da0a80589a2a0476c9a3f2fdc83b53502e917" +checksum = "a99801b5bd34ede4cf3fc688c5919368fea4e4814a4664359503e6015b280aea" dependencies = [ "proc-macro2", "quote", From f4f259395c0bc2ddd11fa5f510e9161e15f0a50d Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 10 Nov 2025 09:15:35 +0100 Subject: [PATCH 124/180] Update taiki-e/install-action action to v2.62.49 (#21358) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [taiki-e/install-action](https://redirect.github.com/taiki-e/install-action) | action | patch | `v2.62.45` -> `v2.62.49` | --- > [!WARNING] > Some dependencies could not be looked up. Check the Dependency Dashboard for more information. --- ### Release Notes
taiki-e/install-action (taiki-e/install-action) ### [`v2.62.49`](https://redirect.github.com/taiki-e/install-action/blob/HEAD/CHANGELOG.md#100---2021-12-30) [Compare Source](https://redirect.github.com/taiki-e/install-action/compare/v2.62.48...v2.62.49) Initial release [Unreleased]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.49...HEAD [2.62.49]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.48...v2.62.49 [2.62.48]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.47...v2.62.48 [2.62.47]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.46...v2.62.47 [2.62.46]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.45...v2.62.46 [2.62.45]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.44...v2.62.45 [2.62.44]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.43...v2.62.44 [2.62.43]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.42...v2.62.43 [2.62.42]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.41...v2.62.42 [2.62.41]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.40...v2.62.41 [2.62.40]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.39...v2.62.40 [2.62.39]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.38...v2.62.39 [2.62.38]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.37...v2.62.38 [2.62.37]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.36...v2.62.37 [2.62.36]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.35...v2.62.36 [2.62.35]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.34...v2.62.35 [2.62.34]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.33...v2.62.34 [2.62.33]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.32...v2.62.33 [2.62.32]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.31...v2.62.32 [2.62.31]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.30...v2.62.31 [2.62.30]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.29...v2.62.30 [2.62.29]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.28...v2.62.29 [2.62.28]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.27...v2.62.28 [2.62.27]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.26...v2.62.27 [2.62.26]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.25...v2.62.26 [2.62.25]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.24...v2.62.25 [2.62.24]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.23...v2.62.24 [2.62.23]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.22...v2.62.23 [2.62.22]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.21...v2.62.22 [2.62.21]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.20...v2.62.21 [2.62.20]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.19...v2.62.20 [2.62.19]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.18...v2.62.19 [2.62.18]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.17...v2.62.18 [2.62.17]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.16...v2.62.17 [2.62.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.15...v2.62.16 [2.62.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.14...v2.62.15 [2.62.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.13...v2.62.14 [2.62.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.12...v2.62.13 [2.62.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.11...v2.62.12 [2.62.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.10...v2.62.11 [2.62.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.9...v2.62.10 [2.62.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.8...v2.62.9 [2.62.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.7...v2.62.8 [2.62.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.6...v2.62.7 [2.62.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.5...v2.62.6 [2.62.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.4...v2.62.5 [2.62.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.3...v2.62.4 [2.62.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.2...v2.62.3 [2.62.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.1...v2.62.2 [2.62.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.62.0...v2.62.1 [2.62.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.13...v2.62.0 [2.61.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.12...v2.61.13 [2.61.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.11...v2.61.12 [2.61.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.10...v2.61.11 [2.61.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.9...v2.61.10 [2.61.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.8...v2.61.9 [2.61.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.7...v2.61.8 [2.61.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.6...v2.61.7 [2.61.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.5...v2.61.6 [2.61.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.4...v2.61.5 [2.61.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.3...v2.61.4 [2.61.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.2...v2.61.3 [2.61.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.1...v2.61.2 [2.61.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.61.0...v2.61.1 [2.61.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.60.0...v2.61.0 [2.60.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.59.1...v2.60.0 [2.59.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.59.0...v2.59.1 [2.59.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.33...v2.59.0 [2.58.33]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.32...v2.58.33 [2.58.32]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.31...v2.58.32 [2.58.31]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.30...v2.58.31 [2.58.30]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.29...v2.58.30 [2.58.29]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.28...v2.58.29 [2.58.28]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.27...v2.58.28 [2.58.27]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.26...v2.58.27 [2.58.26]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.25...v2.58.26 [2.58.25]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.24...v2.58.25 [2.58.24]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.23...v2.58.24 [2.58.23]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.22...v2.58.23 [2.58.22]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.21...v2.58.22 [2.58.21]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.20...v2.58.21 [2.58.20]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.19...v2.58.20 [2.58.19]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.18...v2.58.19 [2.58.18]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.17...v2.58.18 [2.58.17]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.16...v2.58.17 [2.58.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.15...v2.58.16 [2.58.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.14...v2.58.15 [2.58.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.13...v2.58.14 [2.58.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.12...v2.58.13 [2.58.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.11...v2.58.12 [2.58.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.10...v2.58.11 [2.58.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.9...v2.58.10 [2.58.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.8...v2.58.9 [2.58.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.7...v2.58.8 [2.58.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.6...v2.58.7 [2.58.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.5...v2.58.6 [2.58.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.4...v2.58.5 [2.58.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.3...v2.58.4 [2.58.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.2...v2.58.3 [2.58.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.1...v2.58.2 [2.58.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.58.0...v2.58.1 [2.58.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.57.8...v2.58.0 [2.57.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.57.7...v2.57.8 [2.57.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.57.6...v2.57.7 [2.57.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.57.5...v2.57.6 [2.57.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.57.4...v2.57.5 [2.57.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.57.3...v2.57.4 [2.57.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.57.2...v2.57.3 [2.57.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.57.1...v2.57.2 [2.57.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.57.0...v2.57.1 [2.57.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.24...v2.57.0 [2.56.24]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.23...v2.56.24 [2.56.23]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.22...v2.56.23 [2.56.22]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.21...v2.56.22 [2.56.21]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.20...v2.56.21 [2.56.20]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.19...v2.56.20 [2.56.19]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.18...v2.56.19 [2.56.18]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.17...v2.56.18 [2.56.17]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.16...v2.56.17 [2.56.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.15...v2.56.16 [2.56.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.14...v2.56.15 [2.56.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.13...v2.56.14 [2.56.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.12...v2.56.13 [2.56.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.11...v2.56.12 [2.56.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.10...v2.56.11 [2.56.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.9...v2.56.10 [2.56.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.8...v2.56.9 [2.56.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.7...v2.56.8 [2.56.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.6...v2.56.7 [2.56.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.5...v2.56.6 [2.56.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.4...v2.56.5 [2.56.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.3...v2.56.4 [2.56.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.2...v2.56.3 [2.56.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.1...v2.56.2 [2.56.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.56.0...v2.56.1 [2.56.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.55.4...v2.56.0 [2.55.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.55.3...v2.55.4 [2.55.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.55.2...v2.55.3 [2.55.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.55.1...v2.55.2 [2.55.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.55.0...v2.55.1 [2.55.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.54.3...v2.55.0 [2.54.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.54.2...v2.54.3 [2.54.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.54.1...v2.54.2 [2.54.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.54.0...v2.54.1 [2.54.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.53.2...v2.54.0 [2.53.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.53.1...v2.53.2 [2.53.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.53.0...v2.53.1 [2.53.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.52.8...v2.53.0 [2.52.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.52.7...v2.52.8 [2.52.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.52.6...v2.52.7 [2.52.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.52.5...v2.52.6 [2.52.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.52.4...v2.52.5 [2.52.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.52.3...v2.52.4 [2.52.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.52.2...v2.52.3 [2.52.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.52.1...v2.52.2 [2.52.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.52.0...v2.52.1 [2.52.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.51.3...v2.52.0 [2.51.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.51.2...v2.51.3 [2.51.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.51.1...v2.51.2 [2.51.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.51.0...v2.51.1 [2.51.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.50.10...v2.51.0 [2.50.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.50.9...v2.50.10 [2.50.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.50.8...v2.50.9 [2.50.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.50.7...v2.50.8 [2.50.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.50.6...v2.50.7 [2.50.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.50.5...v2.50.6 [2.50.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.50.4...v2.50.5 [2.50.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.50.3...v2.50.4 [2.50.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.50.2...v2.50.3 [2.50.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.50.1...v2.50.2 [2.50.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.50.0...v2.50.1 [2.50.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.50...v2.50.0 [2.49.50]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.49...v2.49.50 [2.49.49]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.48...v2.49.49 [2.49.48]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.47...v2.49.48 [2.49.47]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.46...v2.49.47 [2.49.46]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.45...v2.49.46 [2.49.45]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.44...v2.49.45 [2.49.44]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.43...v2.49.44 [2.49.43]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.42...v2.49.43 [2.49.42]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.41...v2.49.42 [2.49.41]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.40...v2.49.41 [2.49.40]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.39...v2.49.40 [2.49.39]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.38...v2.49.39 [2.49.38]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.37...v2.49.38 [2.49.37]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.36...v2.49.37 [2.49.36]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.35...v2.49.36 [2.49.35]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.34...v2.49.35 [2.49.34]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.33...v2.49.34 [2.49.33]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.32...v2.49.33 [2.49.32]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.31...v2.49.32 [2.49.31]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.30...v2.49.31 [2.49.30]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.29...v2.49.30 [2.49.29]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.28...v2.49.29 [2.49.28]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.27...v2.49.28 [2.49.27]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.26...v2.49.27 [2.49.26]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.25...v2.49.26 [2.49.25]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.24...v2.49.25 [2.49.24]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.23...v2.49.24 [2.49.23]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.22...v2.49.23 [2.49.22]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.21...v2.49.22 [2.49.21]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.20...v2.49.21 [2.49.20]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.19...v2.49.20 [2.49.19]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.18...v2.49.19 [2.49.18]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.17...v2.49.18 [2.49.17]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.16...v2.49.17 [2.49.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.15...v2.49.16 [2.49.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.14...v2.49.15 [2.49.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.13...v2.49.14 [2.49.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.12...v2.49.13 [2.49.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.11...v2.49.12 [2.49.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.10...v2.49.11 [2.49.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.9...v2.49.10 [2.49.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.8...v2.49.9 [2.49.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.7...v2.49.8 [2.49.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.6...v2.49.7 [2.49.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.5...v2.49.6 [2.49.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.4...v2.49.5 [2.49.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.3...v2.49.4 [2.49.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.2...v2.49.3 [2.49.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.1...v2.49.2 [2.49.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.49.0...v2.49.1 [2.49.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.22...v2.49.0 [2.48.22]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.21...v2.48.22 [2.48.21]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.20...v2.48.21 [2.48.20]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.19...v2.48.20 [2.48.19]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.18...v2.48.19 [2.48.18]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.17...v2.48.18 [2.48.17]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.16...v2.48.17 [2.48.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.15...v2.48.16 [2.48.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.14...v2.48.15 [2.48.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.13...v2.48.14 [2.48.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.12...v2.48.13 [2.48.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.11...v2.48.12 [2.48.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.10...v2.48.11 [2.48.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.9...v2.48.10 [2.48.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.8...v2.48.9 [2.48.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.7...v2.48.8 [2.48.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.6...v2.48.7 [2.48.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.5...v2.48.6 [2.48.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.4...v2.48.5 [2.48.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.3...v2.48.4 [2.48.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.2...v2.48.3 [2.48.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.1...v2.48.2 [2.48.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.48.0...v2.48.1 [2.48.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.32...v2.48.0 [2.47.32]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.31...v2.47.32 [2.47.31]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.30...v2.47.31 [2.47.30]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.29...v2.47.30 [2.47.29]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.28...v2.47.29 [2.47.28]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.27...v2.47.28 [2.47.27]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.26...v2.47.27 [2.47.26]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.25...v2.47.26 [2.47.25]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.24...v2.47.25 [2.47.24]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.23...v2.47.24 [2.47.23]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.22...v2.47.23 [2.47.22]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.21...v2.47.22 [2.47.21]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.20...v2.47.21 [2.47.20]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.19...v2.47.20 [2.47.19]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.18...v2.47.19 [2.47.18]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.17...v2.47.18 [2.47.17]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.16...v2.47.17 [2.47.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.15...v2.47.16 [2.47.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.14...v2.47.15 [2.47.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.13...v2.47.14 [2.47.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.12...v2.47.13 [2.47.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.11...v2.47.12 [2.47.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.10...v2.47.11 [2.47.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.9...v2.47.10 [2.47.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.8...v2.47.9 [2.47.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.7...v2.47.8 [2.47.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.6...v2.47.7 [2.47.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.5...v2.47.6 [2.47.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.4...v2.47.5 [2.47.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.3...v2.47.4 [2.47.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.2...v2.47.3 [2.47.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.1...v2.47.2 [2.47.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.47.0...v2.47.1 [2.47.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.20...v2.47.0 [2.46.20]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.19...v2.46.20 [2.46.19]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.18...v2.46.19 [2.46.18]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.17...v2.46.18 [2.46.17]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.16...v2.46.17 [2.46.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.15...v2.46.16 [2.46.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.14...v2.46.15 [2.46.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.13...v2.46.14 [2.46.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.12...v2.46.13 [2.46.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.11...v2.46.12 [2.46.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.10...v2.46.11 [2.46.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.9...v2.46.10 [2.46.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.8...v2.46.9 [2.46.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.7...v2.46.8 [2.46.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.6...v2.46.7 [2.46.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.5...v2.46.6 [2.46.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.4...v2.46.5 [2.46.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.3...v2.46.4 [2.46.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.2...v2.46.3 [2.46.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.1...v2.46.2 [2.46.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.46.0...v2.46.1 [2.46.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.15...v2.46.0 [2.45.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.14...v2.45.15 [2.45.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.13...v2.45.14 [2.45.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.12...v2.45.13 [2.45.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.11...v2.45.12 [2.45.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.10...v2.45.11 [2.45.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.9...v2.45.10 [2.45.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.8...v2.45.9 [2.45.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.7...v2.45.8 [2.45.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.6...v2.45.7 [2.45.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.5...v2.45.6 [2.45.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.4...v2.45.5 [2.45.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.3...v2.45.4 [2.45.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.2...v2.45.3 [2.45.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.1...v2.45.2 [2.45.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.45.0...v2.45.1 [2.45.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.72...v2.45.0 [2.44.72]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.71...v2.44.72 [2.44.71]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.70...v2.44.71 [2.44.70]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.69...v2.44.70 [2.44.69]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.68...v2.44.69 [2.44.68]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.67...v2.44.68 [2.44.67]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.66...v2.44.67 [2.44.66]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.65...v2.44.66 [2.44.65]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.64...v2.44.65 [2.44.64]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.63...v2.44.64 [2.44.63]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.62...v2.44.63 [2.44.62]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.61...v2.44.62 [2.44.61]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.60...v2.44.61 [2.44.60]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.59...v2.44.60 [2.44.59]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.58...v2.44.59 [2.44.58]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.57...v2.44.58 [2.44.57]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.56...v2.44.57 [2.44.56]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.55...v2.44.56 [2.44.55]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.54...v2.44.55 [2.44.54]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.53...v2.44.54 [2.44.53]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.52...v2.44.53 [2.44.52]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.51...v2.44.52 [2.44.51]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.50...v2.44.51 [2.44.50]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.49...v2.44.50 [2.44.49]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.48...v2.44.49 [2.44.48]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.47...v2.44.48 [2.44.47]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.46...v2.44.47 [2.44.46]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.45...v2.44.46 [2.44.45]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.44...v2.44.45 [2.44.44]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.43...v2.44.44 [2.44.43]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.42...v2.44.43 [2.44.42]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.41...v2.44.42 [2.44.41]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.40...v2.44.41 [2.44.40]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.39...v2.44.40 [2.44.39]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.38...v2.44.39 [2.44.38]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.37...v2.44.38 [2.44.37]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.36...v2.44.37 [2.44.36]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.35...v2.44.36 [2.44.35]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.34...v2.44.35 [2.44.34]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.33...v2.44.34 [2.44.33]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.32...v2.44.33 [2.44.32]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.31...v2.44.32 [2.44.31]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.30...v2.44.31 [2.44.30]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.29...v2.44.30 [2.44.29]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.28...v2.44.29 [2.44.28]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.27...v2.44.28 [2.44.27]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.26...v2.44.27 [2.44.26]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.25...v2.44.26 [2.44.25]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.24...v2.44.25 [2.44.24]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.23...v2.44.24 [2.44.23]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.22...v2.44.23 [2.44.22]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.21...v2.44.22 [2.44.21]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.20...v2.44.21 [2.44.20]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.19...v2.44.20 [2.44.19]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.18...v2.44.19 [2.44.18]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.17...v2.44.18 [2.44.17]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.16...v2.44.17 [2.44.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.15...v2.44.16 [2.44.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.14...v2.44.15 [2.44.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.13...v2.44.14 [2.44.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.12...v2.44.13 [2.44.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.11...v2.44.12 [2.44.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.10...v2.44.11 [2.44.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.9...v2.44.10 [2.44.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.8...v2.44.9 [2.44.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.7...v2.44.8 [2.44.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.6...v2.44.7 [2.44.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.5...v2.44.6 [2.44.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.4...v2.44.5 [2.44.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.3...v2.44.4 [2.44.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.2...v2.44.3 [2.44.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.1...v2.44.2 [2.44.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.44.0...v2.44.1 [2.44.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.43.7...v2.44.0 [2.43.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.43.6...v2.43.7 [2.43.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.43.5...v2.43.6 [2.43.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.43.4...v2.43.5 [2.43.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.43.3...v2.43.4 [2.43.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.43.2...v2.43.3 [2.43.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.43.1...v2.43.2 [2.43.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.43.0...v2.43.1 [2.43.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.42...v2.43.0 [2.42.42]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.41...v2.42.42 [2.42.41]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.40...v2.42.41 [2.42.40]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.39...v2.42.40 [2.42.39]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.38...v2.42.39 [2.42.38]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.37...v2.42.38 [2.42.37]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.36...v2.42.37 [2.42.36]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.35...v2.42.36 [2.42.35]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.34...v2.42.35 [2.42.34]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.33...v2.42.34 [2.42.33]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.32...v2.42.33 [2.42.32]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.31...v2.42.32 [2.42.31]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.30...v2.42.31 [2.42.30]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.29...v2.42.30 [2.42.29]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.28...v2.42.29 [2.42.28]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.27...v2.42.28 [2.42.27]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.26...v2.42.27 [2.42.26]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.25...v2.42.26 [2.42.25]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.24...v2.42.25 [2.42.24]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.23...v2.42.24 [2.42.23]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.22...v2.42.23 [2.42.22]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.21...v2.42.22 [2.42.21]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.20...v2.42.21 [2.42.20]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.19...v2.42.20 [2.42.19]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.18...v2.42.19 [2.42.18]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.17...v2.42.18 [2.42.17]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.16...v2.42.17 [2.42.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.15...v2.42.16 [2.42.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.14...v2.42.15 [2.42.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.13...v2.42.14 [2.42.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.12...v2.42.13 [2.42.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.11...v2.42.12 [2.42.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.10...v2.42.11 [2.42.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.9...v2.42.10 [2.42.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.8...v2.42.9 [2.42.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.7...v2.42.8 [2.42.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.6...v2.42.7 [2.42.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.5...v2.42.6 [2.42.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.4...v2.42.5 [2.42.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.3...v2.42.4 [2.42.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.2...v2.42.3 [2.42.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.1...v2.42.2 [2.42.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.42.0...v2.42.1 [2.42.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.18...v2.42.0 [2.41.18]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.17...v2.41.18 [2.41.17]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.16...v2.41.17 [2.41.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.15...v2.41.16 [2.41.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.14...v2.41.15 [2.41.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.13...v2.41.14 [2.41.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.12...v2.41.13 [2.41.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.11...v2.41.12 [2.41.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.10...v2.41.11 [2.41.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.9...v2.41.10 [2.41.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.8...v2.41.9 [2.41.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.7...v2.41.8 [2.41.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.6...v2.41.7 [2.41.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.5...v2.41.6 [2.41.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.4...v2.41.5 [2.41.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.3...v2.41.4 [2.41.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.2...v2.41.3 [2.41.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.1...v2.41.2 [2.41.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.41.0...v2.41.1 [2.41.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.40.2...v2.41.0 [2.40.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.40.1...v2.40.2 [2.40.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.40.0...v2.40.1 [2.40.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.39.2...v2.40.0 [2.39.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.39.1...v2.39.2 [2.39.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.39.0...v2.39.1 [2.39.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.38.7...v2.39.0 [2.38.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.38.6...v2.38.7 [2.38.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.38.5...v2.38.6 [2.38.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.38.4...v2.38.5 [2.38.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.38.3...v2.38.4 [2.38.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.38.2...v2.38.3 [2.38.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.38.1...v2.38.2 [2.38.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.38.0...v2.38.1 [2.38.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.37.0...v2.38.0 [2.37.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.36.0...v2.37.0 [2.36.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.35.0...v2.36.0 [2.35.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.34.3...v2.35.0 [2.34.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.34.2...v2.34.3 [2.34.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.34.1...v2.34.2 [2.34.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.34.0...v2.34.1 [2.34.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.36...v2.34.0 [2.33.36]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.35...v2.33.36 [2.33.35]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.34...v2.33.35 [2.33.34]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.33...v2.33.34 [2.33.33]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.32...v2.33.33 [2.33.32]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.31...v2.33.32 [2.33.31]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.30...v2.33.31 [2.33.30]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.29...v2.33.30 [2.33.29]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.28...v2.33.29 [2.33.28]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.27...v2.33.28 [2.33.27]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.26...v2.33.27 [2.33.26]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.25...v2.33.26 [2.33.25]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.24...v2.33.25 [2.33.24]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.23...v2.33.24 [2.33.23]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.22...v2.33.23 [2.33.22]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.21...v2.33.22 [2.33.21]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.20...v2.33.21 [2.33.20]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.19...v2.33.20 [2.33.19]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.18...v2.33.19 [2.33.18]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.17...v2.33.18 [2.33.17]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.16...v2.33.17 [2.33.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.15...v2.33.16 [2.33.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.14...v2.33.15 [2.33.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.13...v2.33.14 [2.33.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.12...v2.33.13 [2.33.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.11...v2.33.12 [2.33.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.10...v2.33.11 [2.33.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.9...v2.33.10 [2.33.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.8...v2.33.9 [2.33.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.7...v2.33.8 [2.33.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.6...v2.33.7 [2.33.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.5...v2.33.6 [2.33.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.4...v2.33.5 [2.33.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.3...v2.33.4 [2.33.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.2...v2.33.3 [2.33.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.1...v2.33.2 [2.33.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.33.0...v2.33.1 [2.33.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.20...v2.33.0 [2.32.20]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.19...v2.32.20 [2.32.19]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.18...v2.32.19 [2.32.18]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.17...v2.32.18 [2.32.17]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.16...v2.32.17 [2.32.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.15...v2.32.16 [2.32.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.14...v2.32.15 [2.32.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.13...v2.32.14 [2.32.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.12...v2.32.13 [2.32.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.11...v2.32.12 [2.32.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.10...v2.32.11 [2.32.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.9...v2.32.10 [2.32.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.8...v2.32.9 [2.32.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.7...v2.32.8 [2.32.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.6...v2.32.7 [2.32.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.5...v2.32.6 [2.32.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.4...v2.32.5 [2.32.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.3...v2.32.4 [2.32.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.2...v2.32.3 [2.32.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.1...v2.32.2 [2.32.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.32.0...v2.32.1 [2.32.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.31.3...v2.32.0 [2.31.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.31.2...v2.31.3 [2.31.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.31.1...v2.31.2 [2.31.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.31.0...v2.31.1 [2.31.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.30.0...v2.31.0 [2.30.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.29.8...v2.30.0 [2.29.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.29.7...v2.29.8 [2.29.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.29.6...v2.29.7 [2.29.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.29.5...v2.29.6 [2.29.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.29.4...v2.29.5 [2.29.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.29.3...v2.29.4 [2.29.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.29.2...v2.29.3 [2.29.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.29.1...v2.29.2 [2.29.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.29.0...v2.29.1 [2.29.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.16...v2.29.0 [2.28.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.15...v2.28.16 [2.28.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.14...v2.28.15 [2.28.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.13...v2.28.14 [2.28.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.12...v2.28.13 [2.28.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.11...v2.28.12 [2.28.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.10...v2.28.11 [2.28.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.9...v2.28.10 [2.28.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.8...v2.28.9 [2.28.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.7...v2.28.8 [2.28.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.6...v2.28.7 [2.28.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.5...v2.28.6 [2.28.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.4...v2.28.5 [2.28.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.3...v2.28.4 [2.28.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.2...v2.28.3 [2.28.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.1...v2.28.2 [2.28.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.28.0...v2.28.1 [2.28.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.15...v2.28.0 [2.27.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.14...v2.27.15 [2.27.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.13...v2.27.14 [2.27.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.12...v2.27.13 [2.27.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.11...v2.27.12 [2.27.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.10...v2.27.11 [2.27.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.9...v2.27.10 [2.27.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.8...v2.27.9 [2.27.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.7...v2.27.8 [2.27.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.6...v2.27.7 [2.27.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.5...v2.27.6 [2.27.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.4...v2.27.5 [2.27.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.3...v2.27.4 [2.27.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.2...v2.27.3 [2.27.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.1...v2.27.2 [2.27.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.27.0...v2.27.1 [2.27.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.20...v2.27.0 [2.26.20]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.19...v2.26.20 [2.26.19]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.18...v2.26.19 [2.26.18]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.17...v2.26.18 [2.26.17]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.16...v2.26.17 [2.26.16]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.15...v2.26.16 [2.26.15]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.14...v2.26.15 [2.26.14]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.13...v2.26.14 [2.26.13]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.12...v2.26.13 [2.26.12]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.11...v2.26.12 [2.26.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.10...v2.26.11 [2.26.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.9...v2.26.10 [2.26.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.8...v2.26.9 [2.26.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.7...v2.26.8 [2.26.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.6...v2.26.7 [2.26.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.5...v2.26.6 [2.26.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.4...v2.26.5 [2.26.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.3...v2.26.4 [2.26.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.2...v2.26.3 [2.26.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.1...v2.26.2 [2.26.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.26.0...v2.26.1 [2.26.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.25.11...v2.26.0 [2.25.11]: https://redirect.github.com/taiki-e/install-action/compare/v2.25.10...v2.25.11 [2.25.10]: https://redirect.github.com/taiki-e/install-action/compare/v2.25.9...v2.25.10 [2.25.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.25.8...v2.25.9 [2.25.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.25.7...v2.25.8 [2.25.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.25.6...v2.25.7 [2.25.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.25.5...v2.25.6 [2.25.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.25.4...v2.25.5 [2.25.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.25.3...v2.25.4 [2.25.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.25.2...v2.25.3 [2.25.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.25.1...v2.25.2 [2.25.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.25.0...v2.25.1 [2.25.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.24.4...v2.25.0 [2.24.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.24.3...v2.24.4 [2.24.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.24.2...v2.24.3 [2.24.2]: https://redirect.github.com/taiki-e/install-action/compare/v2.24.1...v2.24.2 [2.24.1]: https://redirect.github.com/taiki-e/install-action/compare/v2.24.0...v2.24.1 [2.24.0]: https://redirect.github.com/taiki-e/install-action/compare/v2.23.9...v2.24.0 [2.23.9]: https://redirect.github.com/taiki-e/install-action/compare/v2.23.8...v2.23.9 [2.23.8]: https://redirect.github.com/taiki-e/install-action/compare/v2.23.7...v2.23.8 [2.23.7]: https://redirect.github.com/taiki-e/install-action/compare/v2.23.6...v2.23.7 [2.23.6]: https://redirect.github.com/taiki-e/install-action/compare/v2.23.5...v2.23.6 [2.23.5]: https://redirect.github.com/taiki-e/install-action/compare/v2.23.4...v2.23.5 [2.23.4]: https://redirect.github.com/taiki-e/install-action/compare/v2.23.3...v2.23.4 [2.23.3]: https://redirect.github.com/taiki-e/install-action/compare/v2.23.2...v2.23.3 [2.23.2]: https://redirect.github.com/taiki-e/in
--- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/astral-sh/ruff). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/ci.yaml | 16 ++++++++-------- .github/workflows/sync_typeshed.yaml | 4 ++-- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b4b642df4d..af608d977a 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -256,11 +256,11 @@ jobs: - name: "Install mold" uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: "Install cargo nextest" - uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-nextest - name: "Install cargo insta" - uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-insta - name: "Install uv" @@ -320,11 +320,11 @@ jobs: - name: "Install mold" uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: "Install cargo nextest" - uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-nextest - name: "Install cargo insta" - uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-insta - name: "Install uv" @@ -353,7 +353,7 @@ jobs: - name: "Install Rust toolchain" run: rustup show - name: "Install cargo nextest" - uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-nextest - name: "Install uv" @@ -940,7 +940,7 @@ jobs: run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-codspeed @@ -978,7 +978,7 @@ jobs: run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-codspeed @@ -1016,7 +1016,7 @@ jobs: run: rustup show - name: "Install codspeed" - uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-codspeed diff --git a/.github/workflows/sync_typeshed.yaml b/.github/workflows/sync_typeshed.yaml index 3d84483c2b..cadb98cc0b 100644 --- a/.github/workflows/sync_typeshed.yaml +++ b/.github/workflows/sync_typeshed.yaml @@ -207,12 +207,12 @@ jobs: uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: "Install cargo nextest" if: ${{ success() }} - uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-nextest - name: "Install cargo insta" if: ${{ success() }} - uses: taiki-e/install-action@81ee1d48d9194cdcab880cbdc7d36e87d39874cb # v2.62.45 + uses: taiki-e/install-action@44c6d64aa62cd779e873306675c7a58e86d6d532 # v2.62.49 with: tool: cargo-insta - name: Update snapshots From e0a3cbb048b8bc1db95f979a646218eb88368850 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 10 Nov 2025 09:15:51 +0100 Subject: [PATCH 125/180] Update Rust crate quote to v1.0.42 (#21356) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [quote](https://redirect.github.com/dtolnay/quote) | workspace.dependencies | patch | `1.0.41` -> `1.0.42` | --- > [!WARNING] > Some dependencies could not be looked up. Check the Dependency Dashboard for more information. --- ### Release Notes
dtolnay/quote (quote) ### [`v1.0.42`](https://redirect.github.com/dtolnay/quote/releases/tag/1.0.42) [Compare Source](https://redirect.github.com/dtolnay/quote/compare/1.0.41...1.0.42) - Tweaks to improve build speed ([#​305](https://redirect.github.com/dtolnay/quote/issues/305), [#​306](https://redirect.github.com/dtolnay/quote/issues/306), [#​307](https://redirect.github.com/dtolnay/quote/issues/307), [#​308](https://redirect.github.com/dtolnay/quote/issues/308), thanks [@​dishmaker](https://redirect.github.com/dishmaker))
--- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/astral-sh/ruff). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 602cd19935..bd2aded88c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2650,9 +2650,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.41" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1" +checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" dependencies = [ "proc-macro2", ] From ca51feb319f623e01e5d2b6986a91ce48b6241c4 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 10 Nov 2025 09:16:50 +0100 Subject: [PATCH 126/180] Update Rust crate jiff to v0.2.16 (#21354) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [jiff](https://redirect.github.com/BurntSushi/jiff) | workspace.dependencies | patch | `0.2.15` -> `0.2.16` | --- > [!WARNING] > Some dependencies could not be looked up. Check the Dependency Dashboard for more information. --- ### Release Notes
BurntSushi/jiff (jiff) ### [`v0.2.16`](https://redirect.github.com/BurntSushi/jiff/blob/HEAD/CHANGELOG.md#0216-2025-11-07) [Compare Source](https://redirect.github.com/BurntSushi/jiff/compare/0.2.15...0.2.16) \=================== This release contains a number of enhancements and bug fixes that have accrued over the last few months. Most are small polishes. A couple of the bug fixes apply to panics that could occur when parsing invalid `TZ` strings or invalid `strptime` format strings. Also, parsing into a `Span` should now be much faster (for both the ISO 8601 and "friendly" duration formats). Enhancements: - [#​298](https://redirect.github.com/BurntSushi/jiff/issues/298): Add Serde helpers for (de)serializing `std::time::Duration` values. - [#​396](https://redirect.github.com/BurntSushi/jiff/issues/396): Add `Sub` and `Add` trait implementations for `Zoned` (in addition to the already existing trait implementations for `&Zoned`). - [#​397](https://redirect.github.com/BurntSushi/jiff/pull/397): Add `BrokenDownTime::set_meridiem` and ensure it overrides the hour when formatting. - [#​409](https://redirect.github.com/BurntSushi/jiff/pull/409): Switch dependency on `serde` to `serde_core`. This should help speed up compilation times in some cases. - [#​430](https://redirect.github.com/BurntSushi/jiff/pull/430): Add new `Zoned::series` API, making it consistent with the same API on other datetime types. - [#​432](https://redirect.github.com/BurntSushi/jiff/pull/432): When `lenient` mode is enabled for `strftime`, Jiff will no longer error when the formatting string contains invalid UTF-8. - [#​432](https://redirect.github.com/BurntSushi/jiff/pull/432): Formatting of `%y` and `%g` no longer fails based on the specific year value. - [#​432](https://redirect.github.com/BurntSushi/jiff/pull/432): Parsing of `%s` is now a bit more consistent with other fields. Moreover, `BrokenDownTime::{to_timestamp,to_zoned}` will now prefer timestamps parsed with `%s` over any other fields that have been parsed. - [#​433](https://redirect.github.com/BurntSushi/jiff/pull/433): Allow parsing just a `%s` into a `Zoned` via the `Etc/Unknown` time zone. Bug fixes: - [#​386](https://redirect.github.com/BurntSushi/jiff/issues/386): Fix a bug where `2087-12-31T23:00:00Z` in the `Africa/Casablanca` time zone could not be round-tripped (because its offset was calculated incorrectly as a result of not handling "permanent DST" POSIX time zones). - [#​407](https://redirect.github.com/BurntSushi/jiff/issues/407): Fix a panic that occurred when parsing an empty string as a POSIX time zone. - [#​410](https://redirect.github.com/BurntSushi/jiff/issues/410): Fix a panic that could occur when parsing `%:` via `strptime` APIs. - [#​414](https://redirect.github.com/BurntSushi/jiff/pull/414): Update some parts of the documentation to indicate that `TimeZone::unknown()` is a fallback for `TimeZone::system()` (instead of the `jiff 0.1` behavior of using `TimeZone::UTC`). - [#​423](https://redirect.github.com/BurntSushi/jiff/issues/423): Fix a panicking bug when reading malformed TZif data. - [#​426](https://redirect.github.com/BurntSushi/jiff/issues/426): Fix a panicking bug when parsing century (`%C`) via `strptime`. - [#​445](https://redirect.github.com/BurntSushi/jiff/pull/445): Fixed bugs with parsing durations like `-9223372036854775808s` and `-PT9223372036854775808S`. Performance: - [#​445](https://redirect.github.com/BurntSushi/jiff/pull/445): Parsing into `Span` or `SignedDuration` is now a fair bit faster in some cases.
--- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/astral-sh/ruff). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bd2aded88c..d4342c6c0e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -642,7 +642,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" dependencies = [ "lazy_static", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -651,7 +651,7 @@ version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -1016,7 +1016,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] @@ -1698,7 +1698,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" dependencies = [ "hermit-abi", "libc", - "windows-sys 0.59.0", + "windows-sys 0.52.0", ] [[package]] @@ -1752,24 +1752,24 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jiff" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be1f93b8b1eb69c77f24bbb0afdf66f54b632ee39af40ca21c4365a1d7347e49" +checksum = "49cce2b81f2098e7e3efc35bc2e0a6b7abec9d34128283d7a26fa8f32a6dbb35" dependencies = [ "jiff-static", "jiff-tzdb-platform", "log", "portable-atomic", "portable-atomic-util", - "serde", - "windows-sys 0.59.0", + "serde_core", + "windows-sys 0.52.0", ] [[package]] name = "jiff-static" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4" +checksum = "980af8b43c3ad5d8d349ace167ec8170839f753a42d233ba19e08afe1850fa69" dependencies = [ "proc-macro2", "quote", From 0a6d6b6194ae1edf2374b1f5d56c0c8fd32e8546 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 10 Nov 2025 09:17:37 +0100 Subject: [PATCH 127/180] Update cargo-bins/cargo-binstall action to v1.15.11 (#21352) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [cargo-bins/cargo-binstall](https://redirect.github.com/cargo-bins/cargo-binstall) | action | patch | `v1.15.10` -> `v1.15.11` | --- > [!WARNING] > Some dependencies could not be looked up. Check the Dependency Dashboard for more information. --- ### Release Notes
cargo-bins/cargo-binstall (cargo-bins/cargo-binstall) ### [`v1.15.11`](https://redirect.github.com/cargo-bins/cargo-binstall/releases/tag/v1.15.11) [Compare Source](https://redirect.github.com/cargo-bins/cargo-binstall/compare/v1.15.10...v1.15.11) *Binstall is a tool to fetch and install Rust-based executables as binaries. It aims to be a drop-in replacement for `cargo install` in most cases. Install it today with `cargo install cargo-binstall`, from the binaries below, or if you already have it, upgrade with `cargo binstall cargo-binstall`.* ##### In this release: - Fix binstalk-downloader cannot decode some zip files on macos (x64 and arm64) platforms ([#​2049](https://redirect.github.com/cargo-bins/cargo-binstall/issues/2049) [#​2362](https://redirect.github.com/cargo-bins/cargo-binstall/issues/2362)) - Fix grammer in `HELP.md` and `--help` output ([#​2357](https://redirect.github.com/cargo-bins/cargo-binstall/issues/2357) [#​2359](https://redirect.github.com/cargo-bins/cargo-binstall/issues/2359)) - Update documentation link in Cargo.toml ([#​2355](https://redirect.github.com/cargo-bins/cargo-binstall/issues/2355)) ##### Other changes: - Upgrade dependencies
--- ### Configuration 📅 **Schedule**: Branch creation - "before 4am on Monday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR was generated by [Mend Renovate](https://mend.io/renovate/). View the [repository job log](https://developer.mend.io/github/astral-sh/ruff). Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index af608d977a..9e04254a49 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -440,7 +440,7 @@ jobs: - name: "Install mold" uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: "Install cargo-binstall" - uses: cargo-bins/cargo-binstall@b3f755e95653da9a2d25b99154edfdbd5b356d0a # v1.15.10 + uses: cargo-bins/cargo-binstall@ae04fb5e853ae6cd3ad7de4a1d554a8b646d12aa # v1.15.11 - name: "Install cargo-fuzz" # Download the latest version from quick install and not the github releases because github releases only has MUSL targets. run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm @@ -692,7 +692,7 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: persist-credentials: false - - uses: cargo-bins/cargo-binstall@b3f755e95653da9a2d25b99154edfdbd5b356d0a # v1.15.10 + - uses: cargo-bins/cargo-binstall@ae04fb5e853ae6cd3ad7de4a1d554a8b646d12aa # v1.15.11 - run: cargo binstall --no-confirm cargo-shear - run: cargo shear From 52bd22003b9a29dcc67c3ab707ab28b5e30cf3d1 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Mon, 10 Nov 2025 09:39:46 +0100 Subject: [PATCH 128/180] Update Rust crate libcst to v1.8.6 (#21355) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d4342c6c0e..d9f831125d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1851,9 +1851,9 @@ checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976" [[package]] name = "libcst" -version = "1.8.5" +version = "1.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d56bcd52d9b5e5f43e7fba20eb1f423ccb18c84cdf1cb506b8c1b95776b0b49" +checksum = "6aea7143e4a0ed59b87a1ee71e198500889f8b005311136be15e84c97a6fcd8d" dependencies = [ "annotate-snippets", "libcst_derive", @@ -1866,9 +1866,9 @@ dependencies = [ [[package]] name = "libcst_derive" -version = "1.8.5" +version = "1.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fcf5a725c4db703660124fe0edb98285f1605d0b87b7ee8684b699764a4f01a" +checksum = "0903173ea316c34a44d0497161e04d9210af44f5f5e89bf2f55d9a254c9a0e8d" dependencies = [ "quote", "syn", From 73b1fce74a9b4b3a67373e6b6cd9d70346d12989 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Mon, 10 Nov 2025 08:46:31 +0000 Subject: [PATCH 129/180] [ty] Add diagnostics for `isinstance()` and `issubclass()` calls that use invalid PEP-604 unions for their second argument (#21343) ## Summary This PR adds extra validation for `isinstance()` and `issubclass()` calls that use `UnionType` instances for their second argument. According to typeshed's annotations, any `UnionType` is accepted for the second argument, but this isn't true at runtime: at runtime, all elements in the `UnionType` must either be class objects or be `None` in order for the `isinstance()` or `issubclass()` call to reliably succeed: ```pycon % uvx python3.14 Python 3.14.0 (main, Oct 10 2025, 12:54:13) [Clang 20.1.4 ] on darwin Type "help", "copyright", "credits" or "license" for more information. >>> from typing import LiteralString >>> import types >>> type(LiteralString | int) is types.UnionType True >>> isinstance(42, LiteralString | int) Traceback (most recent call last): File "", line 1, in isinstance(42, LiteralString | int) ~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^ File "/Users/alexw/Library/Application Support/uv/python/cpython-3.14.0-macos-aarch64-none/lib/python3.14/typing.py", line 559, in __instancecheck__ raise TypeError(f"{self} cannot be used with isinstance()") TypeError: typing.LiteralString cannot be used with isinstance() ``` ## Test Plan Added mdtests/snapshots --- .../resources/mdtest/narrow/isinstance.md | 15 +++- .../resources/mdtest/narrow/issubclass.md | 5 +- ...classinfo`_is_an_in…_(eeef56c0ef87a30b).snap | 88 +++++++++++++++++++ ...classinfo`_is_an_in…_(7bb66a0f412caac1).snap | 42 +++++++++ .../ty_python_semantic/src/types/function.rs | 71 ++++++++++++++- 5 files changed, 213 insertions(+), 8 deletions(-) create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/isinstance.md_-_Narrowing_for_`isins…_-_`classinfo`_is_an_in…_(eeef56c0ef87a30b).snap create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/issubclass.md_-_Narrowing_for_`issub…_-_`classinfo`_is_an_in…_(7bb66a0f412caac1).snap diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md b/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md index 0d3e11b996..48df6acd30 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md @@ -104,16 +104,25 @@ Except for the `None` special case mentioned above, narrowing can only take plac the PEP-604 union are class literals. If any elements are generic aliases or other types, the `isinstance()` call may fail at runtime, so no narrowing can take place: + + ```toml [environment] python-version = "3.10" ``` ```py +from typing import Any, Literal, NamedTuple + def _(x: int | list[int] | bytes): - # TODO: this fails at runtime; we should emit a diagnostic - # (requires special-casing of the `isinstance()` signature) - if isinstance(x, int | list[int]): + # error: [invalid-argument-type] + if isinstance(x, list[int] | int): + reveal_type(x) # revealed: int | list[int] | bytes + # error: [invalid-argument-type] + elif isinstance(x, Literal[42] | list[int] | bytes): + reveal_type(x) # revealed: int | list[int] | bytes + # error: [invalid-argument-type] + elif isinstance(x, Any | NamedTuple | list[int]): reveal_type(x) # revealed: int | list[int] | bytes else: reveal_type(x) # revealed: int | list[int] | bytes diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md b/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md index 11eb2ebaf4..139c479843 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md @@ -165,6 +165,8 @@ Except for the `None` special case mentioned above, narrowing can only take plac the PEP-604 union are class literals. If any elements are generic aliases or other types, the `issubclass()` call may fail at runtime, so no narrowing can take place: + + ```toml [environment] python-version = "3.10" @@ -172,8 +174,7 @@ python-version = "3.10" ```py def _(x: type[int | list | bytes]): - # TODO: this fails at runtime; we should emit a diagnostic - # (requires special-casing of the `issubclass()` signature) + # error: [invalid-argument-type] if issubclass(x, int | list[int]): reveal_type(x) # revealed: type[int] | type[list[Unknown]] | type[bytes] else: diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/isinstance.md_-_Narrowing_for_`isins…_-_`classinfo`_is_an_in…_(eeef56c0ef87a30b).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/isinstance.md_-_Narrowing_for_`isins…_-_`classinfo`_is_an_in…_(eeef56c0ef87a30b).snap new file mode 100644 index 0000000000..34383c8fd0 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/isinstance.md_-_Narrowing_for_`isins…_-_`classinfo`_is_an_in…_(eeef56c0ef87a30b).snap @@ -0,0 +1,88 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: isinstance.md - Narrowing for `isinstance` checks - `classinfo` is an invalid PEP-604 union of types +mdtest path: crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md +--- + +# Python source files + +## mdtest_snippet.py + +``` + 1 | from typing import Any, Literal, NamedTuple + 2 | + 3 | def _(x: int | list[int] | bytes): + 4 | # error: [invalid-argument-type] + 5 | if isinstance(x, list[int] | int): + 6 | reveal_type(x) # revealed: int | list[int] | bytes + 7 | # error: [invalid-argument-type] + 8 | elif isinstance(x, Literal[42] | list[int] | bytes): + 9 | reveal_type(x) # revealed: int | list[int] | bytes +10 | # error: [invalid-argument-type] +11 | elif isinstance(x, Any | NamedTuple | list[int]): +12 | reveal_type(x) # revealed: int | list[int] | bytes +13 | else: +14 | reveal_type(x) # revealed: int | list[int] | bytes +``` + +# Diagnostics + +``` +error[invalid-argument-type]: Invalid second argument to `isinstance` + --> src/mdtest_snippet.py:5:8 + | +3 | def _(x: int | list[int] | bytes): +4 | # error: [invalid-argument-type] +5 | if isinstance(x, list[int] | int): + | ^^^^^^^^^^^^^^---------------^ + | | + | This `UnionType` instance contains non-class elements +6 | reveal_type(x) # revealed: int | list[int] | bytes +7 | # error: [invalid-argument-type] + | +info: A `UnionType` instance can only be used as the second argument to `isinstance` if all elements are class objects +info: Element `` in the union is not a class object +info: rule `invalid-argument-type` is enabled by default + +``` + +``` +error[invalid-argument-type]: Invalid second argument to `isinstance` + --> src/mdtest_snippet.py:8:10 + | + 6 | reveal_type(x) # revealed: int | list[int] | bytes + 7 | # error: [invalid-argument-type] + 8 | elif isinstance(x, Literal[42] | list[int] | bytes): + | ^^^^^^^^^^^^^^-------------------------------^ + | | + | This `UnionType` instance contains non-class elements + 9 | reveal_type(x) # revealed: int | list[int] | bytes +10 | # error: [invalid-argument-type] + | +info: A `UnionType` instance can only be used as the second argument to `isinstance` if all elements are class objects +info: Elements `typing.Literal` and `` in the union are not class objects +info: rule `invalid-argument-type` is enabled by default + +``` + +``` +error[invalid-argument-type]: Invalid second argument to `isinstance` + --> src/mdtest_snippet.py:11:10 + | + 9 | reveal_type(x) # revealed: int | list[int] | bytes +10 | # error: [invalid-argument-type] +11 | elif isinstance(x, Any | NamedTuple | list[int]): + | ^^^^^^^^^^^^^^----------------------------^ + | | + | This `UnionType` instance contains non-class elements +12 | reveal_type(x) # revealed: int | list[int] | bytes +13 | else: + | +info: A `UnionType` instance can only be used as the second argument to `isinstance` if all elements are class objects +info: Element `typing.Any` in the union, and 2 more elements, are not class objects +info: rule `invalid-argument-type` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/issubclass.md_-_Narrowing_for_`issub…_-_`classinfo`_is_an_in…_(7bb66a0f412caac1).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/issubclass.md_-_Narrowing_for_`issub…_-_`classinfo`_is_an_in…_(7bb66a0f412caac1).snap new file mode 100644 index 0000000000..27318dfe2b --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/issubclass.md_-_Narrowing_for_`issub…_-_`classinfo`_is_an_in…_(7bb66a0f412caac1).snap @@ -0,0 +1,42 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: issubclass.md - Narrowing for `issubclass` checks - `classinfo` is an invalid PEP-604 union of types +mdtest path: crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | def _(x: type[int | list | bytes]): +2 | # error: [invalid-argument-type] +3 | if issubclass(x, int | list[int]): +4 | reveal_type(x) # revealed: type[int] | type[list[Unknown]] | type[bytes] +5 | else: +6 | reveal_type(x) # revealed: type[int] | type[list[Unknown]] | type[bytes] +``` + +# Diagnostics + +``` +error[invalid-argument-type]: Invalid second argument to `issubclass` + --> src/mdtest_snippet.py:3:8 + | +1 | def _(x: type[int | list | bytes]): +2 | # error: [invalid-argument-type] +3 | if issubclass(x, int | list[int]): + | ^^^^^^^^^^^^^^---------------^ + | | + | This `UnionType` instance contains non-class elements +4 | reveal_type(x) # revealed: type[int] | type[list[Unknown]] | type[bytes] +5 | else: + | +info: A `UnionType` instance can only be used as the second argument to `issubclass` if all elements are class objects +info: Element `` in the union is not a class object +info: rule `invalid-argument-type` is enabled by default + +``` diff --git a/crates/ty_python_semantic/src/types/function.rs b/crates/ty_python_semantic/src/types/function.rs index 6244b0a85a..2462748d03 100644 --- a/crates/ty_python_semantic/src/types/function.rs +++ b/crates/ty_python_semantic/src/types/function.rs @@ -81,9 +81,9 @@ use crate::types::visitor::any_over_type; use crate::types::{ ApplyTypeMappingVisitor, BoundMethodType, BoundTypeVarInstance, CallableType, ClassBase, ClassLiteral, ClassType, DeprecatedInstance, DynamicType, FindLegacyTypeVarsVisitor, - HasRelationToVisitor, IsDisjointVisitor, IsEquivalentVisitor, KnownClass, NormalizedVisitor, - SpecialFormType, Truthiness, Type, TypeContext, TypeMapping, TypeRelation, UnionBuilder, - binding_type, todo_type, walk_signature, + HasRelationToVisitor, IsDisjointVisitor, IsEquivalentVisitor, KnownClass, KnownInstanceType, + NormalizedVisitor, SpecialFormType, Truthiness, Type, TypeContext, TypeMapping, TypeRelation, + UnionBuilder, binding_type, todo_type, walk_signature, }; use crate::{Db, FxOrderSet, ModuleName, resolve_module}; @@ -1755,6 +1755,71 @@ impl KnownFunction { diagnostic .set_primary_message("This call will raise `TypeError` at runtime"); } + + Type::KnownInstance(KnownInstanceType::UnionType(_)) => { + fn find_invalid_elements<'db>( + db: &'db dyn Db, + ty: Type<'db>, + invalid_elements: &mut Vec>, + ) { + match ty { + Type::ClassLiteral(_) => {} + Type::NominalInstance(instance) + if instance.has_known_class(db, KnownClass::NoneType) => {} + Type::KnownInstance(KnownInstanceType::UnionType(union)) => { + for element in union.elements(db) { + find_invalid_elements(db, *element, invalid_elements); + } + } + _ => invalid_elements.push(ty), + } + } + + let mut invalid_elements = vec![]; + find_invalid_elements(db, *second_argument, &mut invalid_elements); + + let Some((first_invalid_element, other_invalid_elements)) = + invalid_elements.split_first() + else { + return; + }; + + let Some(builder) = + context.report_lint(&INVALID_ARGUMENT_TYPE, call_expression) + else { + return; + }; + + let function_name: &str = self.into(); + + let mut diagnostic = builder.into_diagnostic(format_args!( + "Invalid second argument to `{function_name}`" + )); + diagnostic.info(format_args!( + "A `UnionType` instance can only be used as the second argument to \ + `{function_name}` if all elements are class objects" + )); + diagnostic.annotate( + Annotation::secondary(context.span(&call_expression.arguments.args[1])) + .message("This `UnionType` instance contains non-class elements"), + ); + match other_invalid_elements { + [] => diagnostic.info(format_args!( + "Element `{}` in the union is not a class object", + first_invalid_element.display(db) + )), + [single] => diagnostic.info(format_args!( + "Elements `{}` and `{}` in the union are not class objects", + first_invalid_element.display(db), + single.display(db), + )), + _ => diagnostic.info(format_args!( + "Element `{}` in the union, and {} more elements, are not class objects", + first_invalid_element.display(db), + other_invalid_elements.len(), + )) + } + } _ => {} } } From 3fa609929ff56b2c67e2bd80d3878ac95e5b0335 Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Mon, 10 Nov 2025 10:04:44 +0100 Subject: [PATCH 130/180] Save rust cache for CI jobs on main only (#21359) --- .github/workflows/ci.yaml | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9e04254a49..1f9ddad127 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -231,6 +231,8 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: | rustup component add clippy @@ -251,6 +253,8 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show - name: "Install mold" @@ -315,6 +319,8 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show - name: "Install mold" @@ -350,6 +356,8 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show - name: "Install cargo nextest" @@ -376,6 +384,8 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup target add wasm32-unknown-unknown - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 @@ -411,6 +421,8 @@ jobs: file: "Cargo.toml" field: "workspace.package.rust-version" - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" env: MSRV: ${{ steps.msrv.outputs.value }} @@ -435,6 +447,7 @@ jobs: - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 with: workspaces: "fuzz -> target" + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show - name: "Install mold" @@ -496,6 +509,8 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" run: rustup component add rustfmt @@ -650,6 +665,8 @@ jobs: persist-credentials: false - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show - name: "Install mold" @@ -707,6 +724,8 @@ jobs: persist-credentials: false - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show - name: "Install mold" @@ -730,6 +749,8 @@ jobs: python-version: ${{ env.PYTHON_VERSION }} architecture: x64 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Prep README.md" run: python scripts/transform_readme.py --target pypi - name: "Build wheels" @@ -754,6 +775,8 @@ jobs: persist-credentials: false - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 @@ -784,6 +807,8 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Add SSH key" if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }} uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1 @@ -826,6 +851,8 @@ jobs: with: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show - name: "Run checks" @@ -896,6 +923,8 @@ jobs: - name: "Install Rust toolchain" run: rustup target add wasm32-unknown-unknown - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0 with: node-version: 22 @@ -934,6 +963,8 @@ jobs: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" @@ -972,6 +1003,8 @@ jobs: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" @@ -1010,6 +1043,8 @@ jobs: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + save-if: ${{ github.ref == 'refs/heads/main' }} - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - name: "Install Rust toolchain" From 238f151371649218369da79815cb86d4f2029f6c Mon Sep 17 00:00:00 2001 From: David Peter Date: Mon, 10 Nov 2025 10:24:38 +0100 Subject: [PATCH 131/180] [ty] Add support for `Optional` and `Annotated` in implicit type aliases (#21321) ## Summary Add support for `Optional` and `Annotated` in implicit type aliases part of https://github.com/astral-sh/ty/issues/221 ## Typing conformance changes New expected diagnostics. ## Ecosystem A lot of true positives, some known limitations unrelated to this PR. ## Test Plan New Markdown tests --- .../resources/mdtest/annotations/annotated.md | 3 +- .../resources/mdtest/implicit_type_aliases.md | 143 ++++++++++++- ...classinfo`_is_an_in…_(eeef56c0ef87a30b).snap | 2 +- crates/ty_python_semantic/src/types.rs | 68 ++++-- .../src/types/class_base.rs | 1 + .../src/types/infer/builder.rs | 200 ++++++++++++------ .../types/infer/builder/type_expression.rs | 51 ++--- 7 files changed, 343 insertions(+), 125 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/annotated.md b/crates/ty_python_semantic/resources/mdtest/annotations/annotated.md index 5089804119..7c0ba50f1a 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/annotated.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/annotated.md @@ -76,8 +76,7 @@ from ty_extensions import reveal_mro class C(Annotated[int, "foo"]): ... -# TODO: Should be `(, , )` -reveal_mro(C) # revealed: (, @Todo(Inference of subscript on special form), ) +reveal_mro(C) # revealed: (, , ) ``` ### Not parameterized diff --git a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md index 504921c317..690225d9b2 100644 --- a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md +++ b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md @@ -33,7 +33,7 @@ g(None) We also support unions in type aliases: ```py -from typing_extensions import Any, Never, Literal +from typing_extensions import Any, Never, Literal, LiteralString, Tuple, Annotated, Optional from ty_extensions import Unknown IntOrStr = int | str @@ -56,6 +56,14 @@ UnknownOrInt = Unknown | int IntOrUnknown = int | Unknown StrOrZero = str | Literal[0] ZeroOrStr = Literal[0] | str +LiteralStringOrInt = LiteralString | int +IntOrLiteralString = int | LiteralString +NoneOrTuple = None | Tuple[int, str] +TupleOrNone = Tuple[int, str] | None +IntOrAnnotated = int | Annotated[str, "meta"] +AnnotatedOrInt = Annotated[str, "meta"] | int +IntOrOptional = int | Optional[str] +OptionalOrInt = Optional[str] | int reveal_type(IntOrStr) # revealed: types.UnionType reveal_type(IntOrStrOrBytes1) # revealed: types.UnionType @@ -77,6 +85,14 @@ reveal_type(UnknownOrInt) # revealed: types.UnionType reveal_type(IntOrUnknown) # revealed: types.UnionType reveal_type(StrOrZero) # revealed: types.UnionType reveal_type(ZeroOrStr) # revealed: types.UnionType +reveal_type(IntOrLiteralString) # revealed: types.UnionType +reveal_type(LiteralStringOrInt) # revealed: types.UnionType +reveal_type(NoneOrTuple) # revealed: types.UnionType +reveal_type(TupleOrNone) # revealed: types.UnionType +reveal_type(IntOrAnnotated) # revealed: types.UnionType +reveal_type(AnnotatedOrInt) # revealed: types.UnionType +reveal_type(IntOrOptional) # revealed: types.UnionType +reveal_type(OptionalOrInt) # revealed: types.UnionType def _( int_or_str: IntOrStr, @@ -99,6 +115,14 @@ def _( int_or_unknown: IntOrUnknown, str_or_zero: StrOrZero, zero_or_str: ZeroOrStr, + literal_string_or_int: LiteralStringOrInt, + int_or_literal_string: IntOrLiteralString, + none_or_tuple: NoneOrTuple, + tuple_or_none: TupleOrNone, + int_or_annotated: IntOrAnnotated, + annotated_or_int: AnnotatedOrInt, + int_or_optional: IntOrOptional, + optional_or_int: OptionalOrInt, ): reveal_type(int_or_str) # revealed: int | str reveal_type(int_or_str_or_bytes1) # revealed: int | str | bytes @@ -120,6 +144,14 @@ def _( reveal_type(int_or_unknown) # revealed: int | Unknown reveal_type(str_or_zero) # revealed: str | Literal[0] reveal_type(zero_or_str) # revealed: Literal[0] | str + reveal_type(literal_string_or_int) # revealed: LiteralString | int + reveal_type(int_or_literal_string) # revealed: int | LiteralString + reveal_type(none_or_tuple) # revealed: None | tuple[int, str] + reveal_type(tuple_or_none) # revealed: tuple[int, str] | None + reveal_type(int_or_annotated) # revealed: int | str + reveal_type(annotated_or_int) # revealed: str | int + reveal_type(int_or_optional) # revealed: int | str | None + reveal_type(optional_or_int) # revealed: str | None | int ``` If a type is unioned with itself in a value expression, the result is just that type. No @@ -325,6 +357,115 @@ def _(weird: IntLiteral1[int]): reveal_type(weird) # revealed: Unknown ``` +## `Annotated` + +Basic usage: + +```py +from typing import Annotated + +MyAnnotatedInt = Annotated[int, "some metadata", 1, 2, 3] + +def _(annotated_int: MyAnnotatedInt): + reveal_type(annotated_int) # revealed: int +``` + +Usage with generics: + +```py +from typing import TypeVar + +T = TypeVar("T") + +Deprecated = Annotated[T, "deprecated attribute"] + +class C: + old: Deprecated[int] + +# TODO: Should be `int` +reveal_type(C().old) # revealed: @Todo(Generic specialization of typing.Annotated) +``` + +If the metadata argument is missing, we emit an error (because this code fails at runtime), but +still use the first element as the type, when used in annotations: + +```py +# error: [invalid-type-form] "Special form `typing.Annotated` expected at least 2 arguments (one type and at least one metadata element)" +WronglyAnnotatedInt = Annotated[int] + +def _(wrongly_annotated_int: WronglyAnnotatedInt): + reveal_type(wrongly_annotated_int) # revealed: int +``` + +## `Optional` + +Starting with Python 3.14, `Optional[int]` creates an instance of `typing.Union`, which is an alias +for `types.UnionType`. We only support this new behavior and do not attempt to model the details of +the pre-3.14 behavior: + +```py +from typing import Optional + +MyOptionalInt = Optional[int] + +reveal_type(MyOptionalInt) # revealed: types.UnionType + +def _(optional_int: MyOptionalInt): + reveal_type(optional_int) # revealed: int | None +``` + +A special case is `Optional[None]`, which is equivalent to `None`: + +```py +JustNone = Optional[None] + +reveal_type(JustNone) # revealed: None + +def _(just_none: JustNone): + reveal_type(just_none) # revealed: None +``` + +Invalid uses: + +```py +# error: [invalid-type-form] "`typing.Optional` requires exactly one argument" +Optional[int, str] +``` + +## `LiteralString`, `NoReturn`, `Never` + +```py +from typing_extensions import LiteralString, NoReturn, Never + +MyLiteralString = LiteralString +MyNoReturn = NoReturn +MyNever = Never + +reveal_type(MyLiteralString) # revealed: typing.LiteralString +reveal_type(MyNoReturn) # revealed: typing.NoReturn +reveal_type(MyNever) # revealed: typing.Never + +def _( + ls: MyLiteralString, + nr: MyNoReturn, + nv: MyNever, +): + reveal_type(ls) # revealed: LiteralString + reveal_type(nr) # revealed: Never + reveal_type(nv) # revealed: Never +``` + +## `Tuple` + +```py +from typing import Tuple + +IntAndStr = Tuple[int, str] + +def _(int_and_str: IntAndStr): + reveal_type(int_and_str) # revealed: tuple[int, str] +``` + ## Stringified annotations? From the [typing spec on type aliases](https://typing.python.org/en/latest/spec/aliases.html): diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/isinstance.md_-_Narrowing_for_`isins…_-_`classinfo`_is_an_in…_(eeef56c0ef87a30b).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/isinstance.md_-_Narrowing_for_`isins…_-_`classinfo`_is_an_in…_(eeef56c0ef87a30b).snap index 34383c8fd0..822f9319a1 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/isinstance.md_-_Narrowing_for_`isins…_-_`classinfo`_is_an_in…_(eeef56c0ef87a30b).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/isinstance.md_-_Narrowing_for_`isins…_-_`classinfo`_is_an_in…_(eeef56c0ef87a30b).snap @@ -63,7 +63,7 @@ error[invalid-argument-type]: Invalid second argument to `isinstance` 10 | # error: [invalid-argument-type] | info: A `UnionType` instance can only be used as the second argument to `isinstance` if all elements are class objects -info: Elements `typing.Literal` and `` in the union are not class objects +info: Elements `` and `` in the union are not class objects info: rule `invalid-argument-type` is enabled by default ``` diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index e3b8b8e89e..e69d1fa3a3 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -6462,7 +6462,12 @@ impl<'db> Type<'db> { } Ok(builder.build()) } - KnownInstanceType::Literal(list) => Ok(list.to_union(db)), + KnownInstanceType::Literal(ty) => Ok(ty.inner(db)), + KnownInstanceType::Annotated(ty) => { + Ok(ty + .inner(db) + .in_type_expression(db, scope_id, typevar_binding_context)?) + } }, Type::SpecialForm(special_form) => match special_form { @@ -7676,10 +7681,13 @@ pub enum KnownInstanceType<'db> { /// A single instance of `types.UnionType`, which stores the left- and /// right-hand sides of a PEP 604 union. - UnionType(TypeList<'db>), + UnionType(InternedTypes<'db>), /// A single instance of `typing.Literal` - Literal(TypeList<'db>), + Literal(InternedType<'db>), + + /// A single instance of `typing.Annotated` + Annotated(InternedType<'db>), } fn walk_known_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>( @@ -7706,11 +7714,14 @@ fn walk_known_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>( visitor.visit_type(db, default_ty); } } - KnownInstanceType::UnionType(list) | KnownInstanceType::Literal(list) => { + KnownInstanceType::UnionType(list) => { for element in list.elements(db) { visitor.visit_type(db, *element); } } + KnownInstanceType::Literal(ty) | KnownInstanceType::Annotated(ty) => { + visitor.visit_type(db, ty.inner(db)); + } } } @@ -7748,7 +7759,8 @@ impl<'db> KnownInstanceType<'db> { Self::ConstraintSet(set) } Self::UnionType(list) => Self::UnionType(list.normalized_impl(db, visitor)), - Self::Literal(list) => Self::Literal(list.normalized_impl(db, visitor)), + Self::Literal(ty) => Self::Literal(ty.normalized_impl(db, visitor)), + Self::Annotated(ty) => Self::Annotated(ty.normalized_impl(db, visitor)), } } @@ -7768,6 +7780,7 @@ impl<'db> KnownInstanceType<'db> { Self::ConstraintSet(_) => KnownClass::ConstraintSet, Self::UnionType(_) => KnownClass::UnionType, Self::Literal(_) => KnownClass::GenericAlias, + Self::Annotated(_) => KnownClass::GenericAlias, } } @@ -7848,7 +7861,10 @@ impl<'db> KnownInstanceType<'db> { ) } KnownInstanceType::UnionType(_) => f.write_str("types.UnionType"), - KnownInstanceType::Literal(_) => f.write_str("typing.Literal"), + KnownInstanceType::Literal(_) => f.write_str(""), + KnownInstanceType::Annotated(_) => { + f.write_str("") + } } } } @@ -8045,7 +8061,7 @@ impl<'db> InvalidTypeExpressionError<'db> { fn into_fallback_type( self, context: &InferContext, - node: &ast::Expr, + node: &impl Ranged, is_reachable: bool, ) -> Type<'db> { let InvalidTypeExpressionError { @@ -8984,29 +9000,25 @@ impl<'db> TypeVarBoundOrConstraints<'db> { /// # Ordering /// Ordering is based on the context's salsa-assigned id and not on its values. /// The id may change between runs, or when the context was garbage collected and recreated. -#[salsa::interned(debug)] +#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)] #[derive(PartialOrd, Ord)] -pub struct TypeList<'db> { +pub struct InternedTypes<'db> { #[returns(deref)] elements: Box<[Type<'db>]>, } -impl get_size2::GetSize for TypeList<'_> {} +impl get_size2::GetSize for InternedTypes<'_> {} -impl<'db> TypeList<'db> { +impl<'db> InternedTypes<'db> { pub(crate) fn from_elements( db: &'db dyn Db, elements: impl IntoIterator>, - ) -> TypeList<'db> { - TypeList::new(db, elements.into_iter().collect::>()) - } - - pub(crate) fn singleton(db: &'db dyn Db, element: Type<'db>) -> TypeList<'db> { - TypeList::from_elements(db, [element]) + ) -> InternedTypes<'db> { + InternedTypes::new(db, elements.into_iter().collect::>()) } pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self { - TypeList::new( + InternedTypes::new( db, self.elements(db) .iter() @@ -9014,10 +9026,24 @@ impl<'db> TypeList<'db> { .collect::>(), ) } +} - /// Turn this list of types `[T1, T2, ...]` into a union type `T1 | T2 | ...`. - pub(crate) fn to_union(self, db: &'db dyn Db) -> Type<'db> { - UnionType::from_elements(db, self.elements(db)) +/// A salsa-interned `Type` +/// +/// # Ordering +/// Ordering is based on the context's salsa-assigned id and not on its values. +/// The id may change between runs, or when the context was garbage collected and recreated. +#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)] +#[derive(PartialOrd, Ord)] +pub struct InternedType<'db> { + inner: Type<'db>, +} + +impl get_size2::GetSize for InternedType<'_> {} + +impl<'db> InternedType<'db> { + pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self { + InternedType::new(db, self.inner(db).normalized_impl(db, visitor)) } } diff --git a/crates/ty_python_semantic/src/types/class_base.rs b/crates/ty_python_semantic/src/types/class_base.rs index caddc88567..87417f8314 100644 --- a/crates/ty_python_semantic/src/types/class_base.rs +++ b/crates/ty_python_semantic/src/types/class_base.rs @@ -170,6 +170,7 @@ impl<'db> ClassBase<'db> { | KnownInstanceType::ConstraintSet(_) | KnownInstanceType::UnionType(_) | KnownInstanceType::Literal(_) => None, + KnownInstanceType::Annotated(ty) => Self::try_from_type(db, ty.inner(db), subclass), }, Type::SpecialForm(special_form) => match special_form { diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index 3b1142b89b..37cb64ff0b 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -69,12 +69,12 @@ use crate::types::diagnostic::{ hint_if_stdlib_submodule_exists_on_other_versions, report_attempted_protocol_instantiation, report_bad_dunder_set_call, report_cannot_pop_required_field_on_typed_dict, report_duplicate_bases, report_implicit_return_type, report_index_out_of_bounds, - report_instance_layout_conflict, report_invalid_assignment, - report_invalid_attribute_assignment, report_invalid_exception_caught, - report_invalid_exception_cause, report_invalid_exception_raised, - report_invalid_generator_function_return_type, report_invalid_key_on_typed_dict, - report_invalid_or_unsupported_base, report_invalid_return_type, - report_invalid_type_checking_constant, + report_instance_layout_conflict, report_invalid_arguments_to_annotated, + report_invalid_assignment, report_invalid_attribute_assignment, + report_invalid_exception_caught, report_invalid_exception_cause, + report_invalid_exception_raised, report_invalid_generator_function_return_type, + report_invalid_key_on_typed_dict, report_invalid_or_unsupported_base, + report_invalid_return_type, report_invalid_type_checking_constant, report_namedtuple_field_without_default_after_field_with_default, report_non_subscriptable, report_possibly_missing_attribute, report_possibly_unresolved_reference, report_rebound_typevar, report_slice_step_size_zero, @@ -100,10 +100,10 @@ use crate::types::typed_dict::{ use crate::types::visitor::any_over_type; use crate::types::{ CallDunderError, CallableBinding, CallableType, ClassLiteral, ClassType, DataclassParams, - DynamicType, IntersectionBuilder, IntersectionType, KnownClass, KnownInstanceType, - MemberLookupPolicy, MetaclassCandidate, PEP695TypeAliasType, Parameter, ParameterForm, - Parameters, SpecialFormType, SubclassOfType, TrackedConstraintSet, Truthiness, Type, - TypeAliasType, TypeAndQualifiers, TypeContext, TypeList, TypeQualifiers, + DynamicType, InternedType, InternedTypes, IntersectionBuilder, IntersectionType, KnownClass, + KnownInstanceType, MemberLookupPolicy, MetaclassCandidate, PEP695TypeAliasType, Parameter, + ParameterForm, Parameters, SpecialFormType, SubclassOfType, TrackedConstraintSet, Truthiness, + Type, TypeAliasType, TypeAndQualifiers, TypeContext, TypeQualifiers, TypeVarBoundOrConstraintsEvaluation, TypeVarDefaultEvaluation, TypeVarIdentity, TypeVarInstance, TypeVarKind, TypeVarVariance, TypedDictType, UnionBuilder, UnionType, binding_type, todo_type, @@ -8755,14 +8755,18 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { | Type::GenericAlias(..) | Type::SpecialForm(_) | Type::KnownInstance( - KnownInstanceType::UnionType(_) | KnownInstanceType::Literal(_), + KnownInstanceType::UnionType(_) + | KnownInstanceType::Literal(_) + | KnownInstanceType::Annotated(_), ), Type::ClassLiteral(..) | Type::SubclassOf(..) | Type::GenericAlias(..) | Type::SpecialForm(_) | Type::KnownInstance( - KnownInstanceType::UnionType(_) | KnownInstanceType::Literal(_), + KnownInstanceType::UnionType(_) + | KnownInstanceType::Literal(_) + | KnownInstanceType::Annotated(_), ), ast::Operator::BitOr, ) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 => { @@ -8770,7 +8774,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { Some(left_ty) } else { Some(Type::KnownInstance(KnownInstanceType::UnionType( - TypeList::from_elements(self.db(), [left_ty, right_ty]), + InternedTypes::from_elements(self.db(), [left_ty, right_ty]), ))) } } @@ -8795,7 +8799,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { && instance.has_known_class(self.db(), KnownClass::NoneType) => { Some(Type::KnownInstance(KnownInstanceType::UnionType( - TypeList::from_elements(self.db(), [left_ty, right_ty]), + InternedTypes::from_elements(self.db(), [left_ty, right_ty]), ))) } @@ -9863,14 +9867,23 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } fn infer_subscript_load(&mut self, subscript: &ast::ExprSubscript) -> Type<'db> { + let value_ty = self.infer_expression(&subscript.value, TypeContext::default()); + self.infer_subscript_load_impl(value_ty, subscript) + } + + fn infer_subscript_load_impl( + &mut self, + value_ty: Type<'db>, + subscript: &ast::ExprSubscript, + ) -> Type<'db> { let ast::ExprSubscript { range: _, node_index: _, - value, + value: _, slice, ctx, } = subscript; - let value_ty = self.infer_expression(value, TypeContext::default()); + let mut constraint_keys = vec![]; // If `value` is a valid reference, we attempt type narrowing by assignment. @@ -9896,60 +9909,117 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { Type::from(tuple.to_class_type(db)) }; - // HACK ALERT: If we are subscripting a generic class, short-circuit the rest of the - // subscript inference logic and treat this as an explicit specialization. - // TODO: Move this logic into a custom callable, and update `find_name_in_mro` to return - // this callable as the `__class_getitem__` method on `type`. That probably requires - // updating all of the subscript logic below to use custom callables for all of the _other_ - // special cases, too. - if let Type::ClassLiteral(class) = value_ty { - if class.is_tuple(self.db()) { + match value_ty { + Type::ClassLiteral(class) => { + // HACK ALERT: If we are subscripting a generic class, short-circuit the rest of the + // subscript inference logic and treat this as an explicit specialization. + // TODO: Move this logic into a custom callable, and update `find_name_in_mro` to return + // this callable as the `__class_getitem__` method on `type`. That probably requires + // updating all of the subscript logic below to use custom callables for all of the _other_ + // special cases, too. + if class.is_tuple(self.db()) { + return tuple_generic_alias(self.db(), self.infer_tuple_type_expression(slice)); + } + if let Some(generic_context) = class.generic_context(self.db()) { + return self.infer_explicit_class_specialization( + subscript, + value_ty, + class, + generic_context, + ); + } + } + Type::KnownInstance(KnownInstanceType::TypeAliasType(type_alias)) => { + if let Some(generic_context) = type_alias.generic_context(self.db()) { + return self.infer_explicit_type_alias_specialization( + subscript, + value_ty, + type_alias, + generic_context, + ); + } + } + Type::SpecialForm(SpecialFormType::Tuple) => { return tuple_generic_alias(self.db(), self.infer_tuple_type_expression(slice)); } - if let Some(generic_context) = class.generic_context(self.db()) { - return self.infer_explicit_class_specialization( - subscript, - value_ty, - class, - generic_context, - ); - } - } - if let Type::SpecialForm(SpecialFormType::Tuple) = value_ty { - return tuple_generic_alias(self.db(), self.infer_tuple_type_expression(slice)); - } - if let Type::KnownInstance(KnownInstanceType::TypeAliasType(type_alias)) = value_ty { - if let Some(generic_context) = type_alias.generic_context(self.db()) { - return self.infer_explicit_type_alias_specialization( - subscript, - value_ty, - type_alias, - generic_context, - ); - } - } - if value_ty == Type::SpecialForm(SpecialFormType::Literal) { - match self.infer_literal_parameter_type(slice) { - Ok(result) => { - return Type::KnownInstance(KnownInstanceType::Literal(TypeList::singleton( - self.db(), - result, - ))); - } - Err(nodes) => { - for node in nodes { - let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, node) - else { - continue; - }; - builder.into_diagnostic( - "Type arguments for `Literal` must be `None`, \ - a literal value (int, bool, str, or bytes), or an enum member", - ); + Type::SpecialForm(SpecialFormType::Literal) => { + match self.infer_literal_parameter_type(slice) { + Ok(result) => { + return Type::KnownInstance(KnownInstanceType::Literal(InternedType::new( + self.db(), + result, + ))); + } + Err(nodes) => { + for node in nodes { + let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, node) + else { + continue; + }; + builder.into_diagnostic( + "Type arguments for `Literal` must be `None`, \ + a literal value (int, bool, str, or bytes), or an enum member", + ); + } + return Type::unknown(); } - return Type::unknown(); } } + Type::SpecialForm(SpecialFormType::Annotated) => { + let ast::Expr::Tuple(ast::ExprTuple { + elts: ref arguments, + .. + }) = **slice + else { + report_invalid_arguments_to_annotated(&self.context, subscript); + + return self.infer_expression(slice, TypeContext::default()); + }; + + if arguments.len() < 2 { + report_invalid_arguments_to_annotated(&self.context, subscript); + } + + let [type_expr, metadata @ ..] = &arguments[..] else { + for argument in arguments { + self.infer_expression(argument, TypeContext::default()); + } + self.store_expression_type(slice, Type::unknown()); + return Type::unknown(); + }; + + for element in metadata { + self.infer_expression(element, TypeContext::default()); + } + + let ty = self.infer_expression(type_expr, TypeContext::default()); + + return Type::KnownInstance(KnownInstanceType::Annotated(InternedType::new( + self.db(), + ty, + ))); + } + Type::SpecialForm(SpecialFormType::Optional) => { + if matches!(**slice, ast::Expr::Tuple(_)) { + if let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, subscript) { + builder.into_diagnostic(format_args!( + "`typing.Optional` requires exactly one argument" + )); + } + } + + let ty = self.infer_expression(slice, TypeContext::default()); + + // `Optional[None]` is equivalent to `None`: + if ty.is_none(self.db()) { + return ty; + } + + return Type::KnownInstance(KnownInstanceType::UnionType( + InternedTypes::from_elements(self.db(), [ty, Type::none(self.db())]), + )); + } + _ => {} } let slice_ty = self.infer_expression(slice, TypeContext::default()); diff --git a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs index d091487ce7..7694839c15 100644 --- a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs +++ b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs @@ -4,7 +4,7 @@ use ruff_python_ast as ast; use super::{DeferredExpressionState, TypeInferenceBuilder}; use crate::types::diagnostic::{ self, INVALID_TYPE_FORM, NON_SUBSCRIPTABLE, report_invalid_argument_number_to_special_form, - report_invalid_arguments_to_annotated, report_invalid_arguments_to_callable, + report_invalid_arguments_to_callable, }; use crate::types::signatures::Signature; use crate::types::string_annotation::parse_string_annotation; @@ -819,11 +819,15 @@ impl<'db> TypeInferenceBuilder<'db, '_> { if let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, subscript) { builder.into_diagnostic(format_args!( "`{ty}` is not a generic class", - ty = ty.to_union(self.db()).display(self.db()) + ty = ty.inner(self.db()).display(self.db()) )); } Type::unknown() } + KnownInstanceType::Annotated(_) => { + self.infer_type_expression(slice); + todo_type!("Generic specialization of typing.Annotated") + } }, Type::Dynamic(DynamicType::Todo(_)) => { self.infer_type_expression(slice); @@ -900,7 +904,7 @@ impl<'db> TypeInferenceBuilder<'db, '_> { ty } - fn infer_parameterized_special_form_type_expression( + pub(crate) fn infer_parameterized_special_form_type_expression( &mut self, subscript: &ast::ExprSubscript, special_form: SpecialFormType, @@ -909,36 +913,13 @@ impl<'db> TypeInferenceBuilder<'db, '_> { let arguments_slice = &*subscript.slice; match special_form { SpecialFormType::Annotated => { - let ast::Expr::Tuple(ast::ExprTuple { - elts: arguments, .. - }) = arguments_slice - else { - report_invalid_arguments_to_annotated(&self.context, subscript); - - // `Annotated[]` with less than two arguments is an error at runtime. - // However, we still treat `Annotated[T]` as `T` here for the purpose of - // giving better diagnostics later on. - // Pyright also does this. Mypy doesn't; it falls back to `Any` instead. - return self.infer_type_expression(arguments_slice); - }; - - if arguments.len() < 2 { - report_invalid_arguments_to_annotated(&self.context, subscript); - } - - let [type_expr, metadata @ ..] = &arguments[..] else { - for argument in arguments { - self.infer_expression(argument, TypeContext::default()); - } - self.store_expression_type(arguments_slice, Type::unknown()); - return Type::unknown(); - }; - - for element in metadata { - self.infer_expression(element, TypeContext::default()); - } - - let ty = self.infer_type_expression(type_expr); + let ty = self + .infer_subscript_load_impl( + Type::SpecialForm(SpecialFormType::Annotated), + subscript, + ) + .in_type_expression(db, self.scope(), None) + .unwrap_or_else(|err| err.into_fallback_type(&self.context, subscript, true)); self.store_expression_type(arguments_slice, ty); ty } @@ -1453,8 +1434,8 @@ impl<'db> TypeInferenceBuilder<'db, '_> { return Ok(value_ty); } } - Type::KnownInstance(KnownInstanceType::Literal(list)) => { - return Ok(list.to_union(self.db())); + Type::KnownInstance(KnownInstanceType::Literal(ty)) => { + return Ok(ty.inner(self.db())); } // `Literal[SomeEnum.Member]` Type::EnumLiteral(_) => { From a6f2dee33bf50279dbfb0a0b2d6785bf08349066 Mon Sep 17 00:00:00 2001 From: Henry Schreiner Date: Mon, 10 Nov 2025 04:42:09 -0500 Subject: [PATCH 132/180] Add upstream linter URL to `ruff linter --output-format=json` (#21316) --- crates/ruff/src/commands/linter.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/ruff/src/commands/linter.rs b/crates/ruff/src/commands/linter.rs index 083102c09c..19e7d3df20 100644 --- a/crates/ruff/src/commands/linter.rs +++ b/crates/ruff/src/commands/linter.rs @@ -16,6 +16,8 @@ struct LinterInfo { prefix: &'static str, name: &'static str, #[serde(skip_serializing_if = "Option::is_none")] + url: Option<&'static str>, + #[serde(skip_serializing_if = "Option::is_none")] categories: Option>, } @@ -50,6 +52,7 @@ pub(crate) fn linter(format: HelpFormat) -> Result<()> { .map(|linter_info| LinterInfo { prefix: linter_info.common_prefix(), name: linter_info.name(), + url: linter_info.url(), categories: linter_info.upstream_categories().map(|cats| { cats.iter() .map(|c| LinterCategoryInfo { From ab46c8de0fef117afb6ac29c3f6b6a0fe0e51393 Mon Sep 17 00:00:00 2001 From: David Peter Date: Mon, 10 Nov 2025 11:13:36 +0100 Subject: [PATCH 133/180] [ty] Add support for properties that return `Self` (#21335) ## Summary Detect usages of implicit `self` in property getters, which allows us to treat their signature as being generic. closes https://github.com/astral-sh/ty/issues/1502 ## Typing conformance Two new type assertions that are succeeding. ## Ecosystem results Mostly look good. There are a few new false positives related to a bug with constrained typevars that is unrelated to the work here. I reported this as https://github.com/astral-sh/ty/issues/1503. ## Test Plan Added regression tests. --- .../resources/mdtest/annotations/self.md | 6 +- .../resources/mdtest/properties.md | 34 +++++++++ .../src/types/signatures.rs | 70 ++++++++++++++----- 3 files changed, 88 insertions(+), 22 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/self.md b/crates/ty_python_semantic/resources/mdtest/annotations/self.md index b635104a75..4d794fe6c4 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/self.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/self.md @@ -139,7 +139,7 @@ The first parameter of instance methods always has type `Self`, if it is not exp The name `self` is not special in any way. ```py -def some_decorator(f: Callable) -> Callable: +def some_decorator[**P, R](f: Callable[P, R]) -> Callable[P, R]: return f class B: @@ -188,10 +188,10 @@ class B: reveal_type(B().name_does_not_matter()) # revealed: B reveal_type(B().positional_only(1)) # revealed: B reveal_type(B().keyword_only(x=1)) # revealed: B +# TODO: This should deally be `B` reveal_type(B().decorated_method()) # revealed: Unknown -# TODO: this should be B -reveal_type(B().a_property) # revealed: Unknown +reveal_type(B().a_property) # revealed: B async def _(): reveal_type(await B().async_method()) # revealed: B diff --git a/crates/ty_python_semantic/resources/mdtest/properties.md b/crates/ty_python_semantic/resources/mdtest/properties.md index 90634eba39..b4c2abae9f 100644 --- a/crates/ty_python_semantic/resources/mdtest/properties.md +++ b/crates/ty_python_semantic/resources/mdtest/properties.md @@ -49,6 +49,40 @@ c.my_property = 2 c.my_property = "a" ``` +## Properties returning `Self` + +A property that returns `Self` refers to an instance of the class: + +```py +from typing_extensions import Self + +class Path: + @property + def parent(self) -> Self: + raise NotImplementedError + +reveal_type(Path().parent) # revealed: Path +``` + +This also works when a setter is defined: + +```py +class Node: + @property + def parent(self) -> Self: + raise NotImplementedError + + @parent.setter + def parent(self, value: Self) -> None: + pass + +root = Node() +child = Node() +child.parent = root + +reveal_type(child.parent) # revealed: Node +``` + ## `property.getter` `property.getter` can be used to overwrite the getter method of a property. This does not overwrite diff --git a/crates/ty_python_semantic/src/types/signatures.rs b/crates/ty_python_semantic/src/types/signatures.rs index fb96b59679..7c48b4c289 100644 --- a/crates/ty_python_semantic/src/types/signatures.rs +++ b/crates/ty_python_semantic/src/types/signatures.rs @@ -13,6 +13,7 @@ use std::{collections::HashMap, slice::Iter}; use itertools::{EitherOrBoth, Itertools}; +use ruff_db::parsed::parsed_module; use ruff_python_ast::ParameterWithDefault; use smallvec::{SmallVec, smallvec_inline}; @@ -20,9 +21,9 @@ use super::{ DynamicType, Type, TypeVarVariance, definition_expression_type, infer_definition_types, semantic_index, }; -use crate::semantic_index::definition::Definition; +use crate::semantic_index::definition::{Definition, DefinitionKind}; use crate::types::constraints::{ConstraintSet, IteratorConstraintsExtension}; -use crate::types::function::FunctionType; +use crate::types::function::{is_implicit_classmethod, is_implicit_staticmethod}; use crate::types::generics::{ GenericContext, InferableTypeVars, typing_self, walk_generic_context, }; @@ -36,8 +37,11 @@ use crate::{Db, FxOrderSet}; use ruff_python_ast::{self as ast, name::Name}; #[derive(Clone, Copy, Debug)] +#[expect(clippy::struct_excessive_bools)] struct MethodInformation<'db> { - method: FunctionType<'db>, + is_staticmethod: bool, + is_classmethod: bool, + method_may_be_generic: bool, class_literal: ClassLiteral<'db>, class_is_generic: bool, } @@ -46,17 +50,49 @@ fn infer_method_information<'db>( db: &'db dyn Db, definition: Definition<'db>, ) -> Option> { + let DefinitionKind::Function(function_definition) = definition.kind(db) else { + return None; + }; + let class_scope_id = definition.scope(db); let file = class_scope_id.file(db); + let module = parsed_module(db, file).load(db); let index = semantic_index(db, file); let class_scope = index.scope(class_scope_id.file_scope_id(db)); let class_node = class_scope.node().as_class()?; - let method = infer_definition_types(db, definition) - .declaration_type(definition) - .inner_type() - .as_function_literal()?; + let function_node = function_definition.node(&module); + let function_name = &function_node.name; + + let mut is_staticmethod = is_implicit_classmethod(function_name); + let mut is_classmethod = is_implicit_staticmethod(function_name); + + let inference = infer_definition_types(db, definition); + for decorator in &function_node.decorator_list { + let decorator_ty = inference.expression_type(&decorator.expression); + + match decorator_ty + .as_class_literal() + .and_then(|class| class.known(db)) + { + Some(KnownClass::Staticmethod) => { + is_staticmethod = true; + } + Some(KnownClass::Classmethod) => { + is_classmethod = true; + } + _ => {} + } + } + + let method_may_be_generic = match inference.declaration_type(definition).inner_type() { + Type::FunctionLiteral(f) => f.signature(db).overloads.iter().any(|s| { + s.generic_context + .is_some_and(|context| context.variables(db).any(|v| v.typevar(db).is_self(db))) + }), + _ => true, + }; let class_def = index.expect_single_definition(class_node); let (class_literal, class_is_generic) = match infer_definition_types(db, class_def) @@ -71,7 +107,9 @@ fn infer_method_information<'db>( }; Some(MethodInformation { - method, + is_staticmethod, + is_classmethod, + method_may_be_generic, class_literal, class_is_generic, }) @@ -1270,27 +1308,21 @@ impl<'db> Parameters<'db> { }; let method_info = infer_method_information(db, definition); - let is_static_or_classmethod = method_info - .is_some_and(|f| f.method.is_staticmethod(db) || f.method.is_classmethod(db)); + let is_static_or_classmethod = + method_info.is_some_and(|f| f.is_staticmethod || f.is_classmethod); let inferred_annotation = |arg: &ParameterWithDefault| { if let Some(MethodInformation { - method, + method_may_be_generic, class_literal, class_is_generic, + .. }) = method_info && !is_static_or_classmethod && arg.parameter.annotation().is_none() && parameters.index(arg.name().id()) == Some(0) { - let method_has_self_in_generic_context = - method.signature(db).overloads.iter().any(|s| { - s.generic_context.is_some_and(|context| { - context.variables(db).any(|v| v.typevar(db).is_self(db)) - }) - }); - - if method_has_self_in_generic_context + if method_may_be_generic || class_is_generic || class_literal .known(db) From f44598dc113a7380ca52d6c969431ca929a93473 Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Mon, 10 Nov 2025 12:52:45 +0100 Subject: [PATCH 134/180] [ty] Fix `--exclude` and `src.exclude` merging (#21341) --- crates/ty/tests/cli/file_selection.rs | 75 +++++++++++++++++++++++ crates/ty_project/src/metadata/options.rs | 10 +++ crates/ty_project/src/metadata/value.rs | 13 ++-- crates/ty_python_semantic/src/types.rs | 2 +- 4 files changed, 92 insertions(+), 8 deletions(-) diff --git a/crates/ty/tests/cli/file_selection.rs b/crates/ty/tests/cli/file_selection.rs index 5668e5829d..46f9106c21 100644 --- a/crates/ty/tests/cli/file_selection.rs +++ b/crates/ty/tests/cli/file_selection.rs @@ -589,6 +589,81 @@ fn explicit_path_overrides_exclude() -> anyhow::Result<()> { Ok(()) } +#[test] +fn cli_and_configuration_exclude() -> anyhow::Result<()> { + let case = CliTest::with_files([ + ( + "src/main.py", + r#" + print(undefined_var) # error: unresolved-reference + "#, + ), + ( + "tests/generated.py", + r#" + print(dist_undefined_var) # error: unresolved-reference + "#, + ), + ( + "my_dist/other.py", + r#" + print(other_undefined_var) # error: unresolved-reference + "#, + ), + ( + "ty.toml", + r#" + [src] + exclude = ["tests/"] + "#, + ), + ])?; + + assert_cmd_snapshot!(case.command(), @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-reference]: Name `other_undefined_var` used when not defined + --> my_dist/other.py:2:7 + | + 2 | print(other_undefined_var) # error: unresolved-reference + | ^^^^^^^^^^^^^^^^^^^ + | + info: rule `unresolved-reference` is enabled by default + + error[unresolved-reference]: Name `undefined_var` used when not defined + --> src/main.py:2:7 + | + 2 | print(undefined_var) # error: unresolved-reference + | ^^^^^^^^^^^^^ + | + info: rule `unresolved-reference` is enabled by default + + Found 2 diagnostics + + ----- stderr ----- + "); + + assert_cmd_snapshot!(case.command().arg("--exclude").arg("my_dist/"), @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-reference]: Name `undefined_var` used when not defined + --> src/main.py:2:7 + | + 2 | print(undefined_var) # error: unresolved-reference + | ^^^^^^^^^^^^^ + | + info: rule `unresolved-reference` is enabled by default + + Found 1 diagnostic + + ----- stderr ----- + "); + + Ok(()) +} + #[test] fn invalid_include_pattern() -> anyhow::Result<()> { let case = CliTest::with_files([ diff --git a/crates/ty_project/src/metadata/options.rs b/crates/ty_project/src/metadata/options.rs index d83270be90..8fb75ab201 100644 --- a/crates/ty_project/src/metadata/options.rs +++ b/crates/ty_project/src/metadata/options.rs @@ -1186,6 +1186,16 @@ impl From for DiagnosticFormat { } } +impl Combine for OutputFormat { + #[inline(always)] + fn combine_with(&mut self, _other: Self) {} + + #[inline] + fn combine(self, _other: Self) -> Self { + self + } +} + #[derive( Debug, Default, diff --git a/crates/ty_project/src/metadata/value.rs b/crates/ty_project/src/metadata/value.rs index 22d940df58..c69b5df51e 100644 --- a/crates/ty_project/src/metadata/value.rs +++ b/crates/ty_project/src/metadata/value.rs @@ -179,14 +179,13 @@ impl RangedValue { } } -impl Combine for RangedValue { - fn combine(self, _other: Self) -> Self - where - Self: Sized, - { - self +impl Combine for RangedValue +where + T: Combine, +{ + fn combine_with(&mut self, other: Self) { + self.value.combine_with(other.value); } - fn combine_with(&mut self, _other: Self) {} } impl IntoIterator for RangedValue diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index e69d1fa3a3..e3326791ac 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -7493,7 +7493,7 @@ impl<'db> VarianceInferable<'db> for Type<'db> { | Type::TypeAlias(_) => TypeVarVariance::Bivariant, }; - tracing::debug!( + tracing::trace!( "Result of variance of '{tvar}' in `{ty:?}` is `{v:?}`", tvar = typevar.typevar(db).name(db), ty = self.display(db), From 84a810736d40537bcb56362ba818f2a3022e7cf3 Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Mon, 10 Nov 2025 14:27:07 +0100 Subject: [PATCH 135/180] Rebuild ruff binary instead of sharing it across jobs (#21361) --- .github/workflows/ci.yaml | 138 ++++++++---------- .../ruff-ecosystem/ruff_ecosystem/projects.py | 3 - 2 files changed, 61 insertions(+), 80 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1f9ddad127..81996d5432 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -254,6 +254,7 @@ jobs: persist-credentials: false - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 with: + shared-key: ruff-linux-debug save-if: ${{ github.ref == 'refs/heads/main' }} - name: "Install Rust toolchain" run: rustup show @@ -295,14 +296,6 @@ jobs: env: # Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025). RUSTDOCFLAGS: "-D warnings" - - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: ruff - path: target/debug/ruff - - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 - with: - name: ty - path: target/debug/ty cargo-test-linux-release: name: "cargo test (linux, release)" @@ -462,9 +455,7 @@ jobs: fuzz-parser: name: "fuzz parser" runs-on: ubuntu-latest - needs: - - cargo-test-linux - - determine_changes + needs: determine_changes if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.parser == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }} timeout-minutes: 20 env: @@ -474,26 +465,23 @@ jobs: with: persist-credentials: false - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 - - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 - name: Download Ruff binary to test - id: download-cached-binary + - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 with: - name: ruff - path: ruff-to-test + shared-key: ruff-linux-debug + save-if: false + - name: "Install Rust toolchain" + run: rustup show + - name: Build Ruff binary + run: cargo build --bin ruff - name: Fuzz - env: - DOWNLOAD_PATH: ${{ steps.download-cached-binary.outputs.download-path }} run: | - # Make executable, since artifact download doesn't preserve this - chmod +x "${DOWNLOAD_PATH}/ruff" - ( uv run \ --python="${PYTHON_VERSION}" \ --project=./python/py-fuzzer \ --locked \ fuzz \ - --test-executable="${DOWNLOAD_PATH}/ruff" \ + --test-executable=target/debug/ruff \ --bin=ruff \ 0-500 ) @@ -535,9 +523,7 @@ jobs: ecosystem: name: "ecosystem" runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-latest-8' || 'ubuntu-latest' }} - needs: - - cargo-test-linux - - determine_changes + needs: determine_changes # Only runs on pull requests, since that is the only we way we can find the base version for comparison. # Ecosystem check needs linter and/or formatter changes. if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && needs.determine_changes.outputs.code == 'true' }} @@ -545,26 +531,37 @@ jobs: steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 with: + ref: ${{ github.event.pull_request.base.ref }} persist-credentials: false + - uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # v7.1.2 with: python-version: ${{ env.PYTHON_VERSION }} activate-environment: true - - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 - name: Download comparison Ruff binary - id: ruff-target - with: - name: ruff - path: target/debug + - name: "Install Rust toolchain" + run: rustup show - - uses: dawidd6/action-download-artifact@20319c5641d495c8a52e688b7dc5fada6c3a9fbc # v8 - name: Download baseline Ruff binary + - name: "Install mold" + uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 + + - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 with: - name: ruff - branch: ${{ github.event.pull_request.base.ref }} - workflow: "ci.yaml" - check_artifacts: true + shared-key: ruff-linux-debug + save-if: false + + - name: Build baseline version + run: | + cargo build --bin ruff + mv target/debug/ruff target/debug/ruff-baseline + + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + persist-credentials: false + clean: false + + - name: Build comparison version + run: cargo build --bin ruff - name: Install ruff-ecosystem run: | @@ -572,16 +569,11 @@ jobs: - name: Run `ruff check` stable ecosystem check if: ${{ needs.determine_changes.outputs.linter == 'true' }} - env: - DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }} run: | - # Make executable, since artifact download doesn't preserve this - chmod +x ./ruff "${DOWNLOAD_PATH}/ruff" - # Set pipefail to avoid hiding errors with tee set -eo pipefail - ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable + ruff-ecosystem check ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-check-stable cat ecosystem-result-check-stable > "$GITHUB_STEP_SUMMARY" echo "### Linter (stable)" > ecosystem-result @@ -590,16 +582,11 @@ jobs: - name: Run `ruff check` preview ecosystem check if: ${{ needs.determine_changes.outputs.linter == 'true' }} - env: - DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }} run: | - # Make executable, since artifact download doesn't preserve this - chmod +x ./ruff "${DOWNLOAD_PATH}/ruff" - # Set pipefail to avoid hiding errors with tee set -eo pipefail - ruff-ecosystem check ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview + ruff-ecosystem check ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-check-preview cat ecosystem-result-check-preview > "$GITHUB_STEP_SUMMARY" echo "### Linter (preview)" >> ecosystem-result @@ -608,16 +595,11 @@ jobs: - name: Run `ruff format` stable ecosystem check if: ${{ needs.determine_changes.outputs.formatter == 'true' }} - env: - DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }} run: | - # Make executable, since artifact download doesn't preserve this - chmod +x ./ruff "${DOWNLOAD_PATH}/ruff" - # Set pipefail to avoid hiding errors with tee set -eo pipefail - ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable + ruff-ecosystem format ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown | tee ecosystem-result-format-stable cat ecosystem-result-format-stable > "$GITHUB_STEP_SUMMARY" echo "### Formatter (stable)" >> ecosystem-result @@ -626,16 +608,11 @@ jobs: - name: Run `ruff format` preview ecosystem check if: ${{ needs.determine_changes.outputs.formatter == 'true' }} - env: - DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }} run: | - # Make executable, since artifact download doesn't preserve this - chmod +x ./ruff "${DOWNLOAD_PATH}/ruff" - # Set pipefail to avoid hiding errors with tee set -eo pipefail - ruff-ecosystem format ./ruff "${DOWNLOAD_PATH}/ruff" --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview + ruff-ecosystem format ./target/debug/ruff-baseline ./target/debug/ruff --cache ./checkouts --output-format markdown --force-preview | tee ecosystem-result-format-preview cat ecosystem-result-format-preview > "$GITHUB_STEP_SUMMARY" echo "### Formatter (preview)" >> ecosystem-result @@ -731,7 +708,7 @@ jobs: - name: "Install mold" uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1 - name: "Run ty completion evaluation" - run: cargo run --release --package ty_completion_eval -- all --threshold 0.4 --tasks /tmp/completion-evaluation-tasks.csv + run: cargo run --profile profiling --package ty_completion_eval -- all --threshold 0.4 --tasks /tmp/completion-evaluation-tasks.csv - name: "Ensure there are no changes" run: diff ./crates/ty_completion_eval/completion-evaluation-tasks.csv /tmp/completion-evaluation-tasks.csv @@ -866,9 +843,7 @@ jobs: name: "test ruff-lsp" runs-on: ubuntu-latest timeout-minutes: 5 - needs: - - cargo-test-linux - - determine_changes + needs: determine_changes if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }} steps: - uses: extractions/setup-just@e33e0265a09d6d736e2ee1e0eb685ef1de4669ff # v3.0.0 @@ -876,37 +851,46 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - name: "Download ruff-lsp source" + name: "Checkout ruff source" + with: + persist-credentials: false + + - uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1 + with: + shared-key: ruff-linux-debug + save-if: false + + - name: "Install Rust toolchain" + run: rustup show + + - name: Build Ruff binary + run: cargo build -p ruff --bin ruff + + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + name: "Checkout ruff-lsp source" with: persist-credentials: false repository: "astral-sh/ruff-lsp" + path: ruff-lsp - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0 with: # installation fails on 3.13 and newer python-version: "3.12" - - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0 - name: Download development ruff binary - id: ruff-target - with: - name: ruff - path: target/debug - - name: Install ruff-lsp dependencies run: | + cd ruff-lsp just install - name: Run ruff-lsp tests - env: - DOWNLOAD_PATH: ${{ steps.ruff-target.outputs.download-path }} run: | # Setup development binary pip uninstall --yes ruff - chmod +x "${DOWNLOAD_PATH}/ruff" - export PATH="${DOWNLOAD_PATH}:${PATH}" + export PATH="${PWD}/target/debug:${PATH}" ruff version + cd ruff-lsp just test check-playground: diff --git a/python/ruff-ecosystem/ruff_ecosystem/projects.py b/python/ruff-ecosystem/ruff_ecosystem/projects.py index cf7b732409..38d5623159 100644 --- a/python/ruff-ecosystem/ruff_ecosystem/projects.py +++ b/python/ruff-ecosystem/ruff_ecosystem/projects.py @@ -206,9 +206,6 @@ class CheckOptions(CommandOptions): "check", "--no-cache", "--exit-zero", - # Ignore internal test rules - "--ignore", - "RUF9", # Never apply fixes, as they pollute the comparison results "--no-fix", # Use the concise format for comparing violations From 04e7cecab3ee51df32f9f9d01652b04d7395db0c Mon Sep 17 00:00:00 2001 From: Dan Parizher <105245560+danparizher@users.noreply.github.com> Date: Mon, 10 Nov 2025 08:27:31 -0500 Subject: [PATCH 136/180] [`flake8-simplify`] Fix SIM222 false positive for `tuple(generator) or None` (`SIM222`) (#21187) Co-authored-by: Micha Reiser --- .../test/fixtures/flake8_simplify/SIM222.py | 12 ++++++++++++ ...__flake8_simplify__tests__SIM222_SIM222.py.snap | 8 ++++++++ crates/ruff_python_ast/src/helpers.rs | 14 ++++++++++++-- 3 files changed, 32 insertions(+), 2 deletions(-) diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM222.py b/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM222.py index e1e299c98e..71fc606386 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM222.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM222.py @@ -204,3 +204,15 @@ x = 1 print(f"{x=}" or "bar") # SIM222 (lambda: 1) or True # SIM222 (i for i in range(1)) or "bar" # SIM222 + +# https://github.com/astral-sh/ruff/issues/21136 +def get_items(): + return tuple(item for item in Item.objects.all()) or None # OK + + +def get_items_list(): + return tuple([item for item in items]) or None # OK + + +def get_items_set(): + return tuple({item for item in items}) or None # OK diff --git a/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__SIM222_SIM222.py.snap b/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__SIM222_SIM222.py.snap index f6c8bba110..0e65033b21 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__SIM222_SIM222.py.snap +++ b/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__SIM222_SIM222.py.snap @@ -1101,6 +1101,7 @@ help: Replace with `f"{x=}"` 204 + print(f"{x=}") # SIM222 205 | (lambda: 1) or True # SIM222 206 | (i for i in range(1)) or "bar" # SIM222 +207 | note: This is an unsafe fix and may change runtime behavior SIM222 [*] Use `lambda: 1` instead of `lambda: 1 or ...` @@ -1119,6 +1120,8 @@ help: Replace with `lambda: 1` - (lambda: 1) or True # SIM222 205 + lambda: 1 # SIM222 206 | (i for i in range(1)) or "bar" # SIM222 +207 | +208 | # https://github.com/astral-sh/ruff/issues/21136 note: This is an unsafe fix and may change runtime behavior SIM222 [*] Use `(i for i in range(1))` instead of `(i for i in range(1)) or ...` @@ -1128,6 +1131,8 @@ SIM222 [*] Use `(i for i in range(1))` instead of `(i for i in range(1)) or ...` 205 | (lambda: 1) or True # SIM222 206 | (i for i in range(1)) or "bar" # SIM222 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +207 | +208 | # https://github.com/astral-sh/ruff/issues/21136 | help: Replace with `(i for i in range(1))` 203 | x = 1 @@ -1135,4 +1140,7 @@ help: Replace with `(i for i in range(1))` 205 | (lambda: 1) or True # SIM222 - (i for i in range(1)) or "bar" # SIM222 206 + (i for i in range(1)) # SIM222 +207 | +208 | # https://github.com/astral-sh/ruff/issues/21136 +209 | def get_items(): note: This is an unsafe fix and may change runtime behavior diff --git a/crates/ruff_python_ast/src/helpers.rs b/crates/ruff_python_ast/src/helpers.rs index 9d5159a829..66ad66d9b1 100644 --- a/crates/ruff_python_ast/src/helpers.rs +++ b/crates/ruff_python_ast/src/helpers.rs @@ -1318,9 +1318,19 @@ impl Truthiness { if arguments.is_empty() { // Ex) `list()` Self::Falsey - } else if arguments.args.len() == 1 && arguments.keywords.is_empty() { + } else if let [argument] = &*arguments.args + && arguments.keywords.is_empty() + { // Ex) `list([1, 2, 3])` - Self::from_expr(&arguments.args[0], is_builtin) + // For tuple(generator), we can't determine statically if the result will + // be empty or not, so return Unknown. The generator itself is truthy, but + // tuple(empty_generator) is falsy. ListComp and SetComp are handled by + // recursing into Self::from_expr below, which returns Unknown for them. + if argument.is_generator_expr() { + Self::Unknown + } else { + Self::from_expr(argument, is_builtin) + } } else { Self::Unknown } From 8d1efe964a36f44cf6b14ef888a57ba145cb8d0e Mon Sep 17 00:00:00 2001 From: Brent Westbrook <36778786+ntBre@users.noreply.github.com> Date: Mon, 10 Nov 2025 09:12:32 -0500 Subject: [PATCH 137/180] Add a new "Opening a PR" section to the contribution guide (#21298) Summary -- This PR adds a new section to CONTRIBUTING.md describing the expected contents of the PR summary and test plan, using the ecosystem report, and communicating the status of a PR. This seemed like a pretty good place to insert this in the document, at the end of the advice on preparing actual code changes, but I'm certainly open to other suggestions about both the content and placement. Test Plan -- Future PRs :) --------- Co-authored-by: Micha Reiser --- CONTRIBUTING.md | 49 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6c9a99aed0..bb6758451f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -280,6 +280,55 @@ Note that plugin-specific configuration options are defined in their own modules Finally, regenerate the documentation and generated code with `cargo dev generate-all`. +### Opening a PR + +After you finish your changes, the next step is to open a PR. By default, two +sections will be filled into the PR body: the summary and the test plan. + +#### The summary + +The summary is intended to give us as maintainers information about your PR. +This should typically include a link to the relevant issue(s) you're addressing +in your PR, as well as a summary of the issue and your approach to fixing it. If +you have any questions about your approach or design, or if you considered +alternative approaches, that can also be helpful to include. + +AI can be helpful in generating both the code and summary of your PR, but a +successful contribution should still be carefully reviewed by you and the +summary editorialized before submitting a PR. A great summary is thorough but +also succinct and gives us the context we need to review your PR. + +You can find examples of excellent issues and PRs by searching for the +[`great writeup`](https://github.com/astral-sh/ruff/issues?q=label%3A%22great%20writeup%22) +label. + +#### The test plan + +The test plan is likely to be shorter than the summary and can be as simple as +"Added new snapshot tests for `RUF123`," at least for rule bugs. For LSP or some +types of CLI changes, in particular, it can also be helpful to include +screenshots or recordings of your change in action. + +#### Ecosystem report + +After opening the PR, an ecosystem report will be run as part of CI. This shows +a diff of linter and formatter behavior before and after the changes in your PR. +Going through these changes and reporting your findings in the PR summary or an +additional comment help us to review your PR more efficiently. It's also a great +way to find new test cases to incorporate into your PR if you identify any +issues. + +#### PR status + +To help us know when your PR is ready for review again, please either move your +PR back to a draft while working on it (marking it ready for review afterwards +will ping the previous reviewers) or explicitly re-request a review. This helps +us to avoid re-reviewing a PR while you're still working on it and also to +prioritize PRs that are definitely ready for review. + +You can also thumbs-up or mark as resolved any comments we leave to let us know +you addressed them. + ## MkDocs > [!NOTE] From 835e31b3ff8b1ab7665a64c519632550ac54acf1 Mon Sep 17 00:00:00 2001 From: Brent Westbrook <36778786+ntBre@users.noreply.github.com> Date: Mon, 10 Nov 2025 10:51:51 -0500 Subject: [PATCH 138/180] Fix syntax error false positive on alternative `match` patterns (#21362) Summary -- Fixes #21360 by using the union of names instead of overwriting them, as Micha suggested originally on #21104. This avoids overwriting the `n` name in the `Subscript` by the empty set of names visited in the nested OR pattern before visiting the other arm of the outer OR pattern. Test Plan -- A new inline test case taken from the issue --- .../inline/ok/nested_alternative_patterns.py | 2 + .../ruff_python_parser/src/semantic_errors.rs | 4 +- ...syntax@nested_alternative_patterns.py.snap | 212 +++++++++++++++++- 3 files changed, 216 insertions(+), 2 deletions(-) diff --git a/crates/ruff_python_parser/resources/inline/ok/nested_alternative_patterns.py b/crates/ruff_python_parser/resources/inline/ok/nested_alternative_patterns.py index d322fa3899..a12be52ac3 100644 --- a/crates/ruff_python_parser/resources/inline/ok/nested_alternative_patterns.py +++ b/crates/ruff_python_parser/resources/inline/ok/nested_alternative_patterns.py @@ -5,3 +5,5 @@ match 42: case [[x] | [x]] | x: ... match 42: case [[x | x] | [x]] | x: ... +match 42: + case ast.Subscript(n, ast.Constant() | ast.Slice()) | ast.Attribute(n): ... diff --git a/crates/ruff_python_parser/src/semantic_errors.rs b/crates/ruff_python_parser/src/semantic_errors.rs index 2775aa9065..1577f80cb5 100644 --- a/crates/ruff_python_parser/src/semantic_errors.rs +++ b/crates/ruff_python_parser/src/semantic_errors.rs @@ -1868,6 +1868,8 @@ impl<'a, Ctx: SemanticSyntaxContext> MatchPatternVisitor<'a, Ctx> { // case [[x] | [x]] | x: ... // match 42: // case [[x | x] | [x]] | x: ... + // match 42: + // case ast.Subscript(n, ast.Constant() | ast.Slice()) | ast.Attribute(n): ... SemanticSyntaxChecker::add_error( self.ctx, SemanticSyntaxErrorKind::DifferentMatchPatternBindings, @@ -1875,7 +1877,7 @@ impl<'a, Ctx: SemanticSyntaxContext> MatchPatternVisitor<'a, Ctx> { ); break; } - self.names = visitor.names; + self.names.extend(visitor.names); } } } diff --git a/crates/ruff_python_parser/tests/snapshots/valid_syntax@nested_alternative_patterns.py.snap b/crates/ruff_python_parser/tests/snapshots/valid_syntax@nested_alternative_patterns.py.snap index 8d241b04ef..b685a0656d 100644 --- a/crates/ruff_python_parser/tests/snapshots/valid_syntax@nested_alternative_patterns.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/valid_syntax@nested_alternative_patterns.py.snap @@ -8,7 +8,7 @@ input_file: crates/ruff_python_parser/resources/inline/ok/nested_alternative_pat Module( ModModule { node_index: NodeIndex(None), - range: 0..181, + range: 0..271, body: [ Match( StmtMatch { @@ -489,6 +489,216 @@ Module( ], }, ), + Match( + StmtMatch { + node_index: NodeIndex(None), + range: 181..270, + subject: NumberLiteral( + ExprNumberLiteral { + node_index: NodeIndex(None), + range: 187..189, + value: Int( + 42, + ), + }, + ), + cases: [ + MatchCase { + range: 195..270, + node_index: NodeIndex(None), + pattern: MatchOr( + PatternMatchOr { + node_index: NodeIndex(None), + range: 200..265, + patterns: [ + MatchClass( + PatternMatchClass { + node_index: NodeIndex(None), + range: 200..246, + cls: Attribute( + ExprAttribute { + node_index: NodeIndex(None), + range: 200..213, + value: Name( + ExprName { + node_index: NodeIndex(None), + range: 200..203, + id: Name("ast"), + ctx: Load, + }, + ), + attr: Identifier { + id: Name("Subscript"), + range: 204..213, + node_index: NodeIndex(None), + }, + ctx: Load, + }, + ), + arguments: PatternArguments { + range: 213..246, + node_index: NodeIndex(None), + patterns: [ + MatchAs( + PatternMatchAs { + node_index: NodeIndex(None), + range: 214..215, + pattern: None, + name: Some( + Identifier { + id: Name("n"), + range: 214..215, + node_index: NodeIndex(None), + }, + ), + }, + ), + MatchOr( + PatternMatchOr { + node_index: NodeIndex(None), + range: 217..245, + patterns: [ + MatchClass( + PatternMatchClass { + node_index: NodeIndex(None), + range: 217..231, + cls: Attribute( + ExprAttribute { + node_index: NodeIndex(None), + range: 217..229, + value: Name( + ExprName { + node_index: NodeIndex(None), + range: 217..220, + id: Name("ast"), + ctx: Load, + }, + ), + attr: Identifier { + id: Name("Constant"), + range: 221..229, + node_index: NodeIndex(None), + }, + ctx: Load, + }, + ), + arguments: PatternArguments { + range: 229..231, + node_index: NodeIndex(None), + patterns: [], + keywords: [], + }, + }, + ), + MatchClass( + PatternMatchClass { + node_index: NodeIndex(None), + range: 234..245, + cls: Attribute( + ExprAttribute { + node_index: NodeIndex(None), + range: 234..243, + value: Name( + ExprName { + node_index: NodeIndex(None), + range: 234..237, + id: Name("ast"), + ctx: Load, + }, + ), + attr: Identifier { + id: Name("Slice"), + range: 238..243, + node_index: NodeIndex(None), + }, + ctx: Load, + }, + ), + arguments: PatternArguments { + range: 243..245, + node_index: NodeIndex(None), + patterns: [], + keywords: [], + }, + }, + ), + ], + }, + ), + ], + keywords: [], + }, + }, + ), + MatchClass( + PatternMatchClass { + node_index: NodeIndex(None), + range: 249..265, + cls: Attribute( + ExprAttribute { + node_index: NodeIndex(None), + range: 249..262, + value: Name( + ExprName { + node_index: NodeIndex(None), + range: 249..252, + id: Name("ast"), + ctx: Load, + }, + ), + attr: Identifier { + id: Name("Attribute"), + range: 253..262, + node_index: NodeIndex(None), + }, + ctx: Load, + }, + ), + arguments: PatternArguments { + range: 262..265, + node_index: NodeIndex(None), + patterns: [ + MatchAs( + PatternMatchAs { + node_index: NodeIndex(None), + range: 263..264, + pattern: None, + name: Some( + Identifier { + id: Name("n"), + range: 263..264, + node_index: NodeIndex(None), + }, + ), + }, + ), + ], + keywords: [], + }, + }, + ), + ], + }, + ), + guard: None, + body: [ + Expr( + StmtExpr { + node_index: NodeIndex(None), + range: 267..270, + value: EllipsisLiteral( + ExprEllipsisLiteral { + node_index: NodeIndex(None), + range: 267..270, + }, + ), + }, + ), + ], + }, + ], + }, + ), ], }, ) From 4821c050ef94a55ad3973754bbc00e3f8516b169 Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Mon, 10 Nov 2025 11:42:12 -0500 Subject: [PATCH 139/180] [ty] elide redundant inlay hints for function args (#21365) This elides the following inlay hints: ```py foo([x=]x) foo([x=]y.x) foo([x=]x[0]) foo([x=]x(...)) # composes to complex situations foo([x=]y.x(..)[0]) ``` Fixes https://github.com/astral-sh/ty/issues/1514 --- crates/ty_ide/src/inlay_hints.rs | 199 ++++++++++++++++++++++++++++++- 1 file changed, 197 insertions(+), 2 deletions(-) diff --git a/crates/ty_ide/src/inlay_hints.rs b/crates/ty_ide/src/inlay_hints.rs index 47859c39e3..790ba064aa 100644 --- a/crates/ty_ide/src/inlay_hints.rs +++ b/crates/ty_ide/src/inlay_hints.rs @@ -4,7 +4,7 @@ use crate::Db; use ruff_db::files::File; use ruff_db::parsed::parsed_module; use ruff_python_ast::visitor::source_order::{self, SourceOrderVisitor, TraversalSignal}; -use ruff_python_ast::{AnyNodeRef, Expr, Stmt}; +use ruff_python_ast::{AnyNodeRef, ArgOrKeyword, Expr, Stmt}; use ruff_text_size::{Ranged, TextRange, TextSize}; use ty_python_semantic::types::Type; use ty_python_semantic::types::ide_support::inlay_hint_function_argument_details; @@ -283,7 +283,9 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> { self.visit_expr(&call.func); for (index, arg_or_keyword) in call.arguments.arguments_source_order().enumerate() { - if let Some(name) = argument_names.get(&index) { + if let Some(name) = argument_names.get(&index) + && !arg_matches_name(&arg_or_keyword, name) + { self.add_call_argument_name(arg_or_keyword.range().start(), name); } self.visit_expr(arg_or_keyword.value()); @@ -296,6 +298,32 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> { } } +/// Given a positional argument, check if the expression is the "same name" +/// as the function argument itself. +/// +/// This allows us to filter out reptitive inlay hints like `x=x`, `x=y.x`, etc. +fn arg_matches_name(arg_or_keyword: &ArgOrKeyword, name: &str) -> bool { + // Only care about positional args + let ArgOrKeyword::Arg(arg) = arg_or_keyword else { + return false; + }; + + let mut expr = *arg; + loop { + match expr { + // `x=x(1, 2)` counts as a match, recurse for it + Expr::Call(expr_call) => expr = &expr_call.func, + // `x=x[0]` is a match, recurse for it + Expr::Subscript(expr_subscript) => expr = &expr_subscript.value, + // `x=x` is a match + Expr::Name(expr_name) => return expr_name.id.as_str() == name, + // `x=y.x` is a match + Expr::Attribute(expr_attribute) => return expr_attribute.attr.as_str() == name, + _ => return false, + } + } +} + #[cfg(test)] mod tests { use super::*; @@ -485,6 +513,173 @@ mod tests { "); } + #[test] + fn test_function_call_with_positional_or_keyword_parameter_redundant_name() { + let test = inlay_hint_test( + " + def foo(x: int): pass + x = 1 + y = 2 + foo(x) + foo(y)", + ); + + assert_snapshot!(test.inlay_hints(), @r" + def foo(x: int): pass + x[: Literal[1]] = 1 + y[: Literal[2]] = 2 + foo(x) + foo([x=]y) + "); + } + + #[test] + fn test_function_call_with_positional_or_keyword_parameter_redundant_attribute() { + let test = inlay_hint_test( + " + def foo(x: int): pass + class MyClass: + def __init__(): + self.x: int = 1 + self.y: int = 2 + val = MyClass() + + foo(val.x) + foo(val.y)", + ); + + assert_snapshot!(test.inlay_hints(), @r" + def foo(x: int): pass + class MyClass: + def __init__(): + self.x: int = 1 + self.y: int = 2 + val[: MyClass] = MyClass() + + foo(val.x) + foo([x=]val.y) + "); + } + + #[test] + fn test_function_call_with_positional_or_keyword_parameter_redundant_attribute_not() { + // This one checks that we don't allow elide `x=` for `x.y` + let test = inlay_hint_test( + " + def foo(x: int): pass + class MyClass: + def __init__(): + self.x: int = 1 + self.y: int = 2 + x = MyClass() + + foo(x.x) + foo(x.y)", + ); + + assert_snapshot!(test.inlay_hints(), @r" + def foo(x: int): pass + class MyClass: + def __init__(): + self.x: int = 1 + self.y: int = 2 + x[: MyClass] = MyClass() + + foo(x.x) + foo([x=]x.y) + "); + } + + #[test] + fn test_function_call_with_positional_or_keyword_parameter_redundant_call() { + let test = inlay_hint_test( + " + def foo(x: int): pass + class MyClass: + def __init__(): + def x() -> int: + return 1 + def y() -> int: + return 2 + val = MyClass() + + foo(val.x()) + foo(val.y())", + ); + + assert_snapshot!(test.inlay_hints(), @r" + def foo(x: int): pass + class MyClass: + def __init__(): + def x() -> int: + return 1 + def y() -> int: + return 2 + val[: MyClass] = MyClass() + + foo(val.x()) + foo([x=]val.y()) + "); + } + + #[test] + fn test_function_call_with_positional_or_keyword_parameter_redundant_complex() { + let test = inlay_hint_test( + " + from typing import List + + def foo(x: int): pass + class MyClass: + def __init__(): + def x() -> List[int]: + return 1 + def y() -> List[int]: + return 2 + val = MyClass() + + foo(val.x()[0]) + foo(val.y()[1])", + ); + + assert_snapshot!(test.inlay_hints(), @r" + from typing import List + + def foo(x: int): pass + class MyClass: + def __init__(): + def x() -> List[int]: + return 1 + def y() -> List[int]: + return 2 + val[: MyClass] = MyClass() + + foo(val.x()[0]) + foo([x=]val.y()[1]) + "); + } + + #[test] + fn test_function_call_with_positional_or_keyword_parameter_redundant_subscript() { + let test = inlay_hint_test( + " + def foo(x: int): pass + x = [1] + y = [2] + + foo(x[0]) + foo(y[0])", + ); + + assert_snapshot!(test.inlay_hints(), @r" + def foo(x: int): pass + x[: list[Unknown | int]] = [1] + y[: list[Unknown | int]] = [2] + + foo(x[0]) + foo([x=]y[0]) + "); + } + #[test] fn test_function_call_with_positional_only_parameter() { let test = inlay_hint_test( From 1d188476b608c94cea5cb2a97ae248d70a0e2154 Mon Sep 17 00:00:00 2001 From: Matthew Mckee Date: Mon, 10 Nov 2025 16:59:45 +0000 Subject: [PATCH 140/180] [ty] provide `import` completion when in `from ` statement (#21291) ## Summary Resolves https://github.com/astral-sh/ty/issues/1494 ## Test Plan Add a test showing if we are in `from ` we provide the keyword completion "import" --- crates/ty_ide/src/completion.rs | 236 +++++++++++++++++++++++++++++++- 1 file changed, 232 insertions(+), 4 deletions(-) diff --git a/crates/ty_ide/src/completion.rs b/crates/ty_ide/src/completion.rs index ae856dd7e7..4ea6cc18d9 100644 --- a/crates/ty_ide/src/completion.rs +++ b/crates/ty_ide/src/completion.rs @@ -160,6 +160,20 @@ impl<'db> Completion<'db> { .and_then(|ty| imp(db, ty, &CompletionKindVisitor::default())) }) } + + fn keyword(name: &str) -> Self { + Completion { + name: name.into(), + insert: None, + ty: None, + kind: Some(CompletionKind::Keyword), + module_name: None, + import: None, + builtin: false, + is_type_check_only: false, + documentation: None, + } + } } /// The "kind" of a completion. @@ -212,14 +226,16 @@ pub fn completion<'db>( offset: TextSize, ) -> Vec> { let parsed = parsed_module(db, file).load(db); - let tokens = tokens_start_before(parsed.tokens(), offset); + let typed = find_typed_text(db, file, &parsed, offset); - if is_in_comment(tokens) || is_in_string(tokens) || is_in_definition_place(db, tokens, file) { + if is_in_no_completions_place(db, tokens, file) { return vec![]; } + if let Some(completions) = only_keyword_completion(tokens, typed.as_deref()) { + return vec![completions]; + } - let typed = find_typed_text(db, file, &parsed, offset); let typed_query = typed .as_deref() .map(QueryPattern::new) @@ -309,6 +325,17 @@ fn add_keyword_value_completions<'db>( } } +/// When the tokens indicate that the last token should be precisely one +/// possible keyword, we provide a single completion for it. +/// +/// `typed` should be the text that we think the user has typed so far. +fn only_keyword_completion<'db>(tokens: &[Token], typed: Option<&str>) -> Option> { + if is_import_from_incomplete(tokens, typed) { + return Some(Completion::keyword("import")); + } + None +} + /// Adds completions not in scope. /// /// `scoped` should be information about the identified scope @@ -801,6 +828,67 @@ fn import_tokens(tokens: &[Token]) -> Option<(&Token, &Token)> { None } +/// Looks for the start of a `from module ` statement. +/// +/// If found, `true` is returned. +/// +/// `typed` should be the text that we think the user has typed so far. +fn is_import_from_incomplete(tokens: &[Token], typed: Option<&str>) -> bool { + // N.B. The implementation here is very similar to + // `from_import_tokens`. The main difference is that + // we're just looking for whether we should suggest + // the `import` keyword. So this is a little simpler. + + use TokenKind as TK; + + const LIMIT: usize = 1_000; + + /// A state used to "parse" the tokens preceding the user's cursor, + /// in reverse, to detect a "from import" statement. + enum S { + Start, + ImportKeyword, + ModulePossiblyDotted, + ModuleOnlyDotted, + } + + let mut state = S::Start; + if typed.is_none() { + state = S::ImportKeyword; + } + // Move backward through the tokens until we get to + // the `from` token. + for token in tokens.iter().rev().take(LIMIT) { + state = match (state, token.kind()) { + // Match an incomplete `import` keyword. + // + // It's okay to pop off a newline token here initially, + // since it may occur before the user starts typing + // `import` but after the module name. + (S::Start, TK::Newline | TK::Name | TK::Import) => S::ImportKeyword, + // We are a bit more careful with how we parse the module + // here than in `from_import_tokens`. In particular, we + // want to make sure we don't incorrectly suggest `import` + // for `from os.i`. If we aren't careful, then + // `i` could be considered an incomplete `import` keyword + // and `os.` is the module. But of course, ending with a + // `.` (unless the entire module is dots) is invalid. + (S::ImportKeyword, TK::Dot | TK::Ellipsis) => S::ModuleOnlyDotted, + (S::ImportKeyword, TK::Name | TK::Case | TK::Match | TK::Type | TK::Unknown) => { + S::ModulePossiblyDotted + } + (S::ModuleOnlyDotted, TK::Dot | TK::Ellipsis) => S::ModuleOnlyDotted, + ( + S::ModulePossiblyDotted, + TK::Name | TK::Dot | TK::Ellipsis | TK::Case | TK::Match | TK::Type | TK::Unknown, + ) => S::ModulePossiblyDotted, + (S::ModulePossiblyDotted | S::ModuleOnlyDotted, TK::From) => return true, + _ => return false, + }; + } + false +} + /// Looks for the text typed immediately before the cursor offset /// given. /// @@ -815,7 +903,10 @@ fn find_typed_text( let source = source_text(db, file); let tokens = tokens_start_before(parsed.tokens(), offset); let last = tokens.last()?; - if !matches!(last.kind(), TokenKind::Name) { + // It's odd to include `TokenKind::Import` here, but it + // indicates that the user has typed `import`. This is + // useful to know in some contexts. + if !matches!(last.kind(), TokenKind::Name | TokenKind::Import) { return None; } // This one's weird, but if the cursor is beyond @@ -830,6 +921,11 @@ fn find_typed_text( Some(source[last.range()].to_string()) } +/// Whether the last token is in a place where we should not provide completions. +fn is_in_no_completions_place(db: &dyn Db, tokens: &[Token], file: File) -> bool { + is_in_comment(tokens) || is_in_string(tokens) || is_in_definition_place(db, tokens, file) +} + /// Whether the last token is within a comment or not. fn is_in_comment(tokens: &[Token]) -> bool { tokens.last().is_some_and(|t| t.kind().is_comment()) @@ -4216,6 +4312,138 @@ type "); } + #[test] + fn from_import_i_suggests_import() { + let builder = completion_test_builder("from typing i"); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_import_suggests_nothing() { + let builder = completion_test_builder("from typing import"); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_importt_suggests_import() { + let builder = completion_test_builder("from typing importt"); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_space_suggests_import() { + let builder = completion_test_builder("from typing "); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_no_space_not_suggests_import() { + let builder = completion_test_builder("from typing"); + assert_snapshot!(builder.build().snapshot(), @r" + typing + typing_extensions + "); + } + + #[test] + fn from_import_two_imports_suggests_import() { + let builder = completion_test_builder( + "from collections.abc import Sequence + from typing i", + ); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + /// The following behaviour may not be reflected in editors, since LSP + /// clients may do their own filtering of completion suggestions. + #[test] + fn from_import_random_name_suggests_import() { + let builder = completion_test_builder("from typing aa"); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_dotted_name_suggests_import() { + let builder = completion_test_builder("from collections.abc i"); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_relative_import_suggests_import() { + let builder = CursorTest::builder() + .source("main.py", "from .foo i") + .source("foo.py", "") + .completion_test_builder(); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_dotted_name_relative_import_suggests_import() { + let builder = CursorTest::builder() + .source("main.py", "from .foo.bar i") + .source("foo/bar.py", "") + .completion_test_builder(); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_nested_dotted_name_relative_import_suggests_import() { + let builder = CursorTest::builder() + .source("src/main.py", "from ..foo i") + .source("foo.py", "") + .completion_test_builder(); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_nested_very_dotted_name_relative_import_suggests_import() { + let builder = CursorTest::builder() + // N.B. the `...` tokenizes as `TokenKind::Ellipsis` + .source("src/main.py", "from ...foo i") + .source("foo.py", "") + .completion_test_builder(); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_only_dot() { + let builder = CursorTest::builder() + .source( + "main.py", + " + import_zqzqzq = 1 + from . + ", + ) + .completion_test_builder(); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_only_dot_incomplete() { + let builder = CursorTest::builder() + .source( + "main.py", + " + import_zqzqzq = 1 + from .imp + ", + ) + .completion_test_builder(); + assert_snapshot!(builder.build().snapshot(), @"import"); + } + + #[test] + fn from_import_incomplete() { + let builder = completion_test_builder( + "from collections.abc i + + ZQZQZQ = 1 + ZQ", + ); + assert_snapshot!(builder.build().snapshot(), @"ZQZQZQ"); + } + /// A way to create a simple single-file (named `main.py`) completion test /// builder. /// From e4dc406a3dd69822de9282aa9d656816152da4e3 Mon Sep 17 00:00:00 2001 From: Dan Parizher <105245560+danparizher@users.noreply.github.com> Date: Mon, 10 Nov 2025 12:41:44 -0500 Subject: [PATCH 141/180] [`refurb`] Detect empty f-strings (`FURB105`) (#21348) ## Summary Fixes FURB105 (`print-empty-string`) to detect empty f-strings in addition to regular empty strings. Previously, the rule only flagged `print("")` but missed `print(f"")`. This fix ensures both cases are detected and can be automatically fixed. Fixes #21346 ## Problem Analysis The FURB105 rule checks for unnecessary empty strings passed to `print()` calls. The `is_empty_string` helper function was only checking for `Expr::StringLiteral` with empty values, but did not handle `Expr::FString` (f-strings). As a result, `print(f"")` was not being flagged as a violation, even though it's semantically equivalent to `print("")` and should be simplified to `print()`. The issue occurred because the function used a `matches!` macro that only checked for string literals: ```rust fn is_empty_string(expr: &Expr) -> bool { matches!( expr, Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) if value.is_empty() ) } ``` ## Approach 1. **Import the helper function**: Added `is_empty_f_string` to the imports from `ruff_python_ast::helpers`, which already provides logic to detect empty f-strings. 2. **Update `is_empty_string` function**: Changed the implementation from a `matches!` macro to a `match` expression that handles both string literals and f-strings: ```rust fn is_empty_string(expr: &Expr) -> bool { match expr { Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.is_empty(), Expr::FString(f_string) => is_empty_f_string(f_string), _ => false, } } ``` The fix leverages the existing `is_empty_f_string` helper function which properly handles the complexity of f-strings, including nested f-strings and interpolated expressions. This ensures the detection is accurate and consistent with how empty strings are detected elsewhere in the codebase. --- .../resources/test/fixtures/refurb/FURB105.py | 4 + .../rules/refurb/rules/print_empty_string.rs | 14 ++-- ...es__refurb__tests__FURB105_FURB105.py.snap | 76 +++++++++++++++++-- 3 files changed, 78 insertions(+), 16 deletions(-) diff --git a/crates/ruff_linter/resources/test/fixtures/refurb/FURB105.py b/crates/ruff_linter/resources/test/fixtures/refurb/FURB105.py index 86a6584f3a..aca23813b3 100644 --- a/crates/ruff_linter/resources/test/fixtures/refurb/FURB105.py +++ b/crates/ruff_linter/resources/test/fixtures/refurb/FURB105.py @@ -19,6 +19,9 @@ print("", *args, sep="") print("", **kwargs) print(sep="\t") print(sep=print(1)) +print(f"") +print(f"", sep=",") +print(f"", end="bar") # OK. @@ -33,3 +36,4 @@ print("foo", "", sep=",") print("foo", "", "bar", "", sep=",") print("", "", **kwargs) print(*args, sep=",") +print(f"foo") diff --git a/crates/ruff_linter/src/rules/refurb/rules/print_empty_string.rs b/crates/ruff_linter/src/rules/refurb/rules/print_empty_string.rs index 2b8ecfab45..6c010da313 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/print_empty_string.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/print_empty_string.rs @@ -1,5 +1,5 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::helpers::contains_effect; +use ruff_python_ast::helpers::{contains_effect, is_empty_f_string}; use ruff_python_ast::{self as ast, Expr}; use ruff_python_codegen::Generator; use ruff_python_semantic::SemanticModel; @@ -194,13 +194,11 @@ pub(crate) fn print_empty_string(checker: &Checker, call: &ast::ExprCall) { /// Check if an expression is a constant empty string. fn is_empty_string(expr: &Expr) -> bool { - matches!( - expr, - Expr::StringLiteral(ast::ExprStringLiteral { - value, - .. - }) if value.is_empty() - ) + match expr { + Expr::StringLiteral(ast::ExprStringLiteral { value, .. }) => value.is_empty(), + Expr::FString(f_string) => is_empty_f_string(f_string), + _ => false, + } } #[derive(Debug, Clone, Copy, PartialEq, Eq)] diff --git a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB105_FURB105.py.snap b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB105_FURB105.py.snap index e258e208cb..d875fcff56 100644 --- a/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB105_FURB105.py.snap +++ b/crates/ruff_linter/src/rules/refurb/snapshots/ruff_linter__rules__refurb__tests__FURB105_FURB105.py.snap @@ -317,7 +317,7 @@ help: Remove empty string 19 + print(**kwargs) 20 | print(sep="\t") 21 | print(sep=print(1)) -22 | +22 | print(f"") FURB105 [*] Unnecessary separator passed to `print` --> FURB105.py:20:1 @@ -327,6 +327,7 @@ FURB105 [*] Unnecessary separator passed to `print` 20 | print(sep="\t") | ^^^^^^^^^^^^^^^ 21 | print(sep=print(1)) +22 | print(f"") | help: Remove separator 17 | print("", *args) @@ -335,8 +336,8 @@ help: Remove separator - print(sep="\t") 20 + print() 21 | print(sep=print(1)) -22 | -23 | # OK. +22 | print(f"") +23 | print(f"", sep=",") FURB105 [*] Unnecessary separator passed to `print` --> FURB105.py:21:1 @@ -345,8 +346,8 @@ FURB105 [*] Unnecessary separator passed to `print` 20 | print(sep="\t") 21 | print(sep=print(1)) | ^^^^^^^^^^^^^^^^^^^ -22 | -23 | # OK. +22 | print(f"") +23 | print(f"", sep=",") | help: Remove separator 18 | print("", *args, sep="") @@ -354,7 +355,66 @@ help: Remove separator 20 | print(sep="\t") - print(sep=print(1)) 21 + print() -22 | -23 | # OK. -24 | +22 | print(f"") +23 | print(f"", sep=",") +24 | print(f"", end="bar") note: This is an unsafe fix and may change runtime behavior + +FURB105 [*] Unnecessary empty string passed to `print` + --> FURB105.py:22:1 + | +20 | print(sep="\t") +21 | print(sep=print(1)) +22 | print(f"") + | ^^^^^^^^^^ +23 | print(f"", sep=",") +24 | print(f"", end="bar") + | +help: Remove empty string +19 | print("", **kwargs) +20 | print(sep="\t") +21 | print(sep=print(1)) + - print(f"") +22 + print() +23 | print(f"", sep=",") +24 | print(f"", end="bar") +25 | + +FURB105 [*] Unnecessary empty string and separator passed to `print` + --> FURB105.py:23:1 + | +21 | print(sep=print(1)) +22 | print(f"") +23 | print(f"", sep=",") + | ^^^^^^^^^^^^^^^^^^^ +24 | print(f"", end="bar") + | +help: Remove empty string and separator +20 | print(sep="\t") +21 | print(sep=print(1)) +22 | print(f"") + - print(f"", sep=",") +23 + print() +24 | print(f"", end="bar") +25 | +26 | # OK. + +FURB105 [*] Unnecessary empty string passed to `print` + --> FURB105.py:24:1 + | +22 | print(f"") +23 | print(f"", sep=",") +24 | print(f"", end="bar") + | ^^^^^^^^^^^^^^^^^^^^^ +25 | +26 | # OK. + | +help: Remove empty string +21 | print(sep=print(1)) +22 | print(f"") +23 | print(f"", sep=",") + - print(f"", end="bar") +24 + print(end="bar") +25 | +26 | # OK. +27 | From f63a9f233469984408f9a855d158e5243fbd515f Mon Sep 17 00:00:00 2001 From: justin Date: Mon, 10 Nov 2025 12:53:08 -0500 Subject: [PATCH 142/180] [ty] Fix incorrect inference of `enum.auto()` for enums with non-`int` mixins, and imprecise inference of `enum.auto()` for single-member enums (#20541) Co-authored-by: Alex Waygood --- .../resources/mdtest/enums.md | 56 +++++++++++++++++++ crates/ty_python_semantic/src/types.rs | 10 ++++ crates/ty_python_semantic/src/types/enums.rs | 45 ++++++++++++--- 3 files changed, 104 insertions(+), 7 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/enums.md b/crates/ty_python_semantic/resources/mdtest/enums.md index c526e11124..bdd2a92995 100644 --- a/crates/ty_python_semantic/resources/mdtest/enums.md +++ b/crates/ty_python_semantic/resources/mdtest/enums.md @@ -320,6 +320,11 @@ reveal_type(enum_members(Answer)) reveal_type(Answer.YES.value) # revealed: Literal[1] reveal_type(Answer.NO.value) # revealed: Literal[2] + +class SingleMember(Enum): + SINGLE = auto() + +reveal_type(SingleMember.SINGLE.value) # revealed: Literal[1] ``` Usages of `auto()` can be combined with manual value assignments: @@ -348,6 +353,11 @@ class Answer(StrEnum): reveal_type(Answer.YES.value) # revealed: Literal["yes"] reveal_type(Answer.NO.value) # revealed: Literal["no"] + +class SingleMember(StrEnum): + SINGLE = auto() + +reveal_type(SingleMember.SINGLE.value) # revealed: Literal["single"] ``` Using `auto()` with `IntEnum` also works as expected: @@ -363,6 +373,52 @@ reveal_type(Answer.YES.value) # revealed: Literal[1] reveal_type(Answer.NO.value) # revealed: Literal[2] ``` +As does using `auto()` for other enums that use `int` as a mixin: + +```py +from enum import Enum, auto + +class Answer(int, Enum): + YES = auto() + NO = auto() + +reveal_type(Answer.YES.value) # revealed: Literal[1] +reveal_type(Answer.NO.value) # revealed: Literal[2] +``` + +It's [hard to predict](https://github.com/astral-sh/ruff/pull/20541#discussion_r2381878613) what the +effect of using `auto()` will be for an arbitrary non-integer mixin, so for anything that isn't a +`StrEnum` and has a non-`int` mixin, we simply fallback to typeshed's annotation of `Any` for the +`value` property: + +```python +from enum import Enum, auto + +class A(str, Enum): + X = auto() + Y = auto() + +reveal_type(A.X.value) # revealed: Any + +class B(bytes, Enum): + X = auto() + Y = auto() + +reveal_type(B.X.value) # revealed: Any + +class C(tuple, Enum): + X = auto() + Y = auto() + +reveal_type(C.X.value) # revealed: Any + +class D(float, Enum): + X = auto() + Y = auto() + +reveal_type(D.X.value) # revealed: Any +``` + Combining aliases with `auto()`: ```py diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index e3326791ac..2c6f449f23 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -4365,6 +4365,16 @@ impl<'db> Type<'db> { Place::bound(todo_type!("ParamSpecArgs / ParamSpecKwargs")).into() } + Type::NominalInstance(instance) + if matches!(name_str, "value" | "_value_") + && is_single_member_enum(db, instance.class(db).class_literal(db).0) => + { + enum_metadata(db, instance.class(db).class_literal(db).0) + .and_then(|metadata| metadata.members.get_index(0).map(|(_, v)| *v)) + .map_or(Place::Undefined, Place::bound) + .into() + } + Type::NominalInstance(..) | Type::ProtocolInstance(..) | Type::BooleanLiteral(..) diff --git a/crates/ty_python_semantic/src/types/enums.rs b/crates/ty_python_semantic/src/types/enums.rs index 671b919929..dbde339221 100644 --- a/crates/ty_python_semantic/src/types/enums.rs +++ b/crates/ty_python_semantic/src/types/enums.rs @@ -6,7 +6,7 @@ use crate::{ place::{Place, PlaceAndQualifiers, place_from_bindings, place_from_declarations}, semantic_index::{place_table, use_def_map}, types::{ - ClassLiteral, DynamicType, EnumLiteralType, KnownClass, MemberLookupPolicy, + ClassBase, ClassLiteral, DynamicType, EnumLiteralType, KnownClass, MemberLookupPolicy, StringLiteralType, Type, TypeQualifiers, }, }; @@ -68,9 +68,6 @@ pub(crate) fn enum_metadata<'db>( return None; } - let is_str_enum = - Type::ClassLiteral(class).is_subtype_of(db, KnownClass::StrEnum.to_subclass_of(db)); - let scope_id = class.body_scope(db); let use_def_map = use_def_map(db, scope_id); let table = place_table(db, scope_id); @@ -141,14 +138,48 @@ pub(crate) fn enum_metadata<'db>( // enum.auto Some(KnownClass::Auto) => { auto_counter += 1; - Some(if is_str_enum { + + // `StrEnum`s have different `auto()` behaviour to enums inheriting from `(str, Enum)` + let auto_value_ty = if Type::ClassLiteral(class) + .is_subtype_of(db, KnownClass::StrEnum.to_subclass_of(db)) + { Type::StringLiteral(StringLiteralType::new( db, name.to_lowercase().as_str(), )) } else { - Type::IntLiteral(auto_counter) - }) + let custom_mixins: smallvec::SmallVec<[Option; 1]> = + class + .iter_mro(db, None) + .skip(1) + .filter_map(ClassBase::into_class) + .filter(|class| { + !Type::from(*class).is_subtype_of( + db, + KnownClass::Enum.to_subclass_of(db), + ) + }) + .map(|class| class.known(db)) + .filter(|class| { + !matches!(class, Some(KnownClass::Object)) + }) + .collect(); + + // `IntEnum`s have the same `auto()` behaviour to enums inheriting from `(int, Enum)`, + // and `IntEnum`s also have `int` in their MROs, so both cases are handled here. + // + // In general, the `auto()` behaviour for enums with non-`int` mixins is hard to predict, + // so we fall back to `Any` in those cases. + if matches!( + custom_mixins.as_slice(), + [] | [Some(KnownClass::Int)] + ) { + Type::IntLiteral(auto_counter) + } else { + Type::any() + } + }; + Some(auto_value_ty) } _ => None, From deeda5690617162bf7c7364a2cb3faa69e880a19 Mon Sep 17 00:00:00 2001 From: Dan Parizher <105245560+danparizher@users.noreply.github.com> Date: Mon, 10 Nov 2025 13:29:35 -0500 Subject: [PATCH 143/180] [`configuration`] Fix unclear error messages for line-length values exceeding `u16::MAX` (#21329) Co-authored-by: Micha Reiser --- crates/ruff_linter/src/line_width.rs | 18 +++++---- crates/ruff_workspace/src/pyproject.rs | 56 ++++++++++++++++++++++++++ 2 files changed, 67 insertions(+), 7 deletions(-) diff --git a/crates/ruff_linter/src/line_width.rs b/crates/ruff_linter/src/line_width.rs index c8cf857621..80915c9f59 100644 --- a/crates/ruff_linter/src/line_width.rs +++ b/crates/ruff_linter/src/line_width.rs @@ -51,13 +51,17 @@ impl<'de> serde::Deserialize<'de> for LineLength { where D: serde::Deserializer<'de>, { - let value = u16::deserialize(deserializer)?; - Self::try_from(value).map_err(|_| { - serde::de::Error::custom(format!( - "line-length must be between 1 and {} (got {value})", - Self::MAX, - )) - }) + let value = i64::deserialize(deserializer)?; + + u16::try_from(value) + .ok() + .and_then(|u16_value| Self::try_from(u16_value).ok()) + .ok_or_else(|| { + serde::de::Error::custom(format!( + "line-length must be between 1 and {} (got {value})", + Self::MAX, + )) + }) } } diff --git a/crates/ruff_workspace/src/pyproject.rs b/crates/ruff_workspace/src/pyproject.rs index 9cfa01a35d..53649a31e8 100644 --- a/crates/ruff_workspace/src/pyproject.rs +++ b/crates/ruff_workspace/src/pyproject.rs @@ -468,6 +468,62 @@ line-length = 500 "line-length must be between 1 and 320 (got 500)" ); + // Test value at u16::MAX boundary (65535) - should show range error + let invalid_line_length_65535 = toml::from_str::( + r" +[tool.ruff] +line-length = 65535 +", + ) + .expect_err("Deserialization should have failed for line-length at u16::MAX"); + + assert_eq!( + invalid_line_length_65535.message(), + "line-length must be between 1 and 320 (got 65535)" + ); + + // Test value exceeding u16::MAX (65536) - should show clear error + let invalid_line_length_65536 = toml::from_str::( + r" +[tool.ruff] +line-length = 65536 +", + ) + .expect_err("Deserialization should have failed for line-length exceeding u16::MAX"); + + assert_eq!( + invalid_line_length_65536.message(), + "line-length must be between 1 and 320 (got 65536)" + ); + + // Test value far exceeding u16::MAX (99_999) - should show clear error + let invalid_line_length_99999 = toml::from_str::( + r" +[tool.ruff] +line-length = 99_999 +", + ) + .expect_err("Deserialization should have failed for line-length far exceeding u16::MAX"); + + assert_eq!( + invalid_line_length_99999.message(), + "line-length must be between 1 and 320 (got 99999)" + ); + + // Test negative value - should show clear error + let invalid_line_length_negative = toml::from_str::( + r" +[tool.ruff] +line-length = -5 +", + ) + .expect_err("Deserialization should have failed for negative line-length"); + + assert_eq!( + invalid_line_length_negative.message(), + "line-length must be between 1 and 320 (got -5)" + ); + Ok(()) } From d258302b08303bf3e7d14add020c97b1cc56a9aa Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Mon, 10 Nov 2025 14:51:14 -0500 Subject: [PATCH 144/180] [ty] supress some trivial expr inlay hints (#21367) I'm not 100% sold on this implementation, but it's a strict improvement and it adds a ton of snapshot tests for future iteration. Part of https://github.com/astral-sh/ty/issues/494 --- crates/ty_ide/src/inlay_hints.rs | 429 ++++++++++++++++++++-- crates/ty_server/tests/e2e/inlay_hints.rs | 8 +- 2 files changed, 405 insertions(+), 32 deletions(-) diff --git a/crates/ty_ide/src/inlay_hints.rs b/crates/ty_ide/src/inlay_hints.rs index 790ba064aa..5bacaa04cb 100644 --- a/crates/ty_ide/src/inlay_hints.rs +++ b/crates/ty_ide/src/inlay_hints.rs @@ -231,7 +231,7 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> { match stmt { Stmt::Assign(assign) => { - self.in_assignment = true; + self.in_assignment = !type_hint_is_excessive_for_expr(&assign.value); for target in &assign.targets { self.visit_expr(target); } @@ -324,6 +324,32 @@ fn arg_matches_name(arg_or_keyword: &ArgOrKeyword, name: &str) -> bool { } } +/// Given an expression that's the RHS of an assignment, would it be excessive to +/// emit an inlay type hint for the variable assigned to it? +/// +/// This is used to suppress inlay hints for things like `x = 1`, `x, y = (1, 2)`, etc. +fn type_hint_is_excessive_for_expr(expr: &Expr) -> bool { + match expr { + // A tuple of all literals is excessive to typehint + Expr::Tuple(expr_tuple) => expr_tuple.elts.iter().all(type_hint_is_excessive_for_expr), + + // Various Literal[...] types which are always excessive to hint + | Expr::BytesLiteral(_) + | Expr::NumberLiteral(_) + | Expr::BooleanLiteral(_) + | Expr::StringLiteral(_) + // `None` isn't terribly verbose, but still redundant + | Expr::NoneLiteral(_) + // This one expands to `str` which isn't verbose but is redundant + | Expr::FString(_) + // This one expands to `Template` which isn't verbose but is redundant + | Expr::TString(_)=> true, + + // Everything else is reasonable + _ => false, + } +} + #[cfg(test)] mod tests { use super::*; @@ -415,47 +441,183 @@ mod tests { #[test] fn test_assign_statement() { - let test = inlay_hint_test("x = 1"); + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + + x = 1 + y = x + z = i(1) + w = z + ", + ); assert_snapshot!(test.inlay_hints(), @r" - x[: Literal[1]] = 1 + def i(x: int, /) -> int: + return x + + x = 1 + y[: Literal[1]] = x + z[: int] = i(1) + w[: int] = z "); } #[test] - fn test_tuple_assignment() { - let test = inlay_hint_test("x, y = (1, 'abc')"); + fn test_unpacked_tuple_assignment() { + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x1, y1 = (1, 'abc') + x2, y2 = (x1, y1) + x3, y3 = (i(1), s('abc')) + x4, y4 = (x3, y3) + ", + ); assert_snapshot!(test.inlay_hints(), @r#" - x[: Literal[1]], y[: Literal["abc"]] = (1, 'abc') + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x1, y1 = (1, 'abc') + x2[: Literal[1]], y2[: Literal["abc"]] = (x1, y1) + x3[: int], y3[: str] = (i(1), s('abc')) + x4[: int], y4[: str] = (x3, y3) + "#); + } + + #[test] + fn test_multiple_assignment() { + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x1, y1 = 1, 'abc' + x2, y2 = x1, y1 + x3, y3 = i(1), s('abc') + x4, y4 = x3, y3 + ", + ); + + assert_snapshot!(test.inlay_hints(), @r#" + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x1, y1 = 1, 'abc' + x2[: Literal[1]], y2[: Literal["abc"]] = x1, y1 + x3[: int], y3[: str] = i(1), s('abc') + x4[: int], y4[: str] = x3, y3 + "#); + } + + #[test] + fn test_tuple_assignment() { + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x = (1, 'abc') + y = x + z = (i(1), s('abc')) + w = z + ", + ); + + assert_snapshot!(test.inlay_hints(), @r#" + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x = (1, 'abc') + y[: tuple[Literal[1], Literal["abc"]]] = x + z[: tuple[int, str]] = (i(1), s('abc')) + w[: tuple[int, str]] = z "#); } #[test] fn test_nested_tuple_assignment() { - let test = inlay_hint_test("x, (y, z) = (1, ('abc', 2))"); + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x1, (y1, z1) = (1, ('abc', 2)) + x2, (y2, z2) = (x1, (y1, z1)) + x3, (y3, z3) = (i(1), (s('abc'), i(2))) + x4, (y4, z4) = (x3, (y3, z3))", + ); assert_snapshot!(test.inlay_hints(), @r#" - x[: Literal[1]], (y[: Literal["abc"]], z[: Literal[2]]) = (1, ('abc', 2)) + def i(x: int, /) -> int: + return x + def s(x: str, /) -> str: + return x + + x1, (y1, z1) = (1, ('abc', 2)) + x2[: Literal[1]], (y2[: Literal["abc"]], z2[: Literal[2]]) = (x1, (y1, z1)) + x3[: int], (y3[: str], z3[: int]) = (i(1), (s('abc'), i(2))) + x4[: int], (y4[: str], z4[: int]) = (x3, (y3, z3)) "#); } #[test] fn test_assign_statement_with_type_annotation() { - let test = inlay_hint_test("x: int = 1"); + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + + x: int = 1 + y = x + z: int = i(1) + w = z", + ); assert_snapshot!(test.inlay_hints(), @r" + def i(x: int, /) -> int: + return x + x: int = 1 + y[: Literal[1]] = x + z: int = i(1) + w[: int] = z "); } #[test] fn test_assign_statement_out_of_range() { - let test = inlay_hint_test("x = 1\ny = 2"); + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + x = i(1) + z = x", + ); assert_snapshot!(test.inlay_hints(), @r" - x[: Literal[1]] = 1 - y = 2 + def i(x: int, /) -> int: + return x + x[: int] = i(1) + z = x "); } @@ -465,28 +627,236 @@ mod tests { " class A: def __init__(self, y): - self.x = 1 + self.x = int(1) self.y = y a = A(2) - a.y = 3 + a.y = int(3) ", ); assert_snapshot!(test.inlay_hints(), @r" class A: def __init__(self, y): - self.x[: Literal[1]] = 1 + self.x[: int] = int(1) self.y[: Unknown] = y a[: A] = A([y=]2) - a.y[: Literal[3]] = 3 + a.y[: int] = int(3) "); } + #[test] + fn test_many_literals() { + let test = inlay_hint_test( + r#" + a = 1 + b = 1.0 + c = True + d = None + e = "hello" + f = 'there' + g = f"{e} {f}" + h = t"wow %d" + i = b'\x00' + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r#" + a = 1 + b = 1.0 + c = True + d = None + e = "hello" + f = 'there' + g = f"{e} {f}" + h = t"wow %d" + i = b'\x00' + "#); + } + + #[test] + fn test_many_literals_tuple() { + let test = inlay_hint_test( + r#" + a = (1, 2) + b = (1.0, 2.0) + c = (True, False) + d = (None, None) + e = ("hel", "lo") + f = ('the', 're') + g = (f"{ft}", f"{ft}") + h = (t"wow %d", t"wow %d") + i = (b'\x01', b'\x02') + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r#" + a = (1, 2) + b = (1.0, 2.0) + c = (True, False) + d = (None, None) + e = ("hel", "lo") + f = ('the', 're') + g = (f"{ft}", f"{ft}") + h = (t"wow %d", t"wow %d") + i = (b'\x01', b'\x02') + "#); + } + + #[test] + fn test_many_literals_unpacked_tuple() { + let test = inlay_hint_test( + r#" + a1, a2 = (1, 2) + b1, b2 = (1.0, 2.0) + c1, c2 = (True, False) + d1, d2 = (None, None) + e1, e2 = ("hel", "lo") + f1, f2 = ('the', 're') + g1, g2 = (f"{ft}", f"{ft}") + h1, h2 = (t"wow %d", t"wow %d") + i1, i2 = (b'\x01', b'\x02') + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r#" + a1, a2 = (1, 2) + b1, b2 = (1.0, 2.0) + c1, c2 = (True, False) + d1, d2 = (None, None) + e1, e2 = ("hel", "lo") + f1, f2 = ('the', 're') + g1, g2 = (f"{ft}", f"{ft}") + h1, h2 = (t"wow %d", t"wow %d") + i1, i2 = (b'\x01', b'\x02') + "#); + } + + #[test] + fn test_many_literals_multiple() { + let test = inlay_hint_test( + r#" + a1, a2 = 1, 2 + b1, b2 = 1.0, 2.0 + c1, c2 = True, False + d1, d2 = None, None + e1, e2 = "hel", "lo" + f1, f2 = 'the', 're' + g1, g2 = f"{ft}", f"{ft}" + h1, h2 = t"wow %d", t"wow %d" + i1, i2 = b'\x01', b'\x02' + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r#" + a1, a2 = 1, 2 + b1, b2 = 1.0, 2.0 + c1, c2 = True, False + d1, d2 = None, None + e1, e2 = "hel", "lo" + f1, f2 = 'the', 're' + g1, g2 = f"{ft}", f"{ft}" + h1, h2 = t"wow %d", t"wow %d" + i1, i2 = b'\x01', b'\x02' + "#); + } + + #[test] + fn test_many_literals_list() { + let test = inlay_hint_test( + r#" + a = [1, 2] + b = [1.0, 2.0] + c = [True, False] + d = [None, None] + e = ["hel", "lo"] + f = ['the', 're'] + g = [f"{ft}", f"{ft}"] + h = [t"wow %d", t"wow %d"] + i = [b'\x01', b'\x02'] + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r#" + a[: list[Unknown | int]] = [1, 2] + b[: list[Unknown | float]] = [1.0, 2.0] + c[: list[Unknown | bool]] = [True, False] + d[: list[Unknown | None]] = [None, None] + e[: list[Unknown | str]] = ["hel", "lo"] + f[: list[Unknown | str]] = ['the', 're'] + g[: list[Unknown | str]] = [f"{ft}", f"{ft}"] + h[: list[Unknown | Template]] = [t"wow %d", t"wow %d"] + i[: list[Unknown | bytes]] = [b'\x01', b'\x02'] + "#); + } + + #[test] + fn test_simple_init_call() { + let test = inlay_hint_test( + r#" + class MyClass: + def __init__(self): + self.x: int = 1 + + x = MyClass() + y = (MyClass(), MyClass()) + a, b = MyClass(), MyClass() + c, d = (MyClass(), MyClass()) + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r" + class MyClass: + def __init__(self): + self.x: int = 1 + + x[: MyClass] = MyClass() + y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) + a[: MyClass], b[: MyClass] = MyClass(), MyClass() + c[: MyClass], d[: MyClass] = (MyClass(), MyClass()) + "); + } + + #[test] + fn test_generic_init_call() { + let test = inlay_hint_test( + r#" + class MyClass[T, U]: + def __init__(self, x: list[T], y: tuple[U, U]): + self.x = x + self.y = y + + x = MyClass([42], ("a", "b")) + y = (MyClass([42], ("a", "b")), MyClass([42], ("a", "b"))) + a, b = MyClass([42], ("a", "b")), MyClass([42], ("a", "b")) + c, d = (MyClass([42], ("a", "b")), MyClass([42], ("a", "b"))) + "#, + ); + + assert_snapshot!(test.inlay_hints(), @r#" + class MyClass[T, U]: + def __init__(self, x: list[T], y: tuple[U, U]): + self.x[: list[T@MyClass]] = x + self.y[: tuple[U@MyClass, U@MyClass]] = y + + x[: MyClass[Unknown | int, str]] = MyClass([x=][42], [y=]("a", "b")) + y[: tuple[MyClass[Unknown | int, str], MyClass[Unknown | int, str]]] = (MyClass([x=][42], [y=]("a", "b")), MyClass([x=][42], [y=]("a", "b"))) + a[: MyClass[Unknown | int, str]], b[: MyClass[Unknown | int, str]] = MyClass([x=][42], [y=]("a", "b")), MyClass([x=][42], [y=]("a", "b")) + c[: MyClass[Unknown | int, str]], d[: MyClass[Unknown | int, str]] = (MyClass([x=][42], [y=]("a", "b")), MyClass([x=][42], [y=]("a", "b"))) + "#); + } + #[test] fn test_disabled_variable_types() { - let test = inlay_hint_test("x = 1"); + let test = inlay_hint_test( + " + def i(x: int, /) -> int: + return x + + x = i(1) + ", + ); assert_snapshot!( test.inlay_hints_with_settings(&InlayHintSettings { @@ -494,7 +864,10 @@ mod tests { ..Default::default() }), @r" - x = 1 + def i(x: int, /) -> int: + return x + + x = i(1) " ); } @@ -526,8 +899,8 @@ mod tests { assert_snapshot!(test.inlay_hints(), @r" def foo(x: int): pass - x[: Literal[1]] = 1 - y[: Literal[2]] = 2 + x = 1 + y = 2 foo(x) foo([x=]y) "); @@ -539,7 +912,7 @@ mod tests { " def foo(x: int): pass class MyClass: - def __init__(): + def __init__(self): self.x: int = 1 self.y: int = 2 val = MyClass() @@ -551,7 +924,7 @@ mod tests { assert_snapshot!(test.inlay_hints(), @r" def foo(x: int): pass class MyClass: - def __init__(): + def __init__(self): self.x: int = 1 self.y: int = 2 val[: MyClass] = MyClass() @@ -568,7 +941,7 @@ mod tests { " def foo(x: int): pass class MyClass: - def __init__(): + def __init__(self): self.x: int = 1 self.y: int = 2 x = MyClass() @@ -580,7 +953,7 @@ mod tests { assert_snapshot!(test.inlay_hints(), @r" def foo(x: int): pass class MyClass: - def __init__(): + def __init__(self): self.x: int = 1 self.y: int = 2 x[: MyClass] = MyClass() @@ -596,7 +969,7 @@ mod tests { " def foo(x: int): pass class MyClass: - def __init__(): + def __init__(self): def x() -> int: return 1 def y() -> int: @@ -610,7 +983,7 @@ mod tests { assert_snapshot!(test.inlay_hints(), @r" def foo(x: int): pass class MyClass: - def __init__(): + def __init__(self): def x() -> int: return 1 def y() -> int: @@ -630,7 +1003,7 @@ mod tests { def foo(x: int): pass class MyClass: - def __init__(): + def __init__(self): def x() -> List[int]: return 1 def y() -> List[int]: @@ -646,7 +1019,7 @@ mod tests { def foo(x: int): pass class MyClass: - def __init__(): + def __init__(self): def x() -> List[int]: return 1 def y() -> List[int]: diff --git a/crates/ty_server/tests/e2e/inlay_hints.rs b/crates/ty_server/tests/e2e/inlay_hints.rs index 3dbbbee994..ea7c833b5f 100644 --- a/crates/ty_server/tests/e2e/inlay_hints.rs +++ b/crates/ty_server/tests/e2e/inlay_hints.rs @@ -17,7 +17,7 @@ x = 1 def foo(a: int) -> int: return a + 1 -foo(1) +y = foo(1) "; let mut server = TestServerBuilder::new()? @@ -39,7 +39,7 @@ foo(1) [ { "position": { - "line": 0, + "line": 5, "character": 1 }, "label": [ @@ -47,7 +47,7 @@ foo(1) "value": ": " }, { - "value": "Literal[1]" + "value": "int" } ], "kind": 1 @@ -55,7 +55,7 @@ foo(1) { "position": { "line": 5, - "character": 4 + "character": 8 }, "label": [ { From 98869f0307660648f6c3c8f5c6457acc6ea41c81 Mon Sep 17 00:00:00 2001 From: Ibraheem Ahmed Date: Mon, 10 Nov 2025 16:29:05 -0500 Subject: [PATCH 145/180] [ty] Improve generic call expression inference (#21210) ## Summary Implements https://github.com/astral-sh/ty/issues/1356 and https://github.com/astral-sh/ty/issues/136#issuecomment-3413669994. --- .../mdtest/assignment/annotations.md | 274 +++++++++++++- .../resources/mdtest/bidirectional.md | 4 +- .../resources/mdtest/dataclasses/fields.md | 3 +- crates/ty_python_semantic/src/types.rs | 26 +- .../ty_python_semantic/src/types/call/bind.rs | 98 +++-- crates/ty_python_semantic/src/types/class.rs | 2 +- .../ty_python_semantic/src/types/generics.rs | 62 +++- .../src/types/infer/builder.rs | 339 ++++++++++++++---- 8 files changed, 655 insertions(+), 153 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md b/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md index ceb588d7fe..d97092720b 100644 --- a/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md +++ b/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md @@ -417,6 +417,8 @@ reveal_type(x) # revealed: Literal[1] python-version = "3.12" ``` +`generic_list.py`: + ```py from typing import Literal @@ -427,14 +429,13 @@ a = f("a") reveal_type(a) # revealed: list[Literal["a"]] b: list[int | Literal["a"]] = f("a") -reveal_type(b) # revealed: list[Literal["a"] | int] +reveal_type(b) # revealed: list[int | Literal["a"]] c: list[int | str] = f("a") -reveal_type(c) # revealed: list[str | int] +reveal_type(c) # revealed: list[int | str] d: list[int | tuple[int, int]] = f((1, 2)) -# TODO: We could avoid reordering the union elements here. -reveal_type(d) # revealed: list[tuple[int, int] | int] +reveal_type(d) # revealed: list[int | tuple[int, int]] e: list[int] = f(True) reveal_type(e) # revealed: list[int] @@ -455,10 +456,218 @@ j: int | str = f2(True) reveal_type(j) # revealed: Literal[True] ``` -Types are not widened unnecessarily: +A function's arguments are also inferred using the type context: + +`typed_dict.py`: ```py -def id[T](x: T) -> T: +from typing import TypedDict + +class TD(TypedDict): + x: int + +def f[T](x: list[T]) -> T: + return x[0] + +a: TD = f([{"x": 0}, {"x": 1}]) +reveal_type(a) # revealed: TD + +b: TD | None = f([{"x": 0}, {"x": 1}]) +reveal_type(b) # revealed: TD + +# error: [missing-typed-dict-key] "Missing required key 'x' in TypedDict `TD` constructor" +# error: [invalid-key] "Invalid key for TypedDict `TD`: Unknown key "y"" +# error: [invalid-assignment] "Object of type `Unknown | dict[Unknown | str, Unknown | int]` is not assignable to `TD`" +c: TD = f([{"y": 0}, {"x": 1}]) + +# error: [missing-typed-dict-key] "Missing required key 'x' in TypedDict `TD` constructor" +# error: [invalid-key] "Invalid key for TypedDict `TD`: Unknown key "y"" +# error: [invalid-assignment] "Object of type `Unknown | dict[Unknown | str, Unknown | int]` is not assignable to `TD | None`" +c: TD | None = f([{"y": 0}, {"x": 1}]) +``` + +But not in a way that leads to assignability errors: + +`dict_any.py`: + +```py +from typing import TypedDict, Any + +class TD(TypedDict, total=False): + x: str + +class TD2(TypedDict): + x: str + +def f(self, dt: dict[str, Any], key: str): + # TODO: This should not error once typed dict assignability is implemented. + # error: [invalid-assignment] + x1: TD = dt.get(key, {}) + reveal_type(x1) # revealed: TD + + x2: TD = dt.get(key, {"x": 0}) + reveal_type(x2) # revealed: Any + + x3: TD | None = dt.get(key, {}) + # TODO: This should reveal `Any` once typed dict assignability is implemented. + reveal_type(x3) # revealed: Any | None + + x4: TD | None = dt.get(key, {"x": 0}) + reveal_type(x4) # revealed: Any + + x5: TD2 = dt.get(key, {}) + reveal_type(x5) # revealed: Any + + x6: TD2 = dt.get(key, {"x": 0}) + reveal_type(x6) # revealed: Any + + x7: TD2 | None = dt.get(key, {}) + reveal_type(x7) # revealed: Any + + x8: TD2 | None = dt.get(key, {"x": 0}) + reveal_type(x8) # revealed: Any +``` + +## Prefer the declared type of generic classes + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import Any + +def f[T](x: T) -> list[T]: + return [x] + +def f2[T](x: T) -> list[T] | None: + return [x] + +def f3[T](x: T) -> list[T] | dict[T, T]: + return [x] + +a = f(1) +reveal_type(a) # revealed: list[Literal[1]] + +b: list[Any] = f(1) +reveal_type(b) # revealed: list[Any] + +c: list[Any] = [1] +reveal_type(c) # revealed: list[Any] + +d: list[Any] | None = f(1) +reveal_type(d) # revealed: list[Any] + +e: list[Any] | None = [1] +reveal_type(e) # revealed: list[Any] + +f: list[Any] | None = f2(1) +# TODO: Better constraint solver. +reveal_type(f) # revealed: list[Literal[1]] | None + +g: list[Any] | dict[Any, Any] = f3(1) +# TODO: Better constraint solver. +reveal_type(g) # revealed: list[Literal[1]] | dict[Literal[1], Literal[1]] +``` + +We currently prefer the generic declared type regardless of its variance: + +```py +class Bivariant[T]: + pass + +class Covariant[T]: + def pop(self) -> T: + raise NotImplementedError + +class Contravariant[T]: + def push(self, value: T) -> None: + pass + +class Invariant[T]: + x: T + +def bivariant[T](x: T) -> Bivariant[T]: + return Bivariant() + +def covariant[T](x: T) -> Covariant[T]: + return Covariant() + +def contravariant[T](x: T) -> Contravariant[T]: + return Contravariant() + +def invariant[T](x: T) -> Invariant[T]: + return Invariant() + +x1 = bivariant(1) +x2 = covariant(1) +x3 = contravariant(1) +x4 = invariant(1) + +reveal_type(x1) # revealed: Bivariant[Literal[1]] +reveal_type(x2) # revealed: Covariant[Literal[1]] +reveal_type(x3) # revealed: Contravariant[Literal[1]] +reveal_type(x4) # revealed: Invariant[Literal[1]] + +x5: Bivariant[Any] = bivariant(1) +x6: Covariant[Any] = covariant(1) +x7: Contravariant[Any] = contravariant(1) +x8: Invariant[Any] = invariant(1) + +# TODO: This could reveal `Bivariant[Any]`. +reveal_type(x5) # revealed: Bivariant[Literal[1]] +reveal_type(x6) # revealed: Covariant[Any] +reveal_type(x7) # revealed: Contravariant[Any] +reveal_type(x8) # revealed: Invariant[Any] +``` + +## Narrow generic unions + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import reveal_type, TypedDict + +def identity[T](x: T) -> T: + return x + +def _(narrow: dict[str, str], target: list[str] | dict[str, str] | None): + target = identity(narrow) + reveal_type(target) # revealed: dict[str, str] + +def _(narrow: list[str], target: list[str] | dict[str, str] | None): + target = identity(narrow) + reveal_type(target) # revealed: list[str] + +def _(narrow: list[str] | dict[str, str], target: list[str] | dict[str, str] | None): + target = identity(narrow) + reveal_type(target) # revealed: list[str] | dict[str, str] + +class TD(TypedDict): + x: int + +def _(target: list[TD] | dict[str, TD] | None): + target = identity([{"x": 1}]) + reveal_type(target) # revealed: list[TD] + +def _(target: list[TD] | dict[str, TD] | None): + target = identity({"x": {"x": 1}}) + reveal_type(target) # revealed: dict[str, TD] +``` + +## Prefer the inferred type of non-generic classes + +```toml +[environment] +python-version = "3.12" +``` + +```py +def identity[T](x: T) -> T: return x def lst[T](x: T) -> list[T]: @@ -466,20 +675,18 @@ def lst[T](x: T) -> list[T]: def _(i: int): a: int | None = i - b: int | None = id(i) - c: int | str | None = id(i) + b: int | None = identity(i) + c: int | str | None = identity(i) reveal_type(a) # revealed: int reveal_type(b) # revealed: int reveal_type(c) # revealed: int a: list[int | None] | None = [i] - b: list[int | None] | None = id([i]) - c: list[int | None] | int | None = id([i]) + b: list[int | None] | None = identity([i]) + c: list[int | None] | int | None = identity([i]) reveal_type(a) # revealed: list[int | None] - # TODO: these should reveal `list[int | None]` - # we currently do not use the call expression annotation as type context for argument inference - reveal_type(b) # revealed: list[Unknown | int] - reveal_type(c) # revealed: list[Unknown | int] + reveal_type(b) # revealed: list[int | None] + reveal_type(c) # revealed: list[int | None] a: list[int | None] | None = [i] b: list[int | None] | None = lst(i) @@ -489,9 +696,44 @@ def _(i: int): reveal_type(c) # revealed: list[int | None] a: list | None = [] - b: list | None = id([]) - c: list | int | None = id([]) + b: list | None = identity([]) + c: list | int | None = identity([]) reveal_type(a) # revealed: list[Unknown] reveal_type(b) # revealed: list[Unknown] reveal_type(c) # revealed: list[Unknown] + +def f[T](x: list[T]) -> T: + return x[0] + +def _(a: int, b: str, c: int | str): + x1: int = f(lst(a)) + reveal_type(x1) # revealed: int + + x2: int | str = f(lst(a)) + reveal_type(x2) # revealed: int + + x3: int | None = f(lst(a)) + reveal_type(x3) # revealed: int + + x4: str = f(lst(b)) + reveal_type(x4) # revealed: str + + x5: int | str = f(lst(b)) + reveal_type(x5) # revealed: str + + x6: str | None = f(lst(b)) + reveal_type(x6) # revealed: str + + x7: int | str = f(lst(c)) + reveal_type(x7) # revealed: int | str + + x8: int | str = f(lst(c)) + reveal_type(x8) # revealed: int | str + + # TODO: Ideally this would reveal `int | str`. This is a known limitation of our + # call inference solver, and would # require an extra inference attempt without type + # context, or with type context # of subsets of the union, both of which are impractical + # for performance reasons. + x9: int | str | None = f(lst(c)) + reveal_type(x9) # revealed: int | str | None ``` diff --git a/crates/ty_python_semantic/resources/mdtest/bidirectional.md b/crates/ty_python_semantic/resources/mdtest/bidirectional.md index 6b90873728..1211f92fe5 100644 --- a/crates/ty_python_semantic/resources/mdtest/bidirectional.md +++ b/crates/ty_python_semantic/resources/mdtest/bidirectional.md @@ -50,8 +50,8 @@ def _(l: list[int] | None = None): def f[T](x: T, cond: bool) -> T | list[T]: return x if cond else [x] -# TODO: no error -# error: [invalid-assignment] "Object of type `Literal[1] | list[Literal[1]]` is not assignable to `int | list[int]`" +# TODO: Better constraint solver. +# error: [invalid-assignment] l5: int | list[int] = f(1, True) ``` diff --git a/crates/ty_python_semantic/resources/mdtest/dataclasses/fields.md b/crates/ty_python_semantic/resources/mdtest/dataclasses/fields.md index f091a1c991..28a69081e5 100644 --- a/crates/ty_python_semantic/resources/mdtest/dataclasses/fields.md +++ b/crates/ty_python_semantic/resources/mdtest/dataclasses/fields.md @@ -37,7 +37,7 @@ class Data: content: list[int] = field(default_factory=list) timestamp: datetime = field(default_factory=datetime.now, init=False) -# revealed: (self: Data, content: list[int] = Unknown) -> None +# revealed: (self: Data, content: list[int] = list[int]) -> None reveal_type(Data.__init__) data = Data([1, 2, 3]) @@ -63,7 +63,6 @@ class Person: age: int | None = field(default=None, kw_only=True) role: str = field(default="user", kw_only=True) -# TODO: this would ideally show a default value of `None` for `age` # revealed: (self: Person, name: str, *, age: int | None = None, role: str = Literal["user"]) -> None reveal_type(Person.__init__) diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index 2c6f449f23..d49cf0087d 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -885,20 +885,31 @@ impl<'db> Type<'db> { } } - // If the type is a specialized instance of the given `KnownClass`, returns the specialization. + /// If the type is a specialized instance of the given `KnownClass`, returns the specialization. pub(crate) fn known_specialization( &self, db: &'db dyn Db, known_class: KnownClass, ) -> Option> { let class_literal = known_class.try_to_class_literal(db)?; - self.specialization_of(db, Some(class_literal)) + self.specialization_of(db, class_literal) } - // If the type is a specialized instance of the given class, returns the specialization. - // - // If no class is provided, returns the specialization of any class instance. + /// If this type is a class instance, returns its specialization. + pub(crate) fn class_specialization(self, db: &'db dyn Db) -> Option> { + self.specialization_of_optional(db, None) + } + + /// If the type is a specialized instance of the given class, returns the specialization. pub(crate) fn specialization_of( + self, + db: &'db dyn Db, + expected_class: ClassLiteral<'_>, + ) -> Option> { + self.specialization_of_optional(db, Some(expected_class)) + } + + fn specialization_of_optional( self, db: &'db dyn Db, expected_class: Option>, @@ -5588,7 +5599,7 @@ impl<'db> Type<'db> { ) -> Result, CallError<'db>> { self.bindings(db) .match_parameters(db, argument_types) - .check_types(db, argument_types, &TypeContext::default(), &[]) + .check_types(db, argument_types, TypeContext::default(), &[]) } /// Look up a dunder method on the meta-type of `self` and call it. @@ -5640,7 +5651,8 @@ impl<'db> Type<'db> { let bindings = dunder_callable .bindings(db) .match_parameters(db, argument_types) - .check_types(db, argument_types, &tcx, &[])?; + .check_types(db, argument_types, tcx, &[])?; + if boundness == Definedness::PossiblyUndefined { return Err(CallDunderError::PossiblyUnbound(Box::new(bindings))); } diff --git a/crates/ty_python_semantic/src/types/call/bind.rs b/crates/ty_python_semantic/src/types/call/bind.rs index 60868cfc3f..db1d06c32e 100644 --- a/crates/ty_python_semantic/src/types/call/bind.rs +++ b/crates/ty_python_semantic/src/types/call/bind.rs @@ -35,11 +35,11 @@ use crate::types::generics::{ use crate::types::signatures::{Parameter, ParameterForm, ParameterKind, Parameters}; use crate::types::tuple::{TupleLength, TupleType}; use crate::types::{ - BoundMethodType, ClassLiteral, DataclassFlags, DataclassParams, FieldInstance, - KnownBoundMethodType, KnownClass, KnownInstanceType, MemberLookupPolicy, NominalInstanceType, - PropertyInstanceType, SpecialFormType, TrackedConstraintSet, TypeAliasType, TypeContext, - UnionBuilder, UnionType, WrapperDescriptorKind, enums, ide_support, infer_isolated_expression, - todo_type, + BoundMethodType, BoundTypeVarIdentity, ClassLiteral, DataclassFlags, DataclassParams, + FieldInstance, KnownBoundMethodType, KnownClass, KnownInstanceType, MemberLookupPolicy, + NominalInstanceType, PropertyInstanceType, SpecialFormType, TrackedConstraintSet, + TypeAliasType, TypeContext, UnionBuilder, UnionType, WrapperDescriptorKind, enums, ide_support, + infer_isolated_expression, todo_type, }; use ruff_db::diagnostic::{Annotation, Diagnostic, SubDiagnostic, SubDiagnosticSeverity}; use ruff_python_ast::{self as ast, ArgOrKeyword, PythonVersion}; @@ -48,7 +48,7 @@ use ruff_python_ast::{self as ast, ArgOrKeyword, PythonVersion}; /// compatible with _all_ of the types in the union for the call to be valid. /// /// It's guaranteed that the wrapped bindings have no errors. -#[derive(Debug)] +#[derive(Debug, Clone)] pub(crate) struct Bindings<'db> { /// The type that is (hopefully) callable. callable_type: Type<'db>, @@ -150,9 +150,27 @@ impl<'db> Bindings<'db> { mut self, db: &'db dyn Db, argument_types: &CallArguments<'_, 'db>, - call_expression_tcx: &TypeContext<'db>, + call_expression_tcx: TypeContext<'db>, dataclass_field_specifiers: &[Type<'db>], ) -> Result> { + match self.check_types_impl( + db, + argument_types, + call_expression_tcx, + dataclass_field_specifiers, + ) { + Ok(()) => Ok(self), + Err(err) => Err(CallError(err, Box::new(self))), + } + } + + pub(crate) fn check_types_impl( + &mut self, + db: &'db dyn Db, + argument_types: &CallArguments<'_, 'db>, + call_expression_tcx: TypeContext<'db>, + dataclass_field_specifiers: &[Type<'db>], + ) -> Result<(), CallErrorKind> { for element in &mut self.elements { if let Some(mut updated_argument_forms) = element.check_types(db, argument_types, call_expression_tcx) @@ -197,16 +215,13 @@ impl<'db> Bindings<'db> { } if all_ok { - Ok(self) + Ok(()) } else if any_binding_error { - Err(CallError(CallErrorKind::BindingError, Box::new(self))) + Err(CallErrorKind::BindingError) } else if all_not_callable { - Err(CallError(CallErrorKind::NotCallable, Box::new(self))) + Err(CallErrorKind::NotCallable) } else { - Err(CallError( - CallErrorKind::PossiblyNotCallable, - Box::new(self), - )) + Err(CallErrorKind::PossiblyNotCallable) } } @@ -1365,7 +1380,7 @@ impl<'db> From> for Bindings<'db> { /// If the arguments cannot be matched to formal parameters, we store information about the /// specific errors that occurred when trying to match them up. If the callable has multiple /// overloads, we store this error information for each overload. -#[derive(Debug)] +#[derive(Debug, Clone)] pub(crate) struct CallableBinding<'db> { /// The type that is (hopefully) callable. pub(crate) callable_type: Type<'db>, @@ -1486,7 +1501,7 @@ impl<'db> CallableBinding<'db> { &mut self, db: &'db dyn Db, argument_types: &CallArguments<'_, 'db>, - call_expression_tcx: &TypeContext<'db>, + call_expression_tcx: TypeContext<'db>, ) -> Option { // If this callable is a bound method, prepend the self instance onto the arguments list // before checking. @@ -2267,7 +2282,7 @@ pub(crate) enum MatchingOverloadIndex { Multiple(Vec), } -#[derive(Default, Debug)] +#[derive(Default, Debug, Clone)] struct ArgumentForms { values: Vec>, conflicting: Vec, @@ -2672,7 +2687,7 @@ struct ArgumentTypeChecker<'a, 'db> { arguments: &'a CallArguments<'a, 'db>, argument_matches: &'a [MatchedArgument<'db>], parameter_tys: &'a mut [Option>], - call_expression_tcx: &'a TypeContext<'db>, + call_expression_tcx: TypeContext<'db>, return_ty: Type<'db>, errors: &'a mut Vec>, @@ -2688,7 +2703,7 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { arguments: &'a CallArguments<'a, 'db>, argument_matches: &'a [MatchedArgument<'db>], parameter_tys: &'a mut [Option>], - call_expression_tcx: &'a TypeContext<'db>, + call_expression_tcx: TypeContext<'db>, return_ty: Type<'db>, errors: &'a mut Vec>, ) -> Self { @@ -2738,9 +2753,21 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { return; }; + let return_with_tcx = self + .signature + .return_ty + .zip(self.call_expression_tcx.annotation); + self.inferable_typevars = generic_context.inferable_typevars(self.db); let mut builder = SpecializationBuilder::new(self.db, self.inferable_typevars); + // Prefer the declared type of generic classes. + let preferred_type_mappings = return_with_tcx.and_then(|(return_ty, tcx)| { + tcx.class_specialization(self.db)?; + builder.infer(return_ty, tcx).ok()?; + Some(builder.type_mappings().clone()) + }); + let parameters = self.signature.parameters(); for (argument_index, adjusted_argument_index, _, argument_type) in self.enumerate_argument_types() @@ -2753,9 +2780,21 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { continue; }; - if let Err(error) = builder.infer( + let filter = |declared_ty: BoundTypeVarIdentity<'_>, inferred_ty: Type<'_>| { + // Avoid widening the inferred type if it is already assignable to the + // preferred declared type. + preferred_type_mappings + .as_ref() + .and_then(|types| types.get(&declared_ty)) + .is_none_or(|preferred_ty| { + !inferred_ty.is_assignable_to(self.db, *preferred_ty) + }) + }; + + if let Err(error) = builder.infer_filter( expected_type, variadic_argument_type.unwrap_or(argument_type), + filter, ) { self.errors.push(BindingError::SpecializationError { error, @@ -2765,15 +2804,14 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { } } - // Build the specialization first without inferring the type context. - let isolated_specialization = builder.build(generic_context, *self.call_expression_tcx); + // Build the specialization first without inferring the complete type context. + let isolated_specialization = builder.build(generic_context, self.call_expression_tcx); let isolated_return_ty = self .return_ty .apply_specialization(self.db, isolated_specialization); let mut try_infer_tcx = || { - let return_ty = self.signature.return_ty?; - let call_expression_tcx = self.call_expression_tcx.annotation?; + let (return_ty, call_expression_tcx) = return_with_tcx?; // A type variable is not a useful type-context for expression inference, and applying it // to the return type can lead to confusing unions in nested generic calls. @@ -2781,8 +2819,8 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { return None; } - // If the return type is already assignable to the annotated type, we can ignore the - // type context and prefer the narrower inferred type. + // If the return type is already assignable to the annotated type, we ignore the rest of + // the type context and prefer the narrower inferred type. if isolated_return_ty.is_assignable_to(self.db, call_expression_tcx) { return None; } @@ -2791,8 +2829,8 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { // annotated assignment, to closer match the order of any unions written in the type annotation. builder.infer(return_ty, call_expression_tcx).ok()?; - // Otherwise, build the specialization again after inferring the type context. - let specialization = builder.build(generic_context, *self.call_expression_tcx); + // Otherwise, build the specialization again after inferring the complete type context. + let specialization = builder.build(generic_context, self.call_expression_tcx); let return_ty = return_ty.apply_specialization(self.db, specialization); Some((Some(specialization), return_ty)) @@ -3051,7 +3089,7 @@ impl<'db> MatchedArgument<'db> { pub(crate) struct UnknownParameterNameError; /// Binding information for one of the overloads of a callable. -#[derive(Debug)] +#[derive(Debug, Clone)] pub(crate) struct Binding<'db> { pub(crate) signature: Signature<'db>, @@ -3150,7 +3188,7 @@ impl<'db> Binding<'db> { &mut self, db: &'db dyn Db, arguments: &CallArguments<'_, 'db>, - call_expression_tcx: &TypeContext<'db>, + call_expression_tcx: TypeContext<'db>, ) { let mut checker = ArgumentTypeChecker::new( db, diff --git a/crates/ty_python_semantic/src/types/class.rs b/crates/ty_python_semantic/src/types/class.rs index 862ed4b974..18d3d4b900 100644 --- a/crates/ty_python_semantic/src/types/class.rs +++ b/crates/ty_python_semantic/src/types/class.rs @@ -258,7 +258,7 @@ impl<'db> GenericAlias<'db> { ) -> Self { let tcx = tcx .annotation - .and_then(|ty| ty.specialization_of(db, Some(self.origin(db)))) + .and_then(|ty| ty.specialization_of(db, self.origin(db))) .map(|specialization| specialization.types(db)) .unwrap_or(&[]); diff --git a/crates/ty_python_semantic/src/types/generics.rs b/crates/ty_python_semantic/src/types/generics.rs index 555ab47f01..11b708cf69 100644 --- a/crates/ty_python_semantic/src/types/generics.rs +++ b/crates/ty_python_semantic/src/types/generics.rs @@ -1,4 +1,5 @@ use std::cell::RefCell; +use std::collections::hash_map::Entry; use std::fmt::Display; use itertools::Itertools; @@ -1315,6 +1316,11 @@ impl<'db> SpecializationBuilder<'db> { } } + /// Returns the current set of type mappings for this specialization. + pub(crate) fn type_mappings(&self) -> &FxHashMap, Type<'db>> { + &self.types + } + pub(crate) fn build( &mut self, generic_context: GenericContext<'db>, @@ -1322,7 +1328,7 @@ impl<'db> SpecializationBuilder<'db> { ) -> Specialization<'db> { let tcx_specialization = tcx .annotation - .and_then(|annotation| annotation.specialization_of(self.db, None)); + .and_then(|annotation| annotation.class_specialization(self.db)); let types = (generic_context.variables_inner(self.db).iter()).map(|(identity, variable)| { @@ -1345,19 +1351,43 @@ impl<'db> SpecializationBuilder<'db> { generic_context.specialize_partial(self.db, types) } - fn add_type_mapping(&mut self, bound_typevar: BoundTypeVarInstance<'db>, ty: Type<'db>) { - self.types - .entry(bound_typevar.identity(self.db)) - .and_modify(|existing| { - *existing = UnionType::from_elements(self.db, [*existing, ty]); - }) - .or_insert(ty); + fn add_type_mapping( + &mut self, + bound_typevar: BoundTypeVarInstance<'db>, + ty: Type<'db>, + filter: impl Fn(BoundTypeVarIdentity<'db>, Type<'db>) -> bool, + ) { + let identity = bound_typevar.identity(self.db); + match self.types.entry(identity) { + Entry::Occupied(mut entry) => { + if filter(identity, ty) { + *entry.get_mut() = UnionType::from_elements(self.db, [*entry.get(), ty]); + } + } + Entry::Vacant(entry) => { + entry.insert(ty); + } + } } + /// Infer type mappings for the specialization based on a given type and its declared type. pub(crate) fn infer( &mut self, formal: Type<'db>, actual: Type<'db>, + ) -> Result<(), SpecializationError<'db>> { + self.infer_filter(formal, actual, |_, _| true) + } + + /// Infer type mappings for the specialization based on a given type and its declared type. + /// + /// The filter predicate is provided with a type variable and the type being mapped to it. Type + /// mappings to which the predicate returns `false` will be ignored. + pub(crate) fn infer_filter( + &mut self, + formal: Type<'db>, + actual: Type<'db>, + filter: impl Fn(BoundTypeVarIdentity<'db>, Type<'db>) -> bool, ) -> Result<(), SpecializationError<'db>> { if formal == actual { return Ok(()); @@ -1391,8 +1421,8 @@ impl<'db> SpecializationBuilder<'db> { // Remove the union elements from `actual` that are not related to `formal`, and vice // versa. // - // For example, if `formal` is `list[T]` and `actual` is `list[int] | None`, we want to specialize `T` - // to `int`, and so ignore the `None`. + // For example, if `formal` is `list[T]` and `actual` is `list[int] | None`, we want to + // specialize `T` to `int`, and so ignore the `None`. let actual = actual.filter_disjoint_elements(self.db, formal, self.inferable); let formal = formal.filter_disjoint_elements(self.db, actual, self.inferable); @@ -1440,7 +1470,7 @@ impl<'db> SpecializationBuilder<'db> { if remaining_actual.is_never() { return Ok(()); } - self.add_type_mapping(*formal_bound_typevar, remaining_actual); + self.add_type_mapping(*formal_bound_typevar, remaining_actual, filter); } (Type::Union(formal), _) => { // Second, if the formal is a union, and precisely one union element _is_ a typevar (not @@ -1450,7 +1480,7 @@ impl<'db> SpecializationBuilder<'db> { let bound_typevars = (formal.elements(self.db).iter()).filter_map(|ty| ty.as_typevar()); if let Ok(bound_typevar) = bound_typevars.exactly_one() { - self.add_type_mapping(bound_typevar, actual); + self.add_type_mapping(bound_typevar, actual, filter); } } @@ -1478,13 +1508,13 @@ impl<'db> SpecializationBuilder<'db> { argument: ty, }); } - self.add_type_mapping(bound_typevar, ty); + self.add_type_mapping(bound_typevar, ty, filter); } Some(TypeVarBoundOrConstraints::Constraints(constraints)) => { // Prefer an exact match first. for constraint in constraints.elements(self.db) { if ty == *constraint { - self.add_type_mapping(bound_typevar, ty); + self.add_type_mapping(bound_typevar, ty, filter); return Ok(()); } } @@ -1494,7 +1524,7 @@ impl<'db> SpecializationBuilder<'db> { .when_assignable_to(self.db, *constraint, self.inferable) .is_always_satisfied(self.db) { - self.add_type_mapping(bound_typevar, *constraint); + self.add_type_mapping(bound_typevar, *constraint, filter); return Ok(()); } } @@ -1504,7 +1534,7 @@ impl<'db> SpecializationBuilder<'db> { }); } _ => { - self.add_type_mapping(bound_typevar, ty); + self.add_type_mapping(bound_typevar, ty, filter); } } } diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index 37cb64ff0b..74e8aca604 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -1,4 +1,4 @@ -use std::{iter, mem}; +use std::iter; use itertools::{Either, Itertools}; use ruff_db::diagnostic::{Annotation, DiagnosticId, Severity}; @@ -211,6 +211,7 @@ const NUM_FIELD_SPECIFIERS_INLINE: usize = 1; /// don't infer its types more than once. pub(super) struct TypeInferenceBuilder<'db, 'ast> { context: InferContext<'db, 'ast>, + index: &'db SemanticIndex<'db>, region: InferenceRegion<'db>, @@ -349,16 +350,19 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { assert_eq!(self.scope, inference.scope); self.expressions.extend(inference.expressions.iter()); - self.declarations.extend(inference.declarations()); + self.declarations + .extend(inference.declarations(), self.multi_inference_state); if !matches!(self.region, InferenceRegion::Scope(..)) { - self.bindings.extend(inference.bindings()); + self.bindings + .extend(inference.bindings(), self.multi_inference_state); } if let Some(extra) = &inference.extra { self.extend_cycle_recovery(extra.cycle_recovery); self.context.extend(&extra.diagnostics); - self.deferred.extend(extra.deferred.iter().copied()); + self.deferred + .extend(extra.deferred.iter().copied(), self.multi_inference_state); } } @@ -377,7 +381,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { self.extend_cycle_recovery(extra.cycle_recovery); if !matches!(self.region, InferenceRegion::Scope(..)) { - self.bindings.extend(extra.bindings.iter().copied()); + self.bindings + .extend(extra.bindings.iter().copied(), self.multi_inference_state); } } } @@ -398,6 +403,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { self.scope } + /// Set the multi-inference state, returning the previous value. + fn set_multi_inference_state(&mut self, state: MultiInferenceState) -> MultiInferenceState { + std::mem::replace(&mut self.multi_inference_state, state) + } + /// Are we currently inferring types in file with deferred types? /// This is true for stub files, for files with `__future__.annotations`, and /// by default for all source files in Python 3.14 and later. @@ -1637,7 +1647,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } - self.bindings.insert(binding, bound_ty); + self.bindings + .insert(binding, bound_ty, self.multi_inference_state); inferred_ty } @@ -1704,7 +1715,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } TypeAndQualifiers::declared(Type::unknown()) }; - self.declarations.insert(declaration, ty); + self.declarations + .insert(declaration, ty, self.multi_inference_state); } fn add_declaration_with_binding( @@ -1778,8 +1790,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } }; - self.declarations.insert(definition, declared_ty); - self.bindings.insert(definition, inferred_ty); + self.declarations + .insert(definition, declared_ty, self.multi_inference_state); + self.bindings + .insert(definition, inferred_ty, self.multi_inference_state); } fn add_unknown_declaration_with_binding( @@ -2198,7 +2212,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // `infer_function_type_params`, rather than here. if type_params.is_none() { if self.defer_annotations() { - self.deferred.insert(definition); + self.deferred.insert(definition, self.multi_inference_state); } else { let previous_typevar_binding_context = self.typevar_binding_context.replace(definition); @@ -2756,7 +2770,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // Inference of bases deferred in stubs, or if any are string literals. if self.in_stub() || class_node.bases().iter().any(contains_string_literal) { - self.deferred.insert(definition); + self.deferred.insert(definition, self.multi_inference_state); } else { let previous_typevar_binding_context = self.typevar_binding_context.replace(definition); @@ -3126,7 +3140,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { None => None, }; if bound_or_constraint.is_some() || default.is_some() { - self.deferred.insert(definition); + self.deferred.insert(definition, self.multi_inference_state); } let identity = TypeVarIdentity::new(self.db(), &name.id, Some(definition), TypeVarKind::Pep695); @@ -3190,7 +3204,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { default, } = node; if default.is_some() { - self.deferred.insert(definition); + self.deferred.insert(definition, self.multi_inference_state); } let identity = TypeVarIdentity::new( self.db(), @@ -3680,10 +3694,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // Overwrite the previously inferred value, preferring later inferences, which are // likely more precise. Note that we still ensure each inference is assignable to // its declared type, so this mainly affects the IDE hover type. - let prev_multi_inference_state = mem::replace( - &mut builder.multi_inference_state, - MultiInferenceState::Overwrite, - ); + let prev_multi_inference_state = + builder.set_multi_inference_state(MultiInferenceState::Overwrite); // If we are inferring the argument multiple times, silence diagnostics to avoid duplicated warnings. let was_in_multi_inference = if let Some(first_tcx) = first_tcx { @@ -4625,7 +4637,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } if default.is_some() { - self.deferred.insert(definition); + self.deferred.insert(definition, self.multi_inference_state); } let identity = @@ -4867,7 +4879,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { }; if bound_or_constraints.is_some() || default.is_some() { - self.deferred.insert(definition); + self.deferred.insert(definition, self.multi_inference_state); } let identity = TypeVarIdentity::new(db, target_name, Some(definition), TypeVarKind::Legacy); @@ -5961,27 +5973,156 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } - /// Infer the argument types for multiple potential bindings and overloads. - fn infer_all_argument_types<'a>( + fn infer_and_check_argument_types( &mut self, ast_arguments: &ast::Arguments, - arguments: &mut CallArguments<'a, 'db>, - bindings: &Bindings<'db>, - ) { - debug_assert!( - ast_arguments.len() == arguments.len() - && arguments.len() == bindings.argument_forms().len() + argument_types: &mut CallArguments<'_, 'db>, + bindings: &mut Bindings<'db>, + call_expression_tcx: TypeContext<'db>, + ) -> Result<(), CallErrorKind> { + let db = self.db(); + + // If the type context is a union, attempt to narrow to a specific element. + let narrow_targets: &[_] = match call_expression_tcx.annotation { + // TODO: We could theoretically attempt to narrow to every element of + // the power set of this union. However, this leads to an exponential + // explosion of inference attempts, and is rarely needed in practice. + Some(Type::Union(union)) => union.elements(db), + _ => &[], + }; + + // We silence diagnostics until we successfully narrow to a specific type. + let mut speculated_bindings = bindings.clone(); + let was_in_multi_inference = self.context.set_multi_inference(true); + + let mut try_narrow = |narrowed_ty| { + let narrowed_tcx = TypeContext::new(Some(narrowed_ty)); + + // Attempt to infer the argument types using the narrowed type context. + self.infer_all_argument_types( + ast_arguments, + argument_types, + bindings, + narrowed_tcx, + MultiInferenceState::Ignore, + ); + + // Ensure the argument types match their annotated types. + if speculated_bindings + .check_types_impl( + db, + argument_types, + narrowed_tcx, + &self.dataclass_field_specifiers, + ) + .is_err() + { + return None; + } + + // Ensure the inferred return type is assignable to the (narrowed) declared type. + // + // TODO: Checking assignability against the full declared type could help avoid + // cases where the constraint solver is not smart enough to solve complex unions. + // We should see revisit this after the new constraint solver is implemented. + if !speculated_bindings + .return_type(db) + .is_assignable_to(db, narrowed_ty) + { + return None; + } + + // Successfully narrowed to an element of the union. + // + // If necessary, infer the argument types again with diagnostics enabled. + if !was_in_multi_inference { + self.context.set_multi_inference(was_in_multi_inference); + + self.infer_all_argument_types( + ast_arguments, + argument_types, + bindings, + narrowed_tcx, + MultiInferenceState::Intersect, + ); + } + + Some(bindings.check_types_impl( + db, + argument_types, + narrowed_tcx, + &self.dataclass_field_specifiers, + )) + }; + + // Prefer the declared type of generic classes. + for narrowed_ty in narrow_targets + .iter() + .filter(|ty| ty.class_specialization(db).is_some()) + { + if let Some(result) = try_narrow(*narrowed_ty) { + return result; + } + } + + // Try the remaining elements of the union. + // + // TODO: We could also attempt an inference without type context, but this + // leads to similar performance issues. + for narrowed_ty in narrow_targets + .iter() + .filter(|ty| ty.class_specialization(db).is_none()) + { + if let Some(result) = try_narrow(*narrowed_ty) { + return result; + } + } + + // Re-enable diagnostics, and infer against the entire union as a fallback. + self.context.set_multi_inference(was_in_multi_inference); + + self.infer_all_argument_types( + ast_arguments, + argument_types, + bindings, + call_expression_tcx, + MultiInferenceState::Intersect, ); + bindings.check_types_impl( + db, + argument_types, + call_expression_tcx, + &self.dataclass_field_specifiers, + ) + } + + /// Infer the argument types for all bindings. + /// + /// Note that this method may infer the type of a given argument expression multiple times with + /// distinct type context. The provided `MultiInferenceState` can be used to dictate multi-inference + /// behavior. + fn infer_all_argument_types( + &mut self, + ast_arguments: &ast::Arguments, + arguments_types: &mut CallArguments<'_, 'db>, + bindings: &Bindings<'db>, + call_expression_tcx: TypeContext<'db>, + multi_inference_state: MultiInferenceState, + ) { + debug_assert_eq!(ast_arguments.len(), arguments_types.len()); + debug_assert_eq!(arguments_types.len(), bindings.argument_forms().len()); + + let db = self.db(); let iter = itertools::izip!( 0.., - arguments.iter_mut(), + arguments_types.iter_mut(), bindings.argument_forms().iter().copied(), ast_arguments.arguments_source_order() ); let overloads_with_binding = bindings - .into_iter() + .iter() .filter_map(|binding| { match binding.matching_overload_index() { MatchingOverloadIndex::Single(_) | MatchingOverloadIndex::Multiple(_) => { @@ -6000,7 +6141,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { }, } }) - .flatten(); + .flatten() + .collect::>(); + + let old_multi_inference_state = self.set_multi_inference_state(multi_inference_state); for (argument_index, (_, argument_type), argument_form, ast_argument) in iter { let ast_argument = match ast_argument { @@ -6022,7 +6166,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } // Retrieve the parameter type for the current argument in a given overload and its binding. - let db = self.db(); let parameter_type = |overload: &Binding<'db>, binding: &CallableBinding<'db>| { let argument_index = if binding.bound_type.is_some() { argument_index + 1 @@ -6035,10 +6178,25 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { return None; }; - let parameter_type = + let mut parameter_type = overload.signature.parameters()[*parameter_index].annotated_type()?; - // TODO: For now, skip any parameter annotations that mention any typevars. There + // If this is a generic call, attempt to specialize the parameter type using the + // declared type context, if provided. + if let Some(generic_context) = overload.signature.generic_context + && let Some(return_ty) = overload.signature.return_ty + && let Some(declared_return_ty) = call_expression_tcx.annotation + { + let mut builder = + SpecializationBuilder::new(db, generic_context.inferable_typevars(db)); + + let _ = builder.infer(return_ty, declared_return_ty); + let specialization = builder.build(generic_context, call_expression_tcx); + + parameter_type = parameter_type.apply_specialization(db, specialization); + } + + // TODO: For now, skip any parameter annotations that still mention any typevars. There // are two issues: // // First, if we include those typevars in the type context that we use to infer the @@ -6069,26 +6227,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // If there is only a single binding and overload, we can infer the argument directly with // the unique parameter type annotation. - if let Ok((overload, binding)) = overloads_with_binding.clone().exactly_one() { - self.infer_expression_impl( + if let Ok((overload, binding)) = overloads_with_binding.iter().exactly_one() { + *argument_type = Some(self.infer_expression( ast_argument, TypeContext::new(parameter_type(overload, binding)), - ); + )); } else { - // Otherwise, each type is a valid independent inference of the given argument, and we may - // require different permutations of argument types to correctly perform argument expansion - // during overload evaluation, so we take the intersection of all the types we inferred for - // each argument. - // - // Note that this applies to all nested expressions within each argument. - let old_multi_inference_state = mem::replace( - &mut self.multi_inference_state, - MultiInferenceState::Intersect, - ); - // We perform inference once without any type context, emitting any diagnostics that are unrelated // to bidirectional type inference. - self.infer_expression_impl(ast_argument, TypeContext::default()); + *argument_type = Some(self.infer_expression(ast_argument, TypeContext::default())); // We then silence any diagnostics emitted during multi-inference, as the type context is only // used as a hint to infer a more assignable argument type, and should not lead to diagnostics @@ -6097,24 +6244,28 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // Infer the type of each argument once with each distinct parameter type as type context. let parameter_types = overloads_with_binding - .clone() + .iter() .filter_map(|(overload, binding)| parameter_type(overload, binding)) .collect::>(); for parameter_type in parameter_types { - self.infer_expression_impl( - ast_argument, - TypeContext::new(Some(parameter_type)), - ); + let inferred_ty = + self.infer_expression(ast_argument, TypeContext::new(Some(parameter_type))); + + // Each type is a valid independent inference of the given argument, and we may require different + // permutations of argument types to correctly perform argument expansion during overload evaluation, + // so we take the intersection of all the types we inferred for each argument. + *argument_type = argument_type + .map(|current| IntersectionType::from_elements(db, [inferred_ty, current])) + .or(Some(inferred_ty)); } - // Restore the multi-inference state. - self.multi_inference_state = old_multi_inference_state; + // Re-enable diagnostics. self.context.set_multi_inference(was_in_multi_inference); } - - *argument_type = self.try_expression_type(ast_argument); } + + self.set_multi_inference_state(old_multi_inference_state); } fn infer_argument_type( @@ -6275,6 +6426,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { let db = self.db(); match self.multi_inference_state { + MultiInferenceState::Ignore => {} + MultiInferenceState::Panic => { let previous = self.expressions.insert(expression.into(), ty); assert_eq!(previous, None); @@ -6593,7 +6746,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } validate_typed_dict_dict_literal(&self.context, typed_dict, dict, dict.into(), |expr| { - self.expression_type(expr) + item_types + .get(&expr.node_index().load()) + .copied() + .unwrap_or(Type::unknown()) }) .ok() .map(|_| Type::TypedDict(typed_dict)) @@ -7356,7 +7512,13 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { let infer_call_arguments = |bindings: Option>| { if let Some(bindings) = bindings { let bindings = bindings.match_parameters(self.db(), &call_arguments); - self.infer_all_argument_types(arguments, &mut call_arguments, &bindings); + self.infer_all_argument_types( + arguments, + &mut call_arguments, + &bindings, + tcx, + MultiInferenceState::Intersect, + ); } else { let argument_forms = vec![Some(ParameterForm::Value); call_arguments.len()]; self.infer_argument_types(arguments, &mut call_arguments, &argument_forms); @@ -7374,10 +7536,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } - let bindings = callable_type + let mut bindings = callable_type .bindings(self.db()) .match_parameters(self.db(), &call_arguments); - self.infer_all_argument_types(arguments, &mut call_arguments, &bindings); + + let bindings_result = + self.infer_and_check_argument_types(arguments, &mut call_arguments, &mut bindings, tcx); // Validate `TypedDict` constructor calls after argument type inference if let Some(class_literal) = callable_type.as_class_literal() { @@ -7395,14 +7559,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } - let mut bindings = match bindings.check_types( - self.db(), - &call_arguments, - &tcx, - &self.dataclass_field_specifiers[..], - ) { - Ok(bindings) => bindings, - Err(CallError(_, bindings)) => { + let mut bindings = match bindings_result { + Ok(()) => bindings, + Err(_) => { bindings.report_diagnostics(&self.context, call_expression.into()); return bindings.return_type(self.db()); } @@ -10100,8 +10259,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .check_types( self.db(), &call_argument_types, - &TypeContext::default(), - &self.dataclass_field_specifiers[..], + TypeContext::default(), + &self.dataclass_field_specifiers, ) { Ok(bindings) => bindings, Err(CallError(_, bindings)) => { @@ -10833,8 +10992,14 @@ enum MultiInferenceState { Panic, /// Overwrite the previously inferred value. + /// + /// Note that `Overwrite` does not interact well with nested inferences: + /// it overwrites values that were written with `MultiInferenceState::Intersect`. Overwrite, + /// Ignore the newly inferred value. + Ignore, + /// Store the intersection of all types inferred for the expression. Intersect, } @@ -11078,7 +11243,11 @@ where self.0.iter().map(|(k, v)| (k, v)) } - fn insert(&mut self, key: K, value: V) { + fn insert(&mut self, key: K, value: V, multi_inference_state: MultiInferenceState) { + if matches!(multi_inference_state, MultiInferenceState::Ignore) { + return; + } + debug_assert!( !self.0.iter().any(|(existing, _)| existing == &key), "An existing entry already exists for key {key:?}", @@ -11092,17 +11261,21 @@ where } } -impl Extend<(K, V)> for VecMap +impl VecMap where K: Eq, K: std::fmt::Debug, V: std::fmt::Debug, { #[inline] - fn extend>(&mut self, iter: T) { + fn extend>( + &mut self, + iter: T, + multi_inference_state: MultiInferenceState, + ) { if cfg!(debug_assertions) { for (key, value) in iter { - self.insert(key, value); + self.insert(key, value, multi_inference_state); } } else { self.0.extend(iter); @@ -11140,7 +11313,11 @@ where V: Eq, V: std::fmt::Debug, { - fn insert(&mut self, value: V) { + fn insert(&mut self, value: V, multi_inference_state: MultiInferenceState) { + if matches!(multi_inference_state, MultiInferenceState::Ignore) { + return; + } + debug_assert!( !self.0.iter().any(|existing| existing == &value), "An existing entry already exists for {value:?}", @@ -11150,16 +11327,20 @@ where } } -impl Extend for VecSet +impl VecSet where V: Eq, V: std::fmt::Debug, { #[inline] - fn extend>(&mut self, iter: T) { + fn extend>( + &mut self, + iter: T, + multi_inference_state: MultiInferenceState, + ) { if cfg!(debug_assertions) { for value in iter { - self.insert(value); + self.insert(value, multi_inference_state); } } else { self.0.extend(iter); From 3656b448775d2bb69b03b33a49a8f61714d28db8 Mon Sep 17 00:00:00 2001 From: Ibraheem Ahmed Date: Mon, 10 Nov 2025 16:49:48 -0500 Subject: [PATCH 146/180] [ty] Use type context for inference of generic constructors (#20933) ## Summary Resolves https://github.com/astral-sh/ty/issues/1228. This PR is stacked on https://github.com/astral-sh/ruff/pull/21210. --- .../mdtest/assignment/annotations.md | 59 +++++++++++++++++++ .../resources/mdtest/narrow/assignment.md | 2 +- crates/ty_python_semantic/src/types.rs | 16 +++++ .../ty_python_semantic/src/types/call/bind.rs | 13 +++- .../src/types/infer/builder.rs | 10 +++- 5 files changed, 93 insertions(+), 7 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md b/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md index d97092720b..3865572726 100644 --- a/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md +++ b/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md @@ -310,6 +310,65 @@ reveal_type(s) # revealed: list[Literal[1]] reveal_type(s) # revealed: list[Literal[1]] ``` +## Generic constructor annotations are understood + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import Any + +class X[T]: + def __init__(self, value: T): + self.value = value + +a: X[int] = X(1) +reveal_type(a) # revealed: X[int] + +b: X[int | None] = X(1) +reveal_type(b) # revealed: X[int | None] + +c: X[int | None] | None = X(1) +reveal_type(c) # revealed: X[int | None] + +def _[T](a: X[T]): + b: X[T | int] = X(a.value) + reveal_type(b) # revealed: X[T@_ | int] + +d: X[Any] = X(1) +reveal_type(d) # revealed: X[Any] + +def _(flag: bool): + # TODO: Handle unions correctly. + # error: [invalid-assignment] "Object of type `X[int]` is not assignable to `X[int | None]`" + a: X[int | None] = X(1) if flag else X(2) + reveal_type(a) # revealed: X[int | None] +``` + +```py +from dataclasses import dataclass + +@dataclass +class Y[T]: + value: T + +y1: Y[Any] = Y(value=1) +# TODO: This should reveal `Y[Any]`. +reveal_type(y1) # revealed: Y[int] +``` + +```py +class Z[T]: + def __new__(cls, value: T): + return super().__new__(cls) + +z1: Z[Any] = Z(1) +# TODO: This should reveal `Z[Any]`. +reveal_type(z1) # revealed: Z[int] +``` + ## PEP-604 annotations are supported ```py diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/assignment.md b/crates/ty_python_semantic/resources/mdtest/narrow/assignment.md index f692c59835..5786b465cd 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/assignment.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/assignment.md @@ -206,7 +206,7 @@ dd: defaultdict[int, int] = defaultdict(int) dd[0] = 0 cm: ChainMap[int, int] = ChainMap({1: 1}, {0: 0}) cm[0] = 0 -reveal_type(cm) # revealed: ChainMap[Unknown | int, Unknown | int] +reveal_type(cm) # revealed: ChainMap[int | Unknown, int | Unknown] reveal_type(l[0]) # revealed: Literal[0] reveal_type(d[0]) # revealed: Literal[0] diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index d49cf0087d..c9641c4e34 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -1111,6 +1111,22 @@ impl<'db> Type<'db> { } } + /// If the type is a generic class constructor, returns the class instance type. + pub(crate) fn synthesized_constructor_return_ty(self, db: &'db dyn Db) -> Option> { + // TODO: This does not correctly handle unions or intersections. It also does not handle + // constructors that are not represented as bound methods, e.g. `__new__`, or synthesized + // dataclass initializers. + if let Type::BoundMethod(method) = self + && let Type::NominalInstance(instance) = method.self_instance(db) + && method.function(db).name(db).as_str() == "__init__" + { + let class_ty = instance.class_literal(db).identity_specialization(db); + Some(Type::instance(db, class_ty)) + } else { + None + } + } + pub const fn is_property_instance(&self) -> bool { matches!(self, Type::PropertyInstance(..)) } diff --git a/crates/ty_python_semantic/src/types/call/bind.rs b/crates/ty_python_semantic/src/types/call/bind.rs index db1d06c32e..423783b420 100644 --- a/crates/ty_python_semantic/src/types/call/bind.rs +++ b/crates/ty_python_semantic/src/types/call/bind.rs @@ -2687,6 +2687,7 @@ struct ArgumentTypeChecker<'a, 'db> { arguments: &'a CallArguments<'a, 'db>, argument_matches: &'a [MatchedArgument<'db>], parameter_tys: &'a mut [Option>], + callable_type: Type<'db>, call_expression_tcx: TypeContext<'db>, return_ty: Type<'db>, errors: &'a mut Vec>, @@ -2703,6 +2704,7 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { arguments: &'a CallArguments<'a, 'db>, argument_matches: &'a [MatchedArgument<'db>], parameter_tys: &'a mut [Option>], + callable_type: Type<'db>, call_expression_tcx: TypeContext<'db>, return_ty: Type<'db>, errors: &'a mut Vec>, @@ -2713,6 +2715,7 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { arguments, argument_matches, parameter_tys, + callable_type, call_expression_tcx, return_ty, errors, @@ -2754,8 +2757,9 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { }; let return_with_tcx = self - .signature - .return_ty + .callable_type + .synthesized_constructor_return_ty(self.db) + .or(self.signature.return_ty) .zip(self.call_expression_tcx.annotation); self.inferable_typevars = generic_context.inferable_typevars(self.db); @@ -2763,7 +2767,9 @@ impl<'a, 'db> ArgumentTypeChecker<'a, 'db> { // Prefer the declared type of generic classes. let preferred_type_mappings = return_with_tcx.and_then(|(return_ty, tcx)| { - tcx.class_specialization(self.db)?; + tcx.filter_union(self.db, |ty| ty.class_specialization(self.db).is_some()) + .class_specialization(self.db)?; + builder.infer(return_ty, tcx).ok()?; Some(builder.type_mappings().clone()) }); @@ -3196,6 +3202,7 @@ impl<'db> Binding<'db> { arguments, &self.argument_matches, &mut self.parameter_tys, + self.callable_type, call_expression_tcx, self.return_ty, &mut self.errors, diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index 74e8aca604..c227412752 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -6025,9 +6025,13 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // TODO: Checking assignability against the full declared type could help avoid // cases where the constraint solver is not smart enough to solve complex unions. // We should see revisit this after the new constraint solver is implemented. - if !speculated_bindings - .return_type(db) - .is_assignable_to(db, narrowed_ty) + if speculated_bindings + .callable_type() + .synthesized_constructor_return_ty(db) + .is_none() + && !speculated_bindings + .return_type(db) + .is_assignable_to(db, narrowed_ty) { return None; } From 039a69fa8c3aca9c548f5711a4d9e33ea4397e6f Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Mon, 10 Nov 2025 16:56:25 -0500 Subject: [PATCH 147/180] [ty] supress inlay hints for `+1` and `-1` (#21368) It's everyone's favourite language corner case! Also having kicked the tires on it, I'm pretty happy to call this (in conjunction with #21367): Fixes https://github.com/astral-sh/ty/issues/494 There's cases where you can make noisy Literal hints appear, so we can always iterate on it, but this handles like, 98% of the cases in the wild, which is great. --------- Co-authored-by: David Peter --- crates/ty_ide/src/inlay_hints.rs | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/crates/ty_ide/src/inlay_hints.rs b/crates/ty_ide/src/inlay_hints.rs index 5bacaa04cb..353f761e90 100644 --- a/crates/ty_ide/src/inlay_hints.rs +++ b/crates/ty_ide/src/inlay_hints.rs @@ -4,7 +4,7 @@ use crate::Db; use ruff_db::files::File; use ruff_db::parsed::parsed_module; use ruff_python_ast::visitor::source_order::{self, SourceOrderVisitor, TraversalSignal}; -use ruff_python_ast::{AnyNodeRef, ArgOrKeyword, Expr, Stmt}; +use ruff_python_ast::{AnyNodeRef, ArgOrKeyword, Expr, ExprUnaryOp, Stmt, UnaryOp}; use ruff_text_size::{Ranged, TextRange, TextSize}; use ty_python_semantic::types::Type; use ty_python_semantic::types::ide_support::inlay_hint_function_argument_details; @@ -345,6 +345,9 @@ fn type_hint_is_excessive_for_expr(expr: &Expr) -> bool { // This one expands to `Template` which isn't verbose but is redundant | Expr::TString(_)=> true, + // You too `+1 and `-1`, get back here + Expr::UnaryOp(ExprUnaryOp { op: UnaryOp::UAdd | UnaryOp::USub, operand, .. }) => matches!(**operand, Expr::NumberLiteral(_)), + // Everything else is reasonable _ => false, } @@ -659,6 +662,8 @@ mod tests { g = f"{e} {f}" h = t"wow %d" i = b'\x00' + j = +1 + k = -1.0 "#, ); @@ -672,6 +677,8 @@ mod tests { g = f"{e} {f}" h = t"wow %d" i = b'\x00' + j = +1 + k = -1.0 "#); } @@ -688,6 +695,8 @@ mod tests { g = (f"{ft}", f"{ft}") h = (t"wow %d", t"wow %d") i = (b'\x01', b'\x02') + j = (+1, +2.0) + k = (-1, -2.0) "#, ); @@ -701,6 +710,8 @@ mod tests { g = (f"{ft}", f"{ft}") h = (t"wow %d", t"wow %d") i = (b'\x01', b'\x02') + j = (+1, +2.0) + k = (-1, -2.0) "#); } @@ -717,6 +728,8 @@ mod tests { g1, g2 = (f"{ft}", f"{ft}") h1, h2 = (t"wow %d", t"wow %d") i1, i2 = (b'\x01', b'\x02') + j1, j2 = (+1, +2.0) + k1, k2 = (-1, -2.0) "#, ); @@ -730,6 +743,8 @@ mod tests { g1, g2 = (f"{ft}", f"{ft}") h1, h2 = (t"wow %d", t"wow %d") i1, i2 = (b'\x01', b'\x02') + j1, j2 = (+1, +2.0) + k1, k2 = (-1, -2.0) "#); } @@ -746,6 +761,8 @@ mod tests { g1, g2 = f"{ft}", f"{ft}" h1, h2 = t"wow %d", t"wow %d" i1, i2 = b'\x01', b'\x02' + j1, j2 = +1, +2.0 + k1, k2 = -1, -2.0 "#, ); @@ -759,6 +776,8 @@ mod tests { g1, g2 = f"{ft}", f"{ft}" h1, h2 = t"wow %d", t"wow %d" i1, i2 = b'\x01', b'\x02' + j1, j2 = +1, +2.0 + k1, k2 = -1, -2.0 "#); } @@ -775,6 +794,8 @@ mod tests { g = [f"{ft}", f"{ft}"] h = [t"wow %d", t"wow %d"] i = [b'\x01', b'\x02'] + j = [+1, +2.0] + k = [-1, -2.0] "#, ); @@ -788,6 +809,8 @@ mod tests { g[: list[Unknown | str]] = [f"{ft}", f"{ft}"] h[: list[Unknown | Template]] = [t"wow %d", t"wow %d"] i[: list[Unknown | bytes]] = [b'\x01', b'\x02'] + j[: list[Unknown | int | float]] = [+1, +2.0] + k[: list[Unknown | int | float]] = [-1, -2.0] "#); } From 5f3e086ee4daadd4c37ef4607183c8cfcb23d1ab Mon Sep 17 00:00:00 2001 From: Jack O'Connor Date: Thu, 23 Oct 2025 10:10:10 -0700 Subject: [PATCH 148/180] [ty] implement `typing.NewType` by adding `Type::NewTypeInstance` --- crates/ty/docs/rules.md | 164 +++++--- crates/ty_ide/src/completion.rs | 3 +- crates/ty_ide/src/goto.rs | 15 +- .../resources/mdtest/annotations/new_types.md | 382 +++++++++++++++++- .../resources/mdtest/class/super.md | 6 +- ..._base_of_a_`NewTy…_(9847ea9eddc316b4).snap | 58 +++ ...Trying_to_subclass_a…_(fd3c73e2a9f04).snap | 37 ++ ...licit_Super_Objec…_(b753048091f275c0).snap | 152 +++---- crates/ty_python_semantic/src/types.rs | 151 ++++++- .../src/types/bound_super.rs | 3 + crates/ty_python_semantic/src/types/class.rs | 8 + .../src/types/class_base.rs | 12 +- .../src/types/definition.rs | 7 +- .../src/types/diagnostic.rs | 44 ++ .../ty_python_semantic/src/types/display.rs | 1 + .../ty_python_semantic/src/types/function.rs | 5 + .../src/types/ide_support.rs | 4 + .../src/types/infer/builder.rs | 176 +++++++- .../types/infer/builder/type_expression.rs | 10 + crates/ty_python_semantic/src/types/narrow.rs | 3 +- .../ty_python_semantic/src/types/newtype.rs | 266 ++++++++++++ .../src/types/type_ordering.rs | 4 + .../ty_python_semantic/src/types/visitor.rs | 12 + .../e2e__commands__debug_command.snap | 1 + ty.schema.json | 10 + 25 files changed, 1343 insertions(+), 191 deletions(-) create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_The_base_of_a_`NewTy…_(9847ea9eddc316b4).snap create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_Trying_to_subclass_a…_(fd3c73e2a9f04).snap create mode 100644 crates/ty_python_semantic/src/types/newtype.rs diff --git a/crates/ty/docs/rules.md b/crates/ty/docs/rules.md index 951c364462..58b1db584c 100644 --- a/crates/ty/docs/rules.md +++ b/crates/ty/docs/rules.md @@ -39,7 +39,7 @@ def test(): -> "int": Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -63,7 +63,7 @@ Calling a non-callable object will raise a `TypeError` at runtime. Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -95,7 +95,7 @@ f(int) # error Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -126,7 +126,7 @@ a = 1 Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -158,7 +158,7 @@ class C(A, B): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -190,7 +190,7 @@ class B(A): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -217,7 +217,7 @@ class B(A, A): ... Default level: error · Added in 0.0.1-alpha.12 · Related issues · -View source +View source @@ -329,7 +329,7 @@ def test(): -> "Literal[5]": Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -359,7 +359,7 @@ class C(A, B): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -385,7 +385,7 @@ t[3] # IndexError: tuple index out of range Default level: error · Added in 0.0.1-alpha.12 · Related issues · -View source +View source @@ -474,7 +474,7 @@ an atypical memory layout. Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -501,7 +501,7 @@ func("foo") # error: [invalid-argument-type] Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -529,7 +529,7 @@ a: int = '' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -563,7 +563,7 @@ C.instance_var = 3 # error: Cannot assign to instance variable Default level: error · Added in 0.0.1-alpha.19 · Related issues · -View source +View source @@ -599,7 +599,7 @@ asyncio.run(main()) Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -623,7 +623,7 @@ class A(42): ... # error: [invalid-base] Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -650,7 +650,7 @@ with 1: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -679,7 +679,7 @@ a: str Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -723,7 +723,7 @@ except ZeroDivisionError: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -756,7 +756,7 @@ class C[U](Generic[T]): ... Default level: error · Added in 0.0.1-alpha.17 · Related issues · -View source +View source @@ -795,7 +795,7 @@ carol = Person(name="Carol", age=25) # typo! Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -830,7 +830,7 @@ def f(t: TypeVar("U")): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -864,7 +864,7 @@ class B(metaclass=f): ... Default level: error · Added in 0.0.1-alpha.19 · Related issues · -View source +View source @@ -890,13 +890,43 @@ in a class's bases list. TypeError: can only inherit from a NamedTuple type and Generic ``` +## `invalid-newtype` + + +Default level: error · +Preview (since 1.0.0) · +Related issues · +View source + + + +**What it does** + +Checks for the creation of invalid `NewType`s + +**Why is this bad?** + +There are several requirements that you must follow when creating a `NewType`. + +**Examples** + +```python +from typing import NewType + +def get_name() -> str: ... + +Foo = NewType("Foo", int) # okay +Bar = NewType(get_name(), int) # error: The first argument to `NewType` must be a string literal +Baz = NewType("Baz", int | str) # error: invalid base for `typing.NewType` +``` + ## `invalid-overload` Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -946,7 +976,7 @@ def foo(x: int) -> int: ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -972,7 +1002,7 @@ def f(a: int = ''): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1003,7 +1033,7 @@ P2 = ParamSpec("S2") # error: ParamSpec name must match the variable it's assig Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1037,7 +1067,7 @@ TypeError: Protocols can only inherit from other protocols, got Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1086,7 +1116,7 @@ def g(): Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1111,7 +1141,7 @@ def func() -> int: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1169,7 +1199,7 @@ TODO #14889 Default level: error · Added in 0.0.1-alpha.6 · Related issues · -View source +View source @@ -1196,7 +1226,7 @@ NewAlias = TypeAliasType(get_name(), int) # error: TypeAliasType name mus Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1226,7 +1256,7 @@ TYPE_CHECKING = '' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1256,7 +1286,7 @@ b: Annotated[int] # `Annotated` expects at least two arguments Default level: error · Added in 0.0.1-alpha.11 · Related issues · -View source +View source @@ -1290,7 +1320,7 @@ f(10) # Error Default level: error · Added in 0.0.1-alpha.11 · Related issues · -View source +View source @@ -1324,7 +1354,7 @@ class C: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1359,7 +1389,7 @@ T = TypeVar('T', bound=str) # valid bound TypeVar Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1384,7 +1414,7 @@ func() # TypeError: func() missing 1 required positional argument: 'x' Default level: error · Added in 0.0.1-alpha.20 · Related issues · -View source +View source @@ -1417,7 +1447,7 @@ alice["age"] # KeyError Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1446,7 +1476,7 @@ func("string") # error: [no-matching-overload] Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1470,7 +1500,7 @@ Subscripting an object that does not support it will raise a `TypeError` at runt Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1496,7 +1526,7 @@ for i in 34: # TypeError: 'int' object is not iterable Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1523,7 +1553,7 @@ f(1, x=2) # Error raised here Default level: error · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -1581,7 +1611,7 @@ def test(): -> "int": Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1611,7 +1641,7 @@ static_assert(int(2.0 * 3.0) == 6) # error: does not have a statically known tr Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1640,7 +1670,7 @@ class B(A): ... # Error raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1667,7 +1697,7 @@ f("foo") # Error raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1695,7 +1725,7 @@ def _(x: int): Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1741,7 +1771,7 @@ class A: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1768,7 +1798,7 @@ f(x=1, y=2) # Error raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1796,7 +1826,7 @@ A().foo # AttributeError: 'A' object has no attribute 'foo' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1821,7 +1851,7 @@ import foo # ModuleNotFoundError: No module named 'foo' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1846,7 +1876,7 @@ print(x) # NameError: name 'x' is not defined Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1883,7 +1913,7 @@ b1 < b2 < b1 # exception raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1911,7 +1941,7 @@ A() + A() # TypeError: unsupported operand type(s) for +: 'A' and 'A' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1936,7 +1966,7 @@ l[1:10:0] # ValueError: slice step cannot be zero Default level: warn · Added in 0.0.1-alpha.20 · Related issues · -View source +View source @@ -1977,7 +2007,7 @@ class SubProto(BaseProto, Protocol): Default level: warn · Added in 0.0.1-alpha.16 · Related issues · -View source +View source @@ -2065,7 +2095,7 @@ a = 20 / 0 # type: ignore Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2093,7 +2123,7 @@ A.c # AttributeError: type object 'A' has no attribute 'c' Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2125,7 +2155,7 @@ A()[0] # TypeError: 'A' object is not subscriptable Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2157,7 +2187,7 @@ from module import a # ImportError: cannot import name 'a' from 'module' Default level: warn · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2184,7 +2214,7 @@ cast(int, f()) # Redundant Default level: warn · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2208,7 +2238,7 @@ reveal_type(1) # NameError: name 'reveal_type' is not defined Default level: warn · Added in 0.0.1-alpha.15 · Related issues · -View source +View source @@ -2266,7 +2296,7 @@ def g(): Default level: warn · Added in 0.0.1-alpha.7 · Related issues · -View source +View source @@ -2305,7 +2335,7 @@ class D(C): ... # error: [unsupported-base] Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2368,7 +2398,7 @@ def foo(x: int | str) -> int | str: Default level: ignore · Preview (since 0.0.1-alpha.1) · Related issues · -View source +View source @@ -2392,7 +2422,7 @@ Dividing by zero raises a `ZeroDivisionError` at runtime. Default level: ignore · Added in 0.0.1-alpha.1 · Related issues · -View source +View source diff --git a/crates/ty_ide/src/completion.rs b/crates/ty_ide/src/completion.rs index 4ea6cc18d9..e2b03c393e 100644 --- a/crates/ty_ide/src/completion.rs +++ b/crates/ty_ide/src/completion.rs @@ -127,7 +127,8 @@ impl<'db> Completion<'db> { Type::NominalInstance(_) | Type::PropertyInstance(_) | Type::BoundSuper(_) - | Type::TypedDict(_) => CompletionKind::Struct, + | Type::TypedDict(_) + | Type::NewTypeInstance(_) => CompletionKind::Struct, Type::IntLiteral(_) | Type::BooleanLiteral(_) | Type::TypeIs(_) diff --git a/crates/ty_ide/src/goto.rs b/crates/ty_ide/src/goto.rs index d7a7091f94..094d2008d2 100644 --- a/crates/ty_ide/src/goto.rs +++ b/crates/ty_ide/src/goto.rs @@ -209,16 +209,11 @@ impl<'db> DefinitionsOrTargets<'db> { ty_python_semantic::types::TypeDefinition::Module(module) => { ResolvedDefinition::Module(module.file(db)?) } - ty_python_semantic::types::TypeDefinition::Class(definition) => { - ResolvedDefinition::Definition(definition) - } - ty_python_semantic::types::TypeDefinition::Function(definition) => { - ResolvedDefinition::Definition(definition) - } - ty_python_semantic::types::TypeDefinition::TypeVar(definition) => { - ResolvedDefinition::Definition(definition) - } - ty_python_semantic::types::TypeDefinition::TypeAlias(definition) => { + ty_python_semantic::types::TypeDefinition::Class(definition) + | ty_python_semantic::types::TypeDefinition::Function(definition) + | ty_python_semantic::types::TypeDefinition::TypeVar(definition) + | ty_python_semantic::types::TypeDefinition::TypeAlias(definition) + | ty_python_semantic::types::TypeDefinition::NewType(definition) => { ResolvedDefinition::Definition(definition) } }; diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/new_types.md b/crates/ty_python_semantic/resources/mdtest/annotations/new_types.md index 5dc14964cc..7a6e47ed32 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/new_types.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/new_types.md @@ -1,7 +1,5 @@ # NewType -Currently, ty doesn't support `typing.NewType` in type annotations. - ## Valid forms ```py @@ -12,13 +10,389 @@ X = GenericAlias(type, ()) A = NewType("A", int) # TODO: typeshed for `typing.GenericAlias` uses `type` for the first argument. `NewType` should be special-cased # to be compatible with `type` -# error: [invalid-argument-type] "Argument to function `__new__` is incorrect: Expected `type`, found `NewType`" +# error: [invalid-argument-type] "Argument to function `__new__` is incorrect: Expected `type`, found ``" B = GenericAlias(A, ()) def _( a: A, b: B, ): - reveal_type(a) # revealed: @Todo(Support for `typing.NewType` instances in type expressions) + reveal_type(a) # revealed: A reveal_type(b) # revealed: @Todo(Support for `typing.GenericAlias` instances in type expressions) ``` + +## Subtyping + +The basic purpose of `NewType` is that it acts like a subtype of its base, but not the exact same +type (i.e. not an alias). + +```py +from typing_extensions import NewType +from ty_extensions import static_assert, is_subtype_of, is_equivalent_to + +Foo = NewType("Foo", int) +Bar = NewType("Bar", Foo) + +static_assert(is_subtype_of(Foo, int)) +static_assert(not is_equivalent_to(Foo, int)) + +static_assert(is_subtype_of(Bar, Foo)) +static_assert(is_subtype_of(Bar, int)) +static_assert(not is_equivalent_to(Bar, Foo)) + +Foo(42) +Foo(Foo(42)) # allowed: `Foo` is a subtype of `int`. +Foo(Bar(Foo(42))) # allowed: `Bar` is a subtype of `int`. +Foo(True) # allowed: `bool` is a subtype of `int`. +Foo("forty-two") # error: [invalid-argument-type] "Argument is incorrect: Expected `int`, found `Literal["forty-two"]`" + +def f(_: int): ... +def g(_: Foo): ... +def h(_: Bar): ... + +f(42) +f(Foo(42)) +f(Bar(Foo(42))) + +g(42) # error: [invalid-argument-type] "Argument to function `g` is incorrect: Expected `Foo`, found `Literal[42]`" +g(Foo(42)) +g(Bar(Foo(42))) + +h(42) # error: [invalid-argument-type] "Argument to function `h` is incorrect: Expected `Bar`, found `Literal[42]`" +h(Foo(42)) # error: [invalid-argument-type] "Argument to function `h` is incorrect: Expected `Bar`, found `Foo`" +h(Bar(Foo(42))) +``` + +## Member and method lookup work + +```py +from typing_extensions import NewType + +class Foo: + foo_member: str = "hello" + def foo_method(self) -> int: + return 42 + +Bar = NewType("Bar", Foo) +Baz = NewType("Baz", Bar) +baz = Baz(Bar(Foo())) +reveal_type(baz.foo_member) # revealed: str +reveal_type(baz.foo_method()) # revealed: int +``` + +We also infer member access on the `NewType` pseudo-type itself correctly: + +```py +reveal_type(Bar.__supertype__) # revealed: type | NewType +reveal_type(Baz.__supertype__) # revealed: type | NewType +``` + +## `NewType` wrapper functions are `Callable` + +```py +from collections.abc import Callable +from typing_extensions import NewType +from ty_extensions import CallableTypeOf + +Foo = NewType("Foo", int) + +def _(obj: CallableTypeOf[Foo]): + reveal_type(obj) # revealed: (int, /) -> Foo + +def f(_: Callable[[int], Foo]): ... + +f(Foo) +map(Foo, [1, 2, 3]) + +def g(_: Callable[[str], Foo]): ... + +g(Foo) # error: [invalid-argument-type] +``` + +## `NewType` instances are `Callable` if the base type is + +```py +from typing import NewType, Callable, Any +from ty_extensions import CallableTypeOf + +N = NewType("N", int) +i = N(42) + +y: Callable[..., Any] = i # error: [invalid-assignment] "Object of type `N` is not assignable to `(...) -> Any`" + +# error: [invalid-type-form] "Expected the first argument to `ty_extensions.CallableTypeOf` to be a callable object, but got an object of type `N`" +def f(x: CallableTypeOf[i]): + reveal_type(x) # revealed: Unknown + +class SomethingCallable: + def __call__(self, a: str) -> bytes: + raise NotImplementedError + +N2 = NewType("N2", SomethingCallable) +j = N2(SomethingCallable()) + +z: Callable[[str], bytes] = j # fine + +def g(x: CallableTypeOf[j]): + reveal_type(x) # revealed: (a: str) -> bytes +``` + +## The name must be a string literal + +```py +from typing_extensions import NewType + +def _(name: str) -> None: + _ = NewType(name, int) # error: [invalid-newtype] "The first argument to `NewType` must be a string literal" +``` + +However, the literal doesn't necessarily need to be inline, as long as we infer it: + +```py +name = "Foo" +Foo = NewType(name, int) +reveal_type(Foo) # revealed: +``` + +## The second argument must be a class type or another newtype + +Other typing constructs like `Union` are not allowed. + +```py +from typing_extensions import NewType + +# error: [invalid-newtype] "invalid base for `typing.NewType`" +Foo = NewType("Foo", int | str) +``` + +We don't emit the "invalid base" diagnostic for `Unknown`, because that typically results from other +errors that already have a diagnostic, and there's no need to pile on. For example, this mistake +gives you an "Int literals are not allowed" error, and we'd rather not see an "invalid base" error +on top of that: + +```py +# error: [invalid-type-form] "Int literals are not allowed in this context in a type expression" +Foo = NewType("Foo", 42) +``` + +## A `NewType` definition must be a simple variable assignment + +```py +from typing import NewType + +N: NewType = NewType("N", int) # error: [invalid-newtype] "A `NewType` definition must be a simple variable assignment" +``` + +## Newtypes can be cyclic in various ways + +Cyclic newtypes are kind of silly, but it's possible for the user to express them, and it's +important that we don't go into infinite recursive loops and crash with a stack overflow. In fact, +this is *why* base type evaluation is deferred; otherwise Salsa itself would crash. + +```py +from typing_extensions import NewType, reveal_type, cast + +# Define a directly cyclic newtype. +A = NewType("A", "A") +reveal_type(A) # revealed: + +# Typechecking still works. We can't construct an `A` "honestly", but we can `cast` into one. +a: A +a = 42 # error: [invalid-assignment] "Object of type `Literal[42]` is not assignable to `A`" +a = A(42) # error: [invalid-argument-type] "Argument is incorrect: Expected `A`, found `Literal[42]`" +a = cast(A, 42) +reveal_type(a) # revealed: A + +# A newtype cycle might involve more than one step. +B = NewType("B", "C") +C = NewType("C", "B") +reveal_type(B) # revealed: +reveal_type(C) # revealed: +b: B = cast(B, 42) +c: C = C(b) +reveal_type(b) # revealed: B +reveal_type(c) # revealed: C +# Cyclic types behave in surprising ways. These assignments are legal, even though B and C aren't +# the same type, because each of them is a subtype of the other. +b = c +c = b + +# Another newtype could inherit from a cyclic one. +D = NewType("D", C) +reveal_type(D) # revealed: +d: D +d = D(42) # error: [invalid-argument-type] "Argument is incorrect: Expected `C`, found `Literal[42]`" +d = D(c) +d = D(b) # Allowed, the same surprise as above. B and C are subtypes of each other. +reveal_type(d) # revealed: D +``` + +Normal classes can't inherit from newtypes, but generic classes can be parametrized with them, so we +also need to detect "ordinary" type cycles that happen to involve a newtype. + +```py +E = NewType("E", list["E"]) +reveal_type(E) # revealed: +e: E = E([]) +reveal_type(e) # revealed: E +reveal_type(E(E(E(E(E([])))))) # revealed: E +reveal_type(E([E([E([]), E([E([])])]), E([])])) # revealed: E +E(["foo"]) # error: [invalid-argument-type] +E(E(E(["foo"]))) # error: [invalid-argument-type] +``` + +## `NewType` wrapping preserves singleton-ness and single-valued-ness + +```py +from typing_extensions import NewType +from ty_extensions import is_singleton, is_single_valued, static_assert +from types import EllipsisType + +A = NewType("A", EllipsisType) +static_assert(is_singleton(A)) +static_assert(is_single_valued(A)) +reveal_type(type(A(...)) is EllipsisType) # revealed: Literal[True] +# TODO: This should be `Literal[True]` also. +reveal_type(A(...) is ...) # revealed: bool + +B = NewType("B", int) +static_assert(not is_singleton(B)) +static_assert(not is_single_valued(B)) +``` + +## `NewType`s of tuples can be iterated/unpacked + +```py +from typing import NewType + +N = NewType("N", tuple[int, str]) + +a, b = N((1, "foo")) + +reveal_type(a) # revealed: int +reveal_type(b) # revealed: str +``` + +## `isinstance` of a `NewType` instance and its base class is inferred as `Literal[True]` + +```py +from typing import NewType + +N = NewType("N", int) + +def f(x: N): + reveal_type(isinstance(x, int)) # revealed: Literal[True] +``` + +However, a `NewType` isn't a real class, so it isn't a valid second argument to `isinstance`: + +```py +def f(x: N): + # error: [invalid-argument-type] "Argument to function `isinstance` is incorrect" + reveal_type(isinstance(x, N)) # revealed: bool +``` + +Because of that, we don't generate any narrowing constraints for it: + +```py +def f(x: N | str): + if isinstance(x, N): # error: [invalid-argument-type] + reveal_type(x) # revealed: N | str + else: + reveal_type(x) # revealed: N | str +``` + +## Trying to subclass a `NewType` produces an error matching CPython + + + +```py +from typing import NewType + +X = NewType("X", int) + +class Foo(X): ... # error: [invalid-base] +``` + +## Don't narrow `NewType`-wrapped `Enum`s inside of match arms + +`Literal[Foo.X]` is actually disjoint from `N` here: + +```py +from enum import Enum +from typing import NewType + +class Foo(Enum): + X = 0 + Y = 1 + +N = NewType("N", Foo) + +def f(x: N): + match x: + case Foo.X: + reveal_type(x) # revealed: N + case Foo.Y: + reveal_type(x) # revealed: N + case _: + reveal_type(x) # revealed: N +``` + +## We don't support `NewType` on Python 3.9 + +We implement `typing.NewType` as a `KnownClass`, but in Python 3.9 it's actually a function, so all +we get is the `Any` annotations from typeshed. However, `typing_extensions.NewType` is always a +class. This could be improved in the future, but Python 3.9 is now end-of-life, so it's not +high-priority. + +```toml +[environment] +python-version = "3.9" +``` + +```py +from typing import NewType + +Foo = NewType("Foo", int) +reveal_type(Foo) # revealed: Any +reveal_type(Foo(42)) # revealed: Any + +from typing_extensions import NewType + +Bar = NewType("Bar", int) +reveal_type(Bar) # revealed: +reveal_type(Bar(42)) # revealed: Bar +``` + +## The base of a `NewType` can't be a protocol class or a `TypedDict` + + + +```py +from typing import NewType, Protocol, TypedDict + +class Id(Protocol): + code: int + +UserId = NewType("UserId", Id) # error: [invalid-newtype] + +class Foo(TypedDict): + a: int + +Bar = NewType("Bar", Foo) # error: [invalid-newtype] +``` + +## TODO: A `NewType` cannot be generic + +```py +from typing import Any, NewType, TypeVar + +# All of these are allowed. +A = NewType("A", list) +B = NewType("B", list[int]) +B = NewType("B", list[Any]) + +# But a free typevar is not allowed. +T = TypeVar("T") +C = NewType("C", list[T]) # TODO: should be "error: [invalid-newtype]" +``` diff --git a/crates/ty_python_semantic/resources/mdtest/class/super.md b/crates/ty_python_semantic/resources/mdtest/class/super.md index 5d4a4249b7..80a4bc9806 100644 --- a/crates/ty_python_semantic/resources/mdtest/class/super.md +++ b/crates/ty_python_semantic/resources/mdtest/class/super.md @@ -66,7 +66,7 @@ synthesized `Protocol`s that cannot be upcast to, or interpreted as, a non-`obje ```py import types -from typing_extensions import Callable, TypeIs, Literal, TypedDict +from typing_extensions import Callable, TypeIs, Literal, NewType, TypedDict def f(): ... @@ -81,6 +81,8 @@ class SomeTypedDict(TypedDict): x: int y: bytes +N = NewType("N", int) + # revealed: , FunctionType> reveal_type(super(object, f)) # revealed: , WrapperDescriptorType> @@ -95,6 +97,8 @@ reveal_type(super(object, Alias)) reveal_type(super(object, Foo().method)) # revealed: , property> reveal_type(super(object, Foo.some_property)) +# revealed: , int> +reveal_type(super(object, N(42))) def g(x: object) -> TypeIs[list[object]]: return isinstance(x, list) diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_The_base_of_a_`NewTy…_(9847ea9eddc316b4).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_The_base_of_a_`NewTy…_(9847ea9eddc316b4).snap new file mode 100644 index 0000000000..ee47accc9d --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_The_base_of_a_`NewTy…_(9847ea9eddc316b4).snap @@ -0,0 +1,58 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: new_types.md - NewType - The base of a `NewType` can't be a protocol class or a `TypedDict` +mdtest path: crates/ty_python_semantic/resources/mdtest/annotations/new_types.md +--- + +# Python source files + +## mdtest_snippet.py + +``` + 1 | from typing import NewType, Protocol, TypedDict + 2 | + 3 | class Id(Protocol): + 4 | code: int + 5 | + 6 | UserId = NewType("UserId", Id) # error: [invalid-newtype] + 7 | + 8 | class Foo(TypedDict): + 9 | a: int +10 | +11 | Bar = NewType("Bar", Foo) # error: [invalid-newtype] +``` + +# Diagnostics + +``` +error[invalid-newtype]: invalid base for `typing.NewType` + --> src/mdtest_snippet.py:6:28 + | +4 | code: int +5 | +6 | UserId = NewType("UserId", Id) # error: [invalid-newtype] + | ^^ type `Id` +7 | +8 | class Foo(TypedDict): + | +info: The base of a `NewType` is not allowed to be a protocol class. +info: rule `invalid-newtype` is enabled by default + +``` + +``` +error[invalid-newtype]: invalid base for `typing.NewType` + --> src/mdtest_snippet.py:11:22 + | + 9 | a: int +10 | +11 | Bar = NewType("Bar", Foo) # error: [invalid-newtype] + | ^^^ type `Foo` + | +info: The base of a `NewType` is not allowed to be a `TypedDict`. +info: rule `invalid-newtype` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_Trying_to_subclass_a…_(fd3c73e2a9f04).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_Trying_to_subclass_a…_(fd3c73e2a9f04).snap new file mode 100644 index 0000000000..9e4eac091a --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/new_types.md_-_NewType_-_Trying_to_subclass_a…_(fd3c73e2a9f04).snap @@ -0,0 +1,37 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: new_types.md - NewType - Trying to subclass a `NewType` produces an error matching CPython +mdtest path: crates/ty_python_semantic/resources/mdtest/annotations/new_types.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | from typing import NewType +2 | +3 | X = NewType("X", int) +4 | +5 | class Foo(X): ... # error: [invalid-base] +``` + +# Diagnostics + +``` +error[invalid-base]: Cannot subclass an instance of NewType + --> src/mdtest_snippet.py:5:11 + | +3 | X = NewType("X", int) +4 | +5 | class Foo(X): ... # error: [invalid-base] + | ^ + | +info: Perhaps you were looking for: `Foo = NewType('Foo', X)` +info: Definition of class `Foo` will raise `TypeError` at runtime +info: rule `invalid-base` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/super.md_-_Super_-_Basic_Usage_-_Explicit_Super_Objec…_(b753048091f275c0).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/super.md_-_Super_-_Basic_Usage_-_Explicit_Super_Objec…_(b753048091f275c0).snap index 245c95d394..4a1c008f9f 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/super.md_-_Super_-_Basic_Usage_-_Explicit_Super_Objec…_(b753048091f275c0).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/super.md_-_Super_-_Basic_Usage_-_Explicit_Super_Objec…_(b753048091f275c0).snap @@ -46,7 +46,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/class/super.md 32 | reveal_type(super(C, C()).aa) # revealed: int 33 | reveal_type(super(C, C()).bb) # revealed: int 34 | import types - 35 | from typing_extensions import Callable, TypeIs, Literal, TypedDict + 35 | from typing_extensions import Callable, TypeIs, Literal, NewType, TypedDict 36 | 37 | def f(): ... 38 | @@ -61,59 +61,63 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/class/super.md 47 | x: int 48 | y: bytes 49 | - 50 | # revealed: , FunctionType> - 51 | reveal_type(super(object, f)) - 52 | # revealed: , WrapperDescriptorType> - 53 | reveal_type(super(object, types.FunctionType.__get__)) - 54 | # revealed: , GenericAlias> - 55 | reveal_type(super(object, Foo[int])) - 56 | # revealed: , _SpecialForm> - 57 | reveal_type(super(object, Literal)) - 58 | # revealed: , TypeAliasType> - 59 | reveal_type(super(object, Alias)) - 60 | # revealed: , MethodType> - 61 | reveal_type(super(object, Foo().method)) - 62 | # revealed: , property> - 63 | reveal_type(super(object, Foo.some_property)) - 64 | - 65 | def g(x: object) -> TypeIs[list[object]]: - 66 | return isinstance(x, list) - 67 | - 68 | def _(x: object, y: SomeTypedDict, z: Callable[[int, str], bool]): - 69 | if hasattr(x, "bar"): - 70 | # revealed: - 71 | reveal_type(x) - 72 | # error: [invalid-super-argument] - 73 | # revealed: Unknown - 74 | reveal_type(super(object, x)) - 75 | - 76 | # error: [invalid-super-argument] - 77 | # revealed: Unknown - 78 | reveal_type(super(object, z)) + 50 | N = NewType("N", int) + 51 | + 52 | # revealed: , FunctionType> + 53 | reveal_type(super(object, f)) + 54 | # revealed: , WrapperDescriptorType> + 55 | reveal_type(super(object, types.FunctionType.__get__)) + 56 | # revealed: , GenericAlias> + 57 | reveal_type(super(object, Foo[int])) + 58 | # revealed: , _SpecialForm> + 59 | reveal_type(super(object, Literal)) + 60 | # revealed: , TypeAliasType> + 61 | reveal_type(super(object, Alias)) + 62 | # revealed: , MethodType> + 63 | reveal_type(super(object, Foo().method)) + 64 | # revealed: , property> + 65 | reveal_type(super(object, Foo.some_property)) + 66 | # revealed: , int> + 67 | reveal_type(super(object, N(42))) + 68 | + 69 | def g(x: object) -> TypeIs[list[object]]: + 70 | return isinstance(x, list) + 71 | + 72 | def _(x: object, y: SomeTypedDict, z: Callable[[int, str], bool]): + 73 | if hasattr(x, "bar"): + 74 | # revealed: + 75 | reveal_type(x) + 76 | # error: [invalid-super-argument] + 77 | # revealed: Unknown + 78 | reveal_type(super(object, x)) 79 | - 80 | is_list = g(x) - 81 | # revealed: TypeIs[list[object] @ x] - 82 | reveal_type(is_list) - 83 | # revealed: , bool> - 84 | reveal_type(super(object, is_list)) - 85 | - 86 | # revealed: , dict[Literal["x", "y"], int | bytes]> - 87 | reveal_type(super(object, y)) - 88 | - 89 | # The first argument to `super()` must be an actual class object; - 90 | # instances of `GenericAlias` are not accepted at runtime: - 91 | # - 92 | # error: [invalid-super-argument] - 93 | # revealed: Unknown - 94 | reveal_type(super(list[int], [])) - 95 | class Super: - 96 | def method(self) -> int: - 97 | return 42 - 98 | - 99 | class Sub(Super): -100 | def method(self: Sub) -> int: -101 | # revealed: , Sub> -102 | return reveal_type(super(self.__class__, self)).method() + 80 | # error: [invalid-super-argument] + 81 | # revealed: Unknown + 82 | reveal_type(super(object, z)) + 83 | + 84 | is_list = g(x) + 85 | # revealed: TypeIs[list[object] @ x] + 86 | reveal_type(is_list) + 87 | # revealed: , bool> + 88 | reveal_type(super(object, is_list)) + 89 | + 90 | # revealed: , dict[Literal["x", "y"], int | bytes]> + 91 | reveal_type(super(object, y)) + 92 | + 93 | # The first argument to `super()` must be an actual class object; + 94 | # instances of `GenericAlias` are not accepted at runtime: + 95 | # + 96 | # error: [invalid-super-argument] + 97 | # revealed: Unknown + 98 | reveal_type(super(list[int], [])) + 99 | class Super: +100 | def method(self) -> int: +101 | return 42 +102 | +103 | class Sub(Super): +104 | def method(self: Sub) -> int: +105 | # revealed: , Sub> +106 | return reveal_type(super(self.__class__, self)).method() ``` # Diagnostics @@ -206,14 +210,14 @@ info: rule `unresolved-attribute` is enabled by default ``` error[invalid-super-argument]: `` is an abstract/structural type in `super(, )` call - --> src/mdtest_snippet.py:74:21 + --> src/mdtest_snippet.py:78:21 | -72 | # error: [invalid-super-argument] -73 | # revealed: Unknown -74 | reveal_type(super(object, x)) +76 | # error: [invalid-super-argument] +77 | # revealed: Unknown +78 | reveal_type(super(object, x)) | ^^^^^^^^^^^^^^^^ -75 | -76 | # error: [invalid-super-argument] +79 | +80 | # error: [invalid-super-argument] | info: rule `invalid-super-argument` is enabled by default @@ -221,14 +225,14 @@ info: rule `invalid-super-argument` is enabled by default ``` error[invalid-super-argument]: `(int, str, /) -> bool` is an abstract/structural type in `super(, (int, str, /) -> bool)` call - --> src/mdtest_snippet.py:78:17 + --> src/mdtest_snippet.py:82:17 | -76 | # error: [invalid-super-argument] -77 | # revealed: Unknown -78 | reveal_type(super(object, z)) +80 | # error: [invalid-super-argument] +81 | # revealed: Unknown +82 | reveal_type(super(object, z)) | ^^^^^^^^^^^^^^^^ -79 | -80 | is_list = g(x) +83 | +84 | is_list = g(x) | info: rule `invalid-super-argument` is enabled by default @@ -236,15 +240,15 @@ info: rule `invalid-super-argument` is enabled by default ``` error[invalid-super-argument]: `types.GenericAlias` instance `list[int]` is not a valid class - --> src/mdtest_snippet.py:94:13 - | -92 | # error: [invalid-super-argument] -93 | # revealed: Unknown -94 | reveal_type(super(list[int], [])) - | ^^^^^^^^^^^^^^^^^^^^ -95 | class Super: -96 | def method(self) -> int: - | + --> src/mdtest_snippet.py:98:13 + | + 96 | # error: [invalid-super-argument] + 97 | # revealed: Unknown + 98 | reveal_type(super(list[int], [])) + | ^^^^^^^^^^^^^^^^^^^^ + 99 | class Super: +100 | def method(self) -> int: + | info: rule `invalid-super-argument` is enabled by default ``` diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index c9641c4e34..0d0879f348 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -66,6 +66,7 @@ use crate::types::generics::{ use crate::types::infer::infer_unpack_types; use crate::types::mro::{Mro, MroError, MroIterator}; pub(crate) use crate::types::narrow::infer_narrowing_constraint; +use crate::types::newtype::NewType; use crate::types::signatures::{ParameterForm, walk_signature}; use crate::types::tuple::{TupleSpec, TupleSpecBuilder}; pub(crate) use crate::types::typed_dict::{TypedDictParams, TypedDictType, walk_typed_dict_type}; @@ -98,6 +99,7 @@ mod instance; mod member; mod mro; mod narrow; +mod newtype; mod protocol_class; mod signatures; mod special_form; @@ -783,6 +785,13 @@ pub enum Type<'db> { TypedDict(TypedDictType<'db>), /// An aliased type (lazily not-yet-unpacked to its value type). TypeAlias(TypeAliasType<'db>), + /// The set of Python objects that belong to a `typing.NewType` subtype. Note that + /// `typing.NewType` itself is a `Type::ClassLiteral` with `KnownClass::NewType`, and the + /// identity callables it returns (which behave like subtypes in type expressions) are of + /// `Type::KnownInstance` with `KnownInstanceType::NewType`. This `Type` refers to the objects + /// wrapped/returned by a specific one of those identity callables, or by another that inherits + /// from it. + NewTypeInstance(NewType<'db>), } #[salsa::tracked] @@ -1420,6 +1429,13 @@ impl<'db> Type<'db> { self } Type::TypeAlias(alias) => alias.value_type(db).normalized_impl(db, visitor), + Type::NewTypeInstance(newtype) => { + visitor.visit(self, || { + Type::NewTypeInstance(newtype.map_base_class_type(db, |class_type| { + class_type.normalized_impl(db, visitor) + })) + }) + } Type::LiteralString | Type::AlwaysFalsy | Type::AlwaysTruthy @@ -1482,7 +1498,8 @@ impl<'db> Type<'db> { | Type::BoundSuper(_) | Type::TypeIs(_) | Type::TypedDict(_) - | Type::TypeAlias(_) => false, + | Type::TypeAlias(_) + | Type::NewTypeInstance(_) => false, } } @@ -1520,6 +1537,10 @@ impl<'db> Type<'db> { Type::GenericAlias(alias) => Some(ClassType::Generic(alias).into_callable(db)), + Type::NewTypeInstance(newtype) => { + Type::instance(db, newtype.base_class_type(db)).try_upcast_to_callable(db) + } + // TODO: This is unsound so in future we can consider an opt-in option to disable it. Type::SubclassOf(subclass_of_ty) => match subclass_of_ty.subclass_of() { SubclassOfInner::Class(class) => Some(class.into_callable(db)), @@ -1549,6 +1570,15 @@ impl<'db> Type<'db> { false, ))), + Type::KnownInstance(KnownInstanceType::NewType(newtype)) => Some(CallableType::single( + db, + Signature::new( + Parameters::new([Parameter::positional_only(None) + .with_annotated_type(newtype.base(db).instance_type(db))]), + Some(Type::NewTypeInstance(newtype)), + ), + )), + Type::Never | Type::DataclassTransformer(_) | Type::AlwaysTruthy @@ -2429,6 +2459,22 @@ impl<'db> Type<'db> { }) } + (Type::NewTypeInstance(self_newtype), Type::NewTypeInstance(target_newtype)) => { + self_newtype.has_relation_to_impl(db, target_newtype) + } + + ( + Type::NewTypeInstance(self_newtype), + Type::NominalInstance(target_nominal_instance), + ) => self_newtype.base_class_type(db).has_relation_to_impl( + db, + target_nominal_instance.class(db), + inferable, + relation, + relation_visitor, + disjointness_visitor, + ), + (Type::PropertyInstance(_), _) => { KnownClass::Property.to_instance(db).has_relation_to_impl( db, @@ -2448,14 +2494,15 @@ impl<'db> Type<'db> { disjointness_visitor, ), - // Other than the special cases enumerated above, `Instance` types and typevars are - // never subtypes of any other variants + // Other than the special cases enumerated above, nominal-instance types, + // newtype-instance types, and typevars are never subtypes of any other variants (Type::TypeVar(bound_typevar), _) => { // All inferable cases should have been handled above assert!(!bound_typevar.is_inferable(db, inferable)); ConstraintSet::from(false) } (Type::NominalInstance(_), _) => ConstraintSet::from(false), + (Type::NewTypeInstance(_), _) => ConstraintSet::from(false), } } @@ -2529,6 +2576,10 @@ impl<'db> Type<'db> { }) } + (Type::NewTypeInstance(self_newtype), Type::NewTypeInstance(other_newtype)) => { + ConstraintSet::from(self_newtype.is_equivalent_to_impl(db, other_newtype)) + } + (Type::NominalInstance(first), Type::NominalInstance(second)) => { first.is_equivalent_to_impl(db, second, inferable, visitor) } @@ -3288,6 +3339,19 @@ impl<'db> Type<'db> { ) }), + (Type::NewTypeInstance(left), Type::NewTypeInstance(right)) => { + left.is_disjoint_from_impl(db, right) + } + (Type::NewTypeInstance(newtype), other) | (other, Type::NewTypeInstance(newtype)) => { + Type::instance(db, newtype.base_class_type(db)).is_disjoint_from_impl( + db, + other, + inferable, + disjointness_visitor, + relation_visitor, + ) + } + (Type::PropertyInstance(_), other) | (other, Type::PropertyInstance(_)) => { KnownClass::Property.to_instance(db).is_disjoint_from_impl( db, @@ -3432,6 +3496,9 @@ impl<'db> Type<'db> { Type::TypeIs(type_is) => type_is.is_bound(db), Type::TypedDict(_) => false, Type::TypeAlias(alias) => alias.value_type(db).is_singleton(db), + Type::NewTypeInstance(newtype) => { + Type::instance(db, newtype.base_class_type(db)).is_singleton(db) + } } } @@ -3482,6 +3549,9 @@ impl<'db> Type<'db> { } Type::NominalInstance(instance) => instance.is_single_valued(db), + Type::NewTypeInstance(newtype) => { + Type::instance(db, newtype.base_class_type(db)).is_single_valued(db) + } Type::BoundSuper(_) => { // At runtime two super instances never compare equal, even if their arguments are identical. @@ -3645,7 +3715,8 @@ impl<'db> Type<'db> { | Type::ProtocolInstance(_) | Type::PropertyInstance(_) | Type::TypeIs(_) - | Type::TypedDict(_) => None, + | Type::TypedDict(_) + | Type::NewTypeInstance(_) => None, } } @@ -3732,6 +3803,7 @@ impl<'db> Type<'db> { Type::Dynamic(_) | Type::Never => Place::bound(self).into(), Type::NominalInstance(instance) => instance.class(db).instance_member(db, name), + Type::NewTypeInstance(newtype) => newtype.base_class_type(db).instance_member(db, name), Type::ProtocolInstance(protocol) => protocol.instance_member(db, name), @@ -4404,6 +4476,7 @@ impl<'db> Type<'db> { Type::NominalInstance(..) | Type::ProtocolInstance(..) + | Type::NewTypeInstance(..) | Type::BooleanLiteral(..) | Type::IntLiteral(..) | Type::StringLiteral(..) @@ -4842,6 +4915,8 @@ impl<'db> Type<'db> { .value_type(db) .try_bool_impl(db, allow_short_circuit, visitor) })?, + Type::NewTypeInstance(newtype) => Type::instance(db, newtype.base_class_type(db)) + .try_bool_impl(db, allow_short_circuit, visitor)?, }; Ok(truthiness) @@ -5528,7 +5603,7 @@ impl<'db> Type<'db> { SubclassOfInner::Class(class) => Type::from(class).bindings(db), }, - Type::NominalInstance(_) | Type::ProtocolInstance(_) => { + Type::NominalInstance(_) | Type::ProtocolInstance(_) | Type::NewTypeInstance(_) => { // Note that for objects that have a (possibly not callable!) `__call__` attribute, // we will get the signature of the `__call__` attribute, but will pass in the type // of the original object as the "callable type". That ensures that we get errors @@ -5581,6 +5656,16 @@ impl<'db> Type<'db> { Type::EnumLiteral(enum_literal) => enum_literal.enum_class_instance(db).bindings(db), + Type::KnownInstance(KnownInstanceType::NewType(newtype)) => Binding::single( + self, + Signature::new( + Parameters::new([Parameter::positional_only(None) + .with_annotated_type(newtype.base(db).instance_type(db))]), + Some(Type::NewTypeInstance(newtype)), + ), + ) + .into(), + Type::KnownInstance(known_instance) => { known_instance.instance_fallback(db).bindings(db) } @@ -5716,6 +5801,7 @@ impl<'db> Type<'db> { match ty { Type::NominalInstance(nominal) => nominal.tuple_spec(db), + Type::NewTypeInstance(newtype) => non_async_special_case(db, Type::instance(db, newtype.base_class_type(db))), Type::GenericAlias(alias) if alias.origin(db).is_tuple(db) => { Some(Cow::Owned(TupleSpec::homogeneous(todo_type!( "*tuple[] annotations" @@ -6346,6 +6432,9 @@ impl<'db> Type<'db> { Type::ClassLiteral(class) => Some(Type::instance(db, class.default_specialization(db))), Type::GenericAlias(alias) => Some(Type::instance(db, ClassType::from(alias))), Type::SubclassOf(subclass_of_ty) => Some(subclass_of_ty.to_instance(db)), + Type::KnownInstance(KnownInstanceType::NewType(newtype)) => { + Some(Type::NewTypeInstance(newtype)) + } Type::Union(union) => union.to_instance(db), // If there is no bound or constraints on a typevar `T`, `T: object` implicitly, which // has no instance type. Otherwise, synthesize a typevar with bound or constraints @@ -6376,7 +6465,8 @@ impl<'db> Type<'db> { | Type::AlwaysTruthy | Type::AlwaysFalsy | Type::TypeIs(_) - | Type::TypedDict(_) => None, + | Type::TypedDict(_) + | Type::NewTypeInstance(_) => None, } } @@ -6455,6 +6545,7 @@ impl<'db> Type<'db> { Type::KnownInstance(known_instance) => match known_instance { KnownInstanceType::TypeAliasType(alias) => Ok(Type::TypeAlias(*alias)), + KnownInstanceType::NewType(newtype) => Ok(Type::NewTypeInstance(*newtype)), KnownInstanceType::TypeVar(typevar) => { let index = semantic_index(db, scope_id.file(db)); Ok(bind_typevar( @@ -6669,9 +6760,6 @@ impl<'db> Type<'db> { Some(KnownClass::TypeVarTuple) => Ok(todo_type!( "Support for `typing.TypeVarTuple` instances in type expressions" )), - Some(KnownClass::NewType) => Ok(todo_type!( - "Support for `typing.NewType` instances in type expressions" - )), Some(KnownClass::GenericAlias) => Ok(todo_type!( "Support for `typing.GenericAlias` instances in type expressions" )), @@ -6690,6 +6778,13 @@ impl<'db> Type<'db> { .value_type(db) .in_type_expression(db, scope_id, typevar_binding_context) } + + Type::NewTypeInstance(_) => Err(InvalidTypeExpressionError { + invalid_expressions: smallvec::smallvec_inline![ + InvalidTypeExpression::InvalidType(*self, scope_id) + ], + fallback_type: Type::unknown(), + }), } } @@ -6764,6 +6859,7 @@ impl<'db> Type<'db> { // understand a more specific meta type in order to correctly handle `__getitem__`. Type::TypedDict(typed_dict) => SubclassOfType::from(db, typed_dict.defining_class()), Type::TypeAlias(alias) => alias.value_type(db).to_meta_type(db), + Type::NewTypeInstance(newtype) => Type::from(newtype.base_class_type(db)), } } @@ -6873,8 +6969,8 @@ impl<'db> Type<'db> { | TypeMapping::ReplaceParameterDefaults | TypeMapping::BindLegacyTypevars(_) => self, TypeMapping::Materialize(materialization_kind) => { - Type::TypeVar(bound_typevar.materialize_impl(db, *materialization_kind, visitor)) - } + Type::TypeVar(bound_typevar.materialize_impl(db, *materialization_kind, visitor)) + } } Type::KnownInstance(KnownInstanceType::TypeVar(typevar)) => match type_mapping { @@ -6909,6 +7005,12 @@ impl<'db> Type<'db> { instance.apply_type_mapping_impl(db, type_mapping, tcx, visitor) }, + Type::NewTypeInstance(newtype) => visitor.visit(self, || { + Type::NewTypeInstance(newtype.map_base_class_type(db, |class_type| { + class_type.apply_type_mapping_impl(db, type_mapping, tcx, visitor) + })) + }), + Type::ProtocolInstance(instance) => { // TODO: Add tests for materialization once subtyping/assignability is implemented for // protocols. It _might_ require changing the logic here because: @@ -7150,6 +7252,12 @@ impl<'db> Type<'db> { instance.find_legacy_typevars_impl(db, binding_context, typevars, visitor); } + Type::NewTypeInstance(_) => { + // A newtype can never be constructed from an unspecialized generic class, so it is + // impossible that we could ever find any legacy typevars in a newtype instance or + // its underlying class. + } + Type::SubclassOf(subclass_of) => { subclass_of.find_legacy_typevars_impl(db, binding_context, typevars, visitor); } @@ -7305,6 +7413,7 @@ impl<'db> Type<'db> { }, Self::TypeAlias(alias) => alias.value_type(db).definition(db), + Self::NewTypeInstance(newtype) => Some(TypeDefinition::NewType(newtype.definition(db))), Self::StringLiteral(_) | Self::BooleanLiteral(_) @@ -7528,7 +7637,8 @@ impl<'db> VarianceInferable<'db> for Type<'db> { | Type::BoundSuper(_) | Type::TypeVar(_) | Type::TypedDict(_) - | Type::TypeAlias(_) => TypeVarVariance::Bivariant, + | Type::TypeAlias(_) + | Type::NewTypeInstance(_) => TypeVarVariance::Bivariant, }; tracing::trace!( @@ -7726,6 +7836,10 @@ pub enum KnownInstanceType<'db> { /// A single instance of `typing.Annotated` Annotated(InternedType<'db>), + + /// An identity callable created with `typing.NewType(name, base)`, which behaves like a + /// subtype of `base` in type expressions. See the `struct NewType` payload for an example. + NewType(NewType<'db>), } fn walk_known_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>( @@ -7760,6 +7874,11 @@ fn walk_known_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>( KnownInstanceType::Literal(ty) | KnownInstanceType::Annotated(ty) => { visitor.visit_type(db, ty.inner(db)); } + KnownInstanceType::NewType(newtype) => { + if let ClassType::Generic(generic_alias) = newtype.base_class_type(db) { + visitor.visit_generic_alias_type(db, generic_alias); + } + } } } @@ -7799,6 +7918,10 @@ impl<'db> KnownInstanceType<'db> { Self::UnionType(list) => Self::UnionType(list.normalized_impl(db, visitor)), Self::Literal(ty) => Self::Literal(ty.normalized_impl(db, visitor)), Self::Annotated(ty) => Self::Annotated(ty.normalized_impl(db, visitor)), + Self::NewType(newtype) => Self::NewType( + newtype + .map_base_class_type(db, |class_type| class_type.normalized_impl(db, visitor)), + ), } } @@ -7819,6 +7942,7 @@ impl<'db> KnownInstanceType<'db> { Self::UnionType(_) => KnownClass::UnionType, Self::Literal(_) => KnownClass::GenericAlias, Self::Annotated(_) => KnownClass::GenericAlias, + Self::NewType(_) => KnownClass::NewType, } } @@ -7903,6 +8027,9 @@ impl<'db> KnownInstanceType<'db> { KnownInstanceType::Annotated(_) => { f.write_str("") } + KnownInstanceType::NewType(declaration) => { + write!(f, "", declaration.name(self.db)) + } } } } diff --git a/crates/ty_python_semantic/src/types/bound_super.rs b/crates/ty_python_semantic/src/types/bound_super.rs index 011318db51..24d6573c28 100644 --- a/crates/ty_python_semantic/src/types/bound_super.rs +++ b/crates/ty_python_semantic/src/types/bound_super.rs @@ -404,6 +404,9 @@ impl<'db> BoundSuperType<'db> { .to_specialized_instance(db, [key_builder.build(), value_builder.build()]), ); } + Type::NewTypeInstance(newtype) => { + return delegate_to(Type::instance(db, newtype.base_class_type(db))); + } Type::Callable(callable) if callable.is_function_like(db) => { return delegate_to(KnownClass::FunctionType.to_instance(db)); } diff --git a/crates/ty_python_semantic/src/types/class.rs b/crates/ty_python_semantic/src/types/class.rs index 18d3d4b900..ce6fe0d19b 100644 --- a/crates/ty_python_semantic/src/types/class.rs +++ b/crates/ty_python_semantic/src/types/class.rs @@ -358,6 +358,14 @@ pub enum ClassType<'db> { #[salsa::tracked] impl<'db> ClassType<'db> { + /// Return a `ClassType` representing the class `builtins.object` + pub(super) fn object(db: &'db dyn Db) -> Self { + KnownClass::Object + .to_class_literal(db) + .to_class_type(db) + .unwrap() + } + pub(super) const fn is_generic(self) -> bool { matches!(self, Self::Generic(_)) } diff --git a/crates/ty_python_semantic/src/types/class_base.rs b/crates/ty_python_semantic/src/types/class_base.rs index 87417f8314..9cc09acc0f 100644 --- a/crates/ty_python_semantic/src/types/class_base.rs +++ b/crates/ty_python_semantic/src/types/class_base.rs @@ -137,6 +137,12 @@ impl<'db> ClassBase<'db> { Type::TypeAlias(alias) => Self::try_from_type(db, alias.value_type(db), subclass), + Type::NewTypeInstance(newtype) => ClassBase::try_from_type( + db, + Type::instance(db, newtype.base_class_type(db)), + subclass, + ), + Type::PropertyInstance(_) | Type::BooleanLiteral(_) | Type::FunctionLiteral(_) @@ -169,7 +175,11 @@ impl<'db> ClassBase<'db> { | KnownInstanceType::Field(_) | KnownInstanceType::ConstraintSet(_) | KnownInstanceType::UnionType(_) - | KnownInstanceType::Literal(_) => None, + | KnownInstanceType::Literal(_) + // A class inheriting from a newtype would make intuitive sense, but newtype + // wrappers are just identity callables at runtime, so this sort of inheritance + // doesn't work and isn't allowed. + | KnownInstanceType::NewType(_) => None, KnownInstanceType::Annotated(ty) => Self::try_from_type(db, ty.inner(db), subclass), }, diff --git a/crates/ty_python_semantic/src/types/definition.rs b/crates/ty_python_semantic/src/types/definition.rs index f98d47ba93..9095dcea44 100644 --- a/crates/ty_python_semantic/src/types/definition.rs +++ b/crates/ty_python_semantic/src/types/definition.rs @@ -12,6 +12,7 @@ pub enum TypeDefinition<'db> { Function(Definition<'db>), TypeVar(Definition<'db>), TypeAlias(Definition<'db>), + NewType(Definition<'db>), } impl TypeDefinition<'_> { @@ -21,7 +22,8 @@ impl TypeDefinition<'_> { Self::Class(definition) | Self::Function(definition) | Self::TypeVar(definition) - | Self::TypeAlias(definition) => { + | Self::TypeAlias(definition) + | Self::NewType(definition) => { let module = parsed_module(db, definition.file(db)).load(db); Some(definition.focus_range(db, &module)) } @@ -38,7 +40,8 @@ impl TypeDefinition<'_> { Self::Class(definition) | Self::Function(definition) | Self::TypeVar(definition) - | Self::TypeAlias(definition) => { + | Self::TypeAlias(definition) + | Self::NewType(definition) => { let module = parsed_module(db, definition.file(db)).load(db); Some(definition.full_range(db, &module)) } diff --git a/crates/ty_python_semantic/src/types/diagnostic.rs b/crates/ty_python_semantic/src/types/diagnostic.rs index 33efcc74fd..ccb0c82472 100644 --- a/crates/ty_python_semantic/src/types/diagnostic.rs +++ b/crates/ty_python_semantic/src/types/diagnostic.rs @@ -12,6 +12,7 @@ use crate::semantic_index::definition::{Definition, DefinitionKind}; use crate::semantic_index::place::{PlaceTable, ScopedPlaceId}; use crate::semantic_index::{global_scope, place_table}; use crate::suppression::FileSuppressionId; +use crate::types::KnownInstanceType; use crate::types::call::CallError; use crate::types::class::{DisjointBase, DisjointBaseKind, Field}; use crate::types::function::KnownFunction; @@ -65,6 +66,7 @@ pub(crate) fn register_lints(registry: &mut LintRegistryBuilder) { registry.register_lint(&INVALID_LEGACY_TYPE_VARIABLE); registry.register_lint(&INVALID_PARAMSPEC); registry.register_lint(&INVALID_TYPE_ALIAS_TYPE); + registry.register_lint(&INVALID_NEWTYPE); registry.register_lint(&INVALID_METACLASS); registry.register_lint(&INVALID_OVERLOAD); registry.register_lint(&USELESS_OVERLOAD_BODY); @@ -926,6 +928,30 @@ declare_lint! { } } +declare_lint! { + /// ## What it does + /// Checks for the creation of invalid `NewType`s + /// + /// ## Why is this bad? + /// There are several requirements that you must follow when creating a `NewType`. + /// + /// ## Examples + /// ```python + /// from typing import NewType + /// + /// def get_name() -> str: ... + /// + /// Foo = NewType("Foo", int) # okay + /// Bar = NewType(get_name(), int) # error: The first argument to `NewType` must be a string literal + /// Baz = NewType("Baz", int | str) # error: invalid base for `typing.NewType` + /// ``` + pub(crate) static INVALID_NEWTYPE = { + summary: "detects invalid NewType definitions", + status: LintStatus::preview("1.0.0"), + default_level: Level::Error, + } +} + declare_lint! { /// ## What it does /// Checks for arguments to `metaclass=` that are invalid. @@ -2898,6 +2924,24 @@ pub(crate) fn report_invalid_or_unsupported_base( return; } + if let Type::KnownInstance(KnownInstanceType::NewType(newtype)) = base_type { + let Some(builder) = context.report_lint(&INVALID_BASE, base_node) else { + return; + }; + let mut diagnostic = builder.into_diagnostic("Cannot subclass an instance of NewType"); + diagnostic.info(format_args!( + "Perhaps you were looking for: `{} = NewType('{}', {})`", + class.name(context.db()), + class.name(context.db()), + newtype.name(context.db()), + )); + diagnostic.info(format_args!( + "Definition of class `{}` will raise `TypeError` at runtime", + class.name(context.db()) + )); + return; + } + let tuple_of_types = Type::homogeneous_tuple(db, instance_of_type); let explain_mro_entries = |diagnostic: &mut LintDiagnosticGuard| { diff --git a/crates/ty_python_semantic/src/types/display.rs b/crates/ty_python_semantic/src/types/display.rs index 42e2373134..b8a8a05ac4 100644 --- a/crates/ty_python_semantic/src/types/display.rs +++ b/crates/ty_python_semantic/src/types/display.rs @@ -618,6 +618,7 @@ impl Display for DisplayRepresentation<'_> { .fmt(f), } } + Type::NewTypeInstance(newtype) => f.write_str(newtype.name(self.db)), } } } diff --git a/crates/ty_python_semantic/src/types/function.rs b/crates/ty_python_semantic/src/types/function.rs index 2462748d03..98a86f48df 100644 --- a/crates/ty_python_semantic/src/types/function.rs +++ b/crates/ty_python_semantic/src/types/function.rs @@ -1101,6 +1101,11 @@ fn is_instance_truthiness<'db>( Type::NominalInstance(..) => always_true_if(is_instance(&ty)), + Type::NewTypeInstance(newtype) => always_true_if(is_instance(&Type::instance( + db, + newtype.base_class_type(db), + ))), + Type::BooleanLiteral(..) | Type::BytesLiteral(..) | Type::IntLiteral(..) diff --git a/crates/ty_python_semantic/src/types/ide_support.rs b/crates/ty_python_semantic/src/types/ide_support.rs index 94cb15993c..cd29efaeac 100644 --- a/crates/ty_python_semantic/src/types/ide_support.rs +++ b/crates/ty_python_semantic/src/types/ide_support.rs @@ -128,6 +128,10 @@ impl<'db> AllMembers<'db> { } } + Type::NewTypeInstance(newtype) => { + self.extend_with_type(db, Type::instance(db, newtype.base_class_type(db))); + } + Type::ClassLiteral(class_literal) if class_literal.is_typed_dict(db) => { self.extend_with_type(db, KnownClass::TypedDictFallback.to_class_literal(db)); } diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index c227412752..2cb4bf3274 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -59,8 +59,8 @@ use crate::types::diagnostic::{ DIVISION_BY_ZERO, DUPLICATE_KW_ONLY, INCONSISTENT_MRO, INVALID_ARGUMENT_TYPE, INVALID_ASSIGNMENT, INVALID_ATTRIBUTE_ACCESS, INVALID_BASE, INVALID_DECLARATION, INVALID_GENERIC_CLASS, INVALID_KEY, INVALID_LEGACY_TYPE_VARIABLE, INVALID_METACLASS, - INVALID_NAMED_TUPLE, INVALID_OVERLOAD, INVALID_PARAMETER_DEFAULT, INVALID_PARAMSPEC, - INVALID_PROTOCOL, INVALID_TYPE_FORM, INVALID_TYPE_GUARD_CALL, + INVALID_NAMED_TUPLE, INVALID_NEWTYPE, INVALID_OVERLOAD, INVALID_PARAMETER_DEFAULT, + INVALID_PARAMSPEC, INVALID_PROTOCOL, INVALID_TYPE_FORM, INVALID_TYPE_GUARD_CALL, INVALID_TYPE_VARIABLE_CONSTRAINTS, IncompatibleBases, NON_SUBSCRIPTABLE, POSSIBLY_MISSING_IMPLICIT_CALL, POSSIBLY_MISSING_IMPORT, SUBCLASS_OF_FINAL_CLASS, UNDEFINED_REVEAL, UNRESOLVED_ATTRIBUTE, UNRESOLVED_GLOBAL, UNRESOLVED_IMPORT, @@ -90,6 +90,7 @@ use crate::types::generics::{ use crate::types::infer::nearest_enclosing_function; use crate::types::instance::SliceLiteral; use crate::types::mro::MroErrorKind; +use crate::types::newtype::NewType; use crate::types::signatures::Signature; use crate::types::subclass_of::SubclassOfInner; use crate::types::tuple::{Tuple, TupleLength, TupleSpec, TupleType}; @@ -3884,7 +3885,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { | Type::AlwaysTruthy | Type::AlwaysFalsy | Type::TypeIs(_) - | Type::TypedDict(_) => { + | Type::TypedDict(_) + | Type::NewTypeInstance(_) => { // TODO: We could use the annotated parameter type of `__setattr__` as type context here. // However, we would still have to perform the first inference without type context. let value_ty = infer_value_ty(self, TypeContext::default()); @@ -4454,6 +4456,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { Some(KnownClass::ParamSpec) => { self.infer_paramspec(target, call_expr, definition) } + Some(KnownClass::NewType) => { + self.infer_newtype_expression(target, call_expr, definition) + } Some(_) | None => { self.infer_call_expression_impl(call_expr, callable_type, tcx) } @@ -4892,14 +4897,114 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ))) } + fn infer_newtype_expression( + &mut self, + target: &ast::Expr, + call_expr: &ast::ExprCall, + definition: Definition<'db>, + ) -> Type<'db> { + fn error<'db>( + context: &InferContext<'db, '_>, + message: impl std::fmt::Display, + node: impl Ranged, + ) -> Type<'db> { + if let Some(builder) = context.report_lint(&INVALID_NEWTYPE, node) { + builder.into_diagnostic(message); + } + Type::unknown() + } + + let db = self.db(); + let arguments = &call_expr.arguments; + + if !arguments.keywords.is_empty() { + return error( + &self.context, + "Keyword arguments are not supported in `NewType` creation", + call_expr, + ); + } + + if let Some(starred) = arguments.args.iter().find(|arg| arg.is_starred_expr()) { + return error( + &self.context, + "Starred arguments are not supported in `NewType` creation", + starred, + ); + } + + if arguments.args.len() != 2 { + return error( + &self.context, + format!( + "Wrong number of arguments in `NewType` creation, expected 2, found {}", + arguments.args.len() + ), + call_expr, + ); + } + + let name_param_ty = self.infer_expression(&arguments.args[0], TypeContext::default()); + + let Some(name) = name_param_ty.as_string_literal().map(|name| name.value(db)) else { + return error( + &self.context, + "The first argument to `NewType` must be a string literal", + call_expr, + ); + }; + + let ast::Expr::Name(ast::ExprName { + id: target_name, .. + }) = target + else { + return error( + &self.context, + "A `NewType` definition must be a simple variable assignment", + target, + ); + }; + + if name != target_name { + return error( + &self.context, + format_args!( + "The name of a `NewType` (`{name}`) must match \ + the name of the variable it is assigned to (`{target_name}`)" + ), + target, + ); + } + + // Inference of `tp` must be deferred, to avoid cycles. + self.deferred.insert(definition, self.multi_inference_state); + + Type::KnownInstance(KnownInstanceType::NewType(NewType::new( + db, + ast::name::Name::from(name), + definition, + None, + ))) + } + fn infer_assignment_deferred(&mut self, value: &ast::Expr) { - // Infer deferred bounds/constraints/defaults of a legacy TypeVar / ParamSpec. + // Infer deferred bounds/constraints/defaults of a legacy TypeVar / ParamSpec / NewType. let ast::Expr::Call(ast::ExprCall { func, arguments, .. }) = value else { return; }; + let func_ty = self + .try_expression_type(func) + .unwrap_or_else(|| self.infer_expression(func, TypeContext::default())); + let known_class = func_ty + .as_class_literal() + .and_then(|cls| cls.known(self.db())); + if let Some(KnownClass::NewType) = known_class { + self.infer_newtype_assignment_deferred(arguments); + return; + } for arg in arguments.args.iter().skip(1) { self.infer_type_expression(arg); } @@ -4907,12 +5012,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { self.infer_type_expression(&bound.value); } if let Some(default) = arguments.find_keyword("default") { - let func_ty = self - .try_expression_type(func) - .unwrap_or_else(|| self.infer_expression(func, TypeContext::default())); - if func_ty.as_class_literal().is_some_and(|class_literal| { - class_literal.is_known(self.db(), KnownClass::ParamSpec) - }) { + if let Some(KnownClass::ParamSpec) = known_class { self.infer_paramspec_default(&default.value); } else { self.infer_type_expression(&default.value); @@ -4920,6 +5020,34 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } + // Infer the deferred base type of a NewType. + fn infer_newtype_assignment_deferred(&mut self, arguments: &ast::Arguments) { + match self.infer_type_expression(&arguments.args[1]) { + Type::NominalInstance(_) | Type::NewTypeInstance(_) => {} + // `Unknown` is likely to be the result of an unresolved import or a typo, which will + // already get a diagnostic, so don't pile on an extra diagnostic here. + Type::Dynamic(DynamicType::Unknown) => {} + other_type => { + if let Some(builder) = self + .context + .report_lint(&INVALID_NEWTYPE, &arguments.args[1]) + { + let mut diag = builder.into_diagnostic("invalid base for `typing.NewType`"); + diag.set_primary_message(format!("type `{}`", other_type.display(self.db()))); + if matches!(other_type, Type::ProtocolInstance(_)) { + diag.info("The base of a `NewType` is not allowed to be a protocol class."); + } else if matches!(other_type, Type::TypedDict(_)) { + diag.info("The base of a `NewType` is not allowed to be a `TypedDict`."); + } else { + diag.info( + "The base of a `NewType` must be a class type or another `NewType`.", + ); + } + } + } + } + } + fn infer_annotated_assignment_statement(&mut self, assignment: &ast::StmtAnnAssign) { if assignment.target.is_name_expr() { self.infer_definition(assignment); @@ -7483,11 +7611,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .to_class_type(self.db()) .is_none_or(|enum_class| !class.is_subclass_of(self.db(), enum_class)) { - // Inference of correctly-placed `TypeVar` and `ParamSpec` definitions is done in - // `TypeInferenceBuilder::infer_legacy_typevar` and - // `TypeInferenceBuilder::infer_paramspec`, and doesn't use the full - // call-binding machinery. If we reach here, it means that someone is trying to - // instantiate a `typing.TypeVar` and `typing.ParamSpec` in an invalid context. + // Inference of correctly-placed `TypeVar`, `ParamSpec`, and `NewType` definitions + // is done in `infer_legacy_typevar`, `infer_paramspec`, and + // `infer_newtype_expression`, and doesn't use the full call-binding machinery. If + // we reach here, it means that someone is trying to instantiate one of these in an + // invalid context. match class.known(self.db()) { Some(KnownClass::TypeVar | KnownClass::ExtensionsTypeVar) => { if let Some(builder) = self @@ -7509,6 +7637,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ); } } + Some(KnownClass::NewType) => { + if let Some(builder) = + self.context.report_lint(&INVALID_NEWTYPE, call_expression) + { + builder.into_diagnostic( + "A `NewType` definition must be a simple variable assignment", + ); + } + } _ => {} } @@ -8577,7 +8714,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { | Type::BoundSuper(_) | Type::TypeVar(_) | Type::TypeIs(_) - | Type::TypedDict(_), + | Type::TypedDict(_) + | Type::NewTypeInstance(_), ) => { let unary_dunder_method = match op { ast::UnaryOp::Invert => "__invert__", @@ -9025,7 +9163,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { | Type::BoundSuper(_) | Type::TypeVar(_) | Type::TypeIs(_) - | Type::TypedDict(_), + | Type::TypedDict(_) + | Type::NewTypeInstance(_), Type::FunctionLiteral(_) | Type::BooleanLiteral(_) | Type::Callable(..) @@ -9054,7 +9193,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { | Type::BoundSuper(_) | Type::TypeVar(_) | Type::TypeIs(_) - | Type::TypedDict(_), + | Type::TypedDict(_) + | Type::NewTypeInstance(_), op, ) => Type::try_call_bin_op(self.db(), left_ty, op, right_ty) .map(|outcome| outcome.return_type(self.db())) diff --git a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs index 7694839c15..9fc1f35b2a 100644 --- a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs +++ b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs @@ -828,6 +828,16 @@ impl<'db> TypeInferenceBuilder<'db, '_> { self.infer_type_expression(slice); todo_type!("Generic specialization of typing.Annotated") } + KnownInstanceType::NewType(newtype) => { + self.infer_type_expression(&subscript.slice); + if let Some(builder) = self.context.report_lint(&INVALID_TYPE_FORM, subscript) { + builder.into_diagnostic(format_args!( + "`{}` is a `NewType` and cannot be specialized", + newtype.name(self.db()) + )); + } + Type::unknown() + } }, Type::Dynamic(DynamicType::Todo(_)) => { self.infer_type_expression(slice); diff --git a/crates/ty_python_semantic/src/types/narrow.rs b/crates/ty_python_semantic/src/types/narrow.rs index 2e81c92448..8dc6f2f626 100644 --- a/crates/ty_python_semantic/src/types/narrow.rs +++ b/crates/ty_python_semantic/src/types/narrow.rs @@ -252,7 +252,8 @@ impl ClassInfoConstraintFunction { | Type::TypeIs(_) | Type::WrapperDescriptor(_) | Type::DataclassTransformer(_) - | Type::TypedDict(_) => None, + | Type::TypedDict(_) + | Type::NewTypeInstance(_) => None, } } } diff --git a/crates/ty_python_semantic/src/types/newtype.rs b/crates/ty_python_semantic/src/types/newtype.rs new file mode 100644 index 0000000000..fe08fa7bee --- /dev/null +++ b/crates/ty_python_semantic/src/types/newtype.rs @@ -0,0 +1,266 @@ +use std::collections::BTreeSet; + +use crate::Db; +use crate::semantic_index::definition::{Definition, DefinitionKind}; +use crate::types::constraints::ConstraintSet; +use crate::types::{ClassType, Type, definition_expression_type, visitor}; +use ruff_db::parsed::parsed_module; +use ruff_python_ast as ast; + +/// A `typing.NewType` declaration, either from the perspective of the +/// identity-callable-that-acts-like-a-subtype-in-type-expressions returned by the call to +/// `typing.NewType(...)`, or from the perspective of instances of that subtype returned by the +/// identity callable. For example: +/// +/// ```py +/// import typing +/// Foo = typing.NewType("Foo", int) +/// x = Foo(42) +/// ``` +/// +/// The revealed types there are: +/// - `typing.NewType`: `Type::ClassLiteral(ClassLiteral)` with `KnownClass::NewType`. +/// - `Foo`: `Type::KnownInstance(KnownInstanceType::NewType(NewType { .. }))` +/// - `x`: `Type::NewTypeInstance(NewType { .. })` +/// +/// # Ordering +/// Ordering is based on the newtype's salsa-assigned id and not on its values. +/// The id may change between runs, or when the newtype was garbage collected and recreated. +#[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)] +#[derive(PartialOrd, Ord)] +pub struct NewType<'db> { + /// The name of this NewType (e.g. `"Foo"`) + #[returns(ref)] + pub name: ast::name::Name, + + /// The binding where this NewType is first created. + pub definition: Definition<'db>, + + // The base type of this NewType, if it's eagerly specified. This is typically `None` when a + // `NewType` is first encountered, because the base type is lazy/deferred to avoid panics in + // the recursive case. This becomes `Some` when a `NewType` is modified by methods like + // `.normalize()`. Callers should use the `base` method instead of accessing this field + // directly. + eager_base: Option>, +} + +impl get_size2::GetSize for NewType<'_> {} + +#[salsa::tracked] +impl<'db> NewType<'db> { + pub fn base(self, db: &'db dyn Db) -> NewTypeBase<'db> { + match self.eager_base(db) { + Some(base) => base, + None => self.lazy_base(db), + } + } + + #[salsa::tracked( + cycle_initial=lazy_base_cycle_initial, + heap_size=ruff_memory_usage::heap_size + )] + fn lazy_base(self, db: &'db dyn Db) -> NewTypeBase<'db> { + // `TypeInferenceBuilder` emits diagnostics for invalid `NewType` definitions that show up + // in assignments, but invalid definitions still get here, and also `NewType` might show up + // in places that aren't definitions at all. Fall back to `object` in all error cases. + let object_fallback = NewTypeBase::ClassType(ClassType::object(db)); + let definition = self.definition(db); + let module = parsed_module(db, definition.file(db)).load(db); + let DefinitionKind::Assignment(assignment) = definition.kind(db) else { + return object_fallback; + }; + let Some(call_expr) = assignment.value(&module).as_call_expr() else { + return object_fallback; + }; + let Some(second_arg) = call_expr.arguments.args.get(1) else { + return object_fallback; + }; + match definition_expression_type(db, definition, second_arg) { + Type::NominalInstance(nominal_instance_type) => { + NewTypeBase::ClassType(nominal_instance_type.class(db)) + } + Type::NewTypeInstance(newtype) => NewTypeBase::NewType(newtype), + // This branch includes bases that are other typing constructs besides classes and + // other newtypes, for example unions. `NewType("Foo", int | str)` is not allowed. + _ => object_fallback, + } + } + + fn iter_bases(self, db: &'db dyn Db) -> NewTypeBaseIter<'db> { + NewTypeBaseIter { + current: Some(self), + seen_before: BTreeSet::new(), + db, + } + } + + // Walk the `NewTypeBase` chain to find the underlying `ClassType`. There might not be a + // `ClassType` if this `NewType` is cyclical, and we fall back to `object` in that case. + pub fn base_class_type(self, db: &'db dyn Db) -> ClassType<'db> { + for base in self.iter_bases(db) { + if let NewTypeBase::ClassType(class_type) = base { + return class_type; + } + } + ClassType::object(db) + } + + pub(crate) fn is_equivalent_to_impl(self, db: &'db dyn Db, other: Self) -> bool { + // Two instances of the "same" `NewType` won't compare == if one of them has an eagerly + // evaluated base (or a normalized base, etc.) and the other doesn't, so we only check for + // equality of the `definition`. + self.definition(db) == other.definition(db) + } + + // Since a regular class can't inherit from a newtype, the only way for one newtype to be a + // subtype of another is to have the other in its chain of newtype bases. Once we reach the + // base class, we don't have to keep looking. + pub(crate) fn has_relation_to_impl(self, db: &'db dyn Db, other: Self) -> ConstraintSet<'db> { + if self.is_equivalent_to_impl(db, other) { + return ConstraintSet::from(true); + } + for base in self.iter_bases(db) { + if let NewTypeBase::NewType(base_newtype) = base { + if base_newtype.is_equivalent_to_impl(db, other) { + return ConstraintSet::from(true); + } + } + } + ConstraintSet::from(false) + } + + pub(crate) fn is_disjoint_from_impl(self, db: &'db dyn Db, other: Self) -> ConstraintSet<'db> { + // Two NewTypes are disjoint if they're not equal and neither inherits from the other. + // NewTypes have single inheritance, and a regular class can't inherit from a NewType, so + // it's not possible for some third type to multiply-inherit from both. + let mut self_not_subtype_of_other = self.has_relation_to_impl(db, other).negate(db); + let other_not_subtype_of_self = other.has_relation_to_impl(db, self).negate(db); + self_not_subtype_of_other.intersect(db, other_not_subtype_of_self) + } + + /// Create a new `NewType` by mapping the underlying `ClassType`. This descends through any + /// number of nested `NewType` layers and rebuilds the whole chain. In the rare case of cyclic + /// `NewType`s with no underlying `ClassType`, this has no effect and does not call `f`. + pub(crate) fn map_base_class_type( + self, + db: &'db dyn Db, + f: impl FnOnce(ClassType<'db>) -> ClassType<'db>, + ) -> Self { + // Modifying the base class type requires unwrapping and re-wrapping however many base + // newtypes there are between here and there. Normally recursion would be natural for this, + // but the bases iterator does cycle detection, and I think using that with a stack is a + // little cleaner than conjuring up yet another `CycleDetector` visitor and yet another + // layer of "*_impl" nesting. Also if there is no base class type, returning `self` + // unmodified seems more correct than injecting some default type like `object` into the + // cycle, which is what `CycleDetector` would do if we used it here. + let mut inner_newtype_stack = Vec::new(); + for base in self.iter_bases(db) { + match base { + // Build up the stack of intermediate newtypes that we'll need to re-wrap after + // we've mapped the `ClassType`. + NewTypeBase::NewType(base_newtype) => inner_newtype_stack.push(base_newtype), + // We've reached the `ClassType`. + NewTypeBase::ClassType(base_class_type) => { + // Call `f`. + let mut mapped_base = NewTypeBase::ClassType(f(base_class_type)); + // Re-wrap the mapped base class in however many newtypes we unwrapped. + for inner_newtype in inner_newtype_stack.into_iter().rev() { + mapped_base = NewTypeBase::NewType(NewType::new( + db, + inner_newtype.name(db).clone(), + inner_newtype.definition(db), + Some(mapped_base), + )); + } + return NewType::new( + db, + self.name(db).clone(), + self.definition(db), + Some(mapped_base), + ); + } + } + } + // If we get here, there is no `ClassType` (because this newtype is cyclic), and we don't + // call `f` at all. + self + } +} + +pub(crate) fn walk_newtype_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>( + db: &'db dyn Db, + newtype: NewType<'db>, + visitor: &V, +) { + visitor.visit_type(db, newtype.base(db).instance_type(db)); +} + +/// `typing.NewType` typically wraps a class type, but it can also wrap another newtype. +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, get_size2::GetSize, salsa::Update)] +pub enum NewTypeBase<'db> { + ClassType(ClassType<'db>), + NewType(NewType<'db>), +} + +impl<'db> NewTypeBase<'db> { + pub fn instance_type(self, db: &'db dyn Db) -> Type<'db> { + match self { + NewTypeBase::ClassType(class_type) => Type::instance(db, class_type), + NewTypeBase::NewType(newtype) => Type::NewTypeInstance(newtype), + } + } +} + +/// An iterator over the transitive bases of a `NewType`. In the most common case, e.g. +/// `Foo = NewType("Foo", int)`, this yields the one `NewTypeBase::ClassType` (e.g. `int`). For +/// newtypes that wrap other newtypes, this iterator yields the `NewTypeBase::NewType`s (not +/// including `self`) before finally yielding the `NewTypeBase::ClassType`. In the pathological +/// case of cyclic newtypes like `Foo = NewType("Foo", "Foo")`, this iterator yields the unique +/// `NewTypeBase::NewType`s (not including `self`), detects the cycle, and then stops. +/// +/// Note that this does *not* detect indirect cycles that go through a proper class, like this: +/// ```py +/// Foo = NewType("Foo", list["Foo"]) +/// ``` +/// As far as this iterator is concerned, that's the "common case", and it yields the one +/// `NewTypeBase::ClassType` for `list[Foo]`. Functions like `normalize` that continue recursing +/// over the base class need to pass down a cycle-detecting visitor as usual. +struct NewTypeBaseIter<'db> { + current: Option>, + seen_before: BTreeSet>, + db: &'db dyn Db, +} + +impl<'db> Iterator for NewTypeBaseIter<'db> { + type Item = NewTypeBase<'db>; + + fn next(&mut self) -> Option { + let current = self.current?; + match current.base(self.db) { + NewTypeBase::ClassType(base_class_type) => { + self.current = None; + Some(NewTypeBase::ClassType(base_class_type)) + } + NewTypeBase::NewType(base_newtype) => { + // Doing the insertion only in this branch avoids allocating in the common case. + self.seen_before.insert(current); + if self.seen_before.contains(&base_newtype) { + // Cycle detected. Stop iterating. + self.current = None; + None + } else { + self.current = Some(base_newtype); + Some(NewTypeBase::NewType(base_newtype)) + } + } + } + } +} + +fn lazy_base_cycle_initial<'db>( + db: &'db dyn Db, + _id: salsa::Id, + _self: NewType<'db>, +) -> NewTypeBase<'db> { + NewTypeBase::ClassType(ClassType::object(db)) +} diff --git a/crates/ty_python_semantic/src/types/type_ordering.rs b/crates/ty_python_semantic/src/types/type_ordering.rs index f6797f87d9..946b6173a2 100644 --- a/crates/ty_python_semantic/src/types/type_ordering.rs +++ b/crates/ty_python_semantic/src/types/type_ordering.rs @@ -213,6 +213,10 @@ pub(super) fn union_or_intersection_elements_ordering<'db>( (Type::TypedDict(_), _) => Ordering::Less, (_, Type::TypedDict(_)) => Ordering::Greater, + (Type::NewTypeInstance(left), Type::NewTypeInstance(right)) => left.cmp(right), + (Type::NewTypeInstance(_), _) => Ordering::Less, + (_, Type::NewTypeInstance(_)) => Ordering::Greater, + (Type::Union(_), _) | (_, Type::Union(_)) => { unreachable!("our type representation does not permit nested unions"); } diff --git a/crates/ty_python_semantic/src/types/visitor.rs b/crates/ty_python_semantic/src/types/visitor.rs index dd1ddfdfe5..7692c205ff 100644 --- a/crates/ty_python_semantic/src/types/visitor.rs +++ b/crates/ty_python_semantic/src/types/visitor.rs @@ -11,6 +11,7 @@ use crate::{ class::walk_generic_alias, function::{FunctionType, walk_function_type}, instance::{walk_nominal_instance_type, walk_protocol_instance_type}, + newtype::{NewType, walk_newtype_instance_type}, subclass_of::walk_subclass_of_type, walk_bound_method_type, walk_bound_type_var_type, walk_callable_type, walk_intersection_type, walk_known_instance_type, walk_method_wrapper_type, @@ -109,6 +110,10 @@ pub(crate) trait TypeVisitor<'db> { fn visit_typed_dict_type(&self, db: &'db dyn Db, typed_dict: TypedDictType<'db>) { walk_typed_dict_type(db, typed_dict, self); } + + fn visit_newtype_instance_type(&self, db: &'db dyn Db, newtype: NewType<'db>) { + walk_newtype_instance_type(db, newtype, self); + } } /// Enumeration of types that may contain other types, such as unions, intersections, and generics. @@ -131,6 +136,7 @@ pub(super) enum NonAtomicType<'db> { ProtocolInstance(ProtocolInstanceType<'db>), TypedDict(TypedDictType<'db>), TypeAlias(TypeAliasType<'db>), + NewTypeInstance(NewType<'db>), } pub(super) enum TypeKind<'db> { @@ -198,6 +204,9 @@ impl<'db> From> for TypeKind<'db> { TypeKind::NonAtomic(NonAtomicType::TypedDict(typed_dict)) } Type::TypeAlias(alias) => TypeKind::NonAtomic(NonAtomicType::TypeAlias(alias)), + Type::NewTypeInstance(newtype) => { + TypeKind::NonAtomic(NonAtomicType::NewTypeInstance(newtype)) + } } } } @@ -239,6 +248,9 @@ pub(super) fn walk_non_atomic_type<'db, V: TypeVisitor<'db> + ?Sized>( NonAtomicType::TypeAlias(alias) => { visitor.visit_type_alias_type(db, alias); } + NonAtomicType::NewTypeInstance(newtype) => { + visitor.visit_newtype_instance_type(db, newtype); + } } } diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap b/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap index 7373c4cf25..ab96ebf4e8 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap @@ -59,6 +59,7 @@ Settings: Settings { "invalid-legacy-type-variable": Error (Default), "invalid-metaclass": Error (Default), "invalid-named-tuple": Error (Default), + "invalid-newtype": Error (Default), "invalid-overload": Error (Default), "invalid-parameter-default": Error (Default), "invalid-paramspec": Error (Default), diff --git a/ty.schema.json b/ty.schema.json index cae55e4a1b..3026102a5f 100644 --- a/ty.schema.json +++ b/ty.schema.json @@ -623,6 +623,16 @@ } ] }, + "invalid-newtype": { + "title": "detects invalid NewType definitions", + "description": "## What it does\nChecks for the creation of invalid `NewType`s\n\n## Why is this bad?\nThere are several requirements that you must follow when creating a `NewType`.\n\n## Examples\n```python\nfrom typing import NewType\n\ndef get_name() -> str: ...\n\nFoo = NewType(\"Foo\", int) # okay\nBar = NewType(get_name(), int) # error: The first argument to `NewType` must be a string literal\nBaz = NewType(\"Baz\", int | str) # error: invalid base for `typing.NewType`\n```", + "default": "error", + "oneOf": [ + { + "$ref": "#/definitions/Level" + } + ] + }, "invalid-overload": { "title": "detects invalid `@overload` usages", "description": "## What it does\nChecks for various invalid `@overload` usages.\n\n## Why is this bad?\nThe `@overload` decorator is used to define functions and methods that accepts different\ncombinations of arguments and return different types based on the arguments passed. This is\nmainly beneficial for type checkers. But, if the `@overload` usage is invalid, the type\nchecker may not be able to provide correct type information.\n\n## Example\n\nDefining only one overload:\n\n```py\nfrom typing import overload\n\n@overload\ndef foo(x: int) -> int: ...\ndef foo(x: int | None) -> int | None:\n return x\n```\n\nOr, not providing an implementation for the overloaded definition:\n\n```py\nfrom typing import overload\n\n@overload\ndef foo() -> None: ...\n@overload\ndef foo(x: int) -> int: ...\n```\n\n## References\n- [Python documentation: `@overload`](https://docs.python.org/3/library/typing.html#typing.overload)", From 1fd852fb3f7bc2b4aff4ffdaacd005abaf64e1dd Mon Sep 17 00:00:00 2001 From: Dan Parizher <105245560+danparizher@users.noreply.github.com> Date: Mon, 10 Nov 2025 18:04:41 -0500 Subject: [PATCH 149/180] [`ruff`] Ignore `str()` when not used for simple conversion (`RUF065`) (#21330) ## Summary Fixed RUF065 (`logging-eager-conversion`) to only flag `str()` calls when they perform a simple conversion that can be safely removed. The rule now ignores `str()` calls with no arguments, multiple arguments, starred arguments, or keyword unpacking, preventing false positives. Fixes #21315 ## Problem Analysis The RUF065 rule was incorrectly flagging all `str()` calls in logging statements, even when `str()` was performing actual conversion work beyond simple type coercion. Specifically, the rule flagged: - `str()` with no arguments - which returns an empty string - `str(b"data", "utf-8")` with multiple arguments - which performs encoding conversion - `str(*args)` with starred arguments - which unpacks arguments - `str(**kwargs)` with keyword unpacking - which passes keyword arguments These cases cannot be safely removed because `str()` is doing meaningful work (encoding conversion, argument unpacking, etc.), not just redundant type conversion. The root cause was that the rule only checked if the function was `str()` without validating the call signature. It didn't distinguish between simple `str(value)` conversions (which can be removed) and more complex `str()` calls that perform actual work. ## Approach The fix adds validation to the `str()` detection logic in `logging_eager_conversion.rs`: 1. **Check argument count**: Only flag `str()` calls with exactly one positional argument (`str_call_args.args.len() == 1`) 2. **Check for starred arguments**: Ensure the single argument is not starred (`!str_call_args.args[0].is_starred_expr()`) 3. **Check for keyword arguments**: Ensure there are no keyword arguments (`str_call_args.keywords.is_empty()`) This ensures the rule only flags cases like `str(value)` where `str()` is truly redundant and can be removed, while ignoring cases where `str()` performs actual conversion work. The fix maintains backward compatibility - all existing valid test cases continue to be flagged correctly, while the new edge cases are properly ignored. --------- Co-authored-by: Brent Westbrook --- .../fixtures/ruff/{RUF065.py => RUF065_0.py} | 0 .../resources/test/fixtures/ruff/RUF065_1.py | 18 ++++++++ crates/ruff_linter/src/rules/ruff/mod.rs | 3 +- .../ruff/rules/logging_eager_conversion.rs | 14 +++++- ...les__ruff__tests__RUF065_RUF065_0.py.snap} | 44 +++++++++---------- ...ules__ruff__tests__RUF065_RUF065_1.py.snap | 10 +++++ crates/ruff_python_ast/src/nodes.rs | 7 +++ 7 files changed, 71 insertions(+), 25 deletions(-) rename crates/ruff_linter/resources/test/fixtures/ruff/{RUF065.py => RUF065_0.py} (100%) create mode 100644 crates/ruff_linter/resources/test/fixtures/ruff/RUF065_1.py rename crates/ruff_linter/src/rules/ruff/snapshots/{ruff_linter__rules__ruff__tests__RUF065_RUF065.py.snap => ruff_linter__rules__ruff__tests__RUF065_RUF065_0.py.snap} (91%) create mode 100644 crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065_1.py.snap diff --git a/crates/ruff_linter/resources/test/fixtures/ruff/RUF065.py b/crates/ruff_linter/resources/test/fixtures/ruff/RUF065_0.py similarity index 100% rename from crates/ruff_linter/resources/test/fixtures/ruff/RUF065.py rename to crates/ruff_linter/resources/test/fixtures/ruff/RUF065_0.py diff --git a/crates/ruff_linter/resources/test/fixtures/ruff/RUF065_1.py b/crates/ruff_linter/resources/test/fixtures/ruff/RUF065_1.py new file mode 100644 index 0000000000..048a5be48c --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/ruff/RUF065_1.py @@ -0,0 +1,18 @@ +import logging + +# Test cases for str() that should NOT be flagged (issue #21315) +# str() with no arguments - should not be flagged +logging.warning("%s", str()) + +# str() with multiple arguments - should not be flagged +logging.warning("%s", str(b"\xe2\x9a\xa0", "utf-8")) + +# str() with starred arguments - should not be flagged +logging.warning("%s", str(*(b"\xf0\x9f\x9a\xa7", "utf-8"))) + +# str() with keyword unpacking - should not be flagged +logging.warning("%s", str(**{"object": b"\xf0\x9f\x9a\xa8", "encoding": "utf-8"})) + +# str() with single keyword argument - should be flagged (equivalent to str("!")) +logging.warning("%s", str(object="!")) + diff --git a/crates/ruff_linter/src/rules/ruff/mod.rs b/crates/ruff_linter/src/rules/ruff/mod.rs index 7cdc557841..b07107587c 100644 --- a/crates/ruff_linter/src/rules/ruff/mod.rs +++ b/crates/ruff_linter/src/rules/ruff/mod.rs @@ -112,7 +112,8 @@ mod tests { #[test_case(Rule::LegacyFormPytestRaises, Path::new("RUF061_warns.py"))] #[test_case(Rule::LegacyFormPytestRaises, Path::new("RUF061_deprecated_call.py"))] #[test_case(Rule::NonOctalPermissions, Path::new("RUF064.py"))] - #[test_case(Rule::LoggingEagerConversion, Path::new("RUF065.py"))] + #[test_case(Rule::LoggingEagerConversion, Path::new("RUF065_0.py"))] + #[test_case(Rule::LoggingEagerConversion, Path::new("RUF065_1.py"))] #[test_case(Rule::RedirectedNOQA, Path::new("RUF101_0.py"))] #[test_case(Rule::RedirectedNOQA, Path::new("RUF101_1.py"))] #[test_case(Rule::InvalidRuleCode, Path::new("RUF102.py"))] diff --git a/crates/ruff_linter/src/rules/ruff/rules/logging_eager_conversion.rs b/crates/ruff_linter/src/rules/ruff/rules/logging_eager_conversion.rs index 2b09c8a1e0..32c4755229 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/logging_eager_conversion.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/logging_eager_conversion.rs @@ -138,7 +138,12 @@ pub(crate) fn logging_eager_conversion(checker: &Checker, call: &ast::ExprCall) .zip(call.arguments.args.iter().skip(msg_pos + 1)) { // Check if the argument is a call to eagerly format a value - if let Expr::Call(ast::ExprCall { func, .. }) = arg { + if let Expr::Call(ast::ExprCall { + func, + arguments: str_call_args, + .. + }) = arg + { let CFormatType::String(format_conversion) = spec.format_type else { continue; }; @@ -146,8 +151,13 @@ pub(crate) fn logging_eager_conversion(checker: &Checker, call: &ast::ExprCall) // Check for various eager conversion patterns match format_conversion { // %s with str() - remove str() call + // Only flag if str() has exactly one argument (positional or keyword) that is not unpacked FormatConversion::Str - if checker.semantic().match_builtin_expr(func.as_ref(), "str") => + if checker.semantic().match_builtin_expr(func.as_ref(), "str") + && str_call_args.len() == 1 + && str_call_args + .find_argument("object", 0) + .is_some_and(|arg| !arg.is_variadic()) => { checker.report_diagnostic( LoggingEagerConversion { diff --git a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065.py.snap b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065_0.py.snap similarity index 91% rename from crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065.py.snap rename to crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065_0.py.snap index 9f96c36307..9ac438216a 100644 --- a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065.py.snap +++ b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065_0.py.snap @@ -2,7 +2,7 @@ source: crates/ruff_linter/src/rules/ruff/mod.rs --- RUF065 Unnecessary `str()` conversion when formatting with `%s` - --> RUF065.py:4:26 + --> RUF065_0.py:4:26 | 3 | # %s + str() 4 | logging.info("Hello %s", str("World!")) @@ -11,7 +11,7 @@ RUF065 Unnecessary `str()` conversion when formatting with `%s` | RUF065 Unnecessary `str()` conversion when formatting with `%s` - --> RUF065.py:5:39 + --> RUF065_0.py:5:39 | 3 | # %s + str() 4 | logging.info("Hello %s", str("World!")) @@ -22,7 +22,7 @@ RUF065 Unnecessary `str()` conversion when formatting with `%s` | RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` instead of `%s` - --> RUF065.py:8:26 + --> RUF065_0.py:8:26 | 7 | # %s + repr() 8 | logging.info("Hello %s", repr("World!")) @@ -31,7 +31,7 @@ RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` inste | RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` instead of `%s` - --> RUF065.py:9:39 + --> RUF065_0.py:9:39 | 7 | # %s + repr() 8 | logging.info("Hello %s", repr("World!")) @@ -42,7 +42,7 @@ RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` inste | RUF065 Unnecessary `str()` conversion when formatting with `%s` - --> RUF065.py:22:18 + --> RUF065_0.py:22:18 | 21 | # %s + str() 22 | info("Hello %s", str("World!")) @@ -51,7 +51,7 @@ RUF065 Unnecessary `str()` conversion when formatting with `%s` | RUF065 Unnecessary `str()` conversion when formatting with `%s` - --> RUF065.py:23:31 + --> RUF065_0.py:23:31 | 21 | # %s + str() 22 | info("Hello %s", str("World!")) @@ -62,7 +62,7 @@ RUF065 Unnecessary `str()` conversion when formatting with `%s` | RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` instead of `%s` - --> RUF065.py:26:18 + --> RUF065_0.py:26:18 | 25 | # %s + repr() 26 | info("Hello %s", repr("World!")) @@ -71,7 +71,7 @@ RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` inste | RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` instead of `%s` - --> RUF065.py:27:31 + --> RUF065_0.py:27:31 | 25 | # %s + repr() 26 | info("Hello %s", repr("World!")) @@ -82,7 +82,7 @@ RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` inste | RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` instead of `%s` - --> RUF065.py:44:32 + --> RUF065_0.py:44:32 | 42 | logging.warning("Value: %r", repr(42)) 43 | logging.error("Error: %r", repr([1, 2, 3])) @@ -92,7 +92,7 @@ RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` inste | RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` instead of `%s` - --> RUF065.py:45:30 + --> RUF065_0.py:45:30 | 43 | logging.error("Error: %r", repr([1, 2, 3])) 44 | logging.info("Debug info: %s", repr("test\nstring")) @@ -103,7 +103,7 @@ RUF065 Unnecessary `repr()` conversion when formatting with `%s`. Use `%r` inste | RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` instead of `%s` - --> RUF065.py:48:27 + --> RUF065_0.py:48:27 | 47 | # %s + ascii() 48 | logging.info("ASCII: %s", ascii("Hello\nWorld")) @@ -112,7 +112,7 @@ RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` inst | RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` instead of `%s` - --> RUF065.py:49:30 + --> RUF065_0.py:49:30 | 47 | # %s + ascii() 48 | logging.info("ASCII: %s", ascii("Hello\nWorld")) @@ -123,7 +123,7 @@ RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` inst | RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` instead of `%s` - --> RUF065.py:52:27 + --> RUF065_0.py:52:27 | 51 | # %s + oct() 52 | logging.info("Octal: %s", oct(42)) @@ -132,7 +132,7 @@ RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` inste | RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` instead of `%s` - --> RUF065.py:53:30 + --> RUF065_0.py:53:30 | 51 | # %s + oct() 52 | logging.info("Octal: %s", oct(42)) @@ -143,7 +143,7 @@ RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` inste | RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` instead of `%s` - --> RUF065.py:56:25 + --> RUF065_0.py:56:25 | 55 | # %s + hex() 56 | logging.info("Hex: %s", hex(42)) @@ -152,7 +152,7 @@ RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` inste | RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` instead of `%s` - --> RUF065.py:57:28 + --> RUF065_0.py:57:28 | 55 | # %s + hex() 56 | logging.info("Hex: %s", hex(42)) @@ -161,7 +161,7 @@ RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` inste | RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` instead of `%s` - --> RUF065.py:63:19 + --> RUF065_0.py:63:19 | 61 | from logging import info, log 62 | @@ -171,7 +171,7 @@ RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` inst | RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` instead of `%s` - --> RUF065.py:64:32 + --> RUF065_0.py:64:32 | 63 | info("ASCII: %s", ascii("Hello\nWorld")) 64 | log(logging.INFO, "ASCII: %s", ascii("test")) @@ -181,7 +181,7 @@ RUF065 Unnecessary `ascii()` conversion when formatting with `%s`. Use `%a` inst | RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` instead of `%s` - --> RUF065.py:66:19 + --> RUF065_0.py:66:19 | 64 | log(logging.INFO, "ASCII: %s", ascii("test")) 65 | @@ -191,7 +191,7 @@ RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` inste | RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` instead of `%s` - --> RUF065.py:67:32 + --> RUF065_0.py:67:32 | 66 | info("Octal: %s", oct(42)) 67 | log(logging.INFO, "Octal: %s", oct(255)) @@ -201,7 +201,7 @@ RUF065 Unnecessary `oct()` conversion when formatting with `%s`. Use `%#o` inste | RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` instead of `%s` - --> RUF065.py:69:17 + --> RUF065_0.py:69:17 | 67 | log(logging.INFO, "Octal: %s", oct(255)) 68 | @@ -211,7 +211,7 @@ RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` inste | RUF065 Unnecessary `hex()` conversion when formatting with `%s`. Use `%#x` instead of `%s` - --> RUF065.py:70:30 + --> RUF065_0.py:70:30 | 69 | info("Hex: %s", hex(42)) 70 | log(logging.INFO, "Hex: %s", hex(255)) diff --git a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065_1.py.snap b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065_1.py.snap new file mode 100644 index 0000000000..56fa9ec243 --- /dev/null +++ b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__RUF065_RUF065_1.py.snap @@ -0,0 +1,10 @@ +--- +source: crates/ruff_linter/src/rules/ruff/mod.rs +--- +RUF065 Unnecessary `str()` conversion when formatting with `%s` + --> RUF065_1.py:17:23 + | +16 | # str() with single keyword argument - should be flagged (equivalent to str("!")) +17 | logging.warning("%s", str(object="!")) + | ^^^^^^^^^^^^^^^ + | diff --git a/crates/ruff_python_ast/src/nodes.rs b/crates/ruff_python_ast/src/nodes.rs index 5cb58e7f05..b57b86aaf2 100644 --- a/crates/ruff_python_ast/src/nodes.rs +++ b/crates/ruff_python_ast/src/nodes.rs @@ -3269,6 +3269,13 @@ impl<'a> ArgOrKeyword<'a> { ArgOrKeyword::Keyword(keyword) => &keyword.value, } } + + pub const fn is_variadic(self) -> bool { + match self { + ArgOrKeyword::Arg(expr) => expr.is_starred_expr(), + ArgOrKeyword::Keyword(keyword) => keyword.arg.is_none(), + } + } } impl<'a> From<&'a Expr> for ArgOrKeyword<'a> { From 2bc6c78e264101b41929a7f539d8b4e4d3d8e593 Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Mon, 10 Nov 2025 18:04:56 -0500 Subject: [PATCH 150/180] [ty] introduce local variables for `from` imports of submodules in `__init__.py(i)` (#21173) This rips out the previous implementation in favour of a new implementation with 3 rules: - **froms are locals**: a `from..import` can only define locals, it does not have global side-effects. Specifically any submodule attribute `a` that's implicitly introduced by either `from .a import b` or `from . import a as b` (in an `__init__.py(i)`) is a local and not a global. If you do such an import at the top of a file you won't notice this. However if you do such an import in a function, that means it will only be function-scoped (so you'll need to do it in every function that wants to access it, making your code less sensitive to execution order). - **first from first serve**: only the *first* `from..import` in an `__init__.py(i)` that imports a particular direct submodule of the current package introduces that submodule as a local. Subsequent imports of the submodule will not introduce that local. This reflects the fact that in actual python only the first import of a submodule (in the entire execution of the program) introduces it as an attribute of the package. By "first" we mean "the first time in this scope (or any parent scope)". This pairs well with the fact that we are specifically introducing a local (as long as you don't accidentally shadow or overwrite the local). - **dot re-exports**: `from . import a` in an `__init__.pyi` is considered a re-export of `a` (equivalent to `from . import a as a`). This is required to properly handle many stubs in the wild. Currently it must be *exactly* `from . import ...`. This implementation is intentionally limited/conservative (notably, often requiring a from import to be relative). I'm going to file a ton of followups for improvements so that their impact can be evaluated separately. Fixes https://github.com/astral-sh/ty/issues/133 --- crates/ruff_db/src/files.rs | 6 + .../mdtest/import/nonstandard_conventions.md | 487 +++++++++++++++--- .../type_properties/is_equivalent_to.md | 26 +- .../ty_python_semantic/src/semantic_index.rs | 75 +-- .../src/semantic_index/builder.rs | 69 ++- .../src/semantic_index/definition.rs | 54 +- crates/ty_python_semantic/src/types.rs | 28 +- .../src/types/ide_support.rs | 5 +- .../src/types/infer/builder.rs | 101 ++++ 9 files changed, 658 insertions(+), 193 deletions(-) diff --git a/crates/ruff_db/src/files.rs b/crates/ruff_db/src/files.rs index 4d57162c7c..1c322419e0 100644 --- a/crates/ruff_db/src/files.rs +++ b/crates/ruff_db/src/files.rs @@ -475,6 +475,12 @@ impl File { self.path(db).as_str().ends_with("__init__.pyi") } + /// Returns `true` if the file is an `__init__.pyi` + pub fn is_package(self, db: &dyn Db) -> bool { + let path = self.path(db).as_str(); + path.ends_with("__init__.pyi") || path.ends_with("__init__.py") + } + pub fn source_type(self, db: &dyn Db) -> PySourceType { match self.path(db) { FilePath::System(path) => path diff --git a/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md b/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md index 848eaae387..934f9ad918 100644 --- a/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md +++ b/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md @@ -1,39 +1,39 @@ # Nonstandard Import Conventions This document covers ty-specific extensions to the -[standard import conventions](https://typing.python.org/en/latest/spec/distributing.html#import-conventions). +[standard import conventions](https://typing.python.org/en/latest/spec/distributing.html#import-conventions), +and other intentional deviations from actual python semantics. -It's a common idiom for a package's `__init__.py(i)` to include several imports like -`from . import mysubmodule`, with the intent that the `mypackage.mysubmodule` attribute should work -for anyone who only imports `mypackage`. +This file currently covers the following details: -In the context of a `.py` we handle this well through our general attempts to faithfully implement -import side-effects. However for `.pyi` files we are expected to apply -[a more strict set of rules](https://typing.python.org/en/latest/spec/distributing.html#import-conventions) -to encourage intentional API design. Although `.pyi` files are explicitly designed to work with -typecheckers, which ostensibly should all enforce these strict rules, every typechecker has its own -defacto "extensions" to them and so a few idioms like `from . import mysubmodule` have found their -way into `.pyi` files too. +- **froms are locals**: a `from..import` can only define locals, it does not have global + side-effects. Specifically any submodule attribute `a` that's implicitly introduced by either + `from .a import b` or `from . import a as b` (in an `__init__.py(i)`) is a local and not a + global. If you do such an import at the top of a file you won't notice this. However if you do + such an import in a function, that means it will only be function-scoped (so you'll need to do + it in every function that wants to access it, making your code less sensitive to execution + order). -Thus for the sake of compatibility, we need to define our own "extensions". Any extensions we define -here have several competing concerns: +- **first from first serve**: only the *first* `from..import` in an `__init__.py(i)` that imports a + particular direct submodule of the current package introduces that submodule as a local. + Subsequent imports of the submodule will not introduce that local. This reflects the fact that + in actual python only the first import of a submodule (in the entire execution of the program) + introduces it as an attribute of the package. By "first" we mean "the first time in this scope + (or any parent scope)". This pairs well with the fact that we are specifically introducing a + local (as long as you don't accidentally shadow or overwrite the local). -- Extensions should ideally be kept narrow to continue to encourage explicit API design -- Extensions should be easy to explain, document, and understand -- Extensions should ideally still be a subset of runtime behaviour (if it works in a stub, it works - at runtime) -- Extensions should ideally not make `.pyi` files more permissive than `.py` files (if it works in a - stub, it works in an impl) +- **dot re-exports**: `from . import a` in an `__init__.pyi` is considered a re-export of `a` + (equivalent to `from . import a as a`). This is required to properly handle many stubs in the + wild. Currently it must be *exactly* `from . import ...`. -To that end we define the following extension: - -> If an `__init__.pyi` for `mypackage` contains a `from...import` targetting a direct submodule of -> `mypackage`, then that submodule should be available as an attribute of `mypackage`. +Note: almost all tests in here have a stub and non-stub version, because we're interested in both +defining symbols *at all* and re-exporting them. ## Relative `from` Import of Direct Submodule in `__init__` -The `from . import submodule` idiom in an `__init__.pyi` is fairly explicit and we should definitely -support it. +We consider the `from . import submodule` idiom in an `__init__.pyi` an explicit re-export. + +### In Stub `mypackage/__init__.pyi`: @@ -63,7 +63,7 @@ reveal_type(mypackage.imported.X) # revealed: int reveal_type(mypackage.fails.Y) # revealed: Unknown ``` -## Relative `from` Import of Direct Submodule in `__init__` (Non-Stub Check) +### In Non-Stub `mypackage/__init__.py`: @@ -95,8 +95,11 @@ reveal_type(mypackage.fails.Y) # revealed: Unknown ## Absolute `from` Import of Direct Submodule in `__init__` -If an absolute `from...import` happens to import a submodule, it works just as well as a relative -one. +If an absolute `from...import` happens to import a submodule (i.e. it's equivalent to +`from . import y`) we do not treat it as a re-export. We could, but we don't. (This is an arbitrary +decision and can be changed!) + +### In Stub `mypackage/__init__.pyi`: @@ -121,12 +124,14 @@ Y: int = 47 ```py import mypackage -reveal_type(mypackage.imported.X) # revealed: int +# TODO: this could work and would be nice to have? +# error: "has no member `imported`" +reveal_type(mypackage.imported.X) # revealed: Unknown # error: "has no member `fails`" reveal_type(mypackage.fails.Y) # revealed: Unknown ``` -## Absolute `from` Import of Direct Submodule in `__init__` (Non-Stub Check) +### In Non-Stub `mypackage/__init__.py`: @@ -159,7 +164,9 @@ reveal_type(mypackage.fails.Y) # revealed: Unknown ## Import of Direct Submodule in `__init__` An `import` that happens to import a submodule does not expose the submodule as an attribute. (This -is an arbitrary decision and can be changed easily!) +is an arbitrary decision and can be changed!) + +### In Stub `mypackage/__init__.pyi`: @@ -178,12 +185,12 @@ X: int = 42 ```py import mypackage -# TODO: this is probably safe to allow, as it's an unambiguous import of a submodule +# TODO: this could work and would be nice to have? # error: "has no member `imported`" reveal_type(mypackage.imported.X) # revealed: Unknown ``` -## Import of Direct Submodule in `__init__` (Non-Stub Check) +### In Non-Stub `mypackage/__init__.py`: @@ -202,15 +209,17 @@ X: int = 42 ```py import mypackage -# TODO: this is probably safe to allow, as it's an unambiguous import of a submodule +# TODO: this could work and would be nice to have # error: "has no member `imported`" reveal_type(mypackage.imported.X) # revealed: Unknown ``` ## Relative `from` Import of Nested Submodule in `__init__` -`from .submodule import nested` in an `__init__.pyi` is currently not supported as a way to expose -`mypackage.submodule` or `mypackage.submodule.nested` but it could be. +`from .submodule import nested` in an `__init__.pyi` does not re-export `mypackage.submodule`, +`mypackage.submodule.nested`, or `nested`. + +### In Stub `mypackage/__init__.pyi`: @@ -234,16 +243,21 @@ X: int = 42 ```py import mypackage -# TODO: this would be nice to allow # error: "has no member `submodule`" reveal_type(mypackage.submodule) # revealed: Unknown # error: "has no member `submodule`" reveal_type(mypackage.submodule.nested) # revealed: Unknown # error: "has no member `submodule`" reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +# error: "has no member `nested`" +reveal_type(mypackage.nested) # revealed: Unknown +# error: "has no member `nested`" +reveal_type(mypackage.nested.X) # revealed: Unknown ``` -## Relative `from` Import of Nested Submodule in `__init__` (Non-Stub Check) +### In Non-Stub + +`from .submodule import nested` in an `__init__.py` exposes `mypackage.submodule` and `nested`. `mypackage/__init__.py`: @@ -267,19 +281,22 @@ X: int = 42 ```py import mypackage +reveal_type(mypackage.submodule) # revealed: # TODO: this would be nice to support -# error: "has no member `submodule`" -reveal_type(mypackage.submodule) # revealed: Unknown -# error: "has no member `submodule`" +# error: "has no member `nested`" reveal_type(mypackage.submodule.nested) # revealed: Unknown -# error: "has no member `submodule`" +# error: "has no member `nested`" reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +reveal_type(mypackage.nested) # revealed: +reveal_type(mypackage.nested.X) # revealed: int ``` ## Absolute `from` Import of Nested Submodule in `__init__` -`from mypackage.submodule import nested` in an `__init__.pyi` is currently not supported as a way to -expose `mypackage.submodule` or `mypackage.submodule.nested` but it could be. +`from mypackage.submodule import nested` in an `__init__.pyi` does not re-export +`mypackage.submodule`, `mypackage.submodule.nested`, or `nested`. + +### In Stub `mypackage/__init__.pyi`: @@ -303,16 +320,22 @@ X: int = 42 ```py import mypackage -# TODO: this would be nice to support +# TODO: this could work and would be nice to have # error: "has no member `submodule`" reveal_type(mypackage.submodule) # revealed: Unknown # error: "has no member `submodule`" reveal_type(mypackage.submodule.nested) # revealed: Unknown # error: "has no member `submodule`" reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +# error: "has no member `nested`" +reveal_type(mypackage.nested) # revealed: Unknown +# error: "has no member `nested`" +reveal_type(mypackage.nested.X) # revealed: Unknown ``` -## Absolute `from` Import of Nested Submodule in `__init__` (Non-Stub Check) +### In Non-Stub + +`from mypackage.submodule import nested` in an `__init__.py` only creates `nested`. `mypackage/__init__.py`: @@ -343,12 +366,16 @@ reveal_type(mypackage.submodule) # revealed: Unknown reveal_type(mypackage.submodule.nested) # revealed: Unknown # error: "has no member `submodule`" reveal_type(mypackage.submodule.nested.X) # revealed: Unknown +reveal_type(mypackage.nested) # revealed: +reveal_type(mypackage.nested.X) # revealed: int ``` ## Import of Nested Submodule in `__init__` -`import mypackage.submodule.nested` in an `__init__.pyi` is currently not supported as a way to -expose `mypackage.submodule` or `mypackage.submodule.nested` but it could be. +`import mypackage.submodule.nested` in an `__init__.pyi` does not re-export `mypackage.submodule` or +`mypackage.submodule.nested`. + +### In Stub `mypackage/__init__.pyi`: @@ -372,7 +399,6 @@ X: int = 42 ```py import mypackage -# TODO: this would be nice to support, and is probably safe to do as it's unambiguous # error: "has no member `submodule`" reveal_type(mypackage.submodule) # revealed: Unknown # error: "has no member `submodule`" @@ -381,7 +407,10 @@ reveal_type(mypackage.submodule.nested) # revealed: Unknown reveal_type(mypackage.submodule.nested.X) # revealed: Unknown ``` -## Import of Nested Submodule in `__init__` (Non-Stub Check) +### In Non-Stub + +`import mypackage.submodule.nested` in an `__init__.py` does not define `mypackage.submodule` or +`mypackage.submodule.nested` outside the package. `mypackage/__init__.py`: @@ -405,7 +434,7 @@ X: int = 42 ```py import mypackage -# TODO: this would be nice to support, and is probably safe to do as it's unambiguous +# TODO: this would be nice to support # error: "has no member `submodule`" reveal_type(mypackage.submodule) # revealed: Unknown # error: "has no member `submodule`" @@ -418,6 +447,8 @@ reveal_type(mypackage.submodule.nested.X) # revealed: Unknown Renaming the submodule to something else disables the `__init__.pyi` idiom. +### In Stub + `mypackage/__init__.pyi`: ```pyi @@ -441,7 +472,7 @@ reveal_type(mypackage.imported.X) # revealed: Unknown reveal_type(mypackage.imported_m.X) # revealed: Unknown ``` -## Relative `from` Import of Direct Submodule in `__init__`, Mismatched Alias (Non-Stub Check) +### In Non-Stub `mypackage/__init__.py`: @@ -471,6 +502,8 @@ reveal_type(mypackage.imported_m.X) # revealed: int The `__init__.pyi` idiom should definitely always work if the submodule is renamed to itself, as this is the re-export idiom. +### In Stub + `mypackage/__init__.pyi`: ```pyi @@ -491,7 +524,7 @@ import mypackage reveal_type(mypackage.imported.X) # revealed: int ``` -## Relative `from` Import of Direct Submodule in `__init__`, Matched Alias (Non-Stub Check) +### In Non-Stub `mypackage/__init__.py`: @@ -518,6 +551,8 @@ reveal_type(mypackage.imported.X) # revealed: int Even if the `__init__` idiom is in effect, star imports do not pick it up. (This is an arbitrary decision that mostly fell out of the implementation details and can be changed!) +### In Stub + `mypackage/__init__.pyi`: ```pyi @@ -536,13 +571,13 @@ X: int = 42 ```py from mypackage import * -# TODO: this would be nice to support (available_submodule_attributes isn't visible to `*` imports) +# TODO: this would be nice to support # error: "`imported` used when not defined" reveal_type(imported.X) # revealed: Unknown reveal_type(Z) # revealed: int ``` -## Star Import Unaffected (Non-Stub Check) +### In Non-Stub `mypackage/__init__.py`: @@ -569,9 +604,10 @@ reveal_type(Z) # revealed: int ## `from` Import of Non-Submodule -A from import that terminates in a non-submodule should not expose the intermediate submodules as -attributes. This is an arbitrary decision but on balance probably safe and correct, as otherwise it -would be hard for a stub author to be intentional about the submodules being exposed as attributes. +A `from` import that imports a non-submodule isn't currently a special case here (various +proposed/tested approaches did treat this specially). + +### In Stub `mypackage/__init__.pyi`: @@ -590,11 +626,11 @@ X: int = 42 ```py import mypackage -# error: "has no member `imported`" +# error: "no member `imported`" reveal_type(mypackage.imported.X) # revealed: Unknown ``` -## `from` Import of Non-Submodule (Non-Stub Check) +### In Non-Stub `mypackage/__init__.py`: @@ -613,9 +649,7 @@ X: int = 42 ```py import mypackage -# TODO: this would be nice to support, as it works at runtime -# error: "has no member `imported`" -reveal_type(mypackage.imported.X) # revealed: Unknown +reveal_type(mypackage.imported.X) # revealed: int ``` ## `from` Import of Other Package's Submodule @@ -623,6 +657,8 @@ reveal_type(mypackage.imported.X) # revealed: Unknown `from mypackage import submodule` from outside the package is not modeled as a side-effect on `mypackage`, even in the importing file (this could be changed!). +### In Stub + `mypackage/__init__.pyi`: ```pyi @@ -641,12 +677,13 @@ import mypackage from mypackage import imported # TODO: this would be nice to support, but it's dangerous with available_submodule_attributes +# for details, see: https://github.com/astral-sh/ty/issues/1488 reveal_type(imported.X) # revealed: int # error: "has no member `imported`" reveal_type(mypackage.imported.X) # revealed: Unknown ``` -## `from` Import of Other Package's Submodule (Non-Stub Check) +### In Non-Stub `mypackage/__init__.py`: @@ -676,6 +713,8 @@ reveal_type(mypackage.imported.X) # revealed: Unknown `from . import submodule` from a sibling module is not modeled as a side-effect on `mypackage` or a re-export from `submodule`. +### In Stub + `mypackage/__init__.pyi`: ```pyi @@ -707,7 +746,7 @@ reveal_type(imported.fails.Y) # revealed: Unknown reveal_type(mypackage.fails.Y) # revealed: Unknown ``` -## `from` Import of Sibling Module (Non-Stub Check) +### In Non-Stub `mypackage/__init__.py`: @@ -752,9 +791,11 @@ Can easily result in the typechecker getting "confused" and thinking imports of top-level package are referring to the subpackage and not the function/class. This issue can be found with the `lobpcg` function in `scipy.sparse.linalg`. -This kind of failure mode is why the rule is restricted to *direct* submodule imports, as anything -more powerful than that in the current implementation strategy quickly gets the functions and -submodules mixed up. +We avoid this by ensuring that the imported name (the right-hand `funcmod` in +`from .funcmod import funcmod`) overwrites the submodule attribute (the left-hand `funcmod`), as it +does at runtime. + +### In Stub `mypackage/__init__.pyi`: @@ -788,7 +829,7 @@ from mypackage import funcmod x = funcmod(1) ``` -## Fractal Re-export Nameclash Problems (Non-Stub Check) +### In Non-Stub `mypackage/__init__.py`: @@ -822,3 +863,311 @@ from mypackage import funcmod x = funcmod(1) ``` + +## Re-export Nameclash Problems In Functions + +`from` imports in an `__init__.py` at file scope should be visible to functions defined in the file: + +`mypackage/__init__.py`: + +```py +from .funcmod import funcmod + +funcmod(1) + +def run(): + funcmod(2) +``` + +`mypackage/funcmod.py`: + +```py +def funcmod(x: int) -> int: + return x +``` + +## Re-export Nameclash Problems In Try-Blocks + +`from` imports in an `__init__.py` at file scope in a `try` block should be visible to functions +defined in the `try` block (regression test for a bug): + +`mypackage/__init__.py`: + +```py +try: + from .funcmod import funcmod + + funcmod(1) + + def run(): + # TODO: this is a bug in how we analyze try-blocks + # error: [call-non-callable] + funcmod(2) + +finally: + x = 1 +``` + +`mypackage/funcmod.py`: + +```py +def funcmod(x: int) -> int: + return x +``` + +## RHS `from` Imports In Functions + +If a `from` import occurs in a function, the RHS symbols should only be visible in that function. + +`mypackage/__init__.py`: + +```py +def run1(): + from .funcmod import funcmod + + funcmod(1) + +def run2(): + from .funcmod import funcmod + + funcmod(2) + +def run3(): + # error: [unresolved-reference] + funcmod(3) + +# error: [unresolved-reference] +funcmod(4) +``` + +`mypackage/funcmod.py`: + +```py +def funcmod(x: int) -> int: + return x +``` + +## LHS `from` Imports In Functions + +If a `from` import occurs in a function, LHS symbols should only be visible in that function. This +very blatantly is not runtime-accurate, but exists to try to force you to write "obviously +deterministically correct" imports instead of relying on execution order. + +`mypackage/__init__.py`: + +```py +def run1(): + from .funcmod import other + + funcmod.funcmod(1) + +def run2(): + from .funcmod import other + + # TODO: this is just a bug! We only register the first + # import of `funcmod` in the entire file, and not per-scope! + # error: [unresolved-reference] + funcmod.funcmod(2) + +def run3(): + # error: [unresolved-reference] + funcmod.funcmod(3) + +# error: [unresolved-reference] +funcmod.funcmod(4) +``` + +`mypackage/funcmod.py`: + +```py +other: int = 1 + +def funcmod(x: int) -> int: + return x +``` + +## LHS `from` Imports Overwrite Locals + +The LHS of a `from..import` introduces a local symbol that overwrites any local with the same name. +This reflects actual runtime behaviour, although we're kinda assuming it hasn't been imported +already. + +`mypackage/__init__.py`: + +```py +funcmod = 0 +from .funcmod import funcmod + +funcmod(1) +``` + +`mypackage/funcmod.py`: + +```py +def funcmod(x: int) -> int: + return x +``` + +## LHS `from` Imports Overwritten By Local Function + +The LHS of a `from..import` introduces a local symbol that can be overwritten by defining a function +(or class) with the same name. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from .funcmod import other + +def funcmod(x: int) -> int: ... +``` + +`mypackage/funcmod/__init__.pyi`: + +```pyi +def other(int) -> int: ... +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +from .funcmod import other + +def funcmod(x: int) -> int: + return x +``` + +`mypackage/funcmod/__init__.py`: + +```py +def other(x: int) -> int: + return x +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` + +## LHS `from` Imports Overwritten By Local Assignment + +The LHS of a `from..import` introduces a local symbol that can be overwritten by assigning to it. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from .funcmod import other + +funcmod = other +``` + +`mypackage/funcmod/__init__.pyi`: + +```pyi +def other(x: int) -> int: ... +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +from .funcmod import other + +funcmod = other +``` + +`mypackage/funcmod/__init__.py`: + +```py +def other(x: int) -> int: + return x +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` + +## LHS `from` Imports Only Apply The First Time + +The LHS of a `from..import` of a submodule introduces a local symbol only the first time it +introduces a direct submodule. The second time does nothing. + +### In Stub + +`mypackage/__init__.pyi`: + +```pyi +from .funcmod import funcmod as funcmod +from .funcmod import other +``` + +`mypackage/funcmod/__init__.pyi`: + +```pyi +def other(x: int) -> int: ... +def funcmod(x: int) -> int: ... +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` + +### In Non-Stub + +`mypackage/__init__.py`: + +```py +from .funcmod import funcmod +from .funcmod import other +``` + +`mypackage/funcmod/__init__.py`: + +```py +def other(x: int) -> int: + return x + +def funcmod(x: int) -> int: + return x +``` + +`main.py`: + +```py +from mypackage import funcmod + +x = funcmod(1) +``` diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/is_equivalent_to.md b/crates/ty_python_semantic/resources/mdtest/type_properties/is_equivalent_to.md index 41c7f562bb..fe846ee213 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_properties/is_equivalent_to.md +++ b/crates/ty_python_semantic/resources/mdtest/type_properties/is_equivalent_to.md @@ -607,23 +607,33 @@ module: `module2.py`: ```py -import importlib -import importlib.abc +import imported +import imported.abc +``` + +`imported/__init__.pyi`: + +```pyi +``` + +`imported/abc.pyi`: + +```pyi ``` `main2.py`: ```py -import importlib -from module2 import importlib as other_importlib +import imported +from module2 import imported as other_imported from ty_extensions import TypeOf, static_assert, is_equivalent_to -# error: [unresolved-attribute] "Module `importlib` has no member `abc`" -reveal_type(importlib.abc) # revealed: Unknown +# error: [unresolved-attribute] "Module `imported` has no member `abc`" +reveal_type(imported.abc) # revealed: Unknown -reveal_type(other_importlib.abc) # revealed: +reveal_type(other_imported.abc) # revealed: -static_assert(not is_equivalent_to(TypeOf[importlib], TypeOf[other_importlib])) +static_assert(not is_equivalent_to(TypeOf[imported], TypeOf[other_imported])) ``` [materializations]: https://typing.python.org/en/latest/spec/glossary.html#term-materialize diff --git a/crates/ty_python_semantic/src/semantic_index.rs b/crates/ty_python_semantic/src/semantic_index.rs index a654873db3..4d31de2cb9 100644 --- a/crates/ty_python_semantic/src/semantic_index.rs +++ b/crates/ty_python_semantic/src/semantic_index.rs @@ -6,12 +6,12 @@ use ruff_db::parsed::parsed_module; use ruff_index::{IndexSlice, IndexVec}; use ruff_python_ast::NodeIndex; -use ruff_python_ast::name::Name; use ruff_python_parser::semantic_errors::SemanticSyntaxError; use rustc_hash::{FxHashMap, FxHashSet}; use salsa::Update; use salsa::plumbing::AsId; +use crate::Db; use crate::module_name::ModuleName; use crate::node_key::NodeKey; use crate::semantic_index::ast_ids::AstIds; @@ -28,7 +28,6 @@ use crate::semantic_index::scope::{ use crate::semantic_index::symbol::ScopedSymbolId; use crate::semantic_index::use_def::{EnclosingSnapshotKey, ScopedEnclosingSnapshotId, UseDefMap}; use crate::semantic_model::HasTrackedScope; -use crate::{Db, Module, resolve_module}; pub mod ast_ids; mod builder; @@ -84,65 +83,6 @@ pub(crate) fn imported_modules<'db>(db: &'db dyn Db, file: File) -> Arc( - db: &'db dyn Db, - importing_module: Module<'db>, -) -> Box<[ModuleName]> { - let Some(file) = importing_module.file(db) else { - return Box::default(); - }; - if !file.is_package_stub(db) { - return Box::default(); - } - semantic_index(db, file) - .maybe_imported_modules - .iter() - .filter_map(|import| { - let mut submodule = ModuleName::from_identifier_parts( - db, - file, - import.from_module.as_deref(), - import.level, - ) - .ok()?; - // We only actually care if this is a direct submodule of the package - // so this part should actually be exactly the importing module. - let importing_module_name = importing_module.name(db); - if importing_module_name != &submodule { - return None; - } - submodule.extend(&ModuleName::new(import.submodule.as_str())?); - // Throw out the result if this doesn't resolve to an actual module. - // This is quite expensive, but we've gone through a lot of hoops to - // get here so it won't happen too much. - resolve_module(db, &submodule)?; - // Return only the relative part - submodule.relative_to(importing_module_name) - }) - .collect() -} - /// Returns the use-def map for a specific `scope`. /// /// Using [`use_def_map`] over [`semantic_index`] has the advantage that @@ -284,9 +224,6 @@ pub(crate) struct SemanticIndex<'db> { /// The set of modules that are imported anywhere within this file. imported_modules: Arc>, - /// `from...import` statements within this file that might import a submodule. - maybe_imported_modules: FxHashSet, - /// Flags about the global scope (code usage impacting inference) has_future_annotations: bool, @@ -300,16 +237,6 @@ pub(crate) struct SemanticIndex<'db> { generator_functions: FxHashSet, } -/// A `from...import` that may be an import of a module -/// -/// Later analysis will determine if it is. -#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, get_size2::GetSize)] -pub(crate) struct MaybeModuleImport { - level: u32, - from_module: Option, - submodule: Name, -} - impl<'db> SemanticIndex<'db> { /// Returns the place table for a specific scope. /// diff --git a/crates/ty_python_semantic/src/semantic_index/builder.rs b/crates/ty_python_semantic/src/semantic_index/builder.rs index 6affc11424..fe0ca550b3 100644 --- a/crates/ty_python_semantic/src/semantic_index/builder.rs +++ b/crates/ty_python_semantic/src/semantic_index/builder.rs @@ -26,8 +26,8 @@ use crate::semantic_index::definition::{ AnnotatedAssignmentDefinitionNodeRef, AssignmentDefinitionNodeRef, ComprehensionDefinitionNodeRef, Definition, DefinitionCategory, DefinitionNodeKey, DefinitionNodeRef, Definitions, ExceptHandlerDefinitionNodeRef, ForStmtDefinitionNodeRef, - ImportDefinitionNodeRef, ImportFromDefinitionNodeRef, MatchPatternDefinitionNodeRef, - StarImportDefinitionNodeRef, WithItemDefinitionNodeRef, + ImportDefinitionNodeRef, ImportFromDefinitionNodeRef, ImportFromSubmoduleDefinitionNodeRef, + MatchPatternDefinitionNodeRef, StarImportDefinitionNodeRef, WithItemDefinitionNodeRef, }; use crate::semantic_index::expression::{Expression, ExpressionKind}; use crate::semantic_index::place::{PlaceExpr, PlaceTableBuilder, ScopedPlaceId}; @@ -47,9 +47,7 @@ use crate::semantic_index::symbol::{ScopedSymbolId, Symbol}; use crate::semantic_index::use_def::{ EnclosingSnapshotKey, FlowSnapshot, ScopedEnclosingSnapshotId, UseDefMapBuilder, }; -use crate::semantic_index::{ - ExpressionsScopeMap, MaybeModuleImport, SemanticIndex, VisibleAncestorsIter, -}; +use crate::semantic_index::{ExpressionsScopeMap, SemanticIndex, VisibleAncestorsIter}; use crate::semantic_model::HasTrackedScope; use crate::unpack::{EvaluationMode, Unpack, UnpackKind, UnpackPosition, UnpackValue}; use crate::{Db, Program}; @@ -113,7 +111,7 @@ pub(super) struct SemanticIndexBuilder<'db, 'ast> { definitions_by_node: FxHashMap>, expressions_by_node: FxHashMap>, imported_modules: FxHashSet, - maybe_imported_modules: FxHashSet, + seen_submodule_imports: FxHashSet, /// Hashset of all [`FileScopeId`]s that correspond to [generator functions]. /// /// [generator functions]: https://docs.python.org/3/glossary.html#term-generator @@ -151,7 +149,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { definitions_by_node: FxHashMap::default(), expressions_by_node: FxHashMap::default(), - maybe_imported_modules: FxHashSet::default(), + seen_submodule_imports: FxHashSet::default(), imported_modules: FxHashSet::default(), generator_functions: FxHashSet::default(), @@ -1266,7 +1264,6 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { self.scopes_by_node.shrink_to_fit(); self.generator_functions.shrink_to_fit(); self.enclosing_snapshots.shrink_to_fit(); - self.maybe_imported_modules.shrink_to_fit(); SemanticIndex { place_tables, @@ -1279,7 +1276,6 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { scopes_by_node: self.scopes_by_node, use_def_maps, imported_modules: Arc::new(self.imported_modules), - maybe_imported_modules: self.maybe_imported_modules, has_future_annotations: self.has_future_annotations, enclosing_snapshots: self.enclosing_snapshots, semantic_syntax_errors: self.semantic_syntax_errors.into_inner(), @@ -1453,6 +1449,43 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { self.current_use_def_map_mut() .record_node_reachability(NodeKey::from_node(node)); + // If we see: + // + // * `from .x.y import z` (must be relative!) + // * And we are in an `__init__.py(i)` (hereafter `thispackage`) + // * And this is the first time we've seen `from .x` in this module + // + // We introduce a local definition `x = ` that occurs + // before the `z = ...` declaration the import introduces. This models the fact + // that the *first* time that you import 'thispackage.x' the python runtime creates + // `x` as a variable in the global scope of `thispackage`. + // + // This is not a perfect simulation of actual runtime behaviour for *various* + // reasons but it works well for most practical purposes. In particular it's nice + // that `x` can be freely overwritten, and that we don't assume that an import + // in one function is visible in another function. + // + // TODO: Also support `from thispackage.x.y import z`? + // TODO: `seen_submodule_imports` should be per-scope and not per-file + // (if two functions import `.x`, they both should believe `x` is defined) + if node.level == 1 + && let Some(submodule) = &node.module + && let Some(parsed_submodule) = ModuleName::new(submodule.as_str()) + && let Some(direct_submodule) = parsed_submodule.components().next() + && self.file.is_package(self.db) + && !self.seen_submodule_imports.contains(direct_submodule) + { + self.seen_submodule_imports + .insert(direct_submodule.to_owned()); + + let direct_submodule_name = Name::new(direct_submodule); + let symbol = self.add_symbol(direct_submodule_name); + self.add_definition( + symbol.into(), + ImportFromSubmoduleDefinitionNodeRef { node, submodule }, + ); + } + let mut found_star = false; for (alias_index, alias) in node.names.iter().enumerate() { if &alias.name == "*" { @@ -1559,20 +1592,18 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { } let (symbol_name, is_reexported) = if let Some(asname) = &alias.asname { + // It's re-exported if it's `from ... import x as x` (&asname.id, asname.id == alias.name.id) } else { - (&alias.name.id, false) + // It's re-exported if it's `from . import x` in an `__init__.pyi` + ( + &alias.name.id, + node.level == 1 + && node.module.is_none() + && self.file.is_package(self.db), + ) }; - // If there's no alias or a redundant alias, record this as a potential import of a submodule - if alias.asname.is_none() || is_reexported { - self.maybe_imported_modules.insert(MaybeModuleImport { - level: node.level, - from_module: node.module.clone().map(Into::into), - submodule: alias.name.clone().into(), - }); - } - // Look for imports `from __future__ import annotations`, ignore `as ...` // We intentionally don't enforce the rules about location of `__future__` // imports here, we assume the user's intent was to apply the `__future__` diff --git a/crates/ty_python_semantic/src/semantic_index/definition.rs b/crates/ty_python_semantic/src/semantic_index/definition.rs index 81af22d314..db5d519560 100644 --- a/crates/ty_python_semantic/src/semantic_index/definition.rs +++ b/crates/ty_python_semantic/src/semantic_index/definition.rs @@ -3,6 +3,7 @@ use std::ops::Deref; use ruff_db::files::{File, FileRange}; use ruff_db::parsed::{ParsedModuleRef, parsed_module}; use ruff_python_ast as ast; +use ruff_python_ast::name::Name; use ruff_text_size::{Ranged, TextRange}; use crate::Db; @@ -209,6 +210,7 @@ impl<'db> DefinitionState<'db> { pub(crate) enum DefinitionNodeRef<'ast, 'db> { Import(ImportDefinitionNodeRef<'ast>), ImportFrom(ImportFromDefinitionNodeRef<'ast>), + ImportFromSubmodule(ImportFromSubmoduleDefinitionNodeRef<'ast>), ImportStar(StarImportDefinitionNodeRef<'ast>), For(ForStmtDefinitionNodeRef<'ast, 'db>), Function(&'ast ast::StmtFunctionDef), @@ -290,6 +292,12 @@ impl<'ast> From> for DefinitionNodeRef<'ast, ' } } +impl<'ast> From> for DefinitionNodeRef<'ast, '_> { + fn from(node_ref: ImportFromSubmoduleDefinitionNodeRef<'ast>) -> Self { + Self::ImportFromSubmodule(node_ref) + } +} + impl<'ast, 'db> From> for DefinitionNodeRef<'ast, 'db> { fn from(value: ForStmtDefinitionNodeRef<'ast, 'db>) -> Self { Self::For(value) @@ -357,7 +365,11 @@ pub(crate) struct ImportFromDefinitionNodeRef<'ast> { pub(crate) alias_index: usize, pub(crate) is_reexported: bool, } - +#[derive(Copy, Clone, Debug)] +pub(crate) struct ImportFromSubmoduleDefinitionNodeRef<'ast> { + pub(crate) node: &'ast ast::StmtImportFrom, + pub(crate) submodule: &'ast ast::Identifier, +} #[derive(Copy, Clone, Debug)] pub(crate) struct AssignmentDefinitionNodeRef<'ast, 'db> { pub(crate) unpack: Option<(UnpackPosition, Unpack<'db>)>, @@ -427,7 +439,6 @@ impl<'db> DefinitionNodeRef<'_, 'db> { alias_index, is_reexported, }), - DefinitionNodeRef::ImportFrom(ImportFromDefinitionNodeRef { node, alias_index, @@ -437,6 +448,13 @@ impl<'db> DefinitionNodeRef<'_, 'db> { alias_index, is_reexported, }), + DefinitionNodeRef::ImportFromSubmodule(ImportFromSubmoduleDefinitionNodeRef { + node, + submodule, + }) => DefinitionKind::ImportFromSubmodule(ImportFromSubmoduleDefinitionKind { + node: AstNodeRef::new(parsed, node), + submodule: submodule.as_str().into(), + }), DefinitionNodeRef::ImportStar(star_import) => { let StarImportDefinitionNodeRef { node, symbol_id } = star_import; DefinitionKind::StarImport(StarImportDefinitionKind { @@ -562,7 +580,10 @@ impl<'db> DefinitionNodeRef<'_, 'db> { alias_index, is_reexported: _, }) => (&node.names[alias_index]).into(), - + Self::ImportFromSubmodule(ImportFromSubmoduleDefinitionNodeRef { + node, + submodule: _, + }) => node.into(), // INVARIANT: for an invalid-syntax statement such as `from foo import *, bar, *`, // we only create a `StarImportDefinitionKind` for the *first* `*` alias in the names list. Self::ImportStar(StarImportDefinitionNodeRef { node, symbol_id: _ }) => node @@ -661,6 +682,7 @@ impl DefinitionCategory { pub enum DefinitionKind<'db> { Import(ImportDefinitionKind), ImportFrom(ImportFromDefinitionKind), + ImportFromSubmodule(ImportFromSubmoduleDefinitionKind), StarImport(StarImportDefinitionKind), Function(AstNodeRef), Class(AstNodeRef), @@ -687,6 +709,7 @@ impl DefinitionKind<'_> { match self { DefinitionKind::Import(import) => import.is_reexported(), DefinitionKind::ImportFrom(import) => import.is_reexported(), + DefinitionKind::ImportFromSubmodule(_) => false, _ => true, } } @@ -704,6 +727,7 @@ impl DefinitionKind<'_> { DefinitionKind::Import(_) | DefinitionKind::ImportFrom(_) | DefinitionKind::StarImport(_) + | DefinitionKind::ImportFromSubmodule(_) ) } @@ -719,6 +743,7 @@ impl DefinitionKind<'_> { match self { DefinitionKind::Import(import) => import.alias(module).range(), DefinitionKind::ImportFrom(import) => import.alias(module).range(), + DefinitionKind::ImportFromSubmodule(import) => import.import(module).range(), DefinitionKind::StarImport(import) => import.alias(module).range(), DefinitionKind::Function(function) => function.node(module).name.range(), DefinitionKind::Class(class) => class.node(module).name.range(), @@ -756,6 +781,7 @@ impl DefinitionKind<'_> { match self { DefinitionKind::Import(import) => import.alias(module).range(), DefinitionKind::ImportFrom(import) => import.alias(module).range(), + DefinitionKind::ImportFromSubmodule(import) => import.import(module).range(), DefinitionKind::StarImport(import) => import.import(module).range(), DefinitionKind::Function(function) => function.node(module).range(), DefinitionKind::Class(class) => class.node(module).range(), @@ -846,6 +872,7 @@ impl DefinitionKind<'_> { | DefinitionKind::Comprehension(_) | DefinitionKind::WithItem(_) | DefinitionKind::MatchPattern(_) + | DefinitionKind::ImportFromSubmodule(_) | DefinitionKind::ExceptHandler(_) => DefinitionCategory::Binding, } } @@ -991,6 +1018,21 @@ impl ImportFromDefinitionKind { self.is_reexported } } +#[derive(Clone, Debug, get_size2::GetSize)] +pub struct ImportFromSubmoduleDefinitionKind { + node: AstNodeRef, + submodule: Name, +} + +impl ImportFromSubmoduleDefinitionKind { + pub fn import<'ast>(&self, module: &'ast ParsedModuleRef) -> &'ast ast::StmtImportFrom { + self.node.node(module) + } + + pub(crate) fn submodule(&self) -> &Name { + &self.submodule + } +} #[derive(Clone, Debug, get_size2::GetSize)] pub struct AssignmentDefinitionKind<'db> { @@ -1121,6 +1163,12 @@ impl From<&ast::Alias> for DefinitionNodeKey { } } +impl From<&ast::StmtImportFrom> for DefinitionNodeKey { + fn from(node: &ast::StmtImportFrom) -> Self { + Self(NodeKey::from_node(node)) + } +} + impl From<&ast::StmtFunctionDef> for DefinitionNodeKey { fn from(node: &ast::StmtFunctionDef) -> Self { Self(NodeKey::from_node(node)) diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index 0d0879f348..2b3bf1c0d9 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -39,9 +39,7 @@ use crate::place::{ use crate::semantic_index::definition::{Definition, DefinitionKind}; use crate::semantic_index::place::ScopedPlaceId; use crate::semantic_index::scope::ScopeId; -use crate::semantic_index::{ - imported_modules, imported_relative_submodules_of_stub_package, place_table, semantic_index, -}; +use crate::semantic_index::{imported_modules, place_table, semantic_index}; use crate::suppression::check_suppressions; use crate::types::bound_super::BoundSuperType; use crate::types::call::{Binding, Bindings, CallArguments, CallableBinding}; @@ -11302,29 +11300,23 @@ impl<'db> ModuleLiteralType<'db> { /// /// # Rules /// - /// We have two rules for whether a submodule attribute is defined: + /// Because of the excessive power and danger of this method, we currently have only one rule: /// - /// * If the importing file include `import x.y` then `x.y` is defined in the importing file. - /// This is an easy rule to justify because `import` can only ever import a module, and so + /// * If the importing file includes `import x.y` then `x.y` is defined in the importing file. + /// This is an easy rule to justify because `import` can only ever import a module, and the + /// only reason to do it is to explicitly introduce those submodules and attributes, so it /// *should* shadow any non-submodule of the same name. /// - /// * If the module is an `__init__.pyi` for `mypackage`, and it contains a `from...import` - /// that normalizes to `from mypackage import submodule`, then `mypackage.submodule` is - /// defined in all files. This supports the `from . import submodule` idiom. Critically, - /// we do *not* allow `from mypackage.nested import submodule` to affect `mypackage`. - /// The idea here is that `from mypackage import submodule` *from mypackage itself* can - /// only ever reasonably be an import of a submodule. It doesn't make any sense to import - /// a function or class from yourself! (You *can* do it but... why? Don't? Please?) + /// `from x.y import z` instances are currently ignored because the `x.y` part may not be a + /// side-effect the user actually cares about, and the `z` component may not be a submodule. + /// + /// We instead prefer handling most other import effects as definitions in the scope of + /// the current file (i.e. [`crate::semantic_index::definition::ImportFromDefinitionNodeRef`]). fn available_submodule_attributes(&self, db: &'db dyn Db) -> impl Iterator { self.importing_file(db) .into_iter() .flat_map(|file| imported_modules(db, file)) .filter_map(|submodule_name| submodule_name.relative_to(self.module(db).name(db))) - .chain( - imported_relative_submodules_of_stub_package(db, self.module(db)) - .iter() - .cloned(), - ) .filter_map(|relative_submodule| relative_submodule.components().next().map(Name::from)) } diff --git a/crates/ty_python_semantic/src/types/ide_support.rs b/crates/ty_python_semantic/src/types/ide_support.rs index cd29efaeac..4b42458c4d 100644 --- a/crates/ty_python_semantic/src/types/ide_support.rs +++ b/crates/ty_python_semantic/src/types/ide_support.rs @@ -1285,7 +1285,7 @@ mod resolve_definition { let file = definition.file(db); let module = parsed_module(db, file).load(db); let import_node = import_from_def.import(&module); - let alias = import_from_def.alias(&module); + let name = &import_from_def.alias(&module).name; // For `ImportFrom`, we need to resolve the original imported symbol name // (alias.name), not the local alias (symbol_name) @@ -1293,7 +1293,7 @@ mod resolve_definition { db, file, import_node, - &alias.name, + name, visited, alias_resolution, ) @@ -1625,6 +1625,7 @@ mod resolve_definition { DefinitionKind::TypeAlias(_) | DefinitionKind::Import(_) | DefinitionKind::ImportFrom(_) + | DefinitionKind::ImportFromSubmodule(_) | DefinitionKind::StarImport(_) | DefinitionKind::NamedExpression(_) | DefinitionKind::Assignment(_) diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index 2cb4bf3274..806c5e5844 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -4,6 +4,7 @@ use itertools::{Either, Itertools}; use ruff_db::diagnostic::{Annotation, DiagnosticId, Severity}; use ruff_db::files::File; use ruff_db::parsed::ParsedModuleRef; +use ruff_python_ast::name::Name; use ruff_python_ast::visitor::{Visitor, walk_expr}; use ruff_python_ast::{ self as ast, AnyNodeRef, ExprContext, HasNodeIndex, NodeIndex, PythonVersion, @@ -1214,6 +1215,13 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { definition, ); } + DefinitionKind::ImportFromSubmodule(import_from) => { + self.infer_import_from_submodule_definition( + import_from.import(self.module()), + import_from.submodule(), + definition, + ); + } DefinitionKind::StarImport(import) => { self.infer_import_from_definition( import.import(self.module()), @@ -5893,6 +5901,99 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } + /// Infer the implicit local definition `x = ` that + /// `from .x.y import z` can introduce in an `__init__.py(i)`. + /// + /// For the definition `z`, see [`TypeInferenceBuilder::infer_import_from_definition`]. + fn infer_import_from_submodule_definition( + &mut self, + import_from: &ast::StmtImportFrom, + submodule: &Name, + definition: Definition<'db>, + ) { + // Although the *actual* runtime semantic of this kind of statement is to + // introduce a variable in the global scope of this module, we want to + // encourage users to write code that doesn't have dependence on execution-order. + // + // By introducing it as a local variable in the scope the import occurs in, + // we effectively require the developer to either do the import at the start of + // the file where it belongs, or to repeat the import in every function that + // wants to use it, which "definitely" works. + // + // (It doesn't actually "definitely" work because only the first import of `thispackage.x` + // will ever set `x`, and any subsequent overwrites of it will permanently clobber it. + // Also, a local variable `x` in a function should always shadow the submodule because + // the submodule is defined at file-scope. However, both of these issues are much more + // narrow, so this approach seems to work well in practice!) + + // Get this package's module by resolving `.` + let Ok(module_name) = ModuleName::from_identifier_parts(self.db(), self.file(), None, 1) + else { + self.add_binding(import_from.into(), definition, |_, _| Type::unknown()); + return; + }; + + let Some(module) = resolve_module(self.db(), &module_name) else { + self.add_binding(import_from.into(), definition, |_, _| Type::unknown()); + return; + }; + + // Now construct the submodule `.x` + assert!( + !submodule.is_empty(), + "ImportFromSubmoduleDefinitionKind constructed with empty module" + ); + let name = submodule + .split_once('.') + .map(|(first, _)| first) + .unwrap_or(submodule.as_str()); + let full_submodule_name = ModuleName::new(name).map(|final_part| { + let mut ret = module_name.clone(); + ret.extend(&final_part); + ret + }); + // And try to import it + if let Some(submodule_type) = full_submodule_name + .as_ref() + .and_then(|submodule_name| self.module_type_from_name(submodule_name)) + { + // Success, introduce a binding! + // + // We explicitly don't introduce a *declaration* because it's actual ok + // (and fairly common) to overwrite this import with a function or class + // and we don't want it to be a type error to do so. + self.add_binding(import_from.into(), definition, |_, _| submodule_type); + return; + } + + // That didn't work, try to produce diagnostics + self.add_binding(import_from.into(), definition, |_, _| Type::unknown()); + + if !self.is_reachable(import_from) { + return; + } + + let Some(builder) = self + .context + .report_lint(&UNRESOLVED_IMPORT, AnyNodeRef::StmtImportFrom(import_from)) + else { + return; + }; + + let diagnostic = builder.into_diagnostic(format_args!( + "Module `{module_name}` has no submodule `{name}`" + )); + + if let Some(full_submodule_name) = full_submodule_name { + hint_if_stdlib_submodule_exists_on_other_versions( + self.db(), + diagnostic, + &full_submodule_name, + module, + ); + } + } + fn infer_return_statement(&mut self, ret: &ast::StmtReturn) { let tcx = if ret.value.is_some() { nearest_enclosing_function(self.db(), self.index, self.scope()) From 9ce3230add53403638dcce63ed435b7c1295619b Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Mon, 10 Nov 2025 18:59:48 -0500 Subject: [PATCH 151/180] [ty] Make implicit submodule imports only occur in global scope (#21370) This loses any ability to have "per-function" implicit submodule imports, to avoid the "ok but now we need per-scope imports" and "ok but this should actually introduce a global that only exists during this function" problems. A simple and clean implementation with no weird corners. Fixes https://github.com/astral-sh/ty/issues/1482 --- .../mdtest/import/nonstandard_conventions.md | 22 +++++++++---------- .../src/semantic_index/builder.rs | 6 ++--- .../src/types/infer/builder.rs | 17 +++----------- 3 files changed, 16 insertions(+), 29 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md b/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md index 934f9ad918..120ea0746d 100644 --- a/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md +++ b/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md @@ -9,18 +9,16 @@ This file currently covers the following details: - **froms are locals**: a `from..import` can only define locals, it does not have global side-effects. Specifically any submodule attribute `a` that's implicitly introduced by either `from .a import b` or `from . import a as b` (in an `__init__.py(i)`) is a local and not a - global. If you do such an import at the top of a file you won't notice this. However if you do - such an import in a function, that means it will only be function-scoped (so you'll need to do - it in every function that wants to access it, making your code less sensitive to execution - order). + global. However we only introduce this symbol if the `from..import` is in global-scope. This + means imports at the start of a file work as you'd expect, while imports in a function don't + introduce submodule attributes. - **first from first serve**: only the *first* `from..import` in an `__init__.py(i)` that imports a particular direct submodule of the current package introduces that submodule as a local. Subsequent imports of the submodule will not introduce that local. This reflects the fact that in actual python only the first import of a submodule (in the entire execution of the program) - introduces it as an attribute of the package. By "first" we mean "the first time in this scope - (or any parent scope)". This pairs well with the fact that we are specifically introducing a - local (as long as you don't accidentally shadow or overwrite the local). + introduces it as an attribute of the package. By "first" we mean "the first time in global + scope". - **dot re-exports**: `from . import a` in an `__init__.pyi` is considered a re-export of `a` (equivalent to `from . import a as a`). This is required to properly handle many stubs in the @@ -949,9 +947,8 @@ def funcmod(x: int) -> int: ## LHS `from` Imports In Functions -If a `from` import occurs in a function, LHS symbols should only be visible in that function. This -very blatantly is not runtime-accurate, but exists to try to force you to write "obviously -deterministically correct" imports instead of relying on execution order. +If a `from` import occurs in a function, we simply ignore its LHS effects to avoid modeling +execution-order-specific behaviour (and to discourage people writing code that has it). `mypackage/__init__.py`: @@ -959,13 +956,14 @@ deterministically correct" imports instead of relying on execution order. def run1(): from .funcmod import other + # TODO: this would be nice to support + # error: [unresolved-reference] funcmod.funcmod(1) def run2(): from .funcmod import other - # TODO: this is just a bug! We only register the first - # import of `funcmod` in the entire file, and not per-scope! + # TODO: this would be nice to support # error: [unresolved-reference] funcmod.funcmod(2) diff --git a/crates/ty_python_semantic/src/semantic_index/builder.rs b/crates/ty_python_semantic/src/semantic_index/builder.rs index fe0ca550b3..8029a775fe 100644 --- a/crates/ty_python_semantic/src/semantic_index/builder.rs +++ b/crates/ty_python_semantic/src/semantic_index/builder.rs @@ -1454,6 +1454,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { // * `from .x.y import z` (must be relative!) // * And we are in an `__init__.py(i)` (hereafter `thispackage`) // * And this is the first time we've seen `from .x` in this module + // * And we're in the global scope // // We introduce a local definition `x = ` that occurs // before the `z = ...` declaration the import introduces. This models the fact @@ -1466,9 +1467,8 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { // in one function is visible in another function. // // TODO: Also support `from thispackage.x.y import z`? - // TODO: `seen_submodule_imports` should be per-scope and not per-file - // (if two functions import `.x`, they both should believe `x` is defined) - if node.level == 1 + if self.current_scope() == FileScopeId::global() + && node.level == 1 && let Some(submodule) = &node.module && let Some(parsed_submodule) = ModuleName::new(submodule.as_str()) && let Some(direct_submodule) = parsed_submodule.components().next() diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index 806c5e5844..086e3f8f15 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -5911,20 +5911,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { submodule: &Name, definition: Definition<'db>, ) { - // Although the *actual* runtime semantic of this kind of statement is to - // introduce a variable in the global scope of this module, we want to - // encourage users to write code that doesn't have dependence on execution-order. - // - // By introducing it as a local variable in the scope the import occurs in, - // we effectively require the developer to either do the import at the start of - // the file where it belongs, or to repeat the import in every function that - // wants to use it, which "definitely" works. - // - // (It doesn't actually "definitely" work because only the first import of `thispackage.x` - // will ever set `x`, and any subsequent overwrites of it will permanently clobber it. - // Also, a local variable `x` in a function should always shadow the submodule because - // the submodule is defined at file-scope. However, both of these issues are much more - // narrow, so this approach seems to work well in practice!) + // The runtime semantic of this kind of statement is to introduce a variable in the global + // scope of this module, so we do just that. (Actually we introduce a local variable, but + // this type of Definition is only created when a `from..import` is in global scope.) // Get this package's module by resolving `.` let Ok(module_name) = ModuleName::from_identifier_parts(self.db(), self.file(), None, 1) From 33b942c7ad4e3c71b832dbb234adbb8b3ee9b0be Mon Sep 17 00:00:00 2001 From: Douglas Creager Date: Mon, 10 Nov 2025 19:46:49 -0500 Subject: [PATCH 152/180] [ty] Handle annotated `self` parameter in constructor of non-invariant generic classes (#21325) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This manifested as an error when inferring the type of a PEP-695 generic class via its constructor parameters: ```py class D[T, U]: @overload def __init__(self: "D[str, U]", u: U) -> None: ... @overload def __init__(self, t: T, u: U) -> None: ... def __init__(self, *args) -> None: ... # revealed: D[Unknown, str] # SHOULD BE: D[str, str] reveal_type(D("string")) ``` This manifested because `D` is inferred to be bivariant in both `T` and `U`. We weren't seeing this in the equivalent example for legacy typevars, since those default to invariant. (This issue also showed up for _covariant_ typevars, so this issue was not limited to bivariance.) The underlying cause was because of a heuristic that we have in our current constraint solver, which attempts to handle situations like this: ```py def f[T](t: T | None): ... f(None) ``` Here, the `None` argument matches the non-typevar union element, so this argument should not add any constraints on what `T` can specialize to. Our previous heuristic would check for this by seeing if the argument type is a subtype of the parameter annotation as a whole — even if it isn't a union! That would cause us to erroneously ignore the `self` parameter in our constructor call, since bivariant classes are equivalent to each other, regardless of their specializations. The quick fix is to move this heuristic "down a level", so that we only apply it when the parameter annotation is a union. This heuristic should go away completely :crossed_fingers: with the new constraint solver. --- .../mdtest/assignment/annotations.md | 3 +- .../mdtest/generics/legacy/classes.md | 16 ++++-- .../mdtest/generics/pep695/classes.md | 15 ++++-- .../ty_python_semantic/src/types/generics.rs | 53 +++++++++---------- 4 files changed, 50 insertions(+), 37 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md b/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md index 3865572726..043380338b 100644 --- a/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md +++ b/crates/ty_python_semantic/resources/mdtest/assignment/annotations.md @@ -674,8 +674,7 @@ x6: Covariant[Any] = covariant(1) x7: Contravariant[Any] = contravariant(1) x8: Invariant[Any] = invariant(1) -# TODO: This could reveal `Bivariant[Any]`. -reveal_type(x5) # revealed: Bivariant[Literal[1]] +reveal_type(x5) # revealed: Bivariant[Any] reveal_type(x6) # revealed: Covariant[Any] reveal_type(x7) # revealed: Contravariant[Any] reveal_type(x8) # revealed: Invariant[Any] diff --git a/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md b/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md index a1f47c3b11..7ba6803dda 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md @@ -436,9 +436,7 @@ def test_seq(x: Sequence[T]) -> Sequence[T]: def func8(t1: tuple[complex, list[int]], t2: tuple[int, *tuple[str, ...]], t3: tuple[()]): reveal_type(test_seq(t1)) # revealed: Sequence[int | float | complex | list[int]] reveal_type(test_seq(t2)) # revealed: Sequence[int | str] - - # TODO: this should be `Sequence[Never]` - reveal_type(test_seq(t3)) # revealed: Sequence[Unknown] + reveal_type(test_seq(t3)) # revealed: Sequence[Never] ``` ### `__init__` is itself generic @@ -466,6 +464,7 @@ wrong_innards: C[int] = C("five", 1) from typing_extensions import overload, Generic, TypeVar T = TypeVar("T") +U = TypeVar("U") class C(Generic[T]): @overload @@ -497,6 +496,17 @@ C[int](12) C[None]("string") # error: [no-matching-overload] C[None](b"bytes") # error: [no-matching-overload] C[None](12) + +class D(Generic[T, U]): + @overload + def __init__(self: "D[str, U]", u: U) -> None: ... + @overload + def __init__(self, t: T, u: U) -> None: ... + def __init__(self, *args) -> None: ... + +reveal_type(D("string")) # revealed: D[str, str] +reveal_type(D(1)) # revealed: D[str, int] +reveal_type(D(1, "string")) # revealed: D[int, str] ``` ### Synthesized methods with dataclasses diff --git a/crates/ty_python_semantic/resources/mdtest/generics/pep695/classes.md b/crates/ty_python_semantic/resources/mdtest/generics/pep695/classes.md index 30a9ee88ae..a01b468ad0 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/pep695/classes.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/pep695/classes.md @@ -375,9 +375,7 @@ def test_seq[T](x: Sequence[T]) -> Sequence[T]: def func8(t1: tuple[complex, list[int]], t2: tuple[int, *tuple[str, ...]], t3: tuple[()]): reveal_type(test_seq(t1)) # revealed: Sequence[int | float | complex | list[int]] reveal_type(test_seq(t2)) # revealed: Sequence[int | str] - - # TODO: this should be `Sequence[Never]` - reveal_type(test_seq(t3)) # revealed: Sequence[Unknown] + reveal_type(test_seq(t3)) # revealed: Sequence[Never] ``` ### `__init__` is itself generic @@ -436,6 +434,17 @@ C[int](12) C[None]("string") # error: [no-matching-overload] C[None](b"bytes") # error: [no-matching-overload] C[None](12) + +class D[T, U]: + @overload + def __init__(self: "D[str, U]", u: U) -> None: ... + @overload + def __init__(self, t: T, u: U) -> None: ... + def __init__(self, *args) -> None: ... + +reveal_type(D("string")) # revealed: D[str, str] +reveal_type(D(1)) # revealed: D[str, int] +reveal_type(D(1, "string")) # revealed: D[int, str] ``` ### Synthesized methods with dataclasses diff --git a/crates/ty_python_semantic/src/types/generics.rs b/crates/ty_python_semantic/src/types/generics.rs index 11b708cf69..8be33c95fc 100644 --- a/crates/ty_python_semantic/src/types/generics.rs +++ b/crates/ty_python_semantic/src/types/generics.rs @@ -1393,31 +1393,6 @@ impl<'db> SpecializationBuilder<'db> { return Ok(()); } - // If the actual type is a subtype of the formal type, then return without adding any new - // type mappings. (Note that if the formal type contains any typevars, this check will - // fail, since no non-typevar types are assignable to a typevar. Also note that we are - // checking _subtyping_, not _assignability_, so that we do specialize typevars to dynamic - // argument types; and we have a special case for `Never`, which is a subtype of all types, - // but which we also do want as a specialization candidate.) - // - // In particular, this handles a case like - // - // ```py - // def f[T](t: T | None): ... - // - // f(None) - // ``` - // - // without specializing `T` to `None`. - if !matches!(formal, Type::ProtocolInstance(_)) - && !actual.is_never() - && actual - .when_subtype_of(self.db, formal, self.inferable) - .is_always_satisfied(self.db) - { - return Ok(()); - } - // Remove the union elements from `actual` that are not related to `formal`, and vice // versa. // @@ -1473,10 +1448,30 @@ impl<'db> SpecializationBuilder<'db> { self.add_type_mapping(*formal_bound_typevar, remaining_actual, filter); } (Type::Union(formal), _) => { - // Second, if the formal is a union, and precisely one union element _is_ a typevar (not - // _contains_ a typevar), then we add a mapping between that typevar and the actual - // type. (Note that we've already handled above the case where the actual is - // assignable to any _non-typevar_ union element.) + // Second, if the formal is a union, and precisely one union element is assignable + // from the actual type, then we don't add any type mapping. This handles a case like + // + // ```py + // def f[T](t: T | None): ... + // + // f(None) + // ``` + // + // without specializing `T` to `None`. + // + // Otherwise, if precisely one union element _is_ a typevar (not _contains_ a + // typevar), then we add a mapping between that typevar and the actual type. + if !actual.is_never() { + let assignable_elements = (formal.elements(self.db).iter()).filter(|ty| { + actual + .when_subtype_of(self.db, **ty, self.inferable) + .is_always_satisfied(self.db) + }); + if assignable_elements.exactly_one().is_ok() { + return Ok(()); + } + } + let bound_typevars = (formal.elements(self.db).iter()).filter_map(|ty| ty.as_typevar()); if let Ok(bound_typevar) = bound_typevars.exactly_one() { From 36cce347fd490a16eca9d7e4b03e71941913d816 Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Tue, 11 Nov 2025 10:43:37 +0100 Subject: [PATCH 153/180] Reduce notebook memory footprint (#21319) --- crates/ruff_db/src/diagnostic/render/full.rs | 14 ++-- crates/ruff_notebook/src/index.rs | 57 +++++++++---- crates/ruff_notebook/src/notebook.rs | 84 +++++++++----------- 3 files changed, 84 insertions(+), 71 deletions(-) diff --git a/crates/ruff_db/src/diagnostic/render/full.rs b/crates/ruff_db/src/diagnostic/render/full.rs index c87413a84e..0784297755 100644 --- a/crates/ruff_db/src/diagnostic/render/full.rs +++ b/crates/ruff_db/src/diagnostic/render/full.rs @@ -112,16 +112,16 @@ impl std::fmt::Display for Diff<'_> { // `None`, indicating a regular script file, all the lines will be in one "cell" under the // `None` key. let cells = if let Some(notebook_index) = &self.notebook_index { - let mut last_cell = OneIndexed::MIN; + let mut last_cell_index = OneIndexed::MIN; let mut cells: Vec<(Option, TextSize)> = Vec::new(); - for (row, cell) in notebook_index.iter() { - if cell != last_cell { - let offset = source_code.line_start(row); - cells.push((Some(last_cell), offset)); - last_cell = cell; + for cell in notebook_index.iter() { + if cell.cell_index() != last_cell_index { + let offset = source_code.line_start(cell.start_row()); + cells.push((Some(last_cell_index), offset)); + last_cell_index = cell.cell_index(); } } - cells.push((Some(last_cell), source_text.text_len())); + cells.push((Some(last_cell_index), source_text.text_len())); cells } else { vec![(None, source_text.text_len())] diff --git a/crates/ruff_notebook/src/index.rs b/crates/ruff_notebook/src/index.rs index eff605aa6d..951914e74a 100644 --- a/crates/ruff_notebook/src/index.rs +++ b/crates/ruff_notebook/src/index.rs @@ -8,37 +8,40 @@ use ruff_source_file::{LineColumn, OneIndexed, SourceLocation}; /// [`ruff_text_size::TextSize`] to jupyter notebook cell/row/column. #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] pub struct NotebookIndex { - /// Enter a row (1-based), get back the cell (1-based) - pub(super) row_to_cell: Vec, - /// Enter a row (1-based), get back the row in cell (1-based) - pub(super) row_to_row_in_cell: Vec, + /// Stores the starting row and the absolute cell index for every Python (valid) cell. + /// + /// The index in this vector corresponds to the Python cell index (valid cell index). + pub(super) cell_starts: Vec, } impl NotebookIndex { - pub fn new(row_to_cell: Vec, row_to_row_in_cell: Vec) -> Self { - Self { - row_to_cell, - row_to_row_in_cell, + fn find_cell(&self, row: OneIndexed) -> Option { + match self + .cell_starts + .binary_search_by_key(&row, |start| start.start_row) + { + Ok(cell_index) => Some(self.cell_starts[cell_index]), + Err(insertion_point) => Some(self.cell_starts[insertion_point.checked_sub(1)?]), } } - /// Returns the cell number (1-based) for the given row (1-based). + /// Returns the (raw) cell number (1-based) for the given row (1-based). pub fn cell(&self, row: OneIndexed) -> Option { - self.row_to_cell.get(row.to_zero_indexed()).copied() + self.find_cell(row).map(|start| start.raw_cell_index) } /// Returns the row number (1-based) in the cell (1-based) for the /// given row (1-based). pub fn cell_row(&self, row: OneIndexed) -> Option { - self.row_to_row_in_cell.get(row.to_zero_indexed()).copied() + self.find_cell(row) + .map(|start| OneIndexed::from_zero_indexed(row.get() - start.start_row.get())) } - /// Returns an iterator over the row:cell-number pairs (both 1-based). - pub fn iter(&self) -> impl Iterator { - self.row_to_cell - .iter() - .enumerate() - .map(|(row, cell)| (OneIndexed::from_zero_indexed(row), *cell)) + /// Returns an iterator over the starting rows of each cell (1-based). + /// + /// This yields one entry per Python cell (skipping over Makrdown cell). + pub fn iter(&self) -> impl Iterator + '_ { + self.cell_starts.iter().copied() } /// Translates the given [`LineColumn`] based on the indexing table. @@ -67,3 +70,23 @@ impl NotebookIndex { } } } + +#[derive(Debug, Copy, Clone, Eq, PartialEq, Serialize, Deserialize)] +pub struct CellStart { + /// The row in the concatenated notebook source code at which + /// this cell starts. + pub(super) start_row: OneIndexed, + + /// The absolute index of this cell in the notebook. + pub(super) raw_cell_index: OneIndexed, +} + +impl CellStart { + pub fn start_row(&self) -> OneIndexed { + self.start_row + } + + pub fn cell_index(&self) -> OneIndexed { + self.raw_cell_index + } +} diff --git a/crates/ruff_notebook/src/notebook.rs b/crates/ruff_notebook/src/notebook.rs index 124202e927..4dc01971fc 100644 --- a/crates/ruff_notebook/src/notebook.rs +++ b/crates/ruff_notebook/src/notebook.rs @@ -18,7 +18,7 @@ use ruff_text_size::TextSize; use crate::cell::CellOffsets; use crate::index::NotebookIndex; use crate::schema::{Cell, RawNotebook, SortAlphabetically, SourceValue}; -use crate::{CellMetadata, RawNotebookMetadata, schema}; +use crate::{CellMetadata, CellStart, RawNotebookMetadata, schema}; /// Run round-trip source code generation on a given Jupyter notebook file path. pub fn round_trip(path: &Path) -> anyhow::Result { @@ -320,11 +320,19 @@ impl Notebook { /// The index building is expensive as it needs to go through the content of /// every valid code cell. fn build_index(&self) -> NotebookIndex { - let mut row_to_cell = Vec::new(); - let mut row_to_row_in_cell = Vec::new(); + let mut cell_starts = Vec::with_capacity(self.valid_code_cells.len()); + + let mut current_row = OneIndexed::MIN; for &cell_index in &self.valid_code_cells { - let line_count = match &self.raw.cells[cell_index as usize].source() { + let raw_cell_index = cell_index as usize; + // Record the starting row of this cell + cell_starts.push(CellStart { + start_row: current_row, + raw_cell_index: OneIndexed::from_zero_indexed(raw_cell_index), + }); + + let line_count = match &self.raw.cells[raw_cell_index].source() { SourceValue::String(string) => { if string.is_empty() { 1 @@ -342,17 +350,11 @@ impl Notebook { } } }; - row_to_cell.extend(std::iter::repeat_n( - OneIndexed::from_zero_indexed(cell_index as usize), - line_count, - )); - row_to_row_in_cell.extend((0..line_count).map(OneIndexed::from_zero_indexed)); + + current_row = current_row.saturating_add(line_count); } - NotebookIndex { - row_to_cell, - row_to_row_in_cell, - } + NotebookIndex { cell_starts } } /// Return the notebook content. @@ -456,7 +458,7 @@ mod tests { use ruff_source_file::OneIndexed; - use crate::{Cell, Notebook, NotebookError, NotebookIndex}; + use crate::{Cell, CellStart, Notebook, NotebookError, NotebookIndex}; /// Construct a path to a Jupyter notebook in the `resources/test/fixtures/jupyter` directory. fn notebook_path(path: impl AsRef) -> std::path::PathBuf { @@ -548,39 +550,27 @@ print("after empty cells") assert_eq!( notebook.index(), &NotebookIndex { - row_to_cell: vec![ - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(4), - OneIndexed::from_zero_indexed(6), - OneIndexed::from_zero_indexed(6), - OneIndexed::from_zero_indexed(7) - ], - row_to_row_in_cell: vec![ - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(1), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(3), - OneIndexed::from_zero_indexed(4), - OneIndexed::from_zero_indexed(5), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(1), - OneIndexed::from_zero_indexed(2), - OneIndexed::from_zero_indexed(3), - OneIndexed::from_zero_indexed(4), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(0), - OneIndexed::from_zero_indexed(1), - OneIndexed::from_zero_indexed(0) + cell_starts: vec![ + CellStart { + start_row: OneIndexed::MIN, + raw_cell_index: OneIndexed::MIN + }, + CellStart { + start_row: OneIndexed::from_zero_indexed(6), + raw_cell_index: OneIndexed::from_zero_indexed(2) + }, + CellStart { + start_row: OneIndexed::from_zero_indexed(11), + raw_cell_index: OneIndexed::from_zero_indexed(4) + }, + CellStart { + start_row: OneIndexed::from_zero_indexed(12), + raw_cell_index: OneIndexed::from_zero_indexed(6) + }, + CellStart { + start_row: OneIndexed::from_zero_indexed(14), + raw_cell_index: OneIndexed::from_zero_indexed(7) + } ], } ); From 7b237d316f4099c75811527cb13dd8699ec23d1c Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Tue, 11 Nov 2025 14:03:46 +0100 Subject: [PATCH 154/180] Add option to provide a reason to `--add-noqa` (#21294) Co-authored-by: Claude --- crates/ruff/src/args.rs | 9 ++++- crates/ruff/src/commands/add_noqa.rs | 10 ++++- crates/ruff/src/lib.rs | 12 +++++- crates/ruff/tests/cli/lint.rs | 58 ++++++++++++++++++++++++++++ crates/ruff_linter/src/linter.rs | 2 + crates/ruff_linter/src/noqa.rs | 23 ++++++++++- docs/configuration.md | 5 ++- 7 files changed, 110 insertions(+), 9 deletions(-) diff --git a/crates/ruff/src/args.rs b/crates/ruff/src/args.rs index e1c114a66e..f1d38336f2 100644 --- a/crates/ruff/src/args.rs +++ b/crates/ruff/src/args.rs @@ -415,8 +415,13 @@ pub struct CheckCommand { )] pub statistics: bool, /// Enable automatic additions of `noqa` directives to failing lines. + /// Optionally provide a reason to append after the codes. #[arg( long, + value_name = "REASON", + default_missing_value = "", + num_args = 0..=1, + require_equals = true, // conflicts_with = "add_noqa", conflicts_with = "show_files", conflicts_with = "show_settings", @@ -428,7 +433,7 @@ pub struct CheckCommand { conflicts_with = "fix", conflicts_with = "diff", )] - pub add_noqa: bool, + pub add_noqa: Option, /// See the files Ruff will be run against with the current settings. #[arg( long, @@ -1057,7 +1062,7 @@ Possible choices: /// etc.). #[expect(clippy::struct_excessive_bools)] pub struct CheckArguments { - pub add_noqa: bool, + pub add_noqa: Option, pub diff: bool, pub exit_non_zero_on_fix: bool, pub exit_zero: bool, diff --git a/crates/ruff/src/commands/add_noqa.rs b/crates/ruff/src/commands/add_noqa.rs index d5eaeb0170..ff6a07c758 100644 --- a/crates/ruff/src/commands/add_noqa.rs +++ b/crates/ruff/src/commands/add_noqa.rs @@ -21,6 +21,7 @@ pub(crate) fn add_noqa( files: &[PathBuf], pyproject_config: &PyprojectConfig, config_arguments: &ConfigArguments, + reason: Option<&str>, ) -> Result { // Collect all the files to check. let start = Instant::now(); @@ -76,7 +77,14 @@ pub(crate) fn add_noqa( return None; } }; - match add_noqa_to_path(path, package, &source_kind, source_type, &settings.linter) { + match add_noqa_to_path( + path, + package, + &source_kind, + source_type, + &settings.linter, + reason, + ) { Ok(count) => Some(count), Err(e) => { error!("Failed to add noqa to {}: {e}", path.display()); diff --git a/crates/ruff/src/lib.rs b/crates/ruff/src/lib.rs index 3bd457de8c..3ea0d94fad 100644 --- a/crates/ruff/src/lib.rs +++ b/crates/ruff/src/lib.rs @@ -319,12 +319,20 @@ pub fn check(args: CheckCommand, global_options: GlobalConfigArgs) -> Result cannot contain newline characters" + )); + } + + let reason_opt = (!reason.is_empty()).then_some(reason.as_str()); + let modifications = - commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments)?; + commands::add_noqa::add_noqa(&files, &pyproject_config, &config_arguments, reason_opt)?; if modifications > 0 && config_arguments.log_level >= LogLevel::Default { let s = if modifications == 1 { "" } else { "s" }; #[expect(clippy::print_stderr)] diff --git a/crates/ruff/tests/cli/lint.rs b/crates/ruff/tests/cli/lint.rs index ebd202b052..25500ed346 100644 --- a/crates/ruff/tests/cli/lint.rs +++ b/crates/ruff/tests/cli/lint.rs @@ -1760,6 +1760,64 @@ from foo import ( # noqa: F401 Ok(()) } +#[test] +fn add_noqa_with_reason() -> Result<()> { + let fixture = CliTest::new()?; + fixture.write_file( + "test.py", + r#"import os + +def foo(): + x = 1 +"#, + )?; + + assert_cmd_snapshot!(fixture + .check_command() + .arg("--add-noqa=TODO: fix") + .arg("--select=F401,F841") + .arg("test.py"), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Added 2 noqa directives. + "); + + let content = fs::read_to_string(fixture.root().join("test.py"))?; + insta::assert_snapshot!(content, @r" +import os # noqa: F401 TODO: fix + +def foo(): + x = 1 # noqa: F841 TODO: fix +"); + + Ok(()) +} + +#[test] +fn add_noqa_with_newline_in_reason() -> Result<()> { + let fixture = CliTest::new()?; + fixture.write_file("test.py", "import os\n")?; + + assert_cmd_snapshot!(fixture + .check_command() + .arg("--add-noqa=line1\nline2") + .arg("--select=F401") + .arg("test.py"), @r###" + success: false + exit_code: 2 + ----- stdout ----- + + ----- stderr ----- + ruff failed + Cause: --add-noqa cannot contain newline characters + "###); + + Ok(()) +} + /// Infer `3.11` from `requires-python` in `pyproject.toml`. #[test] fn requires_python() -> Result<()> { diff --git a/crates/ruff_linter/src/linter.rs b/crates/ruff_linter/src/linter.rs index 2e4f284bee..3ec070dd26 100644 --- a/crates/ruff_linter/src/linter.rs +++ b/crates/ruff_linter/src/linter.rs @@ -377,6 +377,7 @@ pub fn add_noqa_to_path( source_kind: &SourceKind, source_type: PySourceType, settings: &LinterSettings, + reason: Option<&str>, ) -> Result { // Parse once. let target_version = settings.resolve_target_version(path); @@ -425,6 +426,7 @@ pub fn add_noqa_to_path( &settings.external, &directives.noqa_line_for, stylist.line_ending(), + reason, ) } diff --git a/crates/ruff_linter/src/noqa.rs b/crates/ruff_linter/src/noqa.rs index 606ac5ad3b..da9535817e 100644 --- a/crates/ruff_linter/src/noqa.rs +++ b/crates/ruff_linter/src/noqa.rs @@ -39,7 +39,7 @@ pub fn generate_noqa_edits( let exemption = FileExemption::from(&file_directives); let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator); let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for); - build_noqa_edits_by_diagnostic(comments, locator, line_ending) + build_noqa_edits_by_diagnostic(comments, locator, line_ending, None) } /// A directive to ignore a set of rules either for a given line of Python source code or an entire file (e.g., @@ -715,6 +715,7 @@ impl Display for LexicalError { impl Error for LexicalError {} /// Adds noqa comments to suppress all messages of a file. +#[expect(clippy::too_many_arguments)] pub(crate) fn add_noqa( path: &Path, diagnostics: &[Diagnostic], @@ -723,6 +724,7 @@ pub(crate) fn add_noqa( external: &[String], noqa_line_for: &NoqaMapping, line_ending: LineEnding, + reason: Option<&str>, ) -> Result { let (count, output) = add_noqa_inner( path, @@ -732,12 +734,14 @@ pub(crate) fn add_noqa( external, noqa_line_for, line_ending, + reason, ); fs::write(path, output)?; Ok(count) } +#[expect(clippy::too_many_arguments)] fn add_noqa_inner( path: &Path, diagnostics: &[Diagnostic], @@ -746,6 +750,7 @@ fn add_noqa_inner( external: &[String], noqa_line_for: &NoqaMapping, line_ending: LineEnding, + reason: Option<&str>, ) -> (usize, String) { let mut count = 0; @@ -757,7 +762,7 @@ fn add_noqa_inner( let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for); - let edits = build_noqa_edits_by_line(comments, locator, line_ending); + let edits = build_noqa_edits_by_line(comments, locator, line_ending, reason); let contents = locator.contents(); @@ -783,6 +788,7 @@ fn build_noqa_edits_by_diagnostic( comments: Vec>, locator: &Locator, line_ending: LineEnding, + reason: Option<&str>, ) -> Vec> { let mut edits = Vec::default(); for comment in comments { @@ -794,6 +800,7 @@ fn build_noqa_edits_by_diagnostic( FxHashSet::from_iter([comment.code]), locator, line_ending, + reason, ) { edits.push(Some(noqa_edit.into_edit())); } @@ -808,6 +815,7 @@ fn build_noqa_edits_by_line<'a>( comments: Vec>>, locator: &Locator, line_ending: LineEnding, + reason: Option<&'a str>, ) -> BTreeMap> { let mut comments_by_line = BTreeMap::default(); for comment in comments.into_iter().flatten() { @@ -831,6 +839,7 @@ fn build_noqa_edits_by_line<'a>( .collect(), locator, line_ending, + reason, ) { edits.insert(offset, edit); } @@ -927,6 +936,7 @@ struct NoqaEdit<'a> { noqa_codes: FxHashSet<&'a SecondaryCode>, codes: Option<&'a Codes<'a>>, line_ending: LineEnding, + reason: Option<&'a str>, } impl NoqaEdit<'_> { @@ -954,6 +964,9 @@ impl NoqaEdit<'_> { push_codes(writer, self.noqa_codes.iter().sorted_unstable()); } } + if let Some(reason) = self.reason { + write!(writer, " {reason}").unwrap(); + } write!(writer, "{}", self.line_ending.as_str()).unwrap(); } } @@ -970,6 +983,7 @@ fn generate_noqa_edit<'a>( noqa_codes: FxHashSet<&'a SecondaryCode>, locator: &Locator, line_ending: LineEnding, + reason: Option<&'a str>, ) -> Option> { let line_range = locator.full_line_range(offset); @@ -999,6 +1013,7 @@ fn generate_noqa_edit<'a>( noqa_codes, codes, line_ending, + reason, }) } @@ -2832,6 +2847,7 @@ mod tests { &[], &noqa_line_for, LineEnding::Lf, + None, ); assert_eq!(count, 0); assert_eq!(output, format!("{contents}")); @@ -2855,6 +2871,7 @@ mod tests { &[], &noqa_line_for, LineEnding::Lf, + None, ); assert_eq!(count, 1); assert_eq!(output, "x = 1 # noqa: F841\n"); @@ -2885,6 +2902,7 @@ mod tests { &[], &noqa_line_for, LineEnding::Lf, + None, ); assert_eq!(count, 1); assert_eq!(output, "x = 1 # noqa: E741, F841\n"); @@ -2915,6 +2933,7 @@ mod tests { &[], &noqa_line_for, LineEnding::Lf, + None, ); assert_eq!(count, 0); assert_eq!(output, "x = 1 # noqa"); diff --git a/docs/configuration.md b/docs/configuration.md index 8d3297fbca..7a5f62fc60 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -618,8 +618,9 @@ Options: notebooks, use `--extension ipy:ipynb` --statistics Show counts for every rule with at least one violation - --add-noqa - Enable automatic additions of `noqa` directives to failing lines + --add-noqa[=] + Enable automatic additions of `noqa` directives to failing lines. + Optionally provide a reason to append after the codes --show-files See the files Ruff will be run against with the current settings --show-settings From 44b0c9ebac8b63d39632233959ff3ec2694c3ef6 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Tue, 11 Nov 2025 14:33:43 +0000 Subject: [PATCH 155/180] [ty] Allow PEP-604 unions in stubs and `TYPE_CHECKING` blocks prior to 3.10 (#21379) --- .../resources/mdtest/implicit_type_aliases.md | 48 +++++++++++++++++++ .../src/types/infer/builder.rs | 30 +++++++----- 2 files changed, 65 insertions(+), 13 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md index 690225d9b2..aae10661b4 100644 --- a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md +++ b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md @@ -272,6 +272,54 @@ def g( ): ... ``` +## `|` unions in stubs and `TYPE_CHECKING` blocks + +In runtime contexts, `|` unions are only permitted on Python 3.10+. But in suites of code that are +never executed at runtime (stub files, `if TYPE_CHECKING` blocks, and stringified annotations), they +are permitted even if the target version is set to Python 3.9 or earlier. + +```toml +[environment] +python-version = "3.9" +``` + +`bar.pyi`: + +```pyi +Z = int | str +GLOBAL_CONSTANT: Z +``` + +`foo.py`: + +```py +from typing import TYPE_CHECKING +from bar import GLOBAL_CONSTANT + +reveal_type(GLOBAL_CONSTANT) # revealed: int | str + +if TYPE_CHECKING: + class ItsQuiteCloudyInManchester: + X = int | str + + def f(obj: X): + reveal_type(obj) # revealed: int | str + + # TODO: we currently only understand code as being inside a `TYPE_CHECKING` block + # if a whole *scope* is inside the `if TYPE_CHECKING` block + # (like the `ItsQuiteCloudyInManchester` class above); this is a false-positive + Y = int | str # error: [unsupported-operator] + + def g(obj: Y): + # TODO: should be `int | str` + reveal_type(obj) # revealed: Unknown + +Y = list["int | str"] + +def g(obj: Y): + reveal_type(obj) # revealed: list[int | str] +``` + ## Generic types Implicit type aliases can also refer to generic types: diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index 086e3f8f15..b4f7a42099 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -8899,6 +8899,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { emitted_division_by_zero_diagnostic = self.check_division_by_zero(node, op, left_ty); } + let pep_604_unions_allowed = || { + Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 + || self.file().is_stub(self.db()) + || self.scope().scope(self.db()).in_type_checking_block() + }; + match (left_ty, right_ty, op) { (Type::Union(lhs_union), rhs, _) => lhs_union.try_map(self.db(), |lhs_element| { self.infer_binary_expression_type( @@ -9160,7 +9166,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { | KnownInstanceType::Annotated(_), ), ast::Operator::BitOr, - ) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 => { + ) if pep_604_unions_allowed() => { if left_ty.is_equivalent_to(self.db(), right_ty) { Some(left_ty) } else { @@ -9186,7 +9192,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { | Type::KnownInstance(..) | Type::SpecialForm(..), ast::Operator::BitOr, - ) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 + ) if pep_604_unions_allowed() && instance.has_known_class(self.db(), KnownClass::NoneType) => { Some(Type::KnownInstance(KnownInstanceType::UnionType( @@ -9210,17 +9216,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { _, Type::ClassLiteral(..) | Type::GenericAlias(..) | Type::SubclassOf(..), ast::Operator::BitOr, - ) if Program::get(self.db()).python_version(self.db()) >= PythonVersion::PY310 => { - Type::try_call_bin_op_with_policy( - self.db(), - left_ty, - ast::Operator::BitOr, - right_ty, - MemberLookupPolicy::META_CLASS_NO_TYPE_FALLBACK, - ) - .ok() - .map(|binding| binding.return_type(self.db())) - } + ) if pep_604_unions_allowed() => Type::try_call_bin_op_with_policy( + self.db(), + left_ty, + ast::Operator::BitOr, + right_ty, + MemberLookupPolicy::META_CLASS_NO_TYPE_FALLBACK, + ) + .ok() + .map(|binding| binding.return_type(self.db())), // We've handled all of the special cases that we support for literals, so we need to // fall back on looking for dunder methods on one of the operand types. From bd8812127daa556bd86fa81c9a79f5f49a2feaa8 Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Tue, 11 Nov 2025 13:04:42 -0500 Subject: [PATCH 156/180] [ty] support absolute `from` imports introducing local submodules in `__init__.py` files (#21372) By resolving `.` and the LHS of the from import during semantic indexing, we can check if the LHS is a submodule of `.`, and handle `from whatever.thispackage.x.y import z` exactly like we do `from .x.y import z`. Fixes https://github.com/astral-sh/ty/issues/1484 --- .../mdtest/import/nonstandard_conventions.md | 9 +- crates/ty_python_semantic/src/module_name.rs | 11 +++ .../src/semantic_index/builder.rs | 22 +++-- .../src/semantic_index/definition.rs | 14 +-- .../src/types/infer/builder.rs | 91 ++++++++++--------- 5 files changed, 79 insertions(+), 68 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md b/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md index 120ea0746d..44163c17b4 100644 --- a/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md +++ b/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md @@ -333,7 +333,7 @@ reveal_type(mypackage.nested.X) # revealed: Unknown ### In Non-Stub -`from mypackage.submodule import nested` in an `__init__.py` only creates `nested`. +`from mypackage.submodule import nested` in an `__init__.py` creates both `submodule` and `nested`. `mypackage/__init__.py`: @@ -357,12 +357,11 @@ X: int = 42 ```py import mypackage +reveal_type(mypackage.submodule) # revealed: # TODO: this would be nice to support -# error: "has no member `submodule`" -reveal_type(mypackage.submodule) # revealed: Unknown -# error: "has no member `submodule`" +# error: "has no member `nested`" reveal_type(mypackage.submodule.nested) # revealed: Unknown -# error: "has no member `submodule`" +# error: "has no member `nested`" reveal_type(mypackage.submodule.nested.X) # revealed: Unknown reveal_type(mypackage.nested) # revealed: reveal_type(mypackage.nested.X) # revealed: int diff --git a/crates/ty_python_semantic/src/module_name.rs b/crates/ty_python_semantic/src/module_name.rs index e1aa4509bd..b257d0a6df 100644 --- a/crates/ty_python_semantic/src/module_name.rs +++ b/crates/ty_python_semantic/src/module_name.rs @@ -295,6 +295,7 @@ impl ModuleName { Self::from_identifier_parts(db, importing_file, module.as_deref(), *level) } + /// Computes the absolute module name from the LHS components of `from LHS import RHS` pub(crate) fn from_identifier_parts( db: &dyn Db, importing_file: File, @@ -309,6 +310,16 @@ impl ModuleName { .ok_or(ModuleNameResolutionError::InvalidSyntax) } } + + /// Computes the absolute module name for the package this file belongs to. + /// + /// i.e. this resolves `.` + pub(crate) fn package_for_file( + db: &dyn Db, + importing_file: File, + ) -> Result { + Self::from_identifier_parts(db, importing_file, None, 1) + } } impl Deref for ModuleName { diff --git a/crates/ty_python_semantic/src/semantic_index/builder.rs b/crates/ty_python_semantic/src/semantic_index/builder.rs index 8029a775fe..d7784c2cf1 100644 --- a/crates/ty_python_semantic/src/semantic_index/builder.rs +++ b/crates/ty_python_semantic/src/semantic_index/builder.rs @@ -1451,7 +1451,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { // If we see: // - // * `from .x.y import z` (must be relative!) + // * `from .x.y import z` (or `from whatever.thispackage.x.y`) // * And we are in an `__init__.py(i)` (hereafter `thispackage`) // * And this is the first time we've seen `from .x` in this module // * And we're in the global scope @@ -1465,14 +1465,18 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { // reasons but it works well for most practical purposes. In particular it's nice // that `x` can be freely overwritten, and that we don't assume that an import // in one function is visible in another function. - // - // TODO: Also support `from thispackage.x.y import z`? - if self.current_scope() == FileScopeId::global() - && node.level == 1 - && let Some(submodule) = &node.module - && let Some(parsed_submodule) = ModuleName::new(submodule.as_str()) - && let Some(direct_submodule) = parsed_submodule.components().next() + if node.module.is_some() + && self.current_scope().is_global() && self.file.is_package(self.db) + && let Ok(module_name) = ModuleName::from_identifier_parts( + self.db, + self.file, + node.module.as_deref(), + node.level, + ) + && let Ok(thispackage) = ModuleName::package_for_file(self.db, self.file) + && let Some(relative_submodule) = module_name.relative_to(&thispackage) + && let Some(direct_submodule) = relative_submodule.components().next() && !self.seen_submodule_imports.contains(direct_submodule) { self.seen_submodule_imports @@ -1482,7 +1486,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { let symbol = self.add_symbol(direct_submodule_name); self.add_definition( symbol.into(), - ImportFromSubmoduleDefinitionNodeRef { node, submodule }, + ImportFromSubmoduleDefinitionNodeRef { node }, ); } diff --git a/crates/ty_python_semantic/src/semantic_index/definition.rs b/crates/ty_python_semantic/src/semantic_index/definition.rs index db5d519560..85a7ff6aed 100644 --- a/crates/ty_python_semantic/src/semantic_index/definition.rs +++ b/crates/ty_python_semantic/src/semantic_index/definition.rs @@ -3,7 +3,6 @@ use std::ops::Deref; use ruff_db::files::{File, FileRange}; use ruff_db::parsed::{ParsedModuleRef, parsed_module}; use ruff_python_ast as ast; -use ruff_python_ast::name::Name; use ruff_text_size::{Ranged, TextRange}; use crate::Db; @@ -368,7 +367,6 @@ pub(crate) struct ImportFromDefinitionNodeRef<'ast> { #[derive(Copy, Clone, Debug)] pub(crate) struct ImportFromSubmoduleDefinitionNodeRef<'ast> { pub(crate) node: &'ast ast::StmtImportFrom, - pub(crate) submodule: &'ast ast::Identifier, } #[derive(Copy, Clone, Debug)] pub(crate) struct AssignmentDefinitionNodeRef<'ast, 'db> { @@ -450,10 +448,8 @@ impl<'db> DefinitionNodeRef<'_, 'db> { }), DefinitionNodeRef::ImportFromSubmodule(ImportFromSubmoduleDefinitionNodeRef { node, - submodule, }) => DefinitionKind::ImportFromSubmodule(ImportFromSubmoduleDefinitionKind { node: AstNodeRef::new(parsed, node), - submodule: submodule.as_str().into(), }), DefinitionNodeRef::ImportStar(star_import) => { let StarImportDefinitionNodeRef { node, symbol_id } = star_import; @@ -580,10 +576,7 @@ impl<'db> DefinitionNodeRef<'_, 'db> { alias_index, is_reexported: _, }) => (&node.names[alias_index]).into(), - Self::ImportFromSubmodule(ImportFromSubmoduleDefinitionNodeRef { - node, - submodule: _, - }) => node.into(), + Self::ImportFromSubmodule(ImportFromSubmoduleDefinitionNodeRef { node }) => node.into(), // INVARIANT: for an invalid-syntax statement such as `from foo import *, bar, *`, // we only create a `StarImportDefinitionKind` for the *first* `*` alias in the names list. Self::ImportStar(StarImportDefinitionNodeRef { node, symbol_id: _ }) => node @@ -1021,17 +1014,12 @@ impl ImportFromDefinitionKind { #[derive(Clone, Debug, get_size2::GetSize)] pub struct ImportFromSubmoduleDefinitionKind { node: AstNodeRef, - submodule: Name, } impl ImportFromSubmoduleDefinitionKind { pub fn import<'ast>(&self, module: &'ast ParsedModuleRef) -> &'ast ast::StmtImportFrom { self.node.node(module) } - - pub(crate) fn submodule(&self) -> &Name { - &self.submodule - } } #[derive(Clone, Debug, get_size2::GetSize)] diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index b4f7a42099..e775ca1993 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -4,7 +4,6 @@ use itertools::{Either, Itertools}; use ruff_db::diagnostic::{Annotation, DiagnosticId, Severity}; use ruff_db::files::File; use ruff_db::parsed::ParsedModuleRef; -use ruff_python_ast::name::Name; use ruff_python_ast::visitor::{Visitor, walk_expr}; use ruff_python_ast::{ self as ast, AnyNodeRef, ExprContext, HasNodeIndex, NodeIndex, PythonVersion, @@ -1218,7 +1217,6 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { DefinitionKind::ImportFromSubmodule(import_from) => { self.infer_import_from_submodule_definition( import_from.import(self.module()), - import_from.submodule(), definition, ); } @@ -5901,51 +5899,64 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } - /// Infer the implicit local definition `x = ` that - /// `from .x.y import z` can introduce in an `__init__.py(i)`. + /// Infer the implicit local definition `x = ` that + /// `from .x.y import z` or `from whatever.thispackage.x.y` can introduce in `__init__.py(i)`. /// /// For the definition `z`, see [`TypeInferenceBuilder::infer_import_from_definition`]. + /// + /// The runtime semantic of this kind of statement is to introduce a variable in the global + /// scope of this module *the first time it's imported in the entire program*. This + /// implementation just blindly introduces a local variable wherever the `from..import` is + /// (if the imports actually resolve). + /// + /// That gap between the semantics and implementation are currently the responsibility of the + /// code that actually creates these kinds of Definitions (so blindly introducing a local + /// is all we need to be doing here). fn infer_import_from_submodule_definition( &mut self, import_from: &ast::StmtImportFrom, - submodule: &Name, definition: Definition<'db>, ) { - // The runtime semantic of this kind of statement is to introduce a variable in the global - // scope of this module, so we do just that. (Actually we introduce a local variable, but - // this type of Definition is only created when a `from..import` is in global scope.) - - // Get this package's module by resolving `.` - let Ok(module_name) = ModuleName::from_identifier_parts(self.db(), self.file(), None, 1) - else { + // Get this package's absolute module name by resolving `.`, and make sure it exists + let Ok(thispackage_name) = ModuleName::package_for_file(self.db(), self.file()) else { + self.add_binding(import_from.into(), definition, |_, _| Type::unknown()); + return; + }; + let Some(module) = resolve_module(self.db(), &thispackage_name) else { self.add_binding(import_from.into(), definition, |_, _| Type::unknown()); return; }; - let Some(module) = resolve_module(self.db(), &module_name) else { + // We have `from whatever.thispackage.x.y ...` or `from .x.y ...` + // and we want to extract `x` (to ultimately construct `whatever.thispackage.x`): + + // First we normalize to `whatever.thispackage.x.y` + let Some(final_part) = ModuleName::from_identifier_parts( + self.db(), + self.file(), + import_from.module.as_deref(), + import_from.level, + ) + .ok() + // `whatever.thispackage.x.y` => `x.y` + .and_then(|submodule_name| submodule_name.relative_to(&thispackage_name)) + // `x.y` => `x` + .and_then(|relative_submodule_name| { + relative_submodule_name + .components() + .next() + .and_then(ModuleName::new) + }) else { self.add_binding(import_from.into(), definition, |_, _| Type::unknown()); return; }; - // Now construct the submodule `.x` - assert!( - !submodule.is_empty(), - "ImportFromSubmoduleDefinitionKind constructed with empty module" - ); - let name = submodule - .split_once('.') - .map(|(first, _)| first) - .unwrap_or(submodule.as_str()); - let full_submodule_name = ModuleName::new(name).map(|final_part| { - let mut ret = module_name.clone(); - ret.extend(&final_part); - ret - }); - // And try to import it - if let Some(submodule_type) = full_submodule_name - .as_ref() - .and_then(|submodule_name| self.module_type_from_name(submodule_name)) - { + // `x` => `whatever.thispackage.x` + let mut full_submodule_name = thispackage_name.clone(); + full_submodule_name.extend(&final_part); + + // Try to actually resolve the import `whatever.thispackage.x` + if let Some(submodule_type) = self.module_type_from_name(&full_submodule_name) { // Success, introduce a binding! // // We explicitly don't introduce a *declaration* because it's actual ok @@ -5970,17 +5981,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { }; let diagnostic = builder.into_diagnostic(format_args!( - "Module `{module_name}` has no submodule `{name}`" + "Module `{thispackage_name}` has no submodule `{final_part}`" )); - if let Some(full_submodule_name) = full_submodule_name { - hint_if_stdlib_submodule_exists_on_other_versions( - self.db(), - diagnostic, - &full_submodule_name, - module, - ); - } + hint_if_stdlib_submodule_exists_on_other_versions( + self.db(), + diagnostic, + &full_submodule_name, + module, + ); } fn infer_return_statement(&mut self, ret: &ast::StmtReturn) { From 03bd0619e9adf90c9aee976b0fac10089da777c3 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Tue, 11 Nov 2025 19:30:01 +0000 Subject: [PATCH 157/180] [ty] Silence false-positive diagnostics when using `typing.Dict` or `typing.Callable` as the second argument to `isinstance()` (#21386) --- .../resources/mdtest/call/builtins.md | 35 +++++++++++++ crates/ty_python_semantic/src/types.rs | 7 +++ .../ty_python_semantic/src/types/call/bind.rs | 25 +++++++++ .../ty_python_semantic/src/types/function.rs | 15 +++++- .../src/types/special_form.rs | 52 +++++++++++++++++++ 5 files changed, 132 insertions(+), 2 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/call/builtins.md b/crates/ty_python_semantic/resources/mdtest/call/builtins.md index 0eac021d1a..8de3e77d77 100644 --- a/crates/ty_python_semantic/resources/mdtest/call/builtins.md +++ b/crates/ty_python_semantic/resources/mdtest/call/builtins.md @@ -162,3 +162,38 @@ def _(x: A | B, y: list[int]): reveal_type(x) # revealed: B & ~A reveal_type(isinstance(x, B)) # revealed: Literal[True] ``` + +Certain special forms in the typing module are not instances of `type`, so are strictly-speaking +disallowed as the second argument to `isinstance()` according to typeshed's annotations. However, at +runtime they work fine as the second argument, and we implement that special case in ty: + +```py +import typing as t + +# no errors emitted for any of these: +isinstance("", t.Dict) +isinstance("", t.List) +isinstance("", t.Set) +isinstance("", t.FrozenSet) +isinstance("", t.Tuple) +isinstance("", t.ChainMap) +isinstance("", t.Counter) +isinstance("", t.Deque) +isinstance("", t.OrderedDict) +isinstance("", t.Callable) +isinstance("", t.Type) +isinstance("", t.Callable | t.Deque) + +# `Any` is valid in `issubclass()` calls but not `isinstance()` calls +issubclass(list, t.Any) +issubclass(list, t.Any | t.Dict) +``` + +But for other special forms that are not permitted as the second argument, we still emit an error: + +```py +isinstance("", t.TypeGuard) # error: [invalid-argument-type] +isinstance("", t.ClassVar) # error: [invalid-argument-type] +isinstance("", t.Final) # error: [invalid-argument-type] +isinstance("", t.Any) # error: [invalid-argument-type] +``` diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index 2b3bf1c0d9..7f950f7b77 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -1028,6 +1028,13 @@ impl<'db> Type<'db> { any_over_type(db, self, &|ty| matches!(ty, Type::TypeVar(_)), false) } + pub(crate) const fn as_special_form(self) -> Option { + match self { + Type::SpecialForm(special_form) => Some(special_form), + _ => None, + } + } + pub(crate) const fn as_class_literal(self) -> Option> { match self { Type::ClassLiteral(class_type) => Some(class_type), diff --git a/crates/ty_python_semantic/src/types/call/bind.rs b/crates/ty_python_semantic/src/types/call/bind.rs index 423783b420..ef2f892200 100644 --- a/crates/ty_python_semantic/src/types/call/bind.rs +++ b/crates/ty_python_semantic/src/types/call/bind.rs @@ -3647,6 +3647,31 @@ impl<'db> BindingError<'db> { expected_ty, provided_ty, } => { + // Certain special forms in the typing module are aliases for classes + // elsewhere in the standard library. These special forms are not instances of `type`, + // and you cannot use them in place of their aliased classes in *all* situations: + // for example, `dict()` succeeds at runtime, but `typing.Dict()` fails. However, + // they *can* all be used as the second argument to `isinstance` and `issubclass`. + // We model that specific aspect of their behaviour here. + // + // This is implemented as a special case in call-binding machinery because overriding + // typeshed's signatures for `isinstance()` and `issubclass()` would be complex and + // error-prone, due to the fact that they are annotated with recursive type aliases. + if parameter.index == 1 + && *argument_index == Some(1) + && matches!( + callable_ty + .as_function_literal() + .and_then(|function| function.known(context.db())), + Some(KnownFunction::IsInstance | KnownFunction::IsSubclass) + ) + && provided_ty + .as_special_form() + .is_some_and(SpecialFormType::is_valid_isinstance_target) + { + return; + } + // TODO: Ideally we would not emit diagnostics for `TypedDict` literal arguments // here (see `diagnostic::is_invalid_typed_dict_literal`). However, we may have // silenced diagnostics during overload evaluation, and rely on the assignability diff --git a/crates/ty_python_semantic/src/types/function.rs b/crates/ty_python_semantic/src/types/function.rs index 98a86f48df..737a5218e4 100644 --- a/crates/ty_python_semantic/src/types/function.rs +++ b/crates/ty_python_semantic/src/types/function.rs @@ -1764,6 +1764,7 @@ impl KnownFunction { Type::KnownInstance(KnownInstanceType::UnionType(_)) => { fn find_invalid_elements<'db>( db: &'db dyn Db, + function: KnownFunction, ty: Type<'db>, invalid_elements: &mut Vec>, ) { @@ -1771,9 +1772,19 @@ impl KnownFunction { Type::ClassLiteral(_) => {} Type::NominalInstance(instance) if instance.has_known_class(db, KnownClass::NoneType) => {} + Type::SpecialForm(special_form) + if special_form.is_valid_isinstance_target() => {} + // `Any` can be used in `issubclass()` calls but not `isinstance()` calls + Type::SpecialForm(SpecialFormType::Any) + if function == KnownFunction::IsSubclass => {} Type::KnownInstance(KnownInstanceType::UnionType(union)) => { for element in union.elements(db) { - find_invalid_elements(db, *element, invalid_elements); + find_invalid_elements( + db, + function, + *element, + invalid_elements, + ); } } _ => invalid_elements.push(ty), @@ -1781,7 +1792,7 @@ impl KnownFunction { } let mut invalid_elements = vec![]; - find_invalid_elements(db, *second_argument, &mut invalid_elements); + find_invalid_elements(db, self, *second_argument, &mut invalid_elements); let Some((first_invalid_element, other_invalid_elements)) = invalid_elements.split_first() diff --git a/crates/ty_python_semantic/src/types/special_form.rs b/crates/ty_python_semantic/src/types/special_form.rs index 721def0dee..c8e37d5143 100644 --- a/crates/ty_python_semantic/src/types/special_form.rs +++ b/crates/ty_python_semantic/src/types/special_form.rs @@ -328,6 +328,58 @@ impl SpecialFormType { } } + /// Return `true` if this special form is valid as the second argument + /// to `issubclass()` and `isinstance()` calls. + pub(super) const fn is_valid_isinstance_target(self) -> bool { + match self { + Self::Callable + | Self::ChainMap + | Self::Counter + | Self::DefaultDict + | Self::Deque + | Self::FrozenSet + | Self::Dict + | Self::List + | Self::OrderedDict + | Self::Set + | Self::Tuple + | Self::Type + | Self::Protocol + | Self::Generic => true, + + Self::AlwaysFalsy + | Self::AlwaysTruthy + | Self::Annotated + | Self::Bottom + | Self::CallableTypeOf + | Self::ClassVar + | Self::Concatenate + | Self::Final + | Self::Intersection + | Self::Literal + | Self::LiteralString + | Self::Never + | Self::NoReturn + | Self::Not + | Self::ReadOnly + | Self::Required + | Self::TypeAlias + | Self::TypeGuard + | Self::NamedTuple + | Self::NotRequired + | Self::Optional + | Self::Top + | Self::TypeIs + | Self::TypedDict + | Self::TypingSelf + | Self::Union + | Self::Unknown + | Self::TypeOf + | Self::Any // can be used in `issubclass()` but not `isinstance()`. + | Self::Unpack => false, + } + } + /// Return the repr of the symbol at runtime pub(super) const fn repr(self) -> &'static str { match self { From e4374f14ed12873541d9a9ccca7eea92456684f6 Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Tue, 11 Nov 2025 14:41:14 -0500 Subject: [PATCH 158/180] [ty] Consider `from thispackage import y` to re-export `y` in `__init__.pyi` (#21387) Fixes https://github.com/astral-sh/ty/issues/1487 This one is a true extension of non-standard semantics, and is therefore a certified Hot Take we might conclude is simply a Bad Take (let's see what ecosystem tests say...). --- .../mdtest/import/nonstandard_conventions.md | 12 ++--- .../src/semantic_index/builder.rs | 45 ++++++++++--------- 2 files changed, 30 insertions(+), 27 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md b/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md index 44163c17b4..ce5c995014 100644 --- a/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md +++ b/crates/ty_python_semantic/resources/mdtest/import/nonstandard_conventions.md @@ -22,7 +22,10 @@ This file currently covers the following details: - **dot re-exports**: `from . import a` in an `__init__.pyi` is considered a re-export of `a` (equivalent to `from . import a as a`). This is required to properly handle many stubs in the - wild. Currently it must be *exactly* `from . import ...`. + wild. Equivalent imports like `from whatever.thispackage import a` also introduce a re-export + (this has essentially zero ecosystem impact, we just felt it was more consistent). The only way + to opt out of this is to rename the import to something else (`from . import a as b`). + `from .a import b` and equivalent does *not* introduce a re-export. Note: almost all tests in here have a stub and non-stub version, because we're interested in both defining symbols *at all* and re-exporting them. @@ -94,8 +97,7 @@ reveal_type(mypackage.fails.Y) # revealed: Unknown ## Absolute `from` Import of Direct Submodule in `__init__` If an absolute `from...import` happens to import a submodule (i.e. it's equivalent to -`from . import y`) we do not treat it as a re-export. We could, but we don't. (This is an arbitrary -decision and can be changed!) +`from . import y`) we also treat it as a re-export. ### In Stub @@ -122,9 +124,7 @@ Y: int = 47 ```py import mypackage -# TODO: this could work and would be nice to have? -# error: "has no member `imported`" -reveal_type(mypackage.imported.X) # revealed: Unknown +reveal_type(mypackage.imported.X) # revealed: int # error: "has no member `fails`" reveal_type(mypackage.fails.Y) # revealed: Unknown ``` diff --git a/crates/ty_python_semantic/src/semantic_index/builder.rs b/crates/ty_python_semantic/src/semantic_index/builder.rs index d7784c2cf1..dc3acb1434 100644 --- a/crates/ty_python_semantic/src/semantic_index/builder.rs +++ b/crates/ty_python_semantic/src/semantic_index/builder.rs @@ -1465,9 +1465,8 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { // reasons but it works well for most practical purposes. In particular it's nice // that `x` can be freely overwritten, and that we don't assume that an import // in one function is visible in another function. - if node.module.is_some() - && self.current_scope().is_global() - && self.file.is_package(self.db) + let mut is_self_import = false; + if self.file.is_package(self.db) && let Ok(module_name) = ModuleName::from_identifier_parts( self.db, self.file, @@ -1475,19 +1474,26 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { node.level, ) && let Ok(thispackage) = ModuleName::package_for_file(self.db, self.file) - && let Some(relative_submodule) = module_name.relative_to(&thispackage) - && let Some(direct_submodule) = relative_submodule.components().next() - && !self.seen_submodule_imports.contains(direct_submodule) { - self.seen_submodule_imports - .insert(direct_submodule.to_owned()); + // Record whether this is equivalent to `from . import ...` + is_self_import = module_name == thispackage; - let direct_submodule_name = Name::new(direct_submodule); - let symbol = self.add_symbol(direct_submodule_name); - self.add_definition( - symbol.into(), - ImportFromSubmoduleDefinitionNodeRef { node }, - ); + if node.module.is_some() + && let Some(relative_submodule) = module_name.relative_to(&thispackage) + && let Some(direct_submodule) = relative_submodule.components().next() + && !self.seen_submodule_imports.contains(direct_submodule) + && self.current_scope().is_global() + { + self.seen_submodule_imports + .insert(direct_submodule.to_owned()); + + let direct_submodule_name = Name::new(direct_submodule); + let symbol = self.add_symbol(direct_submodule_name); + self.add_definition( + symbol.into(), + ImportFromSubmoduleDefinitionNodeRef { node }, + ); + } } let mut found_star = false; @@ -1599,13 +1605,10 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { // It's re-exported if it's `from ... import x as x` (&asname.id, asname.id == alias.name.id) } else { - // It's re-exported if it's `from . import x` in an `__init__.pyi` - ( - &alias.name.id, - node.level == 1 - && node.module.is_none() - && self.file.is_package(self.db), - ) + // As a non-standard rule to handle stubs in the wild, we consider + // `from . import x` and `from whatever.thispackage import x` in an + // `__init__.pyi` to re-export `x` (as long as it wasn't renamed) + (&alias.name.id, is_self_import) }; // Look for imports `from __future__ import annotations`, ignore `as ...` From 4373974dd942100495daf4489a58677a754996a9 Mon Sep 17 00:00:00 2001 From: Mahmoud Saada Date: Tue, 11 Nov 2025 15:54:05 -0500 Subject: [PATCH 159/180] [ty] Fix false positive for Final attribute assignment in __init__ (#21158) ## Summary Fixes https://github.com/astral-sh/ty/issues/1409 This PR allows `Final` instance attributes to be initialized in `__init__` methods, as mandated by the Python typing specification (PEP 591). Previously, ty incorrectly prevented this initialization, causing false positive errors. The fix checks if we're inside an `__init__` method before rejecting Final attribute assignments, allowing assignments during instance initialization while still preventing reassignment elsewhere. ## Test Plan - Added new test coverage in `final.md` for the reported issue with `Self` annotations - Updated existing tests that were incorrectly expecting errors - All 278 mdtest tests pass - Manually tested with real-world code examples --------- Co-authored-by: Carl Meyer --- .../resources/mdtest/type_qualifiers/final.md | 138 +++++++++++++++++- .../src/types/infer/builder.rs | 78 ++++++++-- 2 files changed, 196 insertions(+), 20 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/type_qualifiers/final.md b/crates/ty_python_semantic/resources/mdtest/type_qualifiers/final.md index 29e3d72ec3..1c42ded723 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_qualifiers/final.md +++ b/crates/ty_python_semantic/resources/mdtest/type_qualifiers/final.md @@ -88,8 +88,6 @@ class C: self.FINAL_C: Final[int] = 1 self.FINAL_D: Final = 1 self.FINAL_E: Final - # TODO: Should not be an error - # error: [invalid-assignment] "Cannot assign to final attribute `FINAL_E` on type `Self@__init__`" self.FINAL_E = 1 reveal_type(C.FINAL_A) # revealed: int @@ -186,7 +184,6 @@ class C(metaclass=Meta): self.INSTANCE_FINAL_A: Final[int] = 1 self.INSTANCE_FINAL_B: Final = 1 self.INSTANCE_FINAL_C: Final[int] - # error: [invalid-assignment] "Cannot assign to final attribute `INSTANCE_FINAL_C` on type `Self@__init__`" self.INSTANCE_FINAL_C = 1 # error: [invalid-assignment] "Cannot assign to final attribute `META_FINAL_A` on type ``" @@ -282,8 +279,6 @@ class C: def __init__(self): self.LEGAL_H: Final[int] = 1 self.LEGAL_I: Final[int] - # TODO: Should not be an error - # error: [invalid-assignment] self.LEGAL_I = 1 # error: [invalid-type-form] "`Final` is not allowed in function parameter annotations" @@ -392,15 +387,142 @@ class C: # TODO: This should be an error NO_ASSIGNMENT_B: Final[int] - # This is okay. `DEFINED_IN_INIT` is defined in `__init__`. DEFINED_IN_INIT: Final[int] def __init__(self): - # TODO: should not be an error - # error: [invalid-assignment] self.DEFINED_IN_INIT = 1 ``` +## Final attributes with Self annotation in `__init__` + +Issue #1409: Final instance attributes should be assignable in `__init__` even when using `Self` +type annotation. + +```toml +[environment] +python-version = "3.11" +``` + +```py +from typing import Final, Self + +class ClassA: + ID4: Final[int] # OK because initialized in __init__ + + def __init__(self: Self): + self.ID4 = 1 # Should be OK + + def other_method(self: Self): + # error: [invalid-assignment] "Cannot assign to final attribute `ID4` on type `Self@other_method`" + self.ID4 = 2 # Should still error outside __init__ + +class ClassB: + ID5: Final[int] + + def __init__(self): # Without Self annotation + self.ID5 = 1 # Should also be OK + +reveal_type(ClassA().ID4) # revealed: int +reveal_type(ClassB().ID5) # revealed: int +``` + +## Reassignment to Final in `__init__` + +Per PEP 591 and the typing conformance suite, Final attributes can be assigned in `__init__`. +Multiple assignments within `__init__` are allowed (matching mypy and pyright behavior). However, +assignment in `__init__` is not allowed if the attribute already has a value at class level. + +```py +from typing import Final + +# Case 1: Declared in class, assigned once in __init__ - ALLOWED +class DeclaredAssignedInInit: + attr1: Final[int] + + def __init__(self): + self.attr1 = 1 # OK: First and only assignment + +# Case 2: Declared and assigned in class body - ALLOWED (no __init__ assignment) +class DeclaredAndAssignedInClass: + attr2: Final[int] = 10 + +# Case 3: Reassignment when already assigned in class body +class ReassignmentFromClass: + attr3: Final[int] = 10 + + def __init__(self): + # error: [invalid-assignment] + self.attr3 = 20 # Error: already assigned in class body + +# Case 4: Multiple assignments within __init__ itself +# Per conformance suite and PEP 591, all assignments in __init__ are allowed +class MultipleAssignmentsInInit: + attr4: Final[int] + + def __init__(self): + self.attr4 = 1 # OK: Assignment in __init__ + self.attr4 = 2 # OK: Multiple assignments in __init__ are allowed + +class ConditionalAssignment: + X: Final[int] + + def __init__(self, cond: bool): + if cond: + self.X = 42 # OK: Assignment in __init__ + else: + self.X = 56 # OK: Multiple assignments in __init__ are allowed + +# Case 5: Declaration and assignment in __init__ - ALLOWED +class DeclareAndAssignInInit: + def __init__(self): + self.attr5: Final[int] = 1 # OK: Declare and assign in __init__ + +# Case 6: Assignment outside __init__ should still fail +class AssignmentOutsideInit: + attr6: Final[int] + + def other_method(self): + # error: [invalid-assignment] "Cannot assign to final attribute `attr6`" + self.attr6 = 1 # Error: Not in __init__ +``` + +## Final assignment restrictions in `__init__` + +`__init__` can only assign Final attributes on the class it's defining, and only to the first +parameter (`self`). + +```py +from typing import Final + +class C: + x: Final[int] = 100 + +# Assignment from standalone function (even named __init__) +def _(c: C): + # error: [invalid-assignment] "Cannot assign to final attribute `x`" + c.x = 1 # Error: Not in C.__init__ + +def __init__(c: C): + # error: [invalid-assignment] "Cannot assign to final attribute `x`" + c.x = 1 # Error: Not a method + +# Assignment from another class's __init__ +class A: + def __init__(self, c: C): + # error: [invalid-assignment] "Cannot assign to final attribute `x`" + c.x = 1 # Error: Not C's __init__ + +# Assignment to non-self parameter in __init__ +class D: + y: Final[int] + + def __init__(self, other: "D"): + self.y = 1 # OK: Assigning to self + # TODO: Should error - assigning to non-self parameter + # Requires tracking which parameter the base expression refers to + other.y = 2 +``` + ## Full diagnostics diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index e775ca1993..dea82603f4 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -3741,23 +3741,77 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { assignable }; + let emit_invalid_final = |builder: &Self| { + if emit_diagnostics { + if let Some(builder) = builder.context.report_lint(&INVALID_ASSIGNMENT, target) { + builder.into_diagnostic(format_args!( + "Cannot assign to final attribute `{attribute}` on type `{}`", + object_ty.display(db) + )); + } + } + }; + // Return true (and emit a diagnostic) if this is an invalid assignment to a `Final` attribute. + // Per PEP 591 and the typing conformance suite, Final instance attributes can be assigned + // in __init__ methods. Multiple assignments within __init__ are allowed (matching mypy + // and pyright behavior), as long as the attribute doesn't have a class-level value. let invalid_assignment_to_final = |builder: &Self, qualifiers: TypeQualifiers| -> bool { - if qualifiers.contains(TypeQualifiers::FINAL) { - if emit_diagnostics { - if let Some(builder) = builder.context.report_lint(&INVALID_ASSIGNMENT, target) - { - builder.into_diagnostic(format_args!( - "Cannot assign to final attribute `{attribute}` \ - on type `{}`", - object_ty.display(db) - )); + // Check if it's a Final attribute + if !qualifiers.contains(TypeQualifiers::FINAL) { + return false; + } + + // Check if we're in an __init__ method (where Final attributes can be initialized). + let is_in_init = builder + .current_function_definition() + .is_some_and(|func| func.name.id == "__init__"); + + // Not in __init__ - always disallow + if !is_in_init { + emit_invalid_final(builder); + return true; + } + + // We're in __init__ - verify we're in a method of the class being mutated + let Some(class_ty) = builder.class_context_of_current_method() else { + // Not a method (standalone function named __init__) + emit_invalid_final(builder); + return true; + }; + + // Check that object_ty is an instance of the class we're in + if !object_ty.is_subtype_of(builder.db(), Type::instance(builder.db(), class_ty)) { + // Assigning to a different class's Final attribute + emit_invalid_final(builder); + return true; + } + + // Check if class-level attribute already has a value + { + let class_definition = class_ty.class_literal(db).0; + let class_scope_id = class_definition.body_scope(db).file_scope_id(db); + let place_table = builder.index.place_table(class_scope_id); + + if let Some(symbol) = place_table.symbol_by_name(attribute) { + if symbol.is_bound() { + if emit_diagnostics { + if let Some(diag_builder) = + builder.context.report_lint(&INVALID_ASSIGNMENT, target) + { + diag_builder.into_diagnostic(format_args!( + "Cannot assign to final attribute `{attribute}` in `__init__` \ + because it already has a value at class level" + )); + } + } + return true; } } - true - } else { - false } + + // In __init__ and no class-level value - allow + false }; match object_ty { From 43297d34558e695998245731696bb616ae22b904 Mon Sep 17 00:00:00 2001 From: Alex Waygood Date: Tue, 11 Nov 2025 21:09:24 +0000 Subject: [PATCH 160/180] [ty] Support `isinstance()` and `issubclass()` narrowing when the second argument is a `typing.py` stdlib alias (#21391) ## Summary A followup to https://github.com/astral-sh/ruff/pull/21386 ## Test Plan New mdtests added --- .../resources/mdtest/narrow/isinstance.md | 19 +++++++ crates/ty_python_semantic/src/types/narrow.rs | 19 +++++-- .../src/types/special_form.rs | 55 +++++++++++++++++++ 3 files changed, 89 insertions(+), 4 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md b/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md index 48df6acd30..b7e49971a0 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/isinstance.md @@ -147,6 +147,25 @@ def _(x: int | str | bytes): reveal_type(x) # revealed: (int & Unknown) | (str & Unknown) | (bytes & Unknown) ``` +## `classinfo` is a `typing.py` special form + +Certain special forms in `typing.py` are aliases to classes elsewhere in the standard library; these +can be used in `isinstance()` and `issubclass()` checks. We support narrowing using them: + +```py +import typing as t + +def f(x: dict[str, int] | list[str], y: object): + if isinstance(x, t.Dict): + reveal_type(x) # revealed: dict[str, int] + else: + reveal_type(x) # revealed: list[str] + + if isinstance(y, t.Callable): + # TODO: a better top-materialization for `Callable`s (https://github.com/astral-sh/ty/issues/1426) + reveal_type(y) # revealed: () -> object +``` + ## Class types ```py diff --git a/crates/ty_python_semantic/src/types/narrow.rs b/crates/ty_python_semantic/src/types/narrow.rs index 8dc6f2f626..e89822b182 100644 --- a/crates/ty_python_semantic/src/types/narrow.rs +++ b/crates/ty_python_semantic/src/types/narrow.rs @@ -11,9 +11,9 @@ use crate::types::enums::{enum_member_literals, enum_metadata}; use crate::types::function::KnownFunction; use crate::types::infer::infer_same_file_expression_type; use crate::types::{ - ClassLiteral, ClassType, IntersectionBuilder, KnownClass, KnownInstanceType, SpecialFormType, - SubclassOfInner, SubclassOfType, Truthiness, Type, TypeContext, TypeVarBoundOrConstraints, - UnionBuilder, infer_expression_types, + CallableType, ClassLiteral, ClassType, IntersectionBuilder, KnownClass, KnownInstanceType, + SpecialFormType, SubclassOfInner, SubclassOfType, Truthiness, Type, TypeContext, + TypeVarBoundOrConstraints, UnionBuilder, infer_expression_types, }; use ruff_db::parsed::{ParsedModuleRef, parsed_module}; @@ -229,6 +229,18 @@ impl ClassInfoConstraintFunction { ) } + // We don't have a good meta-type for `Callable`s right now, + // so only apply `isinstance()` narrowing, not `issubclass()` + Type::SpecialForm(SpecialFormType::Callable) + if self == ClassInfoConstraintFunction::IsInstance => + { + Some(CallableType::unknown(db).top_materialization(db)) + } + + Type::SpecialForm(special_form) => special_form + .aliased_stdlib_class() + .and_then(|class| self.generate_constraint(db, class.to_class_literal(db))), + Type::AlwaysFalsy | Type::AlwaysTruthy | Type::BooleanLiteral(_) @@ -244,7 +256,6 @@ impl ClassInfoConstraintFunction { | Type::FunctionLiteral(_) | Type::ProtocolInstance(_) | Type::PropertyInstance(_) - | Type::SpecialForm(_) | Type::LiteralString | Type::StringLiteral(_) | Type::IntLiteral(_) diff --git a/crates/ty_python_semantic/src/types/special_form.rs b/crates/ty_python_semantic/src/types/special_form.rs index c8e37d5143..54d9640b87 100644 --- a/crates/ty_python_semantic/src/types/special_form.rs +++ b/crates/ty_python_semantic/src/types/special_form.rs @@ -328,6 +328,61 @@ impl SpecialFormType { } } + /// Return `Some(KnownClass)` if this special form is an alias + /// to a standard library class. + pub(super) const fn aliased_stdlib_class(self) -> Option { + match self { + Self::List => Some(KnownClass::List), + Self::Dict => Some(KnownClass::Dict), + Self::Set => Some(KnownClass::Set), + Self::FrozenSet => Some(KnownClass::FrozenSet), + Self::ChainMap => Some(KnownClass::ChainMap), + Self::Counter => Some(KnownClass::Counter), + Self::DefaultDict => Some(KnownClass::DefaultDict), + Self::Deque => Some(KnownClass::Deque), + Self::OrderedDict => Some(KnownClass::OrderedDict), + Self::Tuple => Some(KnownClass::Tuple), + Self::Type => Some(KnownClass::Type), + + Self::AlwaysFalsy + | Self::AlwaysTruthy + | Self::Annotated + | Self::Bottom + | Self::CallableTypeOf + | Self::ClassVar + | Self::Concatenate + | Self::Final + | Self::Intersection + | Self::Literal + | Self::LiteralString + | Self::Never + | Self::NoReturn + | Self::Not + | Self::ReadOnly + | Self::Required + | Self::TypeAlias + | Self::TypeGuard + | Self::NamedTuple + | Self::NotRequired + | Self::Optional + | Self::Top + | Self::TypeIs + | Self::TypedDict + | Self::TypingSelf + | Self::Union + | Self::Unknown + | Self::TypeOf + | Self::Any + // `typing.Callable` is an alias to `collections.abc.Callable`, + // but they're both the same `SpecialFormType` in our model, + // and neither is a class in typeshed (even though the `collections.abc` one is at runtime) + | Self::Callable + | Self::Protocol + | Self::Generic + | Self::Unpack => None, + } + } + /// Return `true` if this special form is valid as the second argument /// to `issubclass()` and `isinstance()` calls. pub(super) const fn is_valid_isinstance_target(self) -> bool { From ec48a47a8897a8bbbe254c594422edcfb8a813d7 Mon Sep 17 00:00:00 2001 From: Andrew Gallant Date: Tue, 11 Nov 2025 10:16:19 -0500 Subject: [PATCH 161/180] [ty] Add `from im` completion evaluation task Ideally this would have been added as part of #21291, but I forgot. --- crates/ty_completion_eval/completion-evaluation-tasks.csv | 1 + .../truth/import-keyword-completion/completion.toml | 2 ++ .../truth/import-keyword-completion/main.py | 1 + .../truth/import-keyword-completion/pyproject.toml | 5 +++++ .../truth/import-keyword-completion/uv.lock | 8 ++++++++ 5 files changed, 17 insertions(+) create mode 100644 crates/ty_completion_eval/truth/import-keyword-completion/completion.toml create mode 100644 crates/ty_completion_eval/truth/import-keyword-completion/main.py create mode 100644 crates/ty_completion_eval/truth/import-keyword-completion/pyproject.toml create mode 100644 crates/ty_completion_eval/truth/import-keyword-completion/uv.lock diff --git a/crates/ty_completion_eval/completion-evaluation-tasks.csv b/crates/ty_completion_eval/completion-evaluation-tasks.csv index 92d1a3f03d..d93c11d457 100644 --- a/crates/ty_completion_eval/completion-evaluation-tasks.csv +++ b/crates/ty_completion_eval/completion-evaluation-tasks.csv @@ -10,6 +10,7 @@ import-deprioritizes-type_check_only,main.py,1,1 import-deprioritizes-type_check_only,main.py,2,1 import-deprioritizes-type_check_only,main.py,3,2 import-deprioritizes-type_check_only,main.py,4,3 +import-keyword-completion,main.py,0,1 internal-typeshed-hidden,main.py,0,5 none-completion,main.py,0,11 numpy-array,main.py,0, diff --git a/crates/ty_completion_eval/truth/import-keyword-completion/completion.toml b/crates/ty_completion_eval/truth/import-keyword-completion/completion.toml new file mode 100644 index 0000000000..1c3c4b8ea4 --- /dev/null +++ b/crates/ty_completion_eval/truth/import-keyword-completion/completion.toml @@ -0,0 +1,2 @@ +[settings] +auto-import = false diff --git a/crates/ty_completion_eval/truth/import-keyword-completion/main.py b/crates/ty_completion_eval/truth/import-keyword-completion/main.py new file mode 100644 index 0000000000..beb6aff8e4 --- /dev/null +++ b/crates/ty_completion_eval/truth/import-keyword-completion/main.py @@ -0,0 +1 @@ +from collections im diff --git a/crates/ty_completion_eval/truth/import-keyword-completion/pyproject.toml b/crates/ty_completion_eval/truth/import-keyword-completion/pyproject.toml new file mode 100644 index 0000000000..cd277d8097 --- /dev/null +++ b/crates/ty_completion_eval/truth/import-keyword-completion/pyproject.toml @@ -0,0 +1,5 @@ +[project] +name = "test" +version = "0.1.0" +requires-python = ">=3.13" +dependencies = [] diff --git a/crates/ty_completion_eval/truth/import-keyword-completion/uv.lock b/crates/ty_completion_eval/truth/import-keyword-completion/uv.lock new file mode 100644 index 0000000000..a4937d10d3 --- /dev/null +++ b/crates/ty_completion_eval/truth/import-keyword-completion/uv.lock @@ -0,0 +1,8 @@ +version = 1 +revision = 3 +requires-python = ">=3.13" + +[[package]] +name = "test" +version = "0.1.0" +source = { virtual = "." } From cd7354a5c641aa240edbe290d948daa826b7db33 Mon Sep 17 00:00:00 2001 From: Andrew Gallant Date: Tue, 11 Nov 2025 10:23:28 -0500 Subject: [PATCH 162/180] [ty] Add completion evaluation task for general keyword completions --- crates/ty_completion_eval/completion-evaluation-tasks.csv | 1 + .../truth/pass-keyword-completion/completion.toml | 2 ++ .../truth/pass-keyword-completion/main.py | 3 +++ .../truth/pass-keyword-completion/pyproject.toml | 5 +++++ .../truth/pass-keyword-completion/uv.lock | 8 ++++++++ 5 files changed, 19 insertions(+) create mode 100644 crates/ty_completion_eval/truth/pass-keyword-completion/completion.toml create mode 100644 crates/ty_completion_eval/truth/pass-keyword-completion/main.py create mode 100644 crates/ty_completion_eval/truth/pass-keyword-completion/pyproject.toml create mode 100644 crates/ty_completion_eval/truth/pass-keyword-completion/uv.lock diff --git a/crates/ty_completion_eval/completion-evaluation-tasks.csv b/crates/ty_completion_eval/completion-evaluation-tasks.csv index d93c11d457..6190d33465 100644 --- a/crates/ty_completion_eval/completion-evaluation-tasks.csv +++ b/crates/ty_completion_eval/completion-evaluation-tasks.csv @@ -17,6 +17,7 @@ numpy-array,main.py,0, numpy-array,main.py,1,1 object-attr-instance-methods,main.py,0,1 object-attr-instance-methods,main.py,1,1 +pass-keyword-completion,main.py,0, raise-uses-base-exception,main.py,0,2 scope-existing-over-new-import,main.py,0,1 scope-prioritize-closer,main.py,0,2 diff --git a/crates/ty_completion_eval/truth/pass-keyword-completion/completion.toml b/crates/ty_completion_eval/truth/pass-keyword-completion/completion.toml new file mode 100644 index 0000000000..1c3c4b8ea4 --- /dev/null +++ b/crates/ty_completion_eval/truth/pass-keyword-completion/completion.toml @@ -0,0 +1,2 @@ +[settings] +auto-import = false diff --git a/crates/ty_completion_eval/truth/pass-keyword-completion/main.py b/crates/ty_completion_eval/truth/pass-keyword-completion/main.py new file mode 100644 index 0000000000..48bd0f44a3 --- /dev/null +++ b/crates/ty_completion_eval/truth/pass-keyword-completion/main.py @@ -0,0 +1,3 @@ +match x: + case int(): + pa diff --git a/crates/ty_completion_eval/truth/pass-keyword-completion/pyproject.toml b/crates/ty_completion_eval/truth/pass-keyword-completion/pyproject.toml new file mode 100644 index 0000000000..cd277d8097 --- /dev/null +++ b/crates/ty_completion_eval/truth/pass-keyword-completion/pyproject.toml @@ -0,0 +1,5 @@ +[project] +name = "test" +version = "0.1.0" +requires-python = ">=3.13" +dependencies = [] diff --git a/crates/ty_completion_eval/truth/pass-keyword-completion/uv.lock b/crates/ty_completion_eval/truth/pass-keyword-completion/uv.lock new file mode 100644 index 0000000000..a4937d10d3 --- /dev/null +++ b/crates/ty_completion_eval/truth/pass-keyword-completion/uv.lock @@ -0,0 +1,8 @@ +version = 1 +revision = 3 +requires-python = ">=3.13" + +[[package]] +name = "test" +version = "0.1.0" +source = { virtual = "." } From 1bbe4f0d5ee122f6a4e1a3f8698d3ba1900f1b19 Mon Sep 17 00:00:00 2001 From: Andrew Gallant Date: Tue, 11 Nov 2025 10:09:50 -0500 Subject: [PATCH 163/180] [ty] Add more keyword completions to scope completions This should round out the rest of the set. I think I had hesitated doing this before because some of these don't make sense in every context. But I think identifying the correct context for every keyword could be quite difficult. And at the very least, I think offering these at least as a choice---even if they aren't always correct---is better than not doing it at all. --- .../completion-evaluation-tasks.csv | 4 +- crates/ty_ide/src/completion.rs | 611 ++++++++++++++---- 2 files changed, 471 insertions(+), 144 deletions(-) diff --git a/crates/ty_completion_eval/completion-evaluation-tasks.csv b/crates/ty_completion_eval/completion-evaluation-tasks.csv index 6190d33465..effd41aceb 100644 --- a/crates/ty_completion_eval/completion-evaluation-tasks.csv +++ b/crates/ty_completion_eval/completion-evaluation-tasks.csv @@ -17,7 +17,7 @@ numpy-array,main.py,0, numpy-array,main.py,1,1 object-attr-instance-methods,main.py,0,1 object-attr-instance-methods,main.py,1,1 -pass-keyword-completion,main.py,0, +pass-keyword-completion,main.py,0,7 raise-uses-base-exception,main.py,0,2 scope-existing-over-new-import,main.py,0,1 scope-prioritize-closer,main.py,0,2 @@ -25,4 +25,4 @@ scope-simple-long-identifier,main.py,0,1 tstring-completions,main.py,0,1 ty-extensions-lower-stdlib,main.py,0,8 type-var-typing-over-ast,main.py,0,3 -type-var-typing-over-ast,main.py,1,277 +type-var-typing-over-ast,main.py,1,279 diff --git a/crates/ty_ide/src/completion.rs b/crates/ty_ide/src/completion.rs index e2b03c393e..c6f76b2eef 100644 --- a/crates/ty_ide/src/completion.rs +++ b/crates/ty_ide/src/completion.rs @@ -175,6 +175,20 @@ impl<'db> Completion<'db> { documentation: None, } } + + fn value_keyword(name: &str, ty: Type<'db>) -> Completion<'db> { + Completion { + name: name.into(), + insert: None, + ty: Some(ty), + kind: Some(CompletionKind::Keyword), + module_name: None, + import: None, + builtin: true, + is_type_check_only: false, + documentation: None, + } + } } /// The "kind" of a completion. @@ -275,7 +289,7 @@ pub fn completion<'db>( .collect(); if scoped.is_some() { - add_keyword_value_completions(db, &typed_query, &mut completions); + add_keyword_completions(db, &typed_query, &mut completions); } if settings.auto_import { if let Some(scoped) = scoped { @@ -294,35 +308,45 @@ pub fn completion<'db>( completions } -/// Adds a subset of completions derived from keywords. +/// Adds completions derived from keywords. /// -/// Note that at present, these should only be added to "scoped" -/// completions. i.e., This will include `None`, `True`, `False`, etc. -fn add_keyword_value_completions<'db>( +/// This should generally only be used when offering "scoped" completions. +/// This will include keywords corresponding to Python values (like `None`) +/// and general language keywords (like `raise`). +fn add_keyword_completions<'db>( db: &'db dyn Db, query: &QueryPattern, completions: &mut Vec>, ) { - let keywords = [ + let keyword_values = [ ("None", Type::none(db)), ("True", Type::BooleanLiteral(true)), ("False", Type::BooleanLiteral(false)), ]; - for (name, ty) in keywords { + for (name, ty) in keyword_values { if !query.is_match_symbol_name(name) { continue; } - completions.push(Completion { - name: ast::name::Name::new(name), - insert: None, - ty: Some(ty), - kind: None, - module_name: None, - import: None, - is_type_check_only: false, - builtin: true, - documentation: None, - }); + completions.push(Completion::value_keyword(name, ty)); + } + + // Note that we specifically omit the `type` keyword here, since + // it will be included via `builtins`. This does make its sorting + // priority slighty different than other keywords, but it's not + // clear (to me, AG) if that's an issue or not. Since the builtin + // completion has an actual type associated with it, we use that + // instead of a keyword completion. + let keywords = [ + "and", "as", "assert", "async", "await", "break", "class", "continue", "def", "del", + "elif", "else", "except", "finally", "for", "from", "global", "if", "import", "in", "is", + "lambda", "nonlocal", "not", "or", "pass", "raise", "return", "try", "while", "with", + "yield", "case", "match", + ]; + for name in keywords { + if !query.is_match_symbol_name(name) { + continue; + } + completions.push(Completion::keyword(name)); } } @@ -1079,7 +1103,7 @@ mod tests { ); assert_snapshot!( - test.skip_builtins().build().snapshot(), + test.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -1101,6 +1125,73 @@ mod tests { // See `private_symbols_in_stub` for more comprehensive testing private of symbol filtering. } + #[test] + fn keywords() { + let test = completion_test_builder( + "\ + +", + ); + + assert_snapshot!( + test.skip_builtins().build().snapshot(), + @r" + and + as + assert + async + await + break + case + class + continue + def + del + elif + else + except + finally + for + from + global + if + import + in + is + lambda + match + nonlocal + not + or + pass + raise + return + try + while + with + yield + ", + ); + } + + #[test] + fn type_keyword_dedup() { + let test = completion_test_builder( + "\ +type +", + ); + + assert_snapshot!( + test.type_signatures().build().snapshot(), + @r" + TypeError :: + type :: + _NotImplementedType :: + ", + ); + } + #[test] fn builtins_not_included_object_attr() { let builder = completion_test_builder( @@ -1133,7 +1224,7 @@ import re ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"re"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"re"); } #[test] @@ -1146,7 +1237,7 @@ from os import path ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"path"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"path"); } // N.B. We don't currently explore module APIs. This @@ -1288,7 +1379,7 @@ f ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1302,7 +1393,7 @@ g ); assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -1317,7 +1408,7 @@ def foo(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo "); } @@ -1333,7 +1424,7 @@ f ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1347,7 +1438,7 @@ def foo(): ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo "); } @@ -1362,7 +1453,7 @@ def foo(): ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo foofoo "); @@ -1396,7 +1487,7 @@ def foo(): // matches the current cursor's indentation. This seems fraught // however. It's not clear to me that we can always assume a // correspondence between scopes and indentation level. - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo "); } @@ -1412,7 +1503,7 @@ def foo(): ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo foofoo "); @@ -1428,7 +1519,7 @@ def foo(): f", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo foofoo "); @@ -1446,7 +1537,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo foofoo frob @@ -1465,7 +1556,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo frob "); @@ -1483,7 +1574,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo foofoo foofoofoo @@ -1511,7 +1602,7 @@ def foo(): // account for the indented whitespace, or some other technique // needs to be used to get the scope containing `foofoo` but not // `foofoofoo`. - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo "); } @@ -1527,7 +1618,7 @@ def foo(): ); // FIXME: Should include `foofoo` (but not `foofoofoo`). - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo "); } @@ -1545,7 +1636,7 @@ def frob(): ... ); // FIXME: Should include `foofoo` (but not `foofoofoo`). - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo frob "); @@ -1565,7 +1656,7 @@ def frob(): ... ); // FIXME: Should include `foofoo` (but not `foofoofoo`). - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo frob "); @@ -1586,7 +1677,7 @@ def frob(): ... ); // FIXME: Should include `foofoo` (but not `foofoofoo`). - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo frob "); @@ -1621,7 +1712,7 @@ def frob(): ... // the list comprehension is not yet valid and so we do not // detect this as a definition of `bar`. assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -1634,7 +1725,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1645,7 +1736,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1656,7 +1747,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1667,7 +1758,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1678,7 +1769,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1689,7 +1780,7 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -1712,7 +1803,7 @@ def frob(): ... // The `lambda_blank1` test works because there are expressions // on either side of . assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -1727,7 +1818,7 @@ def frob(): ... // FIXME: Should include `foo`. assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -1742,7 +1833,7 @@ def frob(): ... // FIXME: Should include `foo`. assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -1758,7 +1849,7 @@ class Foo: ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" bar frob "); @@ -1774,7 +1865,7 @@ class Foo: ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"bar"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"bar"); } #[test] @@ -1794,7 +1885,7 @@ class Foo: // // These don't work for similar reasons as other // tests above with the inside of whitespace. - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" Foo "); } @@ -1813,7 +1904,7 @@ class Foo: // FIXME: Should include `bar`, `quux` and `frob`. // (Unclear if `Foo` should be included, but a false // positive isn't the end of the world.) - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" Foo "); } @@ -1829,7 +1920,7 @@ class Foo(): ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" Bar Foo "); @@ -1846,7 +1937,7 @@ class Bar: ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" Bar Foo "); @@ -1863,7 +1954,7 @@ class Bar: ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" Bar Foo "); @@ -1878,7 +1969,7 @@ class Bar: ... class Foo(", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" Bar Foo "); @@ -1899,7 +1990,8 @@ quux. ", ); - assert_snapshot!(builder.skip_builtins().type_signatures().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r" bar :: Unknown | Literal[2] baz :: Unknown | Literal[3] foo :: Unknown | Literal[1] @@ -1944,7 +2036,8 @@ quux.b ", ); - assert_snapshot!(builder.skip_builtins().type_signatures().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r" bar :: Unknown | Literal[2] baz :: Unknown | Literal[3] __getattribute__ :: bound method Quux.__getattribute__(name: str, /) -> Any @@ -1968,7 +2061,8 @@ C. ", ); - assert_snapshot!(builder.skip_builtins().type_signatures().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r" meta_attr :: int mro :: bound method .mro() -> list[type] __annotate__ :: @Todo | None @@ -2039,7 +2133,8 @@ Meta. // just redact them. ---AG filters => [(r"(?m)\s*__(annotations|new|annotate)__.+$", "")]}, { - assert_snapshot!(builder.skip_builtins().type_signatures().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r" meta_attr :: property mro :: def mro(self) -> list[type] __base__ :: type | None @@ -2100,7 +2195,7 @@ class Quux: ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" bar baz foo @@ -2161,7 +2256,8 @@ Quux. ", ); - assert_snapshot!(builder.skip_builtins().type_signatures().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r" mro :: bound method .mro() -> list[type] some_attribute :: int some_class_method :: bound method .some_class_method() -> int @@ -2233,7 +2329,8 @@ Answer. // rendered differently in release mode. filters => [(r"(?m)\s*__(call|reduce_ex|annotate|signature)__.+$", "")]}, { - assert_snapshot!(builder.skip_builtins().type_signatures().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r" NO :: Literal[Answer.NO] YES :: Literal[Answer.YES] mro :: bound method .mro() -> list[type] @@ -2318,7 +2415,7 @@ bar(o ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); } #[test] @@ -2333,7 +2430,7 @@ bar( ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" bar foo "); @@ -2352,7 +2449,7 @@ class C: ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo self "); @@ -2369,7 +2466,7 @@ class C: ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"C"); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"C"); } #[test] @@ -2386,7 +2483,7 @@ class C: // FIXME: Should NOT include `foo` here, since // that is only a method that can be called on // `self`. - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" foo self "); @@ -2402,7 +2499,10 @@ class ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"classy_variable_name"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"classy_variable_name", + ); } #[test] @@ -2415,7 +2515,10 @@ print(f\"{some ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"some_symbol"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"some_symbol", + ); } #[test] @@ -2429,7 +2532,10 @@ hidden_ ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -2449,7 +2555,7 @@ if sys.platform == \"not-my-current-platform\": // currently make no effort to provide a good IDE experience within sections that // are unreachable assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -2555,7 +2661,10 @@ A(). ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -2776,7 +2885,10 @@ q.foo.xyz ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -2787,7 +2899,7 @@ q.foo.xyz ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" __annotations__ __class__ __delattr__ @@ -2822,7 +2934,10 @@ class Foo: ... ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -2867,7 +2982,10 @@ def m ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2879,7 +2997,10 @@ def m(): pass ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2892,9 +3013,7 @@ def m(): pass ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r" - m - "); + assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r"m"); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2906,7 +3025,10 @@ class M ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2918,7 +3040,10 @@ Fo = float ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @"Fo"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"Fo", + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2934,7 +3059,14 @@ import fo // which is kind of annoying. So just assert that it // runs without panicking and produces some non-empty // output. - assert!(!builder.skip_builtins().build().completions().is_empty()); + assert!( + !builder + .skip_keywords() + .skip_builtins() + .build() + .completions() + .is_empty() + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2950,7 +3082,14 @@ import foo as ba // which is kind of annoying. So just assert that it // runs without panicking and produces some non-empty // output. - assert!(!builder.skip_builtins().build().completions().is_empty()); + assert!( + !builder + .skip_keywords() + .skip_builtins() + .build() + .completions() + .is_empty() + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2966,7 +3105,14 @@ from fo import wat // which is kind of annoying. So just assert that it // runs without panicking and produces some non-empty // output. - assert!(!builder.skip_builtins().build().completions().is_empty()); + assert!( + !builder + .skip_keywords() + .skip_builtins() + .build() + .completions() + .is_empty() + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2978,7 +3124,10 @@ from foo import wa ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -2990,7 +3139,10 @@ from foo import wat as ba ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } // Ref: https://github.com/astral-sh/ty/issues/572 @@ -3006,7 +3158,7 @@ except Type: ); assert_snapshot!( - builder.skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -3021,7 +3173,10 @@ def _(): ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3041,7 +3196,10 @@ f = Foo() "#, ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @r"", + ); } #[test] @@ -3227,7 +3385,10 @@ foo = 1 from ? import ", ); - assert_snapshot!(builder.skip_builtins().build().snapshot(), @r""); + assert_snapshot!( + builder.skip_keywords().skip_builtins().build().snapshot(), + @r"", + ); } #[test] @@ -3621,7 +3782,7 @@ from os. ); assert_snapshot!( - test.skip_builtins().build().snapshot(), + test.skip_keywords().skip_builtins().build().snapshot(), @"", ); } @@ -3650,7 +3811,10 @@ zqzqzq = 1 ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3661,7 +3825,10 @@ zqzqzq = 1 print(\"zqzq\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3670,7 +3837,10 @@ class Foo: print(\"Foo.zqzq\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3681,7 +3851,10 @@ zqzqzq = 1 print(\"zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3690,7 +3863,10 @@ class Foo: print(\"Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3701,7 +3877,10 @@ zqzqzq = 1 print('zqzq') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3710,7 +3889,10 @@ class Foo: print('Foo.zqzq') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3721,7 +3903,10 @@ zqzqzq = 1 print('zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3730,7 +3915,10 @@ class Foo: print('Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3741,7 +3929,10 @@ zqzqzq = 1 print(\"\"\"zqzq\"\"\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3750,7 +3941,10 @@ class Foo: print(\"\"\"Foo.zqzq\"\"\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3761,7 +3955,10 @@ zqzqzq = 1 print(\"\"\"zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3770,7 +3967,10 @@ class Foo: print(\"\"\"Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3781,7 +3981,10 @@ zqzqzq = 1 print('''zqzq''') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3790,7 +3993,10 @@ class Foo: print('''Foo.zqzq''') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3801,7 +4007,10 @@ zqzqzq = 1 print('''zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3810,7 +4019,10 @@ class Foo: print('''Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3821,7 +4033,10 @@ zqzqzq = 1 print(f\"zqzq\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3830,7 +4045,10 @@ class Foo: print(f\"{Foo} and Foo.zqzq\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3841,7 +4059,10 @@ zqzqzq = 1 print(f\"zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3850,7 +4071,10 @@ class Foo: print(f\"{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3861,7 +4085,10 @@ zqzqzq = 1 print(f'zqzq') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3870,7 +4097,10 @@ class Foo: print(f'{Foo} and Foo.zqzq') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3881,7 +4111,10 @@ zqzqzq = 1 print(f'zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3890,7 +4123,10 @@ class Foo: print(f'{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3901,7 +4137,10 @@ zqzqzq = 1 print(f\"\"\"zqzq\"\"\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3910,7 +4149,10 @@ class Foo: print(f\"\"\"{Foo} and Foo.zqzq\"\"\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3921,7 +4163,10 @@ zqzqzq = 1 print(f\"\"\"zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3930,7 +4175,10 @@ class Foo: print(f\"\"\"{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3941,7 +4189,10 @@ zqzqzq = 1 print(f'''zqzq''') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3950,7 +4201,10 @@ class Foo: print(f'''{Foo} and Foo.zqzq''') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3961,7 +4215,10 @@ zqzqzq = 1 print(f'''zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3970,7 +4227,10 @@ class Foo: print(f'''{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -3981,7 +4241,10 @@ zqzqzq = 1 print(t\"zqzq\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -3990,7 +4253,10 @@ class Foo: print(t\"{Foo} and Foo.zqzq\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -4001,7 +4267,10 @@ zqzqzq = 1 print(t\"zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -4010,7 +4279,10 @@ class Foo: print(t\"{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -4021,7 +4293,10 @@ zqzqzq = 1 print(t'zqzq') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -4030,7 +4305,10 @@ class Foo: print(t'{Foo} and Foo.zqzq') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -4041,7 +4319,10 @@ zqzqzq = 1 print(t'zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -4050,7 +4331,10 @@ class Foo: print(t'{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -4061,7 +4345,10 @@ zqzqzq = 1 print(t\"\"\"zqzq\"\"\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -4070,7 +4357,10 @@ class Foo: print(t\"\"\"{Foo} and Foo.zqzq\"\"\") ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -4081,7 +4371,10 @@ zqzqzq = 1 print(t\"\"\"zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -4090,7 +4383,10 @@ class Foo: print(t\"\"\"{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -4101,7 +4397,10 @@ zqzqzq = 1 print(t'''zqzq''') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -4110,7 +4409,10 @@ class Foo: print(t'''{Foo} and Foo.zqzq''') ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -4121,7 +4423,10 @@ zqzqzq = 1 print(t'''zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); let test = completion_test_builder( "\ @@ -4130,7 +4435,10 @@ class Foo: print(t'''{Foo} and Foo.zqzq ", ); - assert_snapshot!(test.skip_builtins().build().snapshot(), @""); + assert_snapshot!( + test.skip_keywords().skip_builtins().build().snapshot(), + @"", + ); } #[test] @@ -4463,10 +4771,12 @@ type /// of completions from a cursor position in source code. Most of /// the options involve some kind of filtering or adjustment to /// apply to the snapshots, depending on what one wants to test. + #[expect(clippy::struct_excessive_bools)] // free the bools! struct CompletionTestBuilder { cursor_test: CursorTest, settings: CompletionSettings, skip_builtins: bool, + skip_keywords: bool, type_signatures: bool, module_names: bool, // This doesn't seem like a "very complex" type to me... ---AG @@ -4486,6 +4796,7 @@ type let filtered = original .iter() .filter(|c| !self.skip_builtins || !c.builtin) + .filter(|c| !self.skip_keywords || c.kind != Some(CompletionKind::Keyword)) .filter(|c| { self.predicate .as_ref() @@ -4526,6 +4837,21 @@ type self } + /// When set, keywords from completions are skipped. This + /// is useful in tests to reduce noise for scope based + /// completions. + /// + /// Not enabled by default. + /// + /// Note that, at time of writing (2025-11-11), keywords are + /// *also* considered builtins. So `skip_builtins()` will also + /// skip keywords. But this may not always be true. And one + /// might want to skip keywords but *not* builtins. + fn skip_keywords(mut self) -> CompletionTestBuilder { + self.skip_keywords = true; + self + } + /// When set, type signatures of each completion item are /// included in the snapshot. This is useful when one wants /// to specifically test types, but it usually best to leave @@ -4645,6 +4971,7 @@ type cursor_test: self.build(), settings: CompletionSettings::default(), skip_builtins: false, + skip_keywords: false, type_signatures: false, module_names: false, predicate: None, From 164c2a6cc63edeffcd7949a168f2322986de7c00 Mon Sep 17 00:00:00 2001 From: Andrew Gallant Date: Tue, 11 Nov 2025 10:47:34 -0500 Subject: [PATCH 164/180] [ty] Sort keyword completions above everything else It looks like VS Code does this forcefully. As in, I don't think we can override it. It also seems like a plausibly good idea. But by us doing it too, it makes our completion evaluation framework match real world conditions. (To the extent that "VS Code" and "real world conditions" are the same. Which... they aren't. But it's close, since VS Code is so popular.) --- .../completion-evaluation-tasks.csv | 4 ++-- crates/ty_ide/src/completion.rs | 11 ++++++++++- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/crates/ty_completion_eval/completion-evaluation-tasks.csv b/crates/ty_completion_eval/completion-evaluation-tasks.csv index effd41aceb..93b6cd4736 100644 --- a/crates/ty_completion_eval/completion-evaluation-tasks.csv +++ b/crates/ty_completion_eval/completion-evaluation-tasks.csv @@ -12,12 +12,12 @@ import-deprioritizes-type_check_only,main.py,3,2 import-deprioritizes-type_check_only,main.py,4,3 import-keyword-completion,main.py,0,1 internal-typeshed-hidden,main.py,0,5 -none-completion,main.py,0,11 +none-completion,main.py,0,2 numpy-array,main.py,0, numpy-array,main.py,1,1 object-attr-instance-methods,main.py,0,1 object-attr-instance-methods,main.py,1,1 -pass-keyword-completion,main.py,0,7 +pass-keyword-completion,main.py,0,1 raise-uses-base-exception,main.py,0,2 scope-existing-over-new-import,main.py,0,1 scope-prioritize-closer,main.py,0,2 diff --git a/crates/ty_ide/src/completion.rs b/crates/ty_ide/src/completion.rs index c6f76b2eef..8c90597c74 100644 --- a/crates/ty_ide/src/completion.rs +++ b/crates/ty_ide/src/completion.rs @@ -1004,9 +1004,18 @@ fn is_in_definition_place(db: &dyn Db, tokens: &[Token], file: File) -> bool { /// This has the effect of putting all dunder attributes after "normal" /// attributes, and all single-underscore attributes after dunder attributes. fn compare_suggestions(c1: &Completion, c2: &Completion) -> Ordering { - fn key<'a>(completion: &'a Completion) -> (bool, bool, NameKind, bool, &'a Name) { + fn key<'a>(completion: &'a Completion) -> (bool, bool, bool, NameKind, bool, &'a Name) { ( completion.module_name.is_some(), + // At time of writing (2025-11-11), keyword completions + // are classified as builtins, which makes them sort after + // everything else. But we probably want keyword completions + // to sort *before* anything else since they are so common. + // Moreover, it seems VS Code forcefully does this sorting. + // By doing it ourselves, we make our natural sorting match + // VS Code's, and thus our completion evaluation framework + // should be more representative of real world conditions. + completion.kind != Some(CompletionKind::Keyword), completion.builtin, NameKind::classify(&completion.name), completion.is_type_check_only, From 988c38c013028bc47f903516ddda563e4b05decd Mon Sep 17 00:00:00 2001 From: Shaygan Hooshyari Date: Tue, 11 Nov 2025 23:45:34 +0100 Subject: [PATCH 165/180] [ty] Skip eagerly evaluated scopes for attribute storing (#20856) ## Summary Fix https://github.com/astral-sh/ty/issues/664 This PR adds support for storing attributes in comprehension scopes (any eager scope.) For example in the following code we infer type of `z` correctly: ```py class C: def __init__(self): [None for self.z in range(1)] reveal_type(C().z) # previously [unresolved-attribute] but now shows Unknown | int ``` The fix works by adjusting the following logics: To identify if an attriute is an assignment to self or cls we need to check the scope is a method. To allow comprehension scopes here we skip any eager scope in the check. Also at this stage the code checks if self or the first method argument is shadowed by another binding that eager scope to prevent this: ```py class D: g: int class C: def __init__(self): [[None for self.g in range(1)] for self in [D()]] reveal_type(C().g) # [unresolved-attribute] ``` When determining scopes that attributes might be defined after collecting all the methods of the class the code also returns any decendant scope that is eager and only has eager parents until the method scope. When checking reachability of a attribute definition if the attribute is defined in an eager scope we use the reachability of the first non eager scope which must be a method. This allows attributes to be marked as reachable and be seen. There are also which I didn't add support for: ```py class C: def __init__(self): def f(): [None for self.z in range(1)] f() reveal_type(C().z) # [unresolved-attribute] ``` In the above example we will not even return the comprehension scope as an attribute scope because there is a non eager scope (`f` function) between the comprehension and the `__init__` method --------- Co-authored-by: Carl Meyer --- .../resources/mdtest/attributes.md | 90 +++++++++++++------ .../ty_python_semantic/src/semantic_index.rs | 61 +++++++++---- .../src/semantic_index/builder.rs | 62 ++++++++++--- crates/ty_python_semantic/src/types/class.rs | 53 +++++++---- 4 files changed, 191 insertions(+), 75 deletions(-) diff --git a/crates/ty_python_semantic/resources/mdtest/attributes.md b/crates/ty_python_semantic/resources/mdtest/attributes.md index b1dbd57c78..012a925e87 100644 --- a/crates/ty_python_semantic/resources/mdtest/attributes.md +++ b/crates/ty_python_semantic/resources/mdtest/attributes.md @@ -369,6 +369,11 @@ reveal_type(c_instance.y) # revealed: Unknown | int #### Attributes defined in comprehensions +```toml +[environment] +python-version = "3.12" +``` + ```py class TupleIterator: def __next__(self) -> tuple[int, str]: @@ -380,19 +385,9 @@ class TupleIterable: class C: def __init__(self) -> None: - # TODO: Should not emit this diagnostic - # error: [unresolved-attribute] [... for self.a in range(3)] - # TODO: Should not emit this diagnostic - # error: [unresolved-attribute] - # error: [unresolved-attribute] [... for (self.b, self.c) in TupleIterable()] - # TODO: Should not emit this diagnostic - # error: [unresolved-attribute] - # error: [unresolved-attribute] [... for self.d in range(3) for self.e in range(3)] - # TODO: Should not emit this diagnostic - # error: [unresolved-attribute] [[... for self.f in range(3)] for _ in range(3)] [[... for self.g in range(3)] for self in [D()]] @@ -401,35 +396,74 @@ class D: c_instance = C() -# TODO: no error, reveal Unknown | int -# error: [unresolved-attribute] -reveal_type(c_instance.a) # revealed: Unknown +reveal_type(c_instance.a) # revealed: Unknown | int -# TODO: no error, reveal Unknown | int -# error: [unresolved-attribute] -reveal_type(c_instance.b) # revealed: Unknown +reveal_type(c_instance.b) # revealed: Unknown | int -# TODO: no error, reveal Unknown | str -# error: [unresolved-attribute] -reveal_type(c_instance.c) # revealed: Unknown +reveal_type(c_instance.c) # revealed: Unknown | str -# TODO: no error, reveal Unknown | int -# error: [unresolved-attribute] -reveal_type(c_instance.d) # revealed: Unknown +reveal_type(c_instance.d) # revealed: Unknown | int -# TODO: no error, reveal Unknown | int -# error: [unresolved-attribute] -reveal_type(c_instance.e) # revealed: Unknown +reveal_type(c_instance.e) # revealed: Unknown | int -# TODO: no error, reveal Unknown | int -# error: [unresolved-attribute] -reveal_type(c_instance.f) # revealed: Unknown +reveal_type(c_instance.f) # revealed: Unknown | int # This one is correctly not resolved as an attribute: # error: [unresolved-attribute] reveal_type(c_instance.g) # revealed: Unknown ``` +It does not matter how much the comprehension is nested. + +Similarly attributes defined by the comprehension in a generic method are recognized. + +```py +class C: + def f[T](self): + [... for self.a in [1]] + [[... for self.b in [1]] for _ in [1]] + +c_instance = C() + +reveal_type(c_instance.a) # revealed: Unknown | int +reveal_type(c_instance.b) # revealed: Unknown | int +``` + +If the comprehension is inside another scope like function then that attribute is not inferred. + +```py +class C: + def __init__(self): + def f(): + # error: [unresolved-attribute] + [... for self.a in [1]] + + def g(): + # error: [unresolved-attribute] + [... for self.b in [1]] + g() + +c_instance = C() + +# This attribute is in the function f and is not reachable +# error: [unresolved-attribute] +reveal_type(c_instance.a) # revealed: Unknown + +# error: [unresolved-attribute] +reveal_type(c_instance.b) # revealed: Unknown +``` + +If the comprehension is nested in any other eager scope it still can assign attributes. + +```py +class C: + def __init__(self): + class D: + [[... for self.a in [1]] for _ in [1]] + +reveal_type(C().a) # revealed: Unknown | int +``` + #### Conditionally declared / bound attributes We currently treat implicit instance attributes to be bound, even if they are only conditionally diff --git a/crates/ty_python_semantic/src/semantic_index.rs b/crates/ty_python_semantic/src/semantic_index.rs index 4d31de2cb9..f4ab765f08 100644 --- a/crates/ty_python_semantic/src/semantic_index.rs +++ b/crates/ty_python_semantic/src/semantic_index.rs @@ -1,4 +1,4 @@ -use std::iter::FusedIterator; +use std::iter::{FusedIterator, once}; use std::sync::Arc; use ruff_db::files::File; @@ -148,29 +148,56 @@ pub(crate) fn attribute_declarations<'db, 's>( /// /// Only call this when doing type inference on the same file as `class_body_scope`, otherwise it /// introduces a direct dependency on that file's AST. -pub(crate) fn attribute_scopes<'db, 's>( +pub(crate) fn attribute_scopes<'db>( db: &'db dyn Db, class_body_scope: ScopeId<'db>, -) -> impl Iterator + use<'s, 'db> { +) -> impl Iterator + 'db { let file = class_body_scope.file(db); let index = semantic_index(db, file); let class_scope_id = class_body_scope.file_scope_id(db); + ChildrenIter::new(&index.scopes, class_scope_id) + .filter_map(move |(child_scope_id, scope)| { + let (function_scope_id, function_scope) = + if scope.node().scope_kind() == ScopeKind::TypeParams { + // This could be a generic method with a type-params scope. + // Go one level deeper to find the function scope. The first + // descendant is the (potential) function scope. + let function_scope_id = scope.descendants().start; + (function_scope_id, index.scope(function_scope_id)) + } else { + (child_scope_id, scope) + }; + function_scope.node().as_function()?; + Some(function_scope_id) + }) + .flat_map(move |func_id| { + // Add any descendent scope that is eager and have eager scopes between the scope + // and the method scope. Since attributes can be defined in this scope. + let nested = index.descendent_scopes(func_id).filter_map(move |(id, s)| { + let is_eager = s.kind().is_eager(); + let parents_are_eager = { + let mut all_parents_eager = true; + let mut current = Some(id); - ChildrenIter::new(&index.scopes, class_scope_id).filter_map(move |(child_scope_id, scope)| { - let (function_scope_id, function_scope) = - if scope.node().scope_kind() == ScopeKind::TypeParams { - // This could be a generic method with a type-params scope. - // Go one level deeper to find the function scope. The first - // descendant is the (potential) function scope. - let function_scope_id = scope.descendants().start; - (function_scope_id, index.scope(function_scope_id)) - } else { - (child_scope_id, scope) - }; + while let Some(scope_id) = current { + if scope_id == func_id { + break; + } + let scope = index.scope(scope_id); + if !scope.is_eager() { + all_parents_eager = false; + break; + } + current = scope.parent(); + } - function_scope.node().as_function()?; - Some(function_scope_id) - }) + all_parents_eager + }; + + (parents_are_eager && is_eager).then_some(id) + }); + once(func_id).chain(nested) + }) } /// Returns the module global scope of `file`. diff --git a/crates/ty_python_semantic/src/semantic_index/builder.rs b/crates/ty_python_semantic/src/semantic_index/builder.rs index dc3acb1434..9352bf196c 100644 --- a/crates/ty_python_semantic/src/semantic_index/builder.rs +++ b/crates/ty_python_semantic/src/semantic_index/builder.rs @@ -186,29 +186,34 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { self.current_scope_info().file_scope_id } - /// Returns the scope ID of the surrounding class body scope if the current scope - /// is a method inside a class body. Returns `None` otherwise, e.g. if the current - /// scope is a function body outside of a class, or if the current scope is not a + /// Returns the scope ID of the current scope if the current scope + /// is a method inside a class body or an eagerly executed scope inside a method. + /// Returns `None` otherwise, e.g. if the current scope is a function body outside of a class, or if the current scope is not a /// function body. - fn is_method_of_class(&self) -> Option { - let mut scopes_rev = self.scope_stack.iter().rev(); + fn is_method_or_eagerly_executed_in_method(&self) -> Option { + let mut scopes_rev = self + .scope_stack + .iter() + .rev() + .skip_while(|scope| self.scopes[scope.file_scope_id].is_eager()); let current = scopes_rev.next()?; if self.scopes[current.file_scope_id].kind() != ScopeKind::Function { return None; } + let maybe_method = current.file_scope_id; let parent = scopes_rev.next()?; match self.scopes[parent.file_scope_id].kind() { - ScopeKind::Class => Some(parent.file_scope_id), + ScopeKind::Class => Some(maybe_method), ScopeKind::TypeParams => { // If the function is generic, the parent scope is an annotation scope. // In this case, we need to go up one level higher to find the class scope. let grandparent = scopes_rev.next()?; if self.scopes[grandparent.file_scope_id].kind() == ScopeKind::Class { - Some(grandparent.file_scope_id) + Some(maybe_method) } else { None } @@ -217,6 +222,32 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { } } + /// Checks if a symbol name is bound in any intermediate eager scopes + /// between the current scope and the specified method scope. + /// + fn is_symbol_bound_in_intermediate_eager_scopes( + &self, + symbol_name: &str, + method_scope_id: FileScopeId, + ) -> bool { + for scope_info in self.scope_stack.iter().rev() { + let scope_id = scope_info.file_scope_id; + + if scope_id == method_scope_id { + break; + } + + if let Some(symbol_id) = self.place_tables[scope_id].symbol_id(symbol_name) { + let symbol = self.place_tables[scope_id].symbol(symbol_id); + if symbol.is_bound() { + return true; + } + } + } + + false + } + /// Push a new loop, returning the outer loop, if any. fn push_loop(&mut self) -> Option { self.current_scope_info_mut() @@ -1700,7 +1731,7 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { self.visit_expr(&node.annotation); if let Some(value) = &node.value { self.visit_expr(value); - if self.is_method_of_class().is_some() { + if self.is_method_or_eagerly_executed_in_method().is_some() { // Record the right-hand side of the assignment as a standalone expression // if we're inside a method. This allows type inference to infer the type // of the value for annotated assignments like `self.CONSTANT: Final = 1`, @@ -2372,14 +2403,21 @@ impl<'ast> Visitor<'ast> for SemanticIndexBuilder<'_, 'ast> { | ast::Expr::Attribute(ast::ExprAttribute { ctx, .. }) | ast::Expr::Subscript(ast::ExprSubscript { ctx, .. }) => { if let Some(mut place_expr) = PlaceExpr::try_from_expr(expr) { - if self.is_method_of_class().is_some() { + if let Some(method_scope_id) = self.is_method_or_eagerly_executed_in_method() { if let PlaceExpr::Member(member) = &mut place_expr { if member.is_instance_attribute_candidate() { // We specifically mark attribute assignments to the first parameter of a method, // i.e. typically `self` or `cls`. - let accessed_object_refers_to_first_parameter = self - .current_first_parameter_name - .is_some_and(|first| member.symbol_name() == first); + // However, we must check that the symbol hasn't been shadowed by an intermediate + // scope (e.g., a comprehension variable: `for self in [...]`). + let accessed_object_refers_to_first_parameter = + self.current_first_parameter_name.is_some_and(|first| { + member.symbol_name() == first + && !self.is_symbol_bound_in_intermediate_eager_scopes( + first, + method_scope_id, + ) + }); if accessed_object_refers_to_first_parameter { member.mark_instance_attribute(); diff --git a/crates/ty_python_semantic/src/types/class.rs b/crates/ty_python_semantic/src/types/class.rs index ce6fe0d19b..d939dabb01 100644 --- a/crates/ty_python_semantic/src/types/class.rs +++ b/crates/ty_python_semantic/src/types/class.rs @@ -3119,30 +3119,47 @@ impl<'db> ClassLiteral<'db> { union_of_inferred_types = union_of_inferred_types.add(Type::unknown()); } - for (attribute_assignments, method_scope_id) in + for (attribute_assignments, attribute_binding_scope_id) in attribute_assignments(db, class_body_scope, &name) { - let method_scope = index.scope(method_scope_id); - if !is_valid_scope(method_scope) { + let binding_scope = index.scope(attribute_binding_scope_id); + if !is_valid_scope(binding_scope) { continue; } - // The attribute assignment inherits the reachability of the method which contains it - let is_method_reachable = if let Some(method_def) = method_scope.node().as_function() { - let method = index.expect_single_definition(method_def); - let method_place = class_table - .symbol_id(&method_def.node(&module).name) - .unwrap(); - class_map - .all_reachable_symbol_bindings(method_place) - .find_map(|bind| { - (bind.binding.is_defined_and(|def| def == method)) - .then(|| class_map.binding_reachability(db, &bind)) - }) - .unwrap_or(Truthiness::AlwaysFalse) - } else { - Truthiness::AlwaysFalse + let scope_for_reachability_analysis = { + if binding_scope.node().as_function().is_some() { + binding_scope + } else if binding_scope.is_eager() { + let mut eager_scope_parent = binding_scope; + while eager_scope_parent.is_eager() + && let Some(parent) = eager_scope_parent.parent() + { + eager_scope_parent = index.scope(parent); + } + eager_scope_parent + } else { + binding_scope + } }; + + // The attribute assignment inherits the reachability of the method which contains it + let is_method_reachable = + if let Some(method_def) = scope_for_reachability_analysis.node().as_function() { + let method = index.expect_single_definition(method_def); + let method_place = class_table + .symbol_id(&method_def.node(&module).name) + .unwrap(); + class_map + .all_reachable_symbol_bindings(method_place) + .find_map(|bind| { + (bind.binding.is_defined_and(|def| def == method)) + .then(|| class_map.binding_reachability(db, &bind)) + }) + .unwrap_or(Truthiness::AlwaysFalse) + } else { + Truthiness::AlwaysFalse + }; if is_method_reachable.is_always_false() { continue; } From d2c3996f4e2bd4bce5280b6da479d5f176702651 Mon Sep 17 00:00:00 2001 From: Bhuminjay Soni Date: Wed, 12 Nov 2025 12:47:29 +0530 Subject: [PATCH 166/180] `UP035`: Consistently set the deprecated tag (#21396) --- .../ruff_linter/src/rules/pyupgrade/rules/deprecated_import.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/deprecated_import.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/deprecated_import.rs index 9b15ac0b5b..51c3147a50 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/deprecated_import.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/deprecated_import.rs @@ -766,11 +766,12 @@ pub(crate) fn deprecated_import(checker: &Checker, import_from_stmt: &StmtImport } for operation in fixer.with_renames() { - checker.report_diagnostic( + let mut diagnostic = checker.report_diagnostic( DeprecatedImport { deprecation: Deprecation::WithRename(operation), }, import_from_stmt.range(), ); + diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Deprecated); } } From 725ae697739db48b0c4fb8fc1cdc9c0c1502ab84 Mon Sep 17 00:00:00 2001 From: Dan Parizher <105245560+danparizher@users.noreply.github.com> Date: Wed, 12 Nov 2025 02:29:23 -0500 Subject: [PATCH 167/180] [`pydoclint`] Support NumPy-style comma-separated parameters (`DOC102`) (#20972) --- .../test/fixtures/pydoclint/DOC102_numpy.py | 73 +++++++++++++++++++ .../rules/pydoclint/rules/check_docstring.rs | 36 ++++++--- ...-extraneous-parameter_DOC102_numpy.py.snap | 33 +++++++++ 3 files changed, 130 insertions(+), 12 deletions(-) diff --git a/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC102_numpy.py b/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC102_numpy.py index ea93e16cc1..9bdac6788f 100644 --- a/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC102_numpy.py +++ b/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC102_numpy.py @@ -371,6 +371,61 @@ class Foo: """ return +# DOC102 - Test case from issue #20959: comma-separated parameters +def leq(x: object, y: object) -> bool: + """Compare two objects for loose equality. + + Parameters + ---------- + x1, x2 : object + Objects. + + Returns + ------- + bool + Whether the objects are identical or equal. + """ + return x is y or x == y + + +# OK - comma-separated parameters that match function signature +def compare_values(x1: int, x2: int) -> bool: + """Compare two integer values. + + Parameters + ---------- + x1, x2 : int + Values to compare. + + Returns + ------- + bool + True if values are equal. + """ + return x1 == x2 + + +# DOC102 - mixed comma-separated and regular parameters +def process_data(data, x1: str, x2: str) -> str: + """Process data with multiple string parameters. + + Parameters + ---------- + data : list + Input data to process. + x1, x2 : str + String parameters for processing. + extra_param : str + Extra parameter not in signature. + + Returns + ------- + str + Processed result. + """ + return f"{x1}{x2}{len(data)}" + + # OK def baz(x: int) -> int: """ @@ -389,3 +444,21 @@ def baz(x: int) -> int: int """ return x + + +# OK - comma-separated parameters without type annotations +def add_numbers(a, b): + """ + Adds two numbers and returns the result. + + Parameters + ---------- + a, b + The numbers to add. + + Returns + ------- + int + The sum of the two numbers. + """ + return a + b diff --git a/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs b/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs index dd88250952..97f1b7ed9b 100644 --- a/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs +++ b/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs @@ -661,19 +661,31 @@ fn parse_parameters_numpy(content: &str, content_start: TextSize) -> Vec DOC102_numpy.py:380:5 + | +378 | Parameters +379 | ---------- +380 | x1, x2 : object + | ^^ +381 | Objects. + | +help: Remove the extraneous parameter from the docstring + +DOC102 Documented parameter `x2` is not in the function's signature + --> DOC102_numpy.py:380:9 + | +378 | Parameters +379 | ---------- +380 | x1, x2 : object + | ^^ +381 | Objects. + | +help: Remove the extraneous parameter from the docstring + +DOC102 Documented parameter `extra_param` is not in the function's signature + --> DOC102_numpy.py:418:5 + | +416 | x1, x2 : str +417 | String parameters for processing. +418 | extra_param : str + | ^^^^^^^^^^^ +419 | Extra parameter not in signature. + | +help: Remove the extraneous parameter from the docstring From 19c7994e903bf3294c58d7752b5cf23dfff9794f Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Wed, 12 Nov 2025 08:54:14 +0100 Subject: [PATCH 168/180] [ty] Fix Escape handler in playground (#21397) --- playground/ty/src/Editor/Editor.tsx | 31 ++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/playground/ty/src/Editor/Editor.tsx b/playground/ty/src/Editor/Editor.tsx index c82250b9e1..b7755d54de 100644 --- a/playground/ty/src/Editor/Editor.tsx +++ b/playground/ty/src/Editor/Editor.tsx @@ -70,13 +70,16 @@ export default function Editor({ const serverRef = useRef(null); if (serverRef.current != null) { - serverRef.current.update({ - files, - workspace, - onOpenFile, - onVendoredFileChange, - onBackToUserFile, - }); + serverRef.current.update( + { + files, + workspace, + onOpenFile, + onVendoredFileChange, + onBackToUserFile, + }, + isViewingVendoredFile, + ); } // Update the diagnostics in the editor. @@ -200,6 +203,7 @@ class PlaygroundServer private rangeSemanticTokensDisposable: IDisposable; private signatureHelpDisposable: IDisposable; private documentHighlightDisposable: IDisposable; + private inVendoredFileCondition: editor.IContextKey; // Cache for vendored file handles private vendoredFileHandles = new Map(); @@ -249,8 +253,16 @@ class PlaygroundServer this.documentHighlightDisposable = monaco.languages.registerDocumentHighlightProvider("python", this); + this.inVendoredFileCondition = editor.createContextKey( + "inVendoredFile", + false, + ); // Register Esc key command - editor.addCommand(monaco.KeyCode.Escape, this.props.onBackToUserFile); + editor.addCommand( + monaco.KeyCode.Escape, + () => this.props.onBackToUserFile(), + "inVendoredFile", + ); } triggerCharacters: string[] = ["."]; @@ -452,8 +464,9 @@ class PlaygroundServer return undefined; } - update(props: PlaygroundServerProps) { + update(props: PlaygroundServerProps, isViewingVendoredFile: boolean) { this.props = props; + this.inVendoredFileCondition.set(isViewingVendoredFile); } private getOrCreateVendoredFileHandle(vendoredPath: string): FileHandle { From d272a623d3cda57f8eaac3e1d61eb6ca7f1f2708 Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Wed, 12 Nov 2025 08:54:25 +0100 Subject: [PATCH 169/180] [ty] Fix goto for `float` and `complex` in type annotation positions (#21388) --- crates/ty_ide/src/goto_definition.rs | 105 ++++++++++++++++++ crates/ty_ide/src/hover.rs | 26 +++++ crates/ty_ide/src/semantic_tokens.rs | 37 ++++++ crates/ty_python_semantic/src/types.rs | 1 - .../src/types/ide_support.rs | 49 +++++++- 5 files changed, 216 insertions(+), 2 deletions(-) diff --git a/crates/ty_ide/src/goto_definition.rs b/crates/ty_ide/src/goto_definition.rs index fc0fc28fb9..81caea650f 100644 --- a/crates/ty_ide/src/goto_definition.rs +++ b/crates/ty_ide/src/goto_definition.rs @@ -1592,6 +1592,111 @@ a = Test() "); } + #[test] + fn float_annotation() { + let test = CursorTest::builder() + .source( + "main.py", + " +a: float = 3.14 +", + ) + .build(); + + assert_snapshot!(test.goto_definition(), @r#" + info[goto-definition]: Definition + --> stdlib/builtins.pyi:346:7 + | + 345 | @disjoint_base + 346 | class int: + | ^^^ + 347 | """int([x]) -> integer + 348 | int(x, base=10) -> integer + | + info: Source + --> main.py:2:4 + | + 2 | a: float = 3.14 + | ^^^^^ + | + + info[goto-definition]: Definition + --> stdlib/builtins.pyi:659:7 + | + 658 | @disjoint_base + 659 | class float: + | ^^^^^ + 660 | """Convert a string or number to a floating-point number, if possible.""" + | + info: Source + --> main.py:2:4 + | + 2 | a: float = 3.14 + | ^^^^^ + | + "#); + } + + #[test] + fn complex_annotation() { + let test = CursorTest::builder() + .source( + "main.py", + " +a: complex = 3.14 +", + ) + .build(); + + assert_snapshot!(test.goto_definition(), @r#" + info[goto-definition]: Definition + --> stdlib/builtins.pyi:346:7 + | + 345 | @disjoint_base + 346 | class int: + | ^^^ + 347 | """int([x]) -> integer + 348 | int(x, base=10) -> integer + | + info: Source + --> main.py:2:4 + | + 2 | a: complex = 3.14 + | ^^^^^^^ + | + + info[goto-definition]: Definition + --> stdlib/builtins.pyi:659:7 + | + 658 | @disjoint_base + 659 | class float: + | ^^^^^ + 660 | """Convert a string or number to a floating-point number, if possible.""" + | + info: Source + --> main.py:2:4 + | + 2 | a: complex = 3.14 + | ^^^^^^^ + | + + info[goto-definition]: Definition + --> stdlib/builtins.pyi:820:7 + | + 819 | @disjoint_base + 820 | class complex: + | ^^^^^^^ + 821 | """Create a complex number from a string or numbers. + | + info: Source + --> main.py:2:4 + | + 2 | a: complex = 3.14 + | ^^^^^^^ + | + "#); + } + /// Regression test for . /// We must ensure we respect re-import convention for stub files for /// imports in builtins.pyi. diff --git a/crates/ty_ide/src/hover.rs b/crates/ty_ide/src/hover.rs index 1b348b82b9..3b9bf7eeb4 100644 --- a/crates/ty_ide/src/hover.rs +++ b/crates/ty_ide/src/hover.rs @@ -2634,6 +2634,32 @@ def ab(a: int, *, c: int): "); } + #[test] + fn hover_float_annotation() { + let test = cursor_test( + r#" + a: float = 3.14 + "#, + ); + + assert_snapshot!(test.hover(), @r" + int | float + --------------------------------------------- + ```python + int | float + ``` + --------------------------------------------- + info[hover]: Hovered content is + --> main.py:2:4 + | + 2 | a: float = 3.14 + | ^^^^^- Cursor offset + | | + | source + | + "); + } + impl CursorTest { fn hover(&self) -> String { use std::fmt::Write; diff --git a/crates/ty_ide/src/semantic_tokens.rs b/crates/ty_ide/src/semantic_tokens.rs index df1bb88b37..4fb3aa45ab 100644 --- a/crates/ty_ide/src/semantic_tokens.rs +++ b/crates/ty_ide/src/semantic_tokens.rs @@ -1178,6 +1178,43 @@ result = check(None) "#); } + #[test] + fn test_builtin_types() { + let test = SemanticTokenTest::new( + r#" + class Test: + a: int + b: bool + c: str + d: float # TODO: Should be Class + e: list[int] + f: list[float] # TODO: Should be Class + g: int | float # TODO: float should be Class + "#, + ); + + assert_snapshot!(test.to_snapshot(&test.highlight_file()), @r#" + "Test" @ 7..11: Class [definition] + "a" @ 17..18: Variable + "int" @ 20..23: Class + "b" @ 28..29: Variable + "bool" @ 31..35: Class + "c" @ 40..41: Variable + "str" @ 43..46: Class + "d" @ 51..52: Variable + "float" @ 54..59: Variable + "e" @ 89..90: Variable + "list" @ 92..96: Class + "int" @ 97..100: Class + "f" @ 106..107: Variable + "list" @ 109..113: Class + "float" @ 114..119: Variable + "g" @ 150..151: Variable + "int" @ 153..156: Class + "float" @ 159..164: Variable + "#); + } + #[test] fn test_semantic_tokens_range() { let test = SemanticTokenTest::new( diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index 7f950f7b77..e91007047d 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -1171,7 +1171,6 @@ impl<'db> Type<'db> { } } - #[cfg(test)] #[track_caller] pub(crate) const fn expect_union(self) -> UnionType<'db> { self.as_union().expect("Expected a Type::Union variant") diff --git a/crates/ty_python_semantic/src/types/ide_support.rs b/crates/ty_python_semantic/src/types/ide_support.rs index 4b42458c4d..89dfe8fbe8 100644 --- a/crates/ty_python_semantic/src/types/ide_support.rs +++ b/crates/ty_python_semantic/src/types/ide_support.rs @@ -10,9 +10,9 @@ use crate::semantic_index::scope::ScopeId; use crate::semantic_index::{ attribute_scopes, global_scope, place_table, semantic_index, use_def_map, }; -use crate::types::CallDunderError; use crate::types::call::{CallArguments, MatchedArgument}; use crate::types::signatures::Signature; +use crate::types::{CallDunderError, UnionType}; use crate::types::{ ClassBase, ClassLiteral, DynamicType, KnownClass, KnownInstanceType, Type, TypeContext, TypeVarBoundOrConstraints, class::CodeGeneratorKind, @@ -619,6 +619,29 @@ pub fn definitions_for_name<'db>( let Some(builtins_scope) = builtins_module_scope(db) else { return Vec::new(); }; + + // Special cases for `float` and `complex` in type annotation positions. + // We don't know whether we're in a type annotation position, so we'll just ask `Name`'s type, + // which resolves to `int | float` or `int | float | complex` if `float` or `complex` is used in + // a type annotation position and `float` or `complex` otherwise. + // + // https://typing.python.org/en/latest/spec/special-types.html#special-cases-for-float-and-complex + if matches!(name_str, "float" | "complex") + && let Some(union) = name.inferred_type(&SemanticModel::new(db, file)).as_union() + && is_float_or_complex_annotation(db, union, name_str) + { + return union + .elements(db) + .iter() + .filter_map(|ty| ty.as_nominal_instance()) + .map(|instance| { + let definition = instance.class_literal(db).definition(db); + let parsed = parsed_module(db, definition.file(db)); + ResolvedDefinition::FileWithRange(definition.focus_range(db, &parsed.load(db))) + }) + .collect(); + } + find_symbol_in_scope(db, builtins_scope, name_str) .into_iter() .filter(|def| def.is_reexported(db)) @@ -636,6 +659,30 @@ pub fn definitions_for_name<'db>( } } +fn is_float_or_complex_annotation(db: &dyn Db, ty: UnionType, name: &str) -> bool { + let float_or_complex_ty = match name { + "float" => UnionType::from_elements( + db, + [ + KnownClass::Int.to_instance(db), + KnownClass::Float.to_instance(db), + ], + ), + "complex" => UnionType::from_elements( + db, + [ + KnownClass::Int.to_instance(db), + KnownClass::Float.to_instance(db), + KnownClass::Complex.to_instance(db), + ], + ), + _ => return false, + } + .expect_union(); + + ty == float_or_complex_ty +} + /// Returns all resolved definitions for an attribute expression `x.y`. /// This function duplicates much of the functionality in the semantic /// analyzer, but it has somewhat different behavior so we've decided From 6322f370153df85fccc4c8e2fe4c9bdf451d921b Mon Sep 17 00:00:00 2001 From: David Peter Date: Wed, 12 Nov 2025 11:02:29 +0100 Subject: [PATCH 170/180] [ty] Better assertion message for benchmark diagnostics check (#21398) I don't know why, but it always takes me an eternity to find the failing project name a few lines below in the output. So I'm suggesting we just add the project name to the assertion message. --- crates/ruff_benchmark/benches/ty_walltime.rs | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/crates/ruff_benchmark/benches/ty_walltime.rs b/crates/ruff_benchmark/benches/ty_walltime.rs index 2b02230fd6..cb5fd32014 100644 --- a/crates/ruff_benchmark/benches/ty_walltime.rs +++ b/crates/ruff_benchmark/benches/ty_walltime.rs @@ -71,16 +71,13 @@ impl Display for Benchmark<'_> { } } -fn check_project(db: &ProjectDatabase, max_diagnostics: usize) { +fn check_project(db: &ProjectDatabase, project_name: &str, max_diagnostics: usize) { let result = db.check(); let diagnostics = result.len(); assert!( diagnostics > 1 && diagnostics <= max_diagnostics, - "Expected between {} and {} diagnostics but got {}", - 1, - max_diagnostics, - diagnostics + "Expected between 1 and {max_diagnostics} diagnostics on project '{project_name}' but got {diagnostics}", ); } @@ -234,7 +231,7 @@ fn run_single_threaded(bencher: Bencher, benchmark: &Benchmark) { bencher .with_inputs(|| benchmark.setup_iteration()) .bench_local_refs(|db| { - check_project(db, benchmark.max_diagnostics); + check_project(db, benchmark.project.name, benchmark.max_diagnostics); }); } @@ -261,7 +258,7 @@ fn multithreaded(bencher: Bencher, benchmark: &Benchmark) { .with_inputs(|| benchmark.setup_iteration()) .bench_local_values(|db| { thread_pool.install(|| { - check_project(&db, benchmark.max_diagnostics); + check_project(&db, benchmark.project.name, benchmark.max_diagnostics); db }) }); @@ -285,7 +282,7 @@ fn main() { // branch when looking up the ingredient index. { let db = TANJUN.setup_iteration(); - check_project(&db, TANJUN.max_diagnostics); + check_project(&db, TANJUN.project.name, TANJUN.max_diagnostics); } divan::main(); From f5cf672ed46348d5a9fae0140f9108c17dad8621 Mon Sep 17 00:00:00 2001 From: David Peter Date: Wed, 12 Nov 2025 12:41:34 +0100 Subject: [PATCH 171/180] [ty] Reorganize walltime benchmarks (#21400) --- crates/ruff_benchmark/benches/ty_walltime.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/ruff_benchmark/benches/ty_walltime.rs b/crates/ruff_benchmark/benches/ty_walltime.rs index cb5fd32014..61c67fb019 100644 --- a/crates/ruff_benchmark/benches/ty_walltime.rs +++ b/crates/ruff_benchmark/benches/ty_walltime.rs @@ -235,7 +235,7 @@ fn run_single_threaded(bencher: Bencher, benchmark: &Benchmark) { }); } -#[bench(args=[&ALTAIR, &FREQTRADE, &PYDANTIC, &TANJUN], sample_size=2, sample_count=3)] +#[bench(args=[&ALTAIR, &FREQTRADE, &TANJUN], sample_size=2, sample_count=3)] fn small(bencher: Bencher, benchmark: &Benchmark) { run_single_threaded(bencher, benchmark); } @@ -245,12 +245,12 @@ fn medium(bencher: Bencher, benchmark: &Benchmark) { run_single_threaded(bencher, benchmark); } -#[bench(args=[&SYMPY], sample_size=1, sample_count=2)] +#[bench(args=[&SYMPY, &PYDANTIC], sample_size=1, sample_count=2)] fn large(bencher: Bencher, benchmark: &Benchmark) { run_single_threaded(bencher, benchmark); } -#[bench(args=[&PYDANTIC], sample_size=3, sample_count=8)] +#[bench(args=[&ALTAIR], sample_size=3, sample_count=8)] fn multithreaded(bencher: Bencher, benchmark: &Benchmark) { let thread_pool = ThreadPoolBuilder::new().build().unwrap(); From e8e81808884dc5157bd1bf12e8235455879616ee Mon Sep 17 00:00:00 2001 From: David Peter Date: Wed, 12 Nov 2025 12:59:14 +0100 Subject: [PATCH 172/180] [ty] Implicit type aliases: Add support for `typing.Union` (#21363) ## Summary Add support for `typing.Union` in implicit type aliases / in value position. ## Typing conformance tests Two new tests are passing ## Ecosystem impact * The 2k new `invalid-key` diagnostics on pydantic are caused by https://github.com/astral-sh/ty/issues/1479#issuecomment-3513854645. * Everything else I've checked is either a known limitation (often related to type narrowing, because union types are often narrowed down to a subset of options), or a true positive. ## Test Plan New Markdown tests --- crates/ruff_benchmark/benches/ty_walltime.rs | 4 +- .../resources/mdtest/implicit_type_aliases.md | 102 +++++++++++++++++- .../resources/mdtest/invalid_syntax.md | 24 +++++ crates/ty_python_semantic/src/types.rs | 31 ++++-- .../src/types/infer/builder.rs | 59 ++++++++-- 5 files changed, 201 insertions(+), 19 deletions(-) diff --git a/crates/ruff_benchmark/benches/ty_walltime.rs b/crates/ruff_benchmark/benches/ty_walltime.rs index 61c67fb019..8f13ab7ca7 100644 --- a/crates/ruff_benchmark/benches/ty_walltime.rs +++ b/crates/ruff_benchmark/benches/ty_walltime.rs @@ -181,7 +181,7 @@ static PYDANTIC: Benchmark = Benchmark::new( max_dep_date: "2025-06-17", python_version: PythonVersion::PY39, }, - 1000, + 3000, ); static SYMPY: Benchmark = Benchmark::new( @@ -223,7 +223,7 @@ static STATIC_FRAME: Benchmark = Benchmark::new( max_dep_date: "2025-08-09", python_version: PythonVersion::PY311, }, - 800, + 900, ); #[track_caller] diff --git a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md index aae10661b4..0a45e9e3c4 100644 --- a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md +++ b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md @@ -33,7 +33,7 @@ g(None) We also support unions in type aliases: ```py -from typing_extensions import Any, Never, Literal, LiteralString, Tuple, Annotated, Optional +from typing_extensions import Any, Never, Literal, LiteralString, Tuple, Annotated, Optional, Union from ty_extensions import Unknown IntOrStr = int | str @@ -41,6 +41,8 @@ IntOrStrOrBytes1 = int | str | bytes IntOrStrOrBytes2 = (int | str) | bytes IntOrStrOrBytes3 = int | (str | bytes) IntOrStrOrBytes4 = IntOrStr | bytes +IntOrStrOrBytes5 = int | Union[str, bytes] +IntOrStrOrBytes6 = Union[int, str] | bytes BytesOrIntOrStr = bytes | IntOrStr IntOrNone = int | None NoneOrInt = None | int @@ -70,6 +72,8 @@ reveal_type(IntOrStrOrBytes1) # revealed: types.UnionType reveal_type(IntOrStrOrBytes2) # revealed: types.UnionType reveal_type(IntOrStrOrBytes3) # revealed: types.UnionType reveal_type(IntOrStrOrBytes4) # revealed: types.UnionType +reveal_type(IntOrStrOrBytes5) # revealed: types.UnionType +reveal_type(IntOrStrOrBytes6) # revealed: types.UnionType reveal_type(BytesOrIntOrStr) # revealed: types.UnionType reveal_type(IntOrNone) # revealed: types.UnionType reveal_type(NoneOrInt) # revealed: types.UnionType @@ -100,6 +104,8 @@ def _( int_or_str_or_bytes2: IntOrStrOrBytes2, int_or_str_or_bytes3: IntOrStrOrBytes3, int_or_str_or_bytes4: IntOrStrOrBytes4, + int_or_str_or_bytes5: IntOrStrOrBytes5, + int_or_str_or_bytes6: IntOrStrOrBytes6, bytes_or_int_or_str: BytesOrIntOrStr, int_or_none: IntOrNone, none_or_int: NoneOrInt, @@ -129,6 +135,8 @@ def _( reveal_type(int_or_str_or_bytes2) # revealed: int | str | bytes reveal_type(int_or_str_or_bytes3) # revealed: int | str | bytes reveal_type(int_or_str_or_bytes4) # revealed: int | str | bytes + reveal_type(int_or_str_or_bytes5) # revealed: int | str | bytes + reveal_type(int_or_str_or_bytes6) # revealed: int | str | bytes reveal_type(bytes_or_int_or_str) # revealed: bytes | int | str reveal_type(int_or_none) # revealed: int | None reveal_type(none_or_int) # revealed: None | int @@ -505,13 +513,90 @@ def _( ## `Tuple` +We support implicit type aliases using `typing.Tuple`: + ```py from typing import Tuple IntAndStr = Tuple[int, str] +SingleInt = Tuple[int] +Ints = Tuple[int, ...] +EmptyTuple = Tuple[()] -def _(int_and_str: IntAndStr): +def _(int_and_str: IntAndStr, single_int: SingleInt, ints: Ints, empty_tuple: EmptyTuple): reveal_type(int_and_str) # revealed: tuple[int, str] + reveal_type(single_int) # revealed: tuple[int] + reveal_type(ints) # revealed: tuple[int, ...] + reveal_type(empty_tuple) # revealed: tuple[()] +``` + +Invalid uses cause diagnostics: + +```py +from typing import Tuple + +# error: [invalid-type-form] "Int literals are not allowed in this context in a type expression" +Invalid = Tuple[int, 1] + +def _(invalid: Invalid): + reveal_type(invalid) # revealed: tuple[int, Unknown] +``` + +## `Union` + +We support implicit type aliases using `typing.Union`: + +```py +from typing import Union + +IntOrStr = Union[int, str] +IntOrStrOrBytes = Union[int, Union[str, bytes]] + +reveal_type(IntOrStr) # revealed: types.UnionType +reveal_type(IntOrStrOrBytes) # revealed: types.UnionType + +def _( + int_or_str: IntOrStr, + int_or_str_or_bytes: IntOrStrOrBytes, +): + reveal_type(int_or_str) # revealed: int | str + reveal_type(int_or_str_or_bytes) # revealed: int | str | bytes +``` + +If a single type is given, no `types.UnionType` instance is created: + +```py +JustInt = Union[int] + +reveal_type(JustInt) # revealed: + +def _(just_int: JustInt): + reveal_type(just_int) # revealed: int +``` + +An empty `typing.Union` leads to a `TypeError` at runtime, so we emit an error. We still infer +`Never` when used as a type expression, which seems reasonable for an empty union: + +```py +# error: [invalid-type-form] "`typing.Union` requires at least one type argument" +EmptyUnion = Union[()] + +reveal_type(EmptyUnion) # revealed: types.UnionType + +def _(empty: EmptyUnion): + reveal_type(empty) # revealed: Never +``` + +Other invalid uses are also caught: + +```py +# error: [invalid-type-form] "Int literals are not allowed in this context in a type expression" +Invalid = Union[str, 1] + +def _( + invalid: Invalid, +): + reveal_type(invalid) # revealed: str | Unknown ``` ## Stringified annotations? @@ -544,10 +629,19 @@ We *do* support stringified annotations if they appear in a position where a typ syntactically expected: ```py -ListOfInts = list["int"] +from typing import Union -def _(list_of_ints: ListOfInts): +ListOfInts = list["int"] +StrOrStyle = Union[str, "Style"] + +class Style: ... + +def _( + list_of_ints: ListOfInts, + str_or_style: StrOrStyle, +): reveal_type(list_of_ints) # revealed: list[int] + reveal_type(str_or_style) # revealed: str | Style ``` ## Recursive diff --git a/crates/ty_python_semantic/resources/mdtest/invalid_syntax.md b/crates/ty_python_semantic/resources/mdtest/invalid_syntax.md index cc90879401..9594492982 100644 --- a/crates/ty_python_semantic/resources/mdtest/invalid_syntax.md +++ b/crates/ty_python_semantic/resources/mdtest/invalid_syntax.md @@ -104,3 +104,27 @@ from typing import Callable def _(c: Callable[]): reveal_type(c) # revealed: (...) -> Unknown ``` + +### `typing.Tuple` + +```py +from typing import Tuple + +# error: [invalid-syntax] "Expected index or slice expression" +InvalidEmptyTuple = Tuple[] + +def _(t: InvalidEmptyTuple): + reveal_type(t) # revealed: tuple[Unknown] +``` + +### `typing.Union` + +```py +from typing import Union + +# error: [invalid-syntax] "Expected index or slice expression" +InvalidEmptyUnion = Union[] + +def _(u: InvalidEmptyUnion): + reveal_type(u) # revealed: Unknown +``` diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index e91007047d..515e048840 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -6586,12 +6586,13 @@ impl<'db> Type<'db> { }), KnownInstanceType::UnionType(list) => { let mut builder = UnionBuilder::new(db); + let inferred_as = list.inferred_as(db); for element in list.elements(db) { - builder = builder.add(element.in_type_expression( - db, - scope_id, - typevar_binding_context, - )?); + builder = builder.add(if inferred_as.type_expression() { + *element + } else { + element.in_type_expression(db, scope_id, typevar_binding_context)? + }); } Ok(builder.build()) } @@ -9164,6 +9165,21 @@ impl<'db> TypeVarBoundOrConstraints<'db> { } } +/// Whether a given type originates from value expression inference or type expression inference. +/// For example, the symbol `int` would be inferred as `` in value expression context, +/// and as `int` (i.e. an instance of the class `int`) in type expression context. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, get_size2::GetSize, salsa::Update)] +pub enum InferredAs { + ValueExpression, + TypeExpression, +} + +impl InferredAs { + pub const fn type_expression(self) -> bool { + matches!(self, InferredAs::TypeExpression) + } +} + /// A salsa-interned list of types. /// /// # Ordering @@ -9174,6 +9190,7 @@ impl<'db> TypeVarBoundOrConstraints<'db> { pub struct InternedTypes<'db> { #[returns(deref)] elements: Box<[Type<'db>]>, + inferred_as: InferredAs, } impl get_size2::GetSize for InternedTypes<'_> {} @@ -9182,8 +9199,9 @@ impl<'db> InternedTypes<'db> { pub(crate) fn from_elements( db: &'db dyn Db, elements: impl IntoIterator>, + inferred_as: InferredAs, ) -> InternedTypes<'db> { - InternedTypes::new(db, elements.into_iter().collect::>()) + InternedTypes::new(db, elements.into_iter().collect::>(), inferred_as) } pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self { @@ -9193,6 +9211,7 @@ impl<'db> InternedTypes<'db> { .iter() .map(|ty| ty.normalized_impl(db, visitor)) .collect::>(), + self.inferred_as(db), ) } } diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index dea82603f4..3fe0bb003d 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -101,10 +101,10 @@ use crate::types::typed_dict::{ use crate::types::visitor::any_over_type; use crate::types::{ CallDunderError, CallableBinding, CallableType, ClassLiteral, ClassType, DataclassParams, - DynamicType, InternedType, InternedTypes, IntersectionBuilder, IntersectionType, KnownClass, - KnownInstanceType, MemberLookupPolicy, MetaclassCandidate, PEP695TypeAliasType, Parameter, - ParameterForm, Parameters, SpecialFormType, SubclassOfType, TrackedConstraintSet, Truthiness, - Type, TypeAliasType, TypeAndQualifiers, TypeContext, TypeQualifiers, + DynamicType, InferredAs, InternedType, InternedTypes, IntersectionBuilder, IntersectionType, + KnownClass, KnownInstanceType, MemberLookupPolicy, MetaclassCandidate, PEP695TypeAliasType, + Parameter, ParameterForm, Parameters, SpecialFormType, SubclassOfType, TrackedConstraintSet, + Truthiness, Type, TypeAliasType, TypeAndQualifiers, TypeContext, TypeQualifiers, TypeVarBoundOrConstraintsEvaluation, TypeVarDefaultEvaluation, TypeVarIdentity, TypeVarInstance, TypeVarKind, TypeVarVariance, TypedDictType, UnionBuilder, UnionType, binding_type, todo_type, @@ -9234,7 +9234,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { Some(left_ty) } else { Some(Type::KnownInstance(KnownInstanceType::UnionType( - InternedTypes::from_elements(self.db(), [left_ty, right_ty]), + InternedTypes::from_elements( + self.db(), + [left_ty, right_ty], + InferredAs::ValueExpression, + ), ))) } } @@ -9259,7 +9263,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { && instance.has_known_class(self.db(), KnownClass::NoneType) => { Some(Type::KnownInstance(KnownInstanceType::UnionType( - InternedTypes::from_elements(self.db(), [left_ty, right_ty]), + InternedTypes::from_elements( + self.db(), + [left_ty, right_ty], + InferredAs::ValueExpression, + ), ))) } @@ -10476,9 +10484,46 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } return Type::KnownInstance(KnownInstanceType::UnionType( - InternedTypes::from_elements(self.db(), [ty, Type::none(self.db())]), + InternedTypes::from_elements( + self.db(), + [ty, Type::none(self.db())], + InferredAs::ValueExpression, + ), )); } + Type::SpecialForm(SpecialFormType::Union) => { + let db = self.db(); + + match **slice { + ast::Expr::Tuple(ref tuple) => { + let mut elements = tuple + .elts + .iter() + .map(|elt| self.infer_type_expression(elt)) + .peekable(); + + let is_empty = elements.peek().is_none(); + let union_type = Type::KnownInstance(KnownInstanceType::UnionType( + InternedTypes::from_elements(db, elements, InferredAs::TypeExpression), + )); + + if is_empty { + if let Some(builder) = + self.context.report_lint(&INVALID_TYPE_FORM, subscript) + { + builder.into_diagnostic( + "`typing.Union` requires at least one type argument", + ); + } + } + + return union_type; + } + _ => { + return self.infer_expression(slice, TypeContext::default()); + } + } + } _ => {} } From 84c3cecad62fb174e532fa5027a7a31999e18a86 Mon Sep 17 00:00:00 2001 From: David Peter Date: Wed, 12 Nov 2025 15:29:26 +0100 Subject: [PATCH 173/180] [ty] Baseline for subscript assignment diagnostics (#21404) ## Summary Add (snapshot) tests for subscript assignment diagnostics. This is mainly intended to establish a baseline before I hope to improve some of these messages. --- ...t…_-_Invalid_key_type_(d3d47de65fb3bad).snap | 31 +++++ ..._Invalid_key_type_for…_(815dae276e2fd2b7).snap | 36 ++++++ ..._-_Invalid_value_type_(f87bd015df018509).snap | 31 +++++ ..._Invalid_value_type_f…_(155d53762388f9ad).snap | 48 +++++++ ..._Misspelled_key_for_`…_(7cf0fa634e2a2d59).snap | 38 ++++++ ..._No_`__setitem__`_met…_(468f62a3bdd1d60c).snap | 35 ++++++ ..._Possibly_missing_`__…_(efd3f0c02e9b89e9).snap | 31 +++++ ..._Unknown_key_for_all_…_(1c685d9d10678263).snap | 40 ++++++ ..._Unknown_key_for_one_…_(b515711c0a451a86).snap | 40 ++++++ ..._Wrong_value_type_for…_(57372b65e30392a8).snap | 31 +++++ ..._Wrong_value_type_for…_(ffe39a3bae68cfe4).snap | 31 +++++ .../subscript/assignment_diagnostics.md | 117 ++++++++++++++++++ .../resources/mdtest/typed_dict.md | 41 +++++- 13 files changed, 549 insertions(+), 1 deletion(-) create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_(d3d47de65fb3bad).snap create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_for…_(815dae276e2fd2b7).snap create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_(f87bd015df018509).snap create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_f…_(155d53762388f9ad).snap create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Misspelled_key_for_`…_(7cf0fa634e2a2d59).snap create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_No_`__setitem__`_met…_(468f62a3bdd1d60c).snap create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Possibly_missing_`__…_(efd3f0c02e9b89e9).snap create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_all_…_(1c685d9d10678263).snap create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_one_…_(b515711c0a451a86).snap create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(57372b65e30392a8).snap create mode 100644 crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(ffe39a3bae68cfe4).snap create mode 100644 crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_(d3d47de65fb3bad).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_(d3d47de65fb3bad).snap new file mode 100644 index 0000000000..6081e0f5d9 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_(d3d47de65fb3bad).snap @@ -0,0 +1,31 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Invalid key type +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | config: dict[str, int] = {} +2 | config[0] = 3 # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, int].__setitem__(key: str, value: int, /) -> None` cannot be called with a key of type `Literal[0]` and a value of type `Literal[3]` on object of type `dict[str, int]` + --> src/mdtest_snippet.py:2:1 + | +1 | config: dict[str, int] = {} +2 | config[0] = 3 # error: [invalid-assignment] + | ^^^^^^ + | +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_for…_(815dae276e2fd2b7).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_for…_(815dae276e2fd2b7).snap new file mode 100644 index 0000000000..2fbfb5323f --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_key_type_for…_(815dae276e2fd2b7).snap @@ -0,0 +1,36 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Invalid key type for `TypedDict` +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | from typing import TypedDict +2 | +3 | class Config(TypedDict): +4 | retries: int +5 | +6 | def _(config: Config) -> None: +7 | config[0] = 3 # error: [invalid-key] +``` + +# Diagnostics + +``` +error[invalid-key]: Cannot access `Config` with a key of type `Literal[0]`. Only string literals are allowed as keys on TypedDicts. + --> src/mdtest_snippet.py:7:12 + | +6 | def _(config: Config) -> None: +7 | config[0] = 3 # error: [invalid-key] + | ^ + | +info: rule `invalid-key` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_(f87bd015df018509).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_(f87bd015df018509).snap new file mode 100644 index 0000000000..125bcdfac1 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_(f87bd015df018509).snap @@ -0,0 +1,31 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Invalid value type +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | config: dict[str, int] = {} +2 | config["retries"] = "three" # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, int].__setitem__(key: str, value: int, /) -> None` cannot be called with a key of type `Literal["retries"]` and a value of type `Literal["three"]` on object of type `dict[str, int]` + --> src/mdtest_snippet.py:2:1 + | +1 | config: dict[str, int] = {} +2 | config["retries"] = "three" # error: [invalid-assignment] + | ^^^^^^ + | +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_f…_(155d53762388f9ad).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_f…_(155d53762388f9ad).snap new file mode 100644 index 0000000000..c2821569ce --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Invalid_value_type_f…_(155d53762388f9ad).snap @@ -0,0 +1,48 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Invalid value type for `TypedDict` +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | from typing import TypedDict +2 | +3 | class Config(TypedDict): +4 | retries: int +5 | +6 | def _(config: Config) -> None: +7 | config["retries"] = "three" # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Invalid assignment to key "retries" with declared type `int` on TypedDict `Config` + --> src/mdtest_snippet.py:7:5 + | +6 | def _(config: Config) -> None: +7 | config["retries"] = "three" # error: [invalid-assignment] + | ------ --------- ^^^^^^^ value of type `Literal["three"]` + | | | + | | key has declared type `int` + | TypedDict `Config` + | +info: Item declaration + --> src/mdtest_snippet.py:4:5 + | +3 | class Config(TypedDict): +4 | retries: int + | ------------ Item declared here +5 | +6 | def _(config: Config) -> None: + | +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Misspelled_key_for_`…_(7cf0fa634e2a2d59).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Misspelled_key_for_`…_(7cf0fa634e2a2d59).snap new file mode 100644 index 0000000000..e6036f32e0 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Misspelled_key_for_`…_(7cf0fa634e2a2d59).snap @@ -0,0 +1,38 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Misspelled key for `TypedDict` +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | from typing import TypedDict +2 | +3 | class Config(TypedDict): +4 | retries: int +5 | +6 | def _(config: Config) -> None: +7 | config["Retries"] = 30.0 # error: [invalid-key] +``` + +# Diagnostics + +``` +error[invalid-key]: Invalid key for TypedDict `Config` + --> src/mdtest_snippet.py:7:5 + | +6 | def _(config: Config) -> None: +7 | config["Retries"] = 30.0 # error: [invalid-key] + | ------ ^^^^^^^^^ Unknown key "Retries" - did you mean "retries"? + | | + | TypedDict `Config` + | +info: rule `invalid-key` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_No_`__setitem__`_met…_(468f62a3bdd1d60c).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_No_`__setitem__`_met…_(468f62a3bdd1d60c).snap new file mode 100644 index 0000000000..dec0ab3417 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_No_`__setitem__`_met…_(468f62a3bdd1d60c).snap @@ -0,0 +1,35 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - No `__setitem__` method +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | class ReadOnlyDict: +2 | def __getitem__(self, key: str) -> int: +3 | return 42 +4 | +5 | config = ReadOnlyDict() +6 | config["retries"] = 3 # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Cannot assign to object of type `ReadOnlyDict` with no `__setitem__` method + --> src/mdtest_snippet.py:6:1 + | +5 | config = ReadOnlyDict() +6 | config["retries"] = 3 # error: [invalid-assignment] + | ^^^^^^ + | +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Possibly_missing_`__…_(efd3f0c02e9b89e9).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Possibly_missing_`__…_(efd3f0c02e9b89e9).snap new file mode 100644 index 0000000000..ced810cf72 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Possibly_missing_`__…_(efd3f0c02e9b89e9).snap @@ -0,0 +1,31 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Possibly missing `__setitem__` method +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | def _(config: dict[str, int] | None) -> None: +2 | config["retries"] = 3 # error: [possibly-missing-implicit-call] +``` + +# Diagnostics + +``` +warning[possibly-missing-implicit-call]: Method `__setitem__` of type `dict[str, int] | None` may be missing + --> src/mdtest_snippet.py:2:5 + | +1 | def _(config: dict[str, int] | None) -> None: +2 | config["retries"] = 3 # error: [possibly-missing-implicit-call] + | ^^^^^^ + | +info: rule `possibly-missing-implicit-call` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_all_…_(1c685d9d10678263).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_all_…_(1c685d9d10678263).snap new file mode 100644 index 0000000000..6444c84f36 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_all_…_(1c685d9d10678263).snap @@ -0,0 +1,40 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Unknown key for all elemens of a union +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` + 1 | from typing import TypedDict + 2 | + 3 | class Person(TypedDict): + 4 | name: str + 5 | + 6 | class Animal(TypedDict): + 7 | name: str + 8 | legs: int + 9 | +10 | def _(being: Person | Animal) -> None: +11 | being["surname"] = "unknown" # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Method `__setitem__` of type `(key: Literal["name"], value: str, /) -> None` cannot be called with a key of type `Literal["surname"]` and a value of type `Literal["unknown"]` on object of type `Person | Animal` + --> src/mdtest_snippet.py:11:5 + | +10 | def _(being: Person | Animal) -> None: +11 | being["surname"] = "unknown" # error: [invalid-assignment] + | ^^^^^ + | +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_one_…_(b515711c0a451a86).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_one_…_(b515711c0a451a86).snap new file mode 100644 index 0000000000..2b840a6783 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_one_…_(b515711c0a451a86).snap @@ -0,0 +1,40 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Unknown key for one element of a union +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` + 1 | from typing import TypedDict + 2 | + 3 | class Person(TypedDict): + 4 | name: str + 5 | + 6 | class Animal(TypedDict): + 7 | name: str + 8 | legs: int + 9 | +10 | def _(being: Person | Animal) -> None: +11 | being["legs"] = 4 # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Method `__setitem__` of type `(key: Literal["name"], value: str, /) -> None` cannot be called with a key of type `Literal["legs"]` and a value of type `Literal[4]` on object of type `Person | Animal` + --> src/mdtest_snippet.py:11:5 + | +10 | def _(being: Person | Animal) -> None: +11 | being["legs"] = 4 # error: [invalid-assignment] + | ^^^^^ + | +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(57372b65e30392a8).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(57372b65e30392a8).snap new file mode 100644 index 0000000000..37ea1c111a --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(57372b65e30392a8).snap @@ -0,0 +1,31 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Wrong value type for one element of a union +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | def _(config: dict[str, int] | dict[str, str]) -> None: +2 | config["retries"] = 3 # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Method `__setitem__` of type `(bound method dict[str, int].__setitem__(key: str, value: int, /) -> None) | (bound method dict[str, str].__setitem__(key: str, value: str, /) -> None)` cannot be called with a key of type `Literal["retries"]` and a value of type `Literal[3]` on object of type `dict[str, int] | dict[str, str]` + --> src/mdtest_snippet.py:2:5 + | +1 | def _(config: dict[str, int] | dict[str, str]) -> None: +2 | config["retries"] = 3 # error: [invalid-assignment] + | ^^^^^^ + | +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(ffe39a3bae68cfe4).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(ffe39a3bae68cfe4).snap new file mode 100644 index 0000000000..dfd0136536 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(ffe39a3bae68cfe4).snap @@ -0,0 +1,31 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: assignment_diagnostics.md - Subscript assignment diagnostics - Wrong value type for all elements of a union +mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +--- + +# Python source files + +## mdtest_snippet.py + +``` +1 | def _(config: dict[str, int] | dict[str, str]) -> None: +2 | config["retries"] = 3.0 # error: [invalid-assignment] +``` + +# Diagnostics + +``` +error[invalid-assignment]: Method `__setitem__` of type `(bound method dict[str, int].__setitem__(key: str, value: int, /) -> None) | (bound method dict[str, str].__setitem__(key: str, value: str, /) -> None)` cannot be called with a key of type `Literal["retries"]` and a value of type `float` on object of type `dict[str, int] | dict[str, str]` + --> src/mdtest_snippet.py:2:5 + | +1 | def _(config: dict[str, int] | dict[str, str]) -> None: +2 | config["retries"] = 3.0 # error: [invalid-assignment] + | ^^^^^^ + | +info: rule `invalid-assignment` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md b/crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md new file mode 100644 index 0000000000..ed23208eb1 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md @@ -0,0 +1,117 @@ +# Subscript assignment diagnostics + + + +## Invalid value type + +```py +config: dict[str, int] = {} +config["retries"] = "three" # error: [invalid-assignment] +``` + +## Invalid key type + +```py +config: dict[str, int] = {} +config[0] = 3 # error: [invalid-assignment] +``` + +## Invalid value type for `TypedDict` + +```py +from typing import TypedDict + +class Config(TypedDict): + retries: int + +def _(config: Config) -> None: + config["retries"] = "three" # error: [invalid-assignment] +``` + +## Invalid key type for `TypedDict` + +```py +from typing import TypedDict + +class Config(TypedDict): + retries: int + +def _(config: Config) -> None: + config[0] = 3 # error: [invalid-key] +``` + +## Misspelled key for `TypedDict` + +```py +from typing import TypedDict + +class Config(TypedDict): + retries: int + +def _(config: Config) -> None: + config["Retries"] = 30.0 # error: [invalid-key] +``` + +## No `__setitem__` method + +```py +class ReadOnlyDict: + def __getitem__(self, key: str) -> int: + return 42 + +config = ReadOnlyDict() +config["retries"] = 3 # error: [invalid-assignment] +``` + +## Possibly missing `__setitem__` method + +```py +def _(config: dict[str, int] | None) -> None: + config["retries"] = 3 # error: [possibly-missing-implicit-call] +``` + +## Unknown key for one element of a union + +```py +from typing import TypedDict + +class Person(TypedDict): + name: str + +class Animal(TypedDict): + name: str + legs: int + +def _(being: Person | Animal) -> None: + being["legs"] = 4 # error: [invalid-assignment] +``` + +## Unknown key for all elemens of a union + +```py +from typing import TypedDict + +class Person(TypedDict): + name: str + +class Animal(TypedDict): + name: str + legs: int + +def _(being: Person | Animal) -> None: + being["surname"] = "unknown" # error: [invalid-assignment] +``` + +## Wrong value type for one element of a union + +```py +def _(config: dict[str, int] | dict[str, str]) -> None: + config["retries"] = 3 # error: [invalid-assignment] +``` + +## Wrong value type for all elements of a union + +```py +def _(config: dict[str, int] | dict[str, str]) -> None: + config["retries"] = 3.0 # error: [invalid-assignment] +``` diff --git a/crates/ty_python_semantic/resources/mdtest/typed_dict.md b/crates/ty_python_semantic/resources/mdtest/typed_dict.md index b4203ce2b6..8b8fcfffa3 100644 --- a/crates/ty_python_semantic/resources/mdtest/typed_dict.md +++ b/crates/ty_python_semantic/resources/mdtest/typed_dict.md @@ -526,10 +526,20 @@ class Person(TypedDict): name: str age: int | None +class Animal(TypedDict): + name: str + NAME_FINAL: Final = "name" AGE_FINAL: Final[Literal["age"]] = "age" -def _(person: Person, literal_key: Literal["age"], union_of_keys: Literal["age", "name"], str_key: str, unknown_key: Any) -> None: +def _( + person: Person, + being: Person | Animal, + literal_key: Literal["age"], + union_of_keys: Literal["age", "name"], + str_key: str, + unknown_key: Any, +) -> None: reveal_type(person["name"]) # revealed: str reveal_type(person["age"]) # revealed: int | None @@ -548,18 +558,30 @@ def _(person: Person, literal_key: Literal["age"], union_of_keys: Literal["age", # No error here: reveal_type(person[unknown_key]) # revealed: Unknown + + reveal_type(being["name"]) # revealed: str + + # TODO: A type of `int | None | Unknown` might be better here. The `str` is mixed in + # because `Animal.__getitem__` can only return `str`. + # error: [invalid-key] "Invalid key for TypedDict `Animal`" + reveal_type(being["age"]) # revealed: int | None | str ``` ### Writing ```py from typing_extensions import TypedDict, Final, Literal, LiteralString, Any +from ty_extensions import Intersection class Person(TypedDict): name: str surname: str age: int | None +class Animal(TypedDict): + name: str + legs: int + NAME_FINAL: Final = "name" AGE_FINAL: Final[Literal["age"]] = "age" @@ -583,6 +605,23 @@ def _(person: Person, union_of_keys: Literal["name", "surname"]): # error: [invalid-assignment] "Cannot assign value of type `Literal[1]` to key of type `Literal["name", "surname"]` on TypedDict `Person`" person[union_of_keys] = 1 +def _(being: Person | Animal): + being["name"] = "Being" + + # error: [invalid-assignment] "Method `__setitem__` of type `(Overload[(key: Literal["name"], value: str, /) -> None, (key: Literal["surname"], value: str, /) -> None, (key: Literal["age"], value: int | None, /) -> None]) | (Overload[(key: Literal["name"], value: str, /) -> None, (key: Literal["legs"], value: int, /) -> None])` cannot be called with a key of type `Literal["name"]` and a value of type `Literal[1]` on object of type `Person | Animal`" + being["name"] = 1 + + # error: [invalid-assignment] "Method `__setitem__` of type `(Overload[(key: Literal["name"], value: str, /) -> None, (key: Literal["surname"], value: str, /) -> None, (key: Literal["age"], value: int | None, /) -> None]) | (Overload[(key: Literal["name"], value: str, /) -> None, (key: Literal["legs"], value: int, /) -> None])` cannot be called with a key of type `Literal["surname"]` and a value of type `Literal["unknown"]` on object of type `Person | Animal`" + being["surname"] = "unknown" + +def _(centaur: Intersection[Person, Animal]): + centaur["name"] = "Chiron" + centaur["age"] = 100 + centaur["legs"] = 4 + + # TODO: This should be an `invalid-key` error + centaur["unknown"] = "value" + def _(person: Person, union_of_keys: Literal["name", "age"], unknown_value: Any): person[union_of_keys] = unknown_value From 43427abb6105773f1f532e42d4262ccf8d7beb58 Mon Sep 17 00:00:00 2001 From: Micha Reiser Date: Wed, 12 Nov 2025 17:34:26 +0100 Subject: [PATCH 174/180] [ty] Improve semantic token classification for names (#21399) --- crates/ty_ide/src/hover.rs | 8 + crates/ty_ide/src/semantic_tokens.rs | 545 ++++++++++++------ crates/ty_python_semantic/src/ast_node_ref.rs | 1 + .../src/semantic_index/definition.rs | 2 +- .../src/semantic_index/use_def.rs | 1 + crates/ty_python_semantic/src/types.rs | 9 +- .../src/types/ide_support.rs | 44 +- 7 files changed, 389 insertions(+), 221 deletions(-) diff --git a/crates/ty_ide/src/hover.rs b/crates/ty_ide/src/hover.rs index 3b9bf7eeb4..c1278637a1 100644 --- a/crates/ty_ide/src/hover.rs +++ b/crates/ty_ide/src/hover.rs @@ -2644,9 +2644,17 @@ def ab(a: int, *, c: int): assert_snapshot!(test.hover(), @r" int | float + --------------------------------------------- + Convert a string or number to a floating-point number, if possible. + --------------------------------------------- ```python int | float + ``` + --- + ```text + Convert a string or number to a floating-point number, if possible. + ``` --------------------------------------------- info[hover]: Hovered content is diff --git a/crates/ty_ide/src/semantic_tokens.rs b/crates/ty_ide/src/semantic_tokens.rs index 4fb3aa45ab..a4df094f21 100644 --- a/crates/ty_ide/src/semantic_tokens.rs +++ b/crates/ty_ide/src/semantic_tokens.rs @@ -1,3 +1,37 @@ +//! This module walks the AST and collects a set of "semantic tokens" for a file +//! or a range within a file. Each semantic token provides a "token type" and zero +//! or more "modifiers". This information can be used by an editor to provide +//! color coding based on semantic meaning. +//! +//! Visual Studio has a very useful debugger that allows you to inspect the +//! semantic tokens for any given position in the code. Not only is this useful +//! to debug our semantic highlighting, it also allows easy comparison with +//! how Pylance (or other LSPs) highlight a certain token. You can open the scope inspector, +//! with the Command Palette (Command/Ctrl+Shift+P), then select the +//! `Developer: Inspect Editor Tokens and Scopes` command. +//! +//! Current limitations and areas for future improvement: +//! +//! TODO: Need to handle semantic tokens within quoted annotations. +//! +//! TODO: Need to properly handle Annotated expressions. All type arguments other +//! than the first should be treated as value expressions, not as type expressions. +//! +//! TODO: An identifier that resolves to a parameter when used within a function +//! should be classified as a parameter, selfParameter, or clsParameter token. +//! +//! TODO: Properties (or perhaps more generally, descriptor objects?) should be +//! classified as property tokens rather than just variables. +//! +//! TODO: Special forms like `Protocol` and `TypedDict` should probably be classified +//! as class tokens, but they are currently classified as variables. +//! +//! TODO: Type aliases (including those defined with the Python 3.12 "type" statement) +//! do not currently have a dedicated semantic token type, but they maybe should. +//! +//! TODO: Additional token modifiers might be added (e.g. for static methods, +//! abstract methods and classes). + use crate::Db; use bitflags::bitflags; use itertools::Itertools; @@ -13,43 +47,13 @@ use ruff_python_ast::{ }; use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; use std::ops::Deref; +use ty_python_semantic::semantic_index::definition::Definition; +use ty_python_semantic::types::TypeVarKind; use ty_python_semantic::{ HasType, SemanticModel, semantic_index::definition::DefinitionKind, types::Type, - types::ide_support::definition_kind_for_name, + types::ide_support::definition_for_name, }; -// This module walks the AST and collects a set of "semantic tokens" for a file -// or a range within a file. Each semantic token provides a "token type" and zero -// or more "modifiers". This information can be used by an editor to provide -// color coding based on semantic meaning. - -// Current limitations and areas for future improvement: - -// TODO: Need to provide better classification for name tokens that are imported -// from other modules. Currently, these are classified based on their types, -// which often means they're classified as variables when they should be classes -// in many cases. - -// TODO: Need to handle semantic tokens within quoted annotations. - -// TODO: Need to properly handle Annotated expressions. All type arguments other -// than the first should be treated as value expressions, not as type expressions. - -// TODO: An identifier that resolves to a parameter when used within a function -// should be classified as a parameter, selfParameter, or clsParameter token. - -// TODO: Properties (or perhaps more generally, descriptor objects?) should be -// classified as property tokens rather than just variables. - -// TODO: Special forms like Protocol and TypedDict should probably be classified -// as class tokens, but they are currently classified as variables. - -// TODO: Type aliases (including those defined with the Python 3.12 "type" statement) -// do not currently have a dedicated semantic token type, but they maybe should. - -// TODO: Additional token modifiers might be added (e.g. for static methods, -// abstract methods and classes). - /// Semantic token types supported by the language server. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum SemanticTokenType { @@ -198,6 +202,7 @@ struct SemanticTokenVisitor<'db> { tokens: Vec, in_class_scope: bool, in_type_annotation: bool, + in_target_creating_definition: bool, range_filter: Option, } @@ -212,6 +217,7 @@ impl<'db> SemanticTokenVisitor<'db> { file, tokens: Vec::new(), in_class_scope: false, + in_target_creating_definition: false, in_type_annotation: false, range_filter, } @@ -259,13 +265,11 @@ impl<'db> SemanticTokenVisitor<'db> { fn classify_name(&self, name: &ast::ExprName) -> (SemanticTokenType, SemanticTokenModifier) { // First try to classify the token based on its definition kind. - let definition_kind = definition_kind_for_name(self.semantic_model.db(), self.file, name); + let definition = definition_for_name(self.semantic_model.db(), self.file, name); - if let Some(definition_kind) = definition_kind { + if let Some(definition) = definition { let name_str = name.id.as_str(); - if let Some(classification) = - self.classify_from_definition_kind(&definition_kind, name_str) - { + if let Some(classification) = self.classify_from_definition(definition, name_str) { return classification; } } @@ -276,14 +280,16 @@ impl<'db> SemanticTokenVisitor<'db> { self.classify_from_type_and_name_str(ty, name_str) } - fn classify_from_definition_kind( + fn classify_from_definition( &self, - definition_kind: &DefinitionKind<'_>, + definition: Definition, name_str: &str, ) -> Option<(SemanticTokenType, SemanticTokenModifier)> { let mut modifiers = SemanticTokenModifier::empty(); + let db = self.semantic_model.db(); + let model = SemanticModel::new(db, definition.file(db)); - match definition_kind { + match definition.kind(db) { DefinitionKind::Function(_) => { // Check if this is a method based on current scope if self.in_class_scope { @@ -294,7 +300,24 @@ impl<'db> SemanticTokenVisitor<'db> { } DefinitionKind::Class(_) => Some((SemanticTokenType::Class, modifiers)), DefinitionKind::TypeVar(_) => Some((SemanticTokenType::TypeParameter, modifiers)), - DefinitionKind::Parameter(_) => Some((SemanticTokenType::Parameter, modifiers)), + DefinitionKind::Parameter(parameter) => { + let parsed = parsed_module(db, definition.file(db)); + let ty = parameter.node(&parsed.load(db)).inferred_type(&model); + + if let Type::TypeVar(type_var) = ty { + match type_var.typevar(db).kind(db) { + TypeVarKind::TypingSelf => { + return Some((SemanticTokenType::SelfParameter, modifiers)); + } + TypeVarKind::Legacy + | TypeVarKind::ParamSpec + | TypeVarKind::Pep695ParamSpec + | TypeVarKind::Pep695 => {} + } + } + + Some((SemanticTokenType::Parameter, modifiers)) + } DefinitionKind::VariadicPositionalParameter(_) => { Some((SemanticTokenType::Parameter, modifiers)) } @@ -315,6 +338,25 @@ impl<'db> SemanticTokenVisitor<'db> { if Self::is_constant_name(name_str) { modifiers |= SemanticTokenModifier::READONLY; } + + let parsed = parsed_module(db, definition.file(db)); + let parsed = parsed.load(db); + let value = match definition.kind(db) { + DefinitionKind::Assignment(assignment) => Some(assignment.value(&parsed)), + _ => None, + }; + + if let Some(value) = value { + let value_ty = value.inferred_type(&model); + + if value_ty.is_class_literal() + || value_ty.is_subclass_of() + || value_ty.is_generic_alias() + { + return Some((SemanticTokenType::Class, modifiers)); + } + } + Some((SemanticTokenType::Variable, modifiers)) } } @@ -589,6 +631,7 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> { // Clear the in_class_scope flag so inner functions // are not treated as methods let prev_in_class = self.in_class_scope; + self.in_class_scope = false; self.visit_body(&func.body); self.in_class_scope = prev_in_class; @@ -684,6 +727,27 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> { ); } } + ast::Stmt::Assign(assignment) => { + self.in_target_creating_definition = true; + for element in &assignment.targets { + self.visit_expr(element); + } + self.in_target_creating_definition = false; + + self.visit_expr(&assignment.value); + } + ast::Stmt::AnnAssign(assignment) => { + self.in_target_creating_definition = true; + self.visit_expr(&assignment.target); + self.in_target_creating_definition = false; + + self.visit_expr(&assignment.annotation); + + if let Some(value) = &assignment.value { + self.visit_expr(value); + } + } + _ => { // For all other statement types, let the default visitor handle them walk_stmt(self, stmt); @@ -701,7 +765,10 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> { fn visit_expr(&mut self, expr: &Expr) { match expr { ast::Expr::Name(name) => { - let (token_type, modifiers) = self.classify_name(name); + let (token_type, mut modifiers) = self.classify_name(name); + if self.in_target_creating_definition && name.ctx.is_store() { + modifiers |= SemanticTokenModifier::DEFINITION; + } self.add_token(name, token_type, modifiers); walk_expr(self, expr); } @@ -745,6 +812,15 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> { // Visit the lambda body self.visit_expr(&lambda.body); } + + ast::Expr::Named(named) => { + let prev_in_target = self.in_target_creating_definition; + self.in_target_creating_definition = true; + self.visit_expr(&named.target); + self.in_target_creating_definition = prev_in_target; + + self.visit_expr(&named.value); + } _ => { // For all other expression types, let the default visitor handle them walk_expr(self, expr); @@ -971,12 +1047,31 @@ y = 'hello' let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" - "x" @ 1..2: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 1..2: Variable [definition] "42" @ 5..7: Number - "y" @ 8..9: Variable + "y" @ 8..9: Variable [definition] "'hello'" @ 12..19: String - "###); + "#); + } + + #[test] + fn test_semantic_tokens_walrus() { + let test = SemanticTokenTest::new( + " +if x := 42: + y = 'hello' +", + ); + + let tokens = test.highlight_file(); + + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 4..5: Variable [definition] + "42" @ 9..11: Number + "y" @ 17..18: Variable [definition] + "'hello'" @ 21..28: String + "#); } #[test] @@ -984,18 +1079,30 @@ y = 'hello' let test = SemanticTokenTest::new( " class MyClass: - def method(self, x): pass + def method(self, x): + self.x = 10 + + def method_unidiomatic_self(self2): + print(self2.x)) ", ); let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "MyClass" @ 7..14: Class [definition] "method" @ 24..30: Method [definition] "self" @ 31..35: SelfParameter "x" @ 37..38: Parameter - "###); + "self" @ 49..53: SelfParameter + "x" @ 54..55: Variable + "10" @ 58..60: Number + "method_unidiomatic_self" @ 70..93: Method [definition] + "self2" @ 94..99: SelfParameter + "print" @ 110..115: Function + "self2" @ 116..121: SelfParameter + "x" @ 122..123: Variable + "#); } #[test] @@ -1085,13 +1192,13 @@ class MyClass: let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "MyClass" @ 7..14: Class [definition] - "CONSTANT" @ 20..28: Variable [readonly] + "CONSTANT" @ 20..28: Variable [definition, readonly] "42" @ 31..33: Number "method" @ 48..54: Method [definition, async] "self" @ 55..59: SelfParameter - "###); + "#); } #[test] @@ -1118,11 +1225,11 @@ z = sys.version "MyClass" @ 18..25: Class [definition] "my_function" @ 41..52: Function [definition] "42" @ 67..69: Number - "x" @ 71..72: Variable + "x" @ 71..72: Variable [definition] "MyClass" @ 75..82: Class - "y" @ 85..86: Variable + "y" @ 85..86: Variable [definition] "my_function" @ 89..100: Function - "z" @ 103..104: Variable + "z" @ 103..104: Variable [definition] "sys" @ 107..110: Namespace "version" @ 111..118: Variable "#); @@ -1140,14 +1247,14 @@ z = None let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" - "x" @ 1..2: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 1..2: Variable [definition] "True" @ 5..9: BuiltinConstant - "y" @ 10..11: Variable + "y" @ 10..11: Variable [definition] "False" @ 14..19: BuiltinConstant - "z" @ 20..21: Variable + "z" @ 20..21: Variable [definition] "None" @ 24..28: BuiltinConstant - "###); + "#); } #[test] @@ -1168,11 +1275,11 @@ result = check(None) assert_snapshot!(test.to_snapshot(&tokens), @r#" "check" @ 5..10: Function [definition] "value" @ 11..16: Parameter - "value" @ 26..31: Variable + "value" @ 26..31: Parameter "None" @ 35..39: BuiltinConstant "False" @ 56..61: BuiltinConstant "True" @ 73..77: BuiltinConstant - "result" @ 79..85: Variable + "result" @ 79..85: Variable [definition] "check" @ 88..93: Function "None" @ 94..98: BuiltinConstant "#); @@ -1182,36 +1289,44 @@ result = check(None) fn test_builtin_types() { let test = SemanticTokenTest::new( r#" + type U = str | int + class Test: a: int b: bool c: str - d: float # TODO: Should be Class + d: float e: list[int] - f: list[float] # TODO: Should be Class - g: int | float # TODO: float should be Class + f: list[float] + g: int | float + h: U "#, ); assert_snapshot!(test.to_snapshot(&test.highlight_file()), @r#" - "Test" @ 7..11: Class [definition] - "a" @ 17..18: Variable - "int" @ 20..23: Class - "b" @ 28..29: Variable - "bool" @ 31..35: Class - "c" @ 40..41: Variable - "str" @ 43..46: Class - "d" @ 51..52: Variable - "float" @ 54..59: Variable - "e" @ 89..90: Variable - "list" @ 92..96: Class - "int" @ 97..100: Class - "f" @ 106..107: Variable - "list" @ 109..113: Class - "float" @ 114..119: Variable - "g" @ 150..151: Variable - "int" @ 153..156: Class - "float" @ 159..164: Variable + "U" @ 6..7: TypeParameter + "str" @ 10..13: Class + "int" @ 16..19: Class + "Test" @ 27..31: Class [definition] + "a" @ 37..38: Variable [definition] + "int" @ 40..43: Class + "b" @ 48..49: Variable [definition] + "bool" @ 51..55: Class + "c" @ 60..61: Variable [definition] + "str" @ 63..66: Class + "d" @ 71..72: Variable [definition] + "float" @ 74..79: Class + "e" @ 84..85: Variable [definition] + "list" @ 87..91: Class + "int" @ 92..95: Class + "f" @ 101..102: Variable [definition] + "list" @ 104..108: Class + "float" @ 109..114: Class + "g" @ 120..121: Variable [definition] + "int" @ 123..126: Class + "float" @ 129..134: Class + "h" @ 139..140: Variable [definition] + "U" @ 142..143: TypeParameter "#); } @@ -1243,29 +1358,29 @@ def function2(): assert!(range_tokens.len() < full_tokens.len()); // Test both full tokens and range tokens with snapshots - assert_snapshot!(test.to_snapshot(&full_tokens), @r###" + assert_snapshot!(test.to_snapshot(&full_tokens), @r#" "function1" @ 5..14: Function [definition] - "x" @ 22..23: Variable + "x" @ 22..23: Variable [definition] "42" @ 26..28: Number "x" @ 40..41: Variable "function2" @ 47..56: Function [definition] - "y" @ 64..65: Variable + "y" @ 64..65: Variable [definition] "\"hello\"" @ 68..75: String - "z" @ 80..81: Variable + "z" @ 80..81: Variable [definition] "True" @ 84..88: BuiltinConstant "y" @ 100..101: Variable "z" @ 104..105: Variable - "###); + "#); - assert_snapshot!(test.to_snapshot(&range_tokens), @r###" + assert_snapshot!(test.to_snapshot(&range_tokens), @r#" "function2" @ 47..56: Function [definition] - "y" @ 64..65: Variable + "y" @ 64..65: Variable [definition] "\"hello\"" @ 68..75: String - "z" @ 80..81: Variable + "z" @ 80..81: Variable [definition] "True" @ 84..88: BuiltinConstant "y" @ 100..101: Variable "z" @ 104..105: Variable - "###); + "#); // Verify that no tokens from range_tokens have ranges outside the requested range for token in range_tokens.iter() { @@ -1298,7 +1413,7 @@ z = 3 let range_tokens = test.highlight_range(range); assert_snapshot!(test.to_snapshot(&range_tokens), @r#" - "y" @ 7..8: Variable + "y" @ 7..8: Variable [definition] "2" @ 11..12: Number "#); } @@ -1351,9 +1466,9 @@ y = sys "sys" @ 18..21: Namespace "collections" @ 27..38: Namespace "defaultdict" @ 46..57: Class - "x" @ 119..120: Namespace + "x" @ 119..120: Variable [definition] "os" @ 123..125: Namespace - "y" @ 126..127: Namespace + "y" @ 126..127: Variable [definition] "sys" @ 130..133: Namespace "#); } @@ -1422,7 +1537,7 @@ u = List.__name__ # __name__ should be variable let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "os" @ 8..10: Namespace "sys" @ 18..21: Namespace "collections" @ 27..38: Namespace @@ -1430,7 +1545,7 @@ u = List.__name__ # __name__ should be variable "typing" @ 63..69: Namespace "List" @ 77..81: Variable "MyClass" @ 89..96: Class [definition] - "CONSTANT" @ 102..110: Variable [readonly] + "CONSTANT" @ 102..110: Variable [definition, readonly] "42" @ 113..115: Number "method" @ 125..131: Method [definition] "self" @ 132..136: SelfParameter @@ -1438,29 +1553,29 @@ u = List.__name__ # __name__ should be variable "property" @ 168..176: Decorator "prop" @ 185..189: Method [definition] "self" @ 190..194: SelfParameter - "self" @ 212..216: TypeParameter + "self" @ 212..216: SelfParameter "CONSTANT" @ 217..225: Variable [readonly] - "obj" @ 227..230: Variable + "obj" @ 227..230: Variable [definition] "MyClass" @ 233..240: Class - "x" @ 278..279: Namespace + "x" @ 278..279: Variable [definition] "os" @ 282..284: Namespace "path" @ 285..289: Namespace - "y" @ 339..340: Method + "y" @ 339..340: Variable [definition] "obj" @ 343..346: Variable "method" @ 347..353: Method - "z" @ 405..406: Variable + "z" @ 405..406: Variable [definition] "obj" @ 409..412: Variable "CONSTANT" @ 413..421: Variable [readonly] - "w" @ 483..484: Variable + "w" @ 483..484: Variable [definition] "obj" @ 487..490: Variable "prop" @ 491..495: Variable - "v" @ 534..535: Function + "v" @ 534..535: Variable [definition] "MyClass" @ 538..545: Class "method" @ 546..552: Method - "u" @ 596..597: Variable + "u" @ 596..597: Variable [definition] "List" @ 600..604: Variable "__name__" @ 605..613: Variable - "###); + "#); } #[test] @@ -1479,19 +1594,19 @@ y = obj.unknown_attr # Should fall back to variable let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "MyClass" @ 7..14: Class [definition] - "some_attr" @ 20..29: Variable + "some_attr" @ 20..29: Variable [definition] "\"value\"" @ 32..39: String - "obj" @ 41..44: Variable + "obj" @ 41..44: Variable [definition] "MyClass" @ 47..54: Class - "x" @ 117..118: Variable + "x" @ 117..118: Variable [definition] "obj" @ 121..124: Variable "some_attr" @ 125..134: Variable - "y" @ 187..188: Variable + "y" @ 187..188: Variable [definition] "obj" @ 191..194: Variable "unknown_attr" @ 195..207: Variable - "###); + "#); } #[test] @@ -1514,31 +1629,31 @@ w = obj.A # Should not have readonly modifier (length == 1) let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "MyClass" @ 7..14: Class [definition] - "UPPER_CASE" @ 20..30: Variable [readonly] + "UPPER_CASE" @ 20..30: Variable [definition, readonly] "42" @ 33..35: Number - "lower_case" @ 40..50: Variable + "lower_case" @ 40..50: Variable [definition] "24" @ 53..55: Number - "MixedCase" @ 60..69: Variable + "MixedCase" @ 60..69: Variable [definition] "12" @ 72..74: Number - "A" @ 79..80: Variable + "A" @ 79..80: Variable [definition] "1" @ 83..84: Number - "obj" @ 86..89: Variable + "obj" @ 86..89: Variable [definition] "MyClass" @ 92..99: Class - "x" @ 102..103: Variable + "x" @ 102..103: Variable [definition] "obj" @ 106..109: Variable "UPPER_CASE" @ 110..120: Variable [readonly] - "y" @ 156..157: Variable + "y" @ 156..157: Variable [definition] "obj" @ 160..163: Variable "lower_case" @ 164..174: Variable - "z" @ 214..215: Variable + "z" @ 214..215: Variable [definition] "obj" @ 218..221: Variable "MixedCase" @ 222..231: Variable - "w" @ 272..273: Variable + "w" @ 272..273: Variable [definition] "obj" @ 276..279: Variable "A" @ 280..281: Variable - "###); + "#); } #[test] @@ -1569,10 +1684,10 @@ y: Optional[str] = None "Optional" @ 95..103: Variable "List" @ 104..108: Variable "str" @ 109..112: Class - "x" @ 126..127: Variable + "x" @ 126..127: Variable [definition] "int" @ 129..132: Class "42" @ 135..137: Number - "y" @ 138..139: Variable + "y" @ 138..139: Variable [definition] "Optional" @ 141..149: Variable "str" @ 150..153: Class "None" @ 157..161: BuiltinConstant @@ -1589,11 +1704,11 @@ x: int = 42 let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" - "x" @ 1..2: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 1..2: Variable [definition] "int" @ 4..7: Class "42" @ 10..12: Number - "###); + "#); } #[test] @@ -1611,7 +1726,7 @@ x: MyClass = MyClass() assert_snapshot!(test.to_snapshot(&tokens), @r#" "MyClass" @ 7..14: Class [definition] - "x" @ 26..27: Variable + "x" @ 26..27: Variable [definition] "MyClass" @ 29..36: Class "MyClass" @ 39..46: Class "#); @@ -1640,7 +1755,7 @@ def test_function(param: int, other: MyClass) -> Optional[List[str]]: let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "typing" @ 6..12: Namespace "List" @ 20..24: Variable "Optional" @ 26..34: Variable @@ -1653,18 +1768,18 @@ def test_function(param: int, other: MyClass) -> Optional[List[str]]: "Optional" @ 110..118: Variable "List" @ 119..123: Variable "str" @ 124..127: Class - "x" @ 190..191: Variable + "x" @ 190..191: Variable [definition] "int" @ 193..196: Class "42" @ 199..201: Number - "y" @ 206..207: Variable + "y" @ 206..207: Variable [definition] "MyClass" @ 209..216: Class "MyClass" @ 219..226: Class - "z" @ 233..234: Variable + "z" @ 233..234: Variable [definition] "List" @ 236..240: Variable "str" @ 241..244: Class "\"hello\"" @ 249..256: String "None" @ 357..361: BuiltinConstant - "###); + "#); } #[test] @@ -1718,7 +1833,7 @@ def test_function(param: MyProtocol) -> MyProtocol: let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "typing" @ 6..12: Namespace "Protocol" @ 20..28: Variable "MyProtocol" @ 36..46: Class [definition] @@ -1726,14 +1841,62 @@ def test_function(param: MyProtocol) -> MyProtocol: "method" @ 66..72: Method [definition] "self" @ 73..77: SelfParameter "int" @ 82..85: Class - "my_protocol_var" @ 166..181: Class + "my_protocol_var" @ 166..181: Class [definition] "MyProtocol" @ 184..194: Class "test_function" @ 244..257: Function [definition] "param" @ 258..263: Parameter "MyProtocol" @ 265..275: Class "MyProtocol" @ 280..290: Class "param" @ 303..308: Parameter - "###); + "#); + } + + #[test] + fn type_alias_type_of() { + let test = SemanticTokenTest::new( + " +class Test[T]: ... + +my_type_alias = Test[str] # TODO: `my_type_alias` should be classified as a Class + +def test_function(param: my_type_alias): ... +", + ); + + let tokens = test.highlight_file(); + + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "Test" @ 7..11: Class [definition] + "T" @ 12..13: TypeParameter [definition] + "my_type_alias" @ 21..34: Class [definition] + "Test" @ 37..41: Class + "str" @ 42..45: Class + "test_function" @ 109..122: Function [definition] + "param" @ 123..128: Parameter + "my_type_alias" @ 130..143: Class + "#); + } + + #[test] + fn type_alias_to_generic_alias() { + let test = SemanticTokenTest::new( + " +my_type_alias = type[str] + +def test_function(param: my_type_alias): ... +", + ); + + let tokens = test.highlight_file(); + + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "my_type_alias" @ 1..14: Variable [definition] + "type" @ 17..21: Class + "str" @ 22..25: Class + "test_function" @ 32..45: Function [definition] + "param" @ 46..51: Parameter + "my_type_alias" @ 53..66: Variable + "#); } #[test] @@ -1777,7 +1940,7 @@ class BoundedContainer[T: int, U = str]: let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "func" @ 87..91: Function [definition] "T" @ 92..93: TypeParameter [definition] "x" @ 95..96: Parameter @@ -1810,7 +1973,7 @@ class BoundedContainer[T: int, U = str]: "kwargs" @ 374..380: Variable "str" @ 385..388: Class "str" @ 405..408: Class - "func" @ 409..413: Variable + "func" @ 409..413: Parameter "args" @ 415..419: Parameter "kwargs" @ 423..429: Parameter "wrapper" @ 443..450: Function @@ -1823,23 +1986,23 @@ class BoundedContainer[T: int, U = str]: "T" @ 552..553: TypeParameter "value2" @ 555..561: Parameter "U" @ 563..564: TypeParameter - "self" @ 575..579: TypeParameter + "self" @ 575..579: SelfParameter "value1" @ 580..586: Variable "T" @ 588..589: TypeParameter "value1" @ 592..598: Parameter - "self" @ 607..611: TypeParameter + "self" @ 607..611: SelfParameter "value2" @ 612..618: Variable "U" @ 620..621: TypeParameter "value2" @ 624..630: Parameter "get_first" @ 640..649: Method [definition] "self" @ 650..654: SelfParameter "T" @ 659..660: TypeParameter - "self" @ 677..681: TypeParameter + "self" @ 677..681: SelfParameter "value1" @ 682..688: Variable "get_second" @ 698..708: Method [definition] "self" @ 709..713: SelfParameter "U" @ 718..719: TypeParameter - "self" @ 736..740: TypeParameter + "self" @ 736..740: SelfParameter "value2" @ 741..747: Variable "BoundedContainer" @ 796..812: Class [definition] "T" @ 813..814: TypeParameter [definition] @@ -1857,7 +2020,7 @@ class BoundedContainer[T: int, U = str]: "U" @ 877..878: TypeParameter "x" @ 897..898: Parameter "y" @ 900..901: Parameter - "###); + "#); } #[test] @@ -1880,10 +2043,10 @@ def generic_function[T](value: T) -> T: "value" @ 25..30: Parameter "T" @ 32..33: TypeParameter "T" @ 38..39: TypeParameter - "result" @ 98..104: Variable + "result" @ 98..104: Variable [definition] "T" @ 106..107: TypeParameter "value" @ 110..115: Parameter - "temp" @ 120..124: TypeParameter + "temp" @ 120..124: Variable [definition] "result" @ 127..133: Variable "result" @ 184..190: Variable "#); @@ -1931,19 +2094,19 @@ z = 'single' "mixed" 'quotes'"#, let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" - "x" @ 0..1: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 0..1: Variable [definition] "\"hello\"" @ 4..11: String "\"world\"" @ 12..19: String - "y" @ 20..21: Variable + "y" @ 20..21: Variable [definition] "\"multi\"" @ 25..32: String "\"line\"" @ 38..44: String "\"string\"" @ 50..58: String - "z" @ 60..61: Variable + "z" @ 60..61: Variable [definition] "'single'" @ 64..72: String "\"mixed\"" @ 73..80: String "'quotes'" @ 81..89: String - "###); + "#); } #[test] @@ -1958,19 +2121,19 @@ z = b'single' b"mixed" b'quotes'"#, let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" - "x" @ 0..1: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 0..1: Variable [definition] "b\"hello\"" @ 4..12: String "b\"world\"" @ 13..21: String - "y" @ 22..23: Variable + "y" @ 22..23: Variable [definition] "b\"multi\"" @ 27..35: String "b\"line\"" @ 41..48: String "b\"bytes\"" @ 54..62: String - "z" @ 64..65: Variable + "z" @ 64..65: Variable [definition] "b'single'" @ 68..77: String "b\"mixed\"" @ 78..86: String "b'quotes'" @ 87..96: String - "###); + "#); } #[test] @@ -1987,26 +2150,26 @@ regular_bytes = b"just bytes""#, let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" - "string_concat" @ 39..52: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "string_concat" @ 39..52: Variable [definition] "\"hello\"" @ 55..62: String "\"world\"" @ 63..70: String - "bytes_concat" @ 71..83: Variable + "bytes_concat" @ 71..83: Variable [definition] "b\"hello\"" @ 86..94: String "b\"world\"" @ 95..103: String - "mixed_quotes_str" @ 104..120: Variable + "mixed_quotes_str" @ 104..120: Variable [definition] "'single'" @ 123..131: String "\"double\"" @ 132..140: String "'single'" @ 141..149: String - "mixed_quotes_bytes" @ 150..168: Variable + "mixed_quotes_bytes" @ 150..168: Variable [definition] "b'single'" @ 171..180: String "b\"double\"" @ 181..190: String "b'single'" @ 191..200: String - "regular_string" @ 201..215: Variable + "regular_string" @ 201..215: Variable [definition] "\"just a string\"" @ 218..233: String - "regular_bytes" @ 234..247: Variable + "regular_bytes" @ 234..247: Variable [definition] "b\"just bytes\"" @ 250..263: String - "###); + "#); } #[test] @@ -2031,24 +2194,24 @@ complex_fstring = f"User: {name.upper()}, Count: {len(data)}, Hex: {value:x}" let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" - "name" @ 45..49: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "name" @ 45..49: Variable [definition] "\"Alice\"" @ 52..59: String - "data" @ 60..64: Variable + "data" @ 60..64: Variable [definition] "b\"hello\"" @ 67..75: String - "value" @ 76..81: Variable + "value" @ 76..81: Variable [definition] "42" @ 84..86: Number - "result" @ 153..159: Variable + "result" @ 153..159: Variable [definition] "Hello " @ 164..170: String "name" @ 171..175: Variable "! Value: " @ 176..185: String "value" @ 186..191: Variable ", Data: " @ 192..200: String "data" @ 201..205: Variable - "mixed" @ 266..271: Variable + "mixed" @ 266..271: Variable [definition] "prefix" @ 276..282: String "b\"suffix\"" @ 286..295: String - "complex_fstring" @ 340..355: Variable + "complex_fstring" @ 340..355: Variable [definition] "User: " @ 360..366: String "name" @ 367..371: Variable "upper" @ 372..377: Method @@ -2058,7 +2221,7 @@ complex_fstring = f"User: {name.upper()}, Count: {len(data)}, Hex: {value:x}" ", Hex: " @ 400..407: String "value" @ 408..413: Variable "x" @ 414..415: String - "###); + "#); } #[test] @@ -2092,25 +2255,25 @@ def outer(): let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" - "x" @ 1..2: Variable + assert_snapshot!(test.to_snapshot(&tokens), @r#" + "x" @ 1..2: Variable [definition] "\"global_value\"" @ 5..19: String - "y" @ 20..21: Variable + "y" @ 20..21: Variable [definition] "\"another_global\"" @ 24..40: String "outer" @ 46..51: Function [definition] - "x" @ 59..60: Variable + "x" @ 59..60: Variable [definition] "\"outer_value\"" @ 63..76: String - "z" @ 81..82: Variable + "z" @ 81..82: Variable [definition] "\"outer_local\"" @ 85..98: String "inner" @ 108..113: Function [definition] "x" @ 134..135: Variable "z" @ 137..138: Variable "y" @ 189..190: Variable - "x" @ 239..240: Variable + "x" @ 239..240: Variable [definition] "\"modified\"" @ 243..253: String - "y" @ 262..263: Variable + "y" @ 262..263: Variable [definition] "\"modified_global\"" @ 266..283: String - "z" @ 292..293: Variable + "z" @ 292..293: Variable [definition] "\"modified_local\"" @ 296..312: String "deeper" @ 326..332: Function [definition] "x" @ 357..358: Variable @@ -2120,7 +2283,7 @@ def outer(): "y" @ 461..462: Variable "deeper" @ 479..485: Function "inner" @ 498..503: Function - "###); + "#); } #[test] @@ -2183,10 +2346,10 @@ def process_data(data): let tokens = test.highlight_file(); - assert_snapshot!(test.to_snapshot(&tokens), @r###" + assert_snapshot!(test.to_snapshot(&tokens), @r#" "process_data" @ 5..17: Function [definition] "data" @ 18..22: Parameter - "data" @ 35..39: Variable + "data" @ 35..39: Parameter "\"name\"" @ 55..61: String "name" @ 63..67: Variable "\"age\"" @ 69..74: String @@ -2215,7 +2378,7 @@ def process_data(data): "Fallback: " @ 375..385: String "fallback" @ 386..394: Variable "fallback" @ 417..425: Variable - "###); + "#); } #[test] @@ -2238,7 +2401,7 @@ finally: let tokens = test.highlight_file(); assert_snapshot!(test.to_snapshot(&tokens), @r#" - "x" @ 10..11: Variable + "x" @ 10..11: Variable [definition] "1" @ 14..15: Number "0" @ 18..19: Number "ValueError" @ 27..37: Class @@ -2281,18 +2444,18 @@ class C: "C" @ 33..34: Class [definition] "__init__" @ 44..52: Method [definition] "self" @ 53..57: SelfParameter - "Self" @ 59..63: TypeParameter - "self" @ 74..78: Parameter + "Self" @ 59..63: Variable + "self" @ 74..78: SelfParameter "annotated" @ 79..88: Variable "int" @ 90..93: Class "1" @ 96..97: Number - "self" @ 106..110: Parameter + "self" @ 106..110: SelfParameter "non_annotated" @ 111..124: Variable "1" @ 127..128: Number - "self" @ 137..141: Parameter + "self" @ 137..141: SelfParameter "x" @ 142..143: Variable "test" @ 144..148: Variable - "self" @ 159..163: Parameter + "self" @ 159..163: SelfParameter "x" @ 164..165: Variable "#); } diff --git a/crates/ty_python_semantic/src/ast_node_ref.rs b/crates/ty_python_semantic/src/ast_node_ref.rs index ed28bc396b..14916ec807 100644 --- a/crates/ty_python_semantic/src/ast_node_ref.rs +++ b/crates/ty_python_semantic/src/ast_node_ref.rs @@ -85,6 +85,7 @@ where /// /// This method may panic or produce unspecified results if the provided module is from a /// different file or Salsa revision than the module to which the node belongs. + #[track_caller] pub fn node<'ast>(&self, module_ref: &'ast ParsedModuleRef) -> &'ast T { #[cfg(debug_assertions)] assert_eq!(module_ref.module().addr(), self.module_addr); diff --git a/crates/ty_python_semantic/src/semantic_index/definition.rs b/crates/ty_python_semantic/src/semantic_index/definition.rs index 85a7ff6aed..70a6039fd1 100644 --- a/crates/ty_python_semantic/src/semantic_index/definition.rs +++ b/crates/ty_python_semantic/src/semantic_index/definition.rs @@ -1034,7 +1034,7 @@ impl<'db> AssignmentDefinitionKind<'db> { self.target_kind } - pub(crate) fn value<'ast>(&self, module: &'ast ParsedModuleRef) -> &'ast ast::Expr { + pub fn value<'ast>(&self, module: &'ast ParsedModuleRef) -> &'ast ast::Expr { self.value.node(module) } diff --git a/crates/ty_python_semantic/src/semantic_index/use_def.rs b/crates/ty_python_semantic/src/semantic_index/use_def.rs index dcca102b87..4126153487 100644 --- a/crates/ty_python_semantic/src/semantic_index/use_def.rs +++ b/crates/ty_python_semantic/src/semantic_index/use_def.rs @@ -761,6 +761,7 @@ pub(crate) struct DeclarationsIterator<'map, 'db> { inner: LiveDeclarationsIterator<'map>, } +#[derive(Debug)] pub(crate) struct DeclarationWithConstraint<'db> { pub(crate) declaration: DefinitionState<'db>, pub(crate) reachability_constraint: ScopedReachabilityConstraintId, diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index 515e048840..b49b8e2d23 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -869,7 +869,7 @@ impl<'db> Type<'db> { matches!(self, Type::Dynamic(DynamicType::Todo(_))) } - pub(crate) const fn is_generic_alias(&self) -> bool { + pub const fn is_generic_alias(&self) -> bool { matches!(self, Type::GenericAlias(_)) } @@ -1080,12 +1080,11 @@ impl<'db> Type<'db> { .expect("Expected a Type::ClassLiteral variant") } - pub(crate) const fn is_subclass_of(&self) -> bool { + pub const fn is_subclass_of(&self) -> bool { matches!(self, Type::SubclassOf(..)) } - #[cfg(test)] - pub(crate) const fn is_class_literal(&self) -> bool { + pub const fn is_class_literal(&self) -> bool { matches!(self, Type::ClassLiteral(..)) } @@ -8585,7 +8584,7 @@ impl<'db> TypeVarInstance<'db> { self.identity(db).definition(db) } - pub(crate) fn kind(self, db: &'db dyn Db) -> TypeVarKind { + pub fn kind(self, db: &'db dyn Db) -> TypeVarKind { self.identity(db).kind(db) } diff --git a/crates/ty_python_semantic/src/types/ide_support.rs b/crates/ty_python_semantic/src/types/ide_support.rs index 89dfe8fbe8..475c3017c7 100644 --- a/crates/ty_python_semantic/src/types/ide_support.rs +++ b/crates/ty_python_semantic/src/types/ide_support.rs @@ -477,32 +477,17 @@ pub fn all_members<'db>(db: &'db dyn Db, ty: Type<'db>) -> FxHashSet /// Get the primary definition kind for a name expression within a specific file. /// Returns the first definition kind that is reachable for this name in its scope. /// This is useful for IDE features like semantic tokens. -pub fn definition_kind_for_name<'db>( +pub fn definition_for_name<'db>( db: &'db dyn Db, file: File, name: &ast::ExprName, -) -> Option> { - let index = semantic_index(db, file); - let name_str = name.id.as_str(); - - // Get the scope for this name expression - let file_scope = index.expression_scope_id(&ast::ExprRef::from(name)); - - // Get the place table for this scope - let place_table = index.place_table(file_scope); - - // Look up the place by name - let symbol_id = place_table.symbol_id(name_str)?; - - // Get the use-def map and look up definitions for this place - let declarations = index - .use_def_map(file_scope) - .all_reachable_symbol_declarations(symbol_id); +) -> Option> { + let definitions = definitions_for_name(db, file, name); // Find the first valid definition and return its kind - for declaration in declarations { - if let Some(def) = declaration.declaration.definition() { - return Some(def.kind(db).clone()); + for declaration in definitions { + if let Some(def) = declaration.definition() { + return Some(def); } } @@ -617,7 +602,7 @@ pub fn definitions_for_name<'db>( // If we didn't find any definitions in scopes, fallback to builtins if resolved_definitions.is_empty() { let Some(builtins_scope) = builtins_module_scope(db) else { - return Vec::new(); + return resolved_definitions; }; // Special cases for `float` and `complex` in type annotation positions. @@ -633,11 +618,14 @@ pub fn definitions_for_name<'db>( return union .elements(db) .iter() + // Use `rev` so that `complex` and `float` come first. + // This is required for hover to pick up the docstring of `complex` and `float` + // instead of `int` (hover only shows the docstring of the first definition). + .rev() .filter_map(|ty| ty.as_nominal_instance()) .map(|instance| { let definition = instance.class_literal(db).definition(db); - let parsed = parsed_module(db, definition.file(db)); - ResolvedDefinition::FileWithRange(definition.focus_range(db, &parsed.load(db))) + ResolvedDefinition::Definition(definition) }) .collect(); } @@ -1243,6 +1231,14 @@ mod resolve_definition { } impl<'db> ResolvedDefinition<'db> { + pub(crate) fn definition(&self) -> Option> { + match self { + ResolvedDefinition::Definition(definition) => Some(*definition), + ResolvedDefinition::Module(_) => None, + ResolvedDefinition::FileWithRange(_) => None, + } + } + fn file(&self, db: &'db dyn Db) -> File { match self { ResolvedDefinition::Definition(definition) => definition.file(db), From 8a85a2961e26839e6e3edfc6ecb9164fa1317011 Mon Sep 17 00:00:00 2001 From: Nikolas Hearp <154913660+njhearp@users.noreply.github.com> Date: Wed, 12 Nov 2025 12:54:39 -0500 Subject: [PATCH 175/180] [`flake8-simplify`] Apply `SIM113` when index variable is of type `int` (#21395) ## Summary Fixes #21393 Now the rule checks if the index variable is initialized as an `int` type rather than only flagging if the index variable is initialized to `0`. I used `ResolvedPythonType` to check if the index variable is an `int` type. ## Test Plan Updated snapshot test for `SIM113`. --------- Co-authored-by: Brent Westbrook <36778786+ntBre@users.noreply.github.com> --- .../test/fixtures/flake8_simplify/SIM113.py | 3 +- crates/ruff_linter/src/preview.rs | 5 ++ .../src/rules/flake8_simplify/mod.rs | 1 + .../rules/enumerate_for_loop.rs | 17 +++++- ...ify__tests__preview__SIM113_SIM113.py.snap | 60 +++++++++++++++++++ 5 files changed, 82 insertions(+), 4 deletions(-) create mode 100644 crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__preview__SIM113_SIM113.py.snap diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM113.py b/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM113.py index e0ac4190ed..00771b5f70 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM113.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_simplify/SIM113.py @@ -46,7 +46,8 @@ def func(): def func(): - # OK (index doesn't start at 0 + # SIM113 + # https://github.com/astral-sh/ruff/pull/21395 idx = 10 for x in range(5): g(x, idx) diff --git a/crates/ruff_linter/src/preview.rs b/crates/ruff_linter/src/preview.rs index 836ba4feea..239466f599 100644 --- a/crates/ruff_linter/src/preview.rs +++ b/crates/ruff_linter/src/preview.rs @@ -269,3 +269,8 @@ pub(crate) const fn is_typing_extensions_str_alias_enabled(settings: &LinterSett pub(crate) const fn is_extended_i18n_function_matching_enabled(settings: &LinterSettings) -> bool { settings.preview.is_enabled() } + +// https://github.com/astral-sh/ruff/pull/21395 +pub(crate) const fn is_enumerate_for_loop_int_index_enabled(settings: &LinterSettings) -> bool { + settings.preview.is_enabled() +} diff --git a/crates/ruff_linter/src/rules/flake8_simplify/mod.rs b/crates/ruff_linter/src/rules/flake8_simplify/mod.rs index 45233277e5..4546dd143a 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/mod.rs +++ b/crates/ruff_linter/src/rules/flake8_simplify/mod.rs @@ -61,6 +61,7 @@ mod tests { #[test_case(Rule::SplitStaticString, Path::new("SIM905.py"))] #[test_case(Rule::DictGetWithNoneDefault, Path::new("SIM910.py"))] + #[test_case(Rule::EnumerateForLoop, Path::new("SIM113.py"))] fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> { let snapshot = format!( "preview__{}_{}", diff --git a/crates/ruff_linter/src/rules/flake8_simplify/rules/enumerate_for_loop.rs b/crates/ruff_linter/src/rules/flake8_simplify/rules/enumerate_for_loop.rs index 6739fa3868..a35513de85 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/rules/enumerate_for_loop.rs +++ b/crates/ruff_linter/src/rules/flake8_simplify/rules/enumerate_for_loop.rs @@ -1,6 +1,8 @@ +use crate::preview::is_enumerate_for_loop_int_index_enabled; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::statement_visitor::{StatementVisitor, walk_stmt}; use ruff_python_ast::{self as ast, Expr, Int, Number, Operator, Stmt}; +use ruff_python_semantic::analyze::type_inference::{NumberLike, PythonType, ResolvedPythonType}; use ruff_python_semantic::analyze::typing; use ruff_text_size::Ranged; @@ -11,6 +13,9 @@ use crate::checkers::ast::Checker; /// Checks for `for` loops with explicit loop-index variables that can be replaced /// with `enumerate()`. /// +/// In [preview], this rule checks for index variables initialized with any integer rather than only +/// a literal zero. +/// /// ## Why is this bad? /// When iterating over a sequence, it's often desirable to keep track of the /// index of each element alongside the element itself. Prefer the `enumerate` @@ -35,6 +40,8 @@ use crate::checkers::ast::Checker; /// /// ## References /// - [Python documentation: `enumerate`](https://docs.python.org/3/library/functions.html#enumerate) +/// +/// [preview]: https://docs.astral.sh/ruff/preview/ #[derive(ViolationMetadata)] #[violation_metadata(stable_since = "v0.2.0")] pub(crate) struct EnumerateForLoop { @@ -82,17 +89,21 @@ pub(crate) fn enumerate_for_loop(checker: &Checker, for_stmt: &ast::StmtFor) { continue; } - // Ensure that the index variable was initialized to 0. + // Ensure that the index variable was initialized to 0 (or instance of `int` if preview is enabled). let Some(value) = typing::find_binding_value(binding, checker.semantic()) else { continue; }; - if !matches!( + if !(matches!( value, Expr::NumberLiteral(ast::ExprNumberLiteral { value: Number::Int(Int::ZERO), .. }) - ) { + ) || matches!( + ResolvedPythonType::from(value), + ResolvedPythonType::Atom(PythonType::Number(NumberLike::Integer)) + ) && is_enumerate_for_loop_int_index_enabled(checker.settings())) + { continue; } diff --git a/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__preview__SIM113_SIM113.py.snap b/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__preview__SIM113_SIM113.py.snap new file mode 100644 index 0000000000..065ed20bb9 --- /dev/null +++ b/crates/ruff_linter/src/rules/flake8_simplify/snapshots/ruff_linter__rules__flake8_simplify__tests__preview__SIM113_SIM113.py.snap @@ -0,0 +1,60 @@ +--- +source: crates/ruff_linter/src/rules/flake8_simplify/mod.rs +--- +SIM113 Use `enumerate()` for index variable `idx` in `for` loop + --> SIM113.py:6:9 + | +4 | for x in range(5): +5 | g(x, idx) +6 | idx += 1 + | ^^^^^^^^ +7 | h(x) + | + +SIM113 Use `enumerate()` for index variable `idx` in `for` loop + --> SIM113.py:17:9 + | +15 | if g(x): +16 | break +17 | idx += 1 + | ^^^^^^^^ +18 | sum += h(x, idx) + | + +SIM113 Use `enumerate()` for index variable `idx` in `for` loop + --> SIM113.py:27:9 + | +25 | g(x) +26 | h(x, y) +27 | idx += 1 + | ^^^^^^^^ + | + +SIM113 Use `enumerate()` for index variable `idx` in `for` loop + --> SIM113.py:36:9 + | +34 | for x in range(5): +35 | sum += h(x, idx) +36 | idx += 1 + | ^^^^^^^^ + | + +SIM113 Use `enumerate()` for index variable `idx` in `for` loop + --> SIM113.py:44:9 + | +42 | for x in range(5): +43 | g(x, idx) +44 | idx += 1 + | ^^^^^^^^ +45 | h(x) + | + +SIM113 Use `enumerate()` for index variable `idx` in `for` loop + --> SIM113.py:54:9 + | +52 | for x in range(5): +53 | g(x, idx) +54 | idx += 1 + | ^^^^^^^^ +55 | h(x) + | From a1d9cb5830eca9b63e7fb529504fc536e99bca23 Mon Sep 17 00:00:00 2001 From: pyscripter Date: Wed, 12 Nov 2025 19:10:08 +0100 Subject: [PATCH 176/180] Added the PyScripter IDE to the list of "Who is using Ruff?" (#21402) ## Summary Added the PyScripter IDE to the list of "Who is using Ruff?". PyScripter is a popular python IDE that is using ruff for code diagnostics, fixes and code formatting. --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index fd8cc2450e..dd86c69390 100644 --- a/README.md +++ b/README.md @@ -491,6 +491,7 @@ Ruff is used by a number of major open-source projects and companies, including: - [PyTorch](https://github.com/pytorch/pytorch) - [Pydantic](https://github.com/pydantic/pydantic) - [Pylint](https://github.com/PyCQA/pylint) +- [PyScripter](https://github.com/pyscripter/pyscripter) - [PyVista](https://github.com/pyvista/pyvista) - [Reflex](https://github.com/reflex-dev/reflex) - [River](https://github.com/online-ml/river) From 9dd666d67758c9d25a361c7c23d7f8b883b0b05a Mon Sep 17 00:00:00 2001 From: Shunsuke Shibayama <45118249+mtshiba@users.noreply.github.com> Date: Thu, 13 Nov 2025 03:15:51 +0900 Subject: [PATCH 177/180] [ty] fix global symbol lookup from eager scopes (#21317) ## Summary cf. https://github.com/astral-sh/ruff/pull/20962 In the following code, `foo` in the comprehension was not reported as unresolved: ```python # error: [unresolved-reference] "Name `foo` used when not defined" foo foo = [ # no error! # revealed: Divergent reveal_type(x) for _ in () for x in [foo] ] baz = [ # error: [unresolved-reference] "Name `baz` used when not defined" # revealed: Unknown reveal_type(x) for _ in () for x in [baz] ] ``` In fact, this is a more serious bug than it looks: for `foo`, [`explicit_global_symbol` is called](https://github.com/astral-sh/ruff/blob/6cc3393ccd9059439d9c1325e0e041db1d7481af/crates/ty_python_semantic/src/types/infer/builder.rs#L8052), causing a symbol that should actually be `Undefined` to be reported as being of type `Divergent`. This PR fixes this bug. As a result, the code in `mdtest/regression/pr_20962_comprehension_panics.md` no longer panics. ## Test Plan `corpus\cyclic_symbol_in_comprehension.py` is added. New tests are added in `mdtest/comprehensions/basic.md`. --------- Co-authored-by: Micha Reiser Co-authored-by: Carl Meyer --- .../cyclic_symbol_in_comprehension.py} | 13 ++----- .../resources/mdtest/annotations/deferred.md | 28 ++++++++++++++ .../resources/mdtest/comprehensions/basic.md | 18 +++++++++ .../resources/mdtest/generics/scoping.md | 37 +++++++++++++++++++ .../resources/mdtest/narrow/complex_target.md | 9 +++++ .../src/semantic_index/builder.rs | 6 +++ .../src/semantic_index/use_def.rs | 12 ++++-- .../src/types/infer/builder.rs | 37 ++++++++++++++----- 8 files changed, 136 insertions(+), 24 deletions(-) rename crates/ty_python_semantic/resources/{mdtest/regression/pr_20962_comprehension_panics.md => corpus/cyclic_symbol_in_comprehension.py} (56%) diff --git a/crates/ty_python_semantic/resources/mdtest/regression/pr_20962_comprehension_panics.md b/crates/ty_python_semantic/resources/corpus/cyclic_symbol_in_comprehension.py similarity index 56% rename from crates/ty_python_semantic/resources/mdtest/regression/pr_20962_comprehension_panics.md rename to crates/ty_python_semantic/resources/corpus/cyclic_symbol_in_comprehension.py index 97bbf21049..b7ba910e1e 100644 --- a/crates/ty_python_semantic/resources/mdtest/regression/pr_20962_comprehension_panics.md +++ b/crates/ty_python_semantic/resources/corpus/cyclic_symbol_in_comprehension.py @@ -1,13 +1,7 @@ -# Documentation of two fuzzer panics involving comprehensions +# Regression test for https://github.com/astral-sh/ruff/pull/20962 +# error message: +# `place_by_id: execute: too many cycle iterations` -Type inference for comprehensions was added in . It -added two new fuzzer panics that are documented here for regression testing. - -## Too many cycle iterations in `place_by_id` - - - -```py name_5(name_3) [0 for unique_name_0 in unique_name_1 for unique_name_2 in name_3] @@ -34,4 +28,3 @@ else: @name_3 async def name_5(): pass -``` diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/deferred.md b/crates/ty_python_semantic/resources/mdtest/annotations/deferred.md index 8db8d90409..89b9324ea4 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/deferred.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/deferred.md @@ -87,9 +87,23 @@ class Foo: class Baz[T: Foo]: pass + # error: [unresolved-reference] "Name `Foo` used when not defined" + # error: [unresolved-reference] "Name `Bar` used when not defined" + class Qux(Foo, Bar, Baz): + pass + + # error: [unresolved-reference] "Name `Foo` used when not defined" + # error: [unresolved-reference] "Name `Bar` used when not defined" + class Quux[_T](Foo, Bar, Baz): + pass + # error: [unresolved-reference] type S = a type T = b + type U = Foo + # error: [unresolved-reference] + type V = Bar + type W = Baz def h[T: Bar](): # error: [unresolved-reference] @@ -141,9 +155,23 @@ class Foo: class Baz[T: Foo]: pass + # error: [unresolved-reference] "Name `Foo` used when not defined" + # error: [unresolved-reference] "Name `Bar` used when not defined" + class Qux(Foo, Bar, Baz): + pass + + # error: [unresolved-reference] "Name `Foo` used when not defined" + # error: [unresolved-reference] "Name `Bar` used when not defined" + class Quux[_T](Foo, Bar, Baz): + pass + # error: [unresolved-reference] type S = a type T = b + type U = Foo + # error: [unresolved-reference] + type V = Bar + type W = Baz def h[T: Bar](): # error: [unresolved-reference] diff --git a/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md b/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md index 5fac394404..0f2d1e4b0c 100644 --- a/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md +++ b/crates/ty_python_semantic/resources/mdtest/comprehensions/basic.md @@ -58,6 +58,24 @@ Iterating over an unbound iterable yields `Unknown`: # error: [not-iterable] "Object of type `int` is not iterable" # revealed: tuple[int, Unknown] [reveal_type((x, z)) for x in range(3) for z in x] + +# error: [unresolved-reference] "Name `foo` used when not defined" +foo +foo = [ + # revealed: tuple[int, Unknown] + reveal_type((x, z)) + for x in range(3) + # error: [unresolved-reference] "Name `foo` used when not defined" + for z in [foo] +] + +baz = [ + # revealed: tuple[int, Unknown] + reveal_type((x, z)) + for x in range(3) + # error: [unresolved-reference] "Name `baz` used when not defined" + for z in [baz] +] ``` ## Starred expressions diff --git a/crates/ty_python_semantic/resources/mdtest/generics/scoping.md b/crates/ty_python_semantic/resources/mdtest/generics/scoping.md index 308092f4d1..79944b263a 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/scoping.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/scoping.md @@ -288,6 +288,43 @@ class C[T]: class Bad2(Iterable[T]): ... ``` +## Class bases are evaluated within the type parameter scope + +```py +class C[_T]( + # error: [unresolved-reference] "Name `C` used when not defined" + C +): ... + +# `D` in `list[D]` is resolved to be a type variable of class `D`. +class D[D](list[D]): ... + +# error: [unresolved-reference] "Name `E` used when not defined" +if E: + class E[_T]( + # error: [unresolved-reference] "Name `E` used when not defined" + E + ): ... + +# error: [unresolved-reference] "Name `F` used when not defined" +F + +# error: [unresolved-reference] "Name `F` used when not defined" +class F[_T](F): ... + +def foo(): + class G[_T]( + # error: [unresolved-reference] "Name `G` used when not defined" + G + ): ... + # error: [unresolved-reference] "Name `H` used when not defined" + if H: + class H[_T]( + # error: [unresolved-reference] "Name `H` used when not defined" + H + ): ... +``` + ## Class scopes do not cover inner scopes Just like regular symbols, the typevars of a generic class are only available in that class's scope, diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/complex_target.md b/crates/ty_python_semantic/resources/mdtest/narrow/complex_target.md index 479238d617..96b12d845d 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/complex_target.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/complex_target.md @@ -58,6 +58,15 @@ d.x = 1 reveal_type(d.x) # revealed: Literal[1] d.x = unknown() reveal_type(d.x) # revealed: Unknown + +class E: + x: int | None = None + +e = E() + +if e.x is not None: + class _: + reveal_type(e.x) # revealed: int ``` Narrowing can be "reset" by assigning to the attribute: diff --git a/crates/ty_python_semantic/src/semantic_index/builder.rs b/crates/ty_python_semantic/src/semantic_index/builder.rs index 9352bf196c..cc1b1649fa 100644 --- a/crates/ty_python_semantic/src/semantic_index/builder.rs +++ b/crates/ty_python_semantic/src/semantic_index/builder.rs @@ -314,6 +314,9 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { // Records snapshots of the place states visible from the current eager scope. fn record_eager_snapshots(&mut self, popped_scope_id: FileScopeId) { + let popped_scope = &self.scopes[popped_scope_id]; + let popped_scope_is_annotation_scope = popped_scope.kind().is_annotation(); + // If the scope that we just popped off is an eager scope, we need to "lock" our view of // which bindings reach each of the uses in the scope. Loop through each enclosing scope, // looking for any that bind each place. @@ -328,6 +331,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { // ``` for enclosing_scope_info in self.scope_stack.iter().rev() { let enclosing_scope_id = enclosing_scope_info.file_scope_id; + let is_immediately_enclosing_scope = popped_scope.parent() == Some(enclosing_scope_id); let enclosing_scope_kind = self.scopes[enclosing_scope_id].kind(); let enclosing_place_table = &self.place_tables[enclosing_scope_id]; @@ -355,6 +359,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { enclosing_place_id, enclosing_scope_kind, enclosing_place, + popped_scope_is_annotation_scope && is_immediately_enclosing_scope, ); self.enclosing_snapshots.insert(key, eager_snapshot); } @@ -429,6 +434,7 @@ impl<'db, 'ast> SemanticIndexBuilder<'db, 'ast> { enclosed_symbol_id.into(), enclosing_scope_kind, enclosing_place.into(), + false, ); self.enclosing_snapshots.insert(key, lazy_snapshot); } diff --git a/crates/ty_python_semantic/src/semantic_index/use_def.rs b/crates/ty_python_semantic/src/semantic_index/use_def.rs index 4126153487..05fa369521 100644 --- a/crates/ty_python_semantic/src/semantic_index/use_def.rs +++ b/crates/ty_python_semantic/src/semantic_index/use_def.rs @@ -1187,17 +1187,21 @@ impl<'db> UseDefMapBuilder<'db> { pub(super) fn snapshot_enclosing_state( &mut self, enclosing_place: ScopedPlaceId, - scope: ScopeKind, + enclosing_scope: ScopeKind, enclosing_place_expr: PlaceExprRef, + is_parent_of_annotation_scope: bool, ) -> ScopedEnclosingSnapshotId { let bindings = match enclosing_place { ScopedPlaceId::Symbol(symbol) => self.symbol_states[symbol].bindings(), ScopedPlaceId::Member(member) => self.member_states[member].bindings(), }; - // Names bound in class scopes are never visible to nested scopes (but attributes/subscripts are visible), - // so we never need to save eager scope bindings in a class scope. - if (scope.is_class() && enclosing_place.is_symbol()) || !enclosing_place_expr.is_bound() { + let is_class_symbol = enclosing_scope.is_class() && enclosing_place.is_symbol(); + // Names bound in class scopes are never visible to nested scopes (but + // attributes/subscripts are visible), so we never need to save eager scope bindings in a + // class scope. There is one exception to this rule: annotation scopes can see names + // defined in an immediately-enclosing class scope. + if (is_class_symbol && !is_parent_of_annotation_scope) || !enclosing_place_expr.is_bound() { self.enclosing_snapshots.push(EnclosingSnapshot::Constraint( bindings.unbound_narrowing_constraint(), )) diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index 3fe0bb003d..53c1c1dc96 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -8319,6 +8319,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { let mut nonlocal_union_builder = UnionBuilder::new(db); let mut found_some_definition = false; for (enclosing_scope_file_id, _) in self.index.ancestor_scopes(file_scope_id).skip(1) { + // If the current enclosing scope is global, no place lookup is performed here, + // instead falling back to the module's explicit global lookup below. + if enclosing_scope_file_id.is_global() { + break; + } + // Class scopes are not visible to nested scopes, and we need to handle global // scope differently (because an unbound name there falls back to builtins), so // check only function-like scopes. @@ -8349,6 +8355,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // registering eager bindings for nested scopes that are actually eager, and for // enclosing scopes that actually contain bindings that we should use when // resolving the reference.) + let mut eagerly_resolved_place = None; if !self.is_deferred() { match self.index.enclosing_snapshot( enclosing_scope_file_id, @@ -8360,6 +8367,11 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { enclosing_scope_file_id, ConstraintKey::NarrowingConstraint(constraint), )); + // If the current scope is eager, it is certain that the place is undefined in the current scope. + // Do not call the `place` query below as a fallback. + if scope.scope(db).is_eager() { + eagerly_resolved_place = Some(Place::Undefined.into()); + } } EnclosingSnapshotResult::FoundBindings(bindings) => { let place = place_from_bindings(db, bindings).map_type(|ty| { @@ -8421,18 +8433,20 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // `nonlocal` variable, but we don't enforce that here. See the // `ast::Stmt::AnnAssign` handling in `SemanticIndexBuilder::visit_stmt`.) if enclosing_place.is_bound() || enclosing_place.is_declared() { - let local_place_and_qualifiers = place( - db, - enclosing_scope_id, - place_expr, - ConsideredDefinitions::AllReachable, - ) - .map_type(|ty| { - self.narrow_place_with_applicable_constraints( + let local_place_and_qualifiers = eagerly_resolved_place.unwrap_or_else(|| { + place( + db, + enclosing_scope_id, place_expr, - ty, - &constraint_keys, + ConsideredDefinitions::AllReachable, ) + .map_type(|ty| { + self.narrow_place_with_applicable_constraints( + place_expr, + ty, + &constraint_keys, + ) + }) }); // We could have `Place::Undefined` here, despite the checks above, for example if // this scope contains a `del` statement but no binding or declaration. @@ -8475,6 +8489,9 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { FileScopeId::global(), ConstraintKey::NarrowingConstraint(constraint), )); + // Reaching here means that no bindings are found in any scope. + // Since `explicit_global_symbol` may return a cycle initial value, we return `Place::Undefined` here. + return Place::Undefined.into(); } EnclosingSnapshotResult::FoundBindings(bindings) => { let place = place_from_bindings(db, bindings).map_type(|ty| { From 2f6f3e10425515f6c13208efb377fd398341d63e Mon Sep 17 00:00:00 2001 From: David Peter Date: Wed, 12 Nov 2025 20:16:38 +0100 Subject: [PATCH 178/180] [ty] Faster subscript assignment checks for (unions of) `TypedDict`s (#21378) ## Summary We synthesize a (potentially large) set of `__setitem__` overloads for every item in a `TypedDict`. Previously, validation of subscript assignments on `TypedDict`s relied on actually calling `__setitem__` with the provided key and value types, which implied that we needed to do the full overload call evaluation for this large set of overloads. This PR improves the performance of subscript assignment checks on `TypedDict`s by validating the assignment directly instead of calling `__setitem__`. This PR also adds better handling for assignments to subscripts on union and intersection types (but does not attempt to make it perfect). It achieves this by distributing the check over unions and intersections, instead of calling `__setitem__` on the union/intersection directly. We already do something similar when validating *attribute* assignments. ## Ecosystem impact * A lot of diagnostics change their rule type, and/or split into multiple diagnostics. The new version is more verbose, but easier to understand, in my opinion * Almost all of the invalid-key diagnostics come from pydantic, and they should all go away (including many more) when we implement https://github.com/astral-sh/ty/issues/1479 * Everything else looks correct to me. There may be some new diagnostics due to the fact that we now check intersections. ## Test Plan New Markdown tests. --- crates/ruff_benchmark/benches/ty_walltime.rs | 2 +- ..._No_`__setitem__`_met…_(468f62a3bdd1d60c).snap | 2 +- ..._Possibly_missing_`__…_(efd3f0c02e9b89e9).snap | 9 +- ..._Unknown_key_for_all_…_(1c685d9d10678263).snap | 34 +- ..._Unknown_key_for_one_…_(b515711c0a451a86).snap | 12 +- ..._Wrong_value_type_for…_(57372b65e30392a8).snap | 3 +- ..._Wrong_value_type_for…_(ffe39a3bae68cfe4).snap | 28 +- ...ict`_-_Diagnostics_(e5289abf5c570c29).snap | 2 +- .../subscript/assignment_diagnostics.md | 12 +- .../resources/mdtest/subscript/instance.md | 2 +- .../resources/mdtest/typed_dict.md | 16 +- crates/ty_python_semantic/src/types.rs | 4 + .../src/types/diagnostic.rs | 40 +- .../src/types/infer/builder.rs | 379 +++++++++++++----- .../src/types/typed_dict.rs | 75 ++-- 15 files changed, 440 insertions(+), 180 deletions(-) diff --git a/crates/ruff_benchmark/benches/ty_walltime.rs b/crates/ruff_benchmark/benches/ty_walltime.rs index 8f13ab7ca7..697a0c989d 100644 --- a/crates/ruff_benchmark/benches/ty_walltime.rs +++ b/crates/ruff_benchmark/benches/ty_walltime.rs @@ -181,7 +181,7 @@ static PYDANTIC: Benchmark = Benchmark::new( max_dep_date: "2025-06-17", python_version: PythonVersion::PY39, }, - 3000, + 5000, ); static SYMPY: Benchmark = Benchmark::new( diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_No_`__setitem__`_met…_(468f62a3bdd1d60c).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_No_`__setitem__`_met…_(468f62a3bdd1d60c).snap index dec0ab3417..f9c43e5882 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_No_`__setitem__`_met…_(468f62a3bdd1d60c).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_No_`__setitem__`_met…_(468f62a3bdd1d60c).snap @@ -23,7 +23,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_dia # Diagnostics ``` -error[invalid-assignment]: Cannot assign to object of type `ReadOnlyDict` with no `__setitem__` method +error[invalid-assignment]: Cannot assign to a subscript on an object of type `ReadOnlyDict` with no `__setitem__` method --> src/mdtest_snippet.py:6:1 | 5 | config = ReadOnlyDict() diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Possibly_missing_`__…_(efd3f0c02e9b89e9).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Possibly_missing_`__…_(efd3f0c02e9b89e9).snap index ced810cf72..a12bb7c666 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Possibly_missing_`__…_(efd3f0c02e9b89e9).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Possibly_missing_`__…_(efd3f0c02e9b89e9).snap @@ -13,19 +13,20 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_dia ``` 1 | def _(config: dict[str, int] | None) -> None: -2 | config["retries"] = 3 # error: [possibly-missing-implicit-call] +2 | config["retries"] = 3 # error: [invalid-assignment] ``` # Diagnostics ``` -warning[possibly-missing-implicit-call]: Method `__setitem__` of type `dict[str, int] | None` may be missing +error[invalid-assignment]: Cannot assign to a subscript on an object of type `None` with no `__setitem__` method --> src/mdtest_snippet.py:2:5 | 1 | def _(config: dict[str, int] | None) -> None: -2 | config["retries"] = 3 # error: [possibly-missing-implicit-call] +2 | config["retries"] = 3 # error: [invalid-assignment] | ^^^^^^ | -info: rule `possibly-missing-implicit-call` is enabled by default +info: The full type of the subscripted object is `dict[str, int] | None` +info: rule `invalid-assignment` is enabled by default ``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_all_…_(1c685d9d10678263).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_all_…_(1c685d9d10678263).snap index 6444c84f36..2e7bbcfe4d 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_all_…_(1c685d9d10678263).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_all_…_(1c685d9d10678263).snap @@ -22,19 +22,39 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_dia 8 | legs: int 9 | 10 | def _(being: Person | Animal) -> None: -11 | being["surname"] = "unknown" # error: [invalid-assignment] +11 | # error: [invalid-key] +12 | # error: [invalid-key] +13 | being["surname"] = "unknown" ``` # Diagnostics ``` -error[invalid-assignment]: Method `__setitem__` of type `(key: Literal["name"], value: str, /) -> None` cannot be called with a key of type `Literal["surname"]` and a value of type `Literal["unknown"]` on object of type `Person | Animal` - --> src/mdtest_snippet.py:11:5 +error[invalid-key]: Invalid key for TypedDict `Person` + --> src/mdtest_snippet.py:13:5 | -10 | def _(being: Person | Animal) -> None: -11 | being["surname"] = "unknown" # error: [invalid-assignment] - | ^^^^^ +11 | # error: [invalid-key] +12 | # error: [invalid-key] +13 | being["surname"] = "unknown" + | ----- ^^^^^^^^^ Unknown key "surname" - did you mean "name"? + | | + | TypedDict `Person` in union type `Person | Animal` | -info: rule `invalid-assignment` is enabled by default +info: rule `invalid-key` is enabled by default + +``` + +``` +error[invalid-key]: Invalid key for TypedDict `Animal` + --> src/mdtest_snippet.py:13:5 + | +11 | # error: [invalid-key] +12 | # error: [invalid-key] +13 | being["surname"] = "unknown" + | ----- ^^^^^^^^^ Unknown key "surname" - did you mean "name"? + | | + | TypedDict `Animal` in union type `Person | Animal` + | +info: rule `invalid-key` is enabled by default ``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_one_…_(b515711c0a451a86).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_one_…_(b515711c0a451a86).snap index 2b840a6783..6c919e6937 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_one_…_(b515711c0a451a86).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Unknown_key_for_one_…_(b515711c0a451a86).snap @@ -22,19 +22,21 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_dia 8 | legs: int 9 | 10 | def _(being: Person | Animal) -> None: -11 | being["legs"] = 4 # error: [invalid-assignment] +11 | being["legs"] = 4 # error: [invalid-key] ``` # Diagnostics ``` -error[invalid-assignment]: Method `__setitem__` of type `(key: Literal["name"], value: str, /) -> None` cannot be called with a key of type `Literal["legs"]` and a value of type `Literal[4]` on object of type `Person | Animal` +error[invalid-key]: Invalid key for TypedDict `Person` --> src/mdtest_snippet.py:11:5 | 10 | def _(being: Person | Animal) -> None: -11 | being["legs"] = 4 # error: [invalid-assignment] - | ^^^^^ +11 | being["legs"] = 4 # error: [invalid-key] + | ----- ^^^^^^ Unknown key "legs" + | | + | TypedDict `Person` in union type `Person | Animal` | -info: rule `invalid-assignment` is enabled by default +info: rule `invalid-key` is enabled by default ``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(57372b65e30392a8).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(57372b65e30392a8).snap index 37ea1c111a..0f603931aa 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(57372b65e30392a8).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(57372b65e30392a8).snap @@ -19,13 +19,14 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_dia # Diagnostics ``` -error[invalid-assignment]: Method `__setitem__` of type `(bound method dict[str, int].__setitem__(key: str, value: int, /) -> None) | (bound method dict[str, str].__setitem__(key: str, value: str, /) -> None)` cannot be called with a key of type `Literal["retries"]` and a value of type `Literal[3]` on object of type `dict[str, int] | dict[str, str]` +error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, str].__setitem__(key: str, value: str, /) -> None` cannot be called with a key of type `Literal["retries"]` and a value of type `Literal[3]` on object of type `dict[str, str]` --> src/mdtest_snippet.py:2:5 | 1 | def _(config: dict[str, int] | dict[str, str]) -> None: 2 | config["retries"] = 3 # error: [invalid-assignment] | ^^^^^^ | +info: The full type of the subscripted object is `dict[str, int] | dict[str, str]` info: rule `invalid-assignment` is enabled by default ``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(ffe39a3bae68cfe4).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(ffe39a3bae68cfe4).snap index dfd0136536..635a402c9b 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(ffe39a3bae68cfe4).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/assignment_diagnosti…_-_Subscript_assignment…_-_Wrong_value_type_for…_(ffe39a3bae68cfe4).snap @@ -13,19 +13,37 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/subscript/assignment_dia ``` 1 | def _(config: dict[str, int] | dict[str, str]) -> None: -2 | config["retries"] = 3.0 # error: [invalid-assignment] +2 | # error: [invalid-assignment] +3 | # error: [invalid-assignment] +4 | config["retries"] = 3.0 ``` # Diagnostics ``` -error[invalid-assignment]: Method `__setitem__` of type `(bound method dict[str, int].__setitem__(key: str, value: int, /) -> None) | (bound method dict[str, str].__setitem__(key: str, value: str, /) -> None)` cannot be called with a key of type `Literal["retries"]` and a value of type `float` on object of type `dict[str, int] | dict[str, str]` - --> src/mdtest_snippet.py:2:5 +error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, int].__setitem__(key: str, value: int, /) -> None` cannot be called with a key of type `Literal["retries"]` and a value of type `float` on object of type `dict[str, int]` + --> src/mdtest_snippet.py:4:5 | -1 | def _(config: dict[str, int] | dict[str, str]) -> None: -2 | config["retries"] = 3.0 # error: [invalid-assignment] +2 | # error: [invalid-assignment] +3 | # error: [invalid-assignment] +4 | config["retries"] = 3.0 | ^^^^^^ | +info: The full type of the subscripted object is `dict[str, int] | dict[str, str]` +info: rule `invalid-assignment` is enabled by default + +``` + +``` +error[invalid-assignment]: Method `__setitem__` of type `bound method dict[str, str].__setitem__(key: str, value: str, /) -> None` cannot be called with a key of type `Literal["retries"]` and a value of type `float` on object of type `dict[str, str]` + --> src/mdtest_snippet.py:4:5 + | +2 | # error: [invalid-assignment] +3 | # error: [invalid-assignment] +4 | config["retries"] = 3.0 + | ^^^^^^ + | +info: The full type of the subscripted object is `dict[str, int] | dict[str, str]` info: rule `invalid-assignment` is enabled by default ``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap index a5b9456acd..51b0f0ce69 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Diagnostics_(e5289abf5c570c29).snap @@ -89,7 +89,7 @@ info: rule `invalid-key` is enabled by default ``` ``` -error[invalid-key]: Invalid key for TypedDict `Person` of type `str` +error[invalid-key]: Invalid key of type `str` for TypedDict `Person` --> src/mdtest_snippet.py:16:12 | 15 | def access_with_str_key(person: Person, str_key: str): diff --git a/crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md b/crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md index ed23208eb1..e4959e3627 100644 --- a/crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md +++ b/crates/ty_python_semantic/resources/mdtest/subscript/assignment_diagnostics.md @@ -67,7 +67,7 @@ config["retries"] = 3 # error: [invalid-assignment] ```py def _(config: dict[str, int] | None) -> None: - config["retries"] = 3 # error: [possibly-missing-implicit-call] + config["retries"] = 3 # error: [invalid-assignment] ``` ## Unknown key for one element of a union @@ -83,7 +83,7 @@ class Animal(TypedDict): legs: int def _(being: Person | Animal) -> None: - being["legs"] = 4 # error: [invalid-assignment] + being["legs"] = 4 # error: [invalid-key] ``` ## Unknown key for all elemens of a union @@ -99,7 +99,9 @@ class Animal(TypedDict): legs: int def _(being: Person | Animal) -> None: - being["surname"] = "unknown" # error: [invalid-assignment] + # error: [invalid-key] + # error: [invalid-key] + being["surname"] = "unknown" ``` ## Wrong value type for one element of a union @@ -113,5 +115,7 @@ def _(config: dict[str, int] | dict[str, str]) -> None: ```py def _(config: dict[str, int] | dict[str, str]) -> None: - config["retries"] = 3.0 # error: [invalid-assignment] + # error: [invalid-assignment] + # error: [invalid-assignment] + config["retries"] = 3.0 ``` diff --git a/crates/ty_python_semantic/resources/mdtest/subscript/instance.md b/crates/ty_python_semantic/resources/mdtest/subscript/instance.md index 7d1ad7f183..b15ec4abc1 100644 --- a/crates/ty_python_semantic/resources/mdtest/subscript/instance.md +++ b/crates/ty_python_semantic/resources/mdtest/subscript/instance.md @@ -76,7 +76,7 @@ a[0] = 0 class NoSetitem: ... a = NoSetitem() -a[0] = 0 # error: "Cannot assign to object of type `NoSetitem` with no `__setitem__` method" +a[0] = 0 # error: "Cannot assign to a subscript on an object of type `NoSetitem` with no `__setitem__` method" ``` ## `__setitem__` not callable diff --git a/crates/ty_python_semantic/resources/mdtest/typed_dict.md b/crates/ty_python_semantic/resources/mdtest/typed_dict.md index 8b8fcfffa3..422711b4c1 100644 --- a/crates/ty_python_semantic/resources/mdtest/typed_dict.md +++ b/crates/ty_python_semantic/resources/mdtest/typed_dict.md @@ -69,7 +69,7 @@ def name_or_age() -> Literal["name", "age"]: carol: Person = {NAME: "Carol", AGE: 20} reveal_type(carol[NAME]) # revealed: str -# error: [invalid-key] "Invalid key for TypedDict `Person` of type `str`" +# error: [invalid-key] "Invalid key of type `str` for TypedDict `Person`" reveal_type(carol[non_literal()]) # revealed: Unknown reveal_type(carol[name_or_age()]) # revealed: str | int | None @@ -553,7 +553,7 @@ def _( # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "non_existing"" reveal_type(person["non_existing"]) # revealed: Unknown - # error: [invalid-key] "Invalid key for TypedDict `Person` of type `str`" + # error: [invalid-key] "Invalid key of type `str` for TypedDict `Person`" reveal_type(person[str_key]) # revealed: Unknown # No error here: @@ -602,16 +602,18 @@ def _(person: Person, literal_key: Literal["age"]): def _(person: Person, union_of_keys: Literal["name", "surname"]): person[union_of_keys] = "unknown" - # error: [invalid-assignment] "Cannot assign value of type `Literal[1]` to key of type `Literal["name", "surname"]` on TypedDict `Person`" + # error: [invalid-assignment] "Invalid assignment to key "name" with declared type `str` on TypedDict `Person`: value of type `Literal[1]`" + # error: [invalid-assignment] "Invalid assignment to key "surname" with declared type `str` on TypedDict `Person`: value of type `Literal[1]`" person[union_of_keys] = 1 def _(being: Person | Animal): being["name"] = "Being" - # error: [invalid-assignment] "Method `__setitem__` of type `(Overload[(key: Literal["name"], value: str, /) -> None, (key: Literal["surname"], value: str, /) -> None, (key: Literal["age"], value: int | None, /) -> None]) | (Overload[(key: Literal["name"], value: str, /) -> None, (key: Literal["legs"], value: int, /) -> None])` cannot be called with a key of type `Literal["name"]` and a value of type `Literal[1]` on object of type `Person | Animal`" + # error: [invalid-assignment] "Invalid assignment to key "name" with declared type `str` on TypedDict `Person`: value of type `Literal[1]`" + # error: [invalid-assignment] "Invalid assignment to key "name" with declared type `str` on TypedDict `Animal`: value of type `Literal[1]`" being["name"] = 1 - # error: [invalid-assignment] "Method `__setitem__` of type `(Overload[(key: Literal["name"], value: str, /) -> None, (key: Literal["surname"], value: str, /) -> None, (key: Literal["age"], value: int | None, /) -> None]) | (Overload[(key: Literal["name"], value: str, /) -> None, (key: Literal["legs"], value: int, /) -> None])` cannot be called with a key of type `Literal["surname"]` and a value of type `Literal["unknown"]` on object of type `Person | Animal`" + # error: [invalid-key] "Invalid key for TypedDict `Animal`: Unknown key "surname" - did you mean "name"?" being["surname"] = "unknown" def _(centaur: Intersection[Person, Animal]): @@ -619,13 +621,13 @@ def _(centaur: Intersection[Person, Animal]): centaur["age"] = 100 centaur["legs"] = 4 - # TODO: This should be an `invalid-key` error + # error: [invalid-key] "Invalid key for TypedDict `Person`: Unknown key "unknown"" centaur["unknown"] = "value" def _(person: Person, union_of_keys: Literal["name", "age"], unknown_value: Any): person[union_of_keys] = unknown_value - # error: [invalid-assignment] "Cannot assign value of type `None` to key of type `Literal["name", "age"]` on TypedDict `Person`" + # error: [invalid-assignment] "Invalid assignment to key "name" with declared type `str` on TypedDict `Person`: value of type `None`" person[union_of_keys] = None def _(person: Person, str_key: str, literalstr_key: LiteralString): diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index b49b8e2d23..4284b15278 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -1163,6 +1163,10 @@ impl<'db> Type<'db> { } } + pub(crate) const fn is_union(&self) -> bool { + matches!(self, Type::Union(_)) + } + pub(crate) const fn as_union(self) -> Option> { match self { Type::Union(union_type) => Some(union_type), diff --git a/crates/ty_python_semantic/src/types/diagnostic.rs b/crates/ty_python_semantic/src/types/diagnostic.rs index ccb0c82472..25c1efa27d 100644 --- a/crates/ty_python_semantic/src/types/diagnostic.rs +++ b/crates/ty_python_semantic/src/types/diagnostic.rs @@ -3063,6 +3063,7 @@ pub(crate) fn report_invalid_key_on_typed_dict<'db>( typed_dict_node: AnyNodeRef, key_node: AnyNodeRef, typed_dict_ty: Type<'db>, + full_object_ty: Option>, key_ty: Type<'db>, items: &FxOrderMap>, ) { @@ -3077,11 +3078,21 @@ pub(crate) fn report_invalid_key_on_typed_dict<'db>( "Invalid key for TypedDict `{typed_dict_name}`", )); - diagnostic.annotate( + diagnostic.annotate(if let Some(full_object_ty) = full_object_ty { + context.secondary(typed_dict_node).message(format_args!( + "TypedDict `{typed_dict_name}` in {kind} type `{full_object_ty}`", + kind = if full_object_ty.is_union() { + "union" + } else { + "intersection" + }, + full_object_ty = full_object_ty.display(db) + )) + } else { context .secondary(typed_dict_node) - .message(format_args!("TypedDict `{typed_dict_name}`")), - ); + .message(format_args!("TypedDict `{typed_dict_name}`")) + }); let existing_keys = items.iter().map(|(name, _)| name.as_str()); @@ -3093,15 +3104,22 @@ pub(crate) fn report_invalid_key_on_typed_dict<'db>( String::new() } )); - - diagnostic } - _ => builder.into_diagnostic(format_args!( - "Invalid key for TypedDict `{}` of type `{}`", - typed_dict_ty.display(db), - key_ty.display(db), - )), - }; + _ => { + let mut diagnostic = builder.into_diagnostic(format_args!( + "Invalid key of type `{}` for TypedDict `{}`", + key_ty.display(db), + typed_dict_ty.display(db), + )); + + if let Some(full_object_ty) = full_object_ty { + diagnostic.info(format_args!( + "The full type of the subscripted object is `{}`", + full_object_ty.display(db) + )); + } + } + } } } diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index 53c1c1dc96..2c445f92ea 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -102,10 +102,10 @@ use crate::types::visitor::any_over_type; use crate::types::{ CallDunderError, CallableBinding, CallableType, ClassLiteral, ClassType, DataclassParams, DynamicType, InferredAs, InternedType, InternedTypes, IntersectionBuilder, IntersectionType, - KnownClass, KnownInstanceType, MemberLookupPolicy, MetaclassCandidate, PEP695TypeAliasType, - Parameter, ParameterForm, Parameters, SpecialFormType, SubclassOfType, TrackedConstraintSet, - Truthiness, Type, TypeAliasType, TypeAndQualifiers, TypeContext, TypeQualifiers, - TypeVarBoundOrConstraintsEvaluation, TypeVarDefaultEvaluation, TypeVarIdentity, + KnownClass, KnownInstanceType, LintDiagnosticGuard, MemberLookupPolicy, MetaclassCandidate, + PEP695TypeAliasType, Parameter, ParameterForm, Parameters, SpecialFormType, SubclassOfType, + TrackedConstraintSet, Truthiness, Type, TypeAliasType, TypeAndQualifiers, TypeContext, + TypeQualifiers, TypeVarBoundOrConstraintsEvaluation, TypeVarDefaultEvaluation, TypeVarIdentity, TypeVarInstance, TypeVarKind, TypeVarVariance, TypedDictType, UnionBuilder, UnionType, binding_type, todo_type, }; @@ -3538,142 +3538,305 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { } } - /// Make sure that the subscript assignment `obj[slice] = value` is valid. + /// Validate a subscript assignment of the form `object[key] = rhs_value`. fn validate_subscript_assignment( &mut self, target: &ast::ExprSubscript, - rhs: &ast::Expr, - assigned_ty: Type<'db>, + rhs_value: &ast::Expr, + rhs_value_ty: Type<'db>, ) -> bool { let ast::ExprSubscript { range: _, node_index: _, - value, + value: object, slice, ctx: _, } = target; - let value_ty = self.infer_expression(value, TypeContext::default()); + let object_ty = self.infer_expression(object, TypeContext::default()); let slice_ty = self.infer_expression(slice, TypeContext::default()); + self.validate_subscript_assignment_impl( + object.as_ref(), + None, + object_ty, + slice.as_ref(), + slice_ty, + rhs_value, + rhs_value_ty, + true, + ) + } + + #[expect(clippy::too_many_arguments)] + fn validate_subscript_assignment_impl( + &self, + object_node: &'ast ast::Expr, + full_object_ty: Option>, + object_ty: Type<'db>, + slice_node: &'ast ast::Expr, + slice_ty: Type<'db>, + rhs_value_node: &'ast ast::Expr, + rhs_value_ty: Type<'db>, + emit_diagnostic: bool, + ) -> bool { + /// Given a string literal or a union of string literals, return an iterator over the contained + /// strings, or `None`, if the type is neither. + fn key_literals<'db>( + db: &'db dyn Db, + slice_ty: Type<'db>, + ) -> Option + 'db> { + if let Some(literal) = slice_ty.as_string_literal() { + Some(Either::Left(std::iter::once(literal.value(db)))) + } else { + slice_ty.as_union().map(|union| { + Either::Right( + union + .elements(db) + .iter() + .filter_map(|ty| ty.as_string_literal().map(|lit| lit.value(db))), + ) + }) + } + } + let db = self.db(); - let context = &self.context; - match value_ty.try_call_dunder( - db, - "__setitem__", - CallArguments::positional([slice_ty, assigned_ty]), - TypeContext::default(), - ) { - Ok(_) => true, - Err(err) => match err { - CallDunderError::PossiblyUnbound { .. } => { - if let Some(builder) = - context.report_lint(&POSSIBLY_MISSING_IMPLICIT_CALL, &**value) - { - builder.into_diagnostic(format_args!( - "Method `__setitem__` of type `{}` may be missing", - value_ty.display(db), - )); - } - false + let attach_original_type_info = |mut diagnostic: LintDiagnosticGuard| { + if let Some(full_object_ty) = full_object_ty { + diagnostic.info(format_args!( + "The full type of the subscripted object is `{}`", + full_object_ty.display(db) + )); + } + }; + + match object_ty { + Type::Union(union) => { + // Note that we use a loop here instead of .all(…) to avoid short-circuiting. + // We need to keep iterating to emit all diagnostics. + let mut valid = true; + for element_ty in union.elements(db) { + valid &= self.validate_subscript_assignment_impl( + object_node, + full_object_ty.or(Some(object_ty)), + *element_ty, + slice_node, + slice_ty, + rhs_value_node, + rhs_value_ty, + emit_diagnostic, + ); } - CallDunderError::CallError(call_error_kind, bindings) => { - match call_error_kind { - CallErrorKind::NotCallable => { - if let Some(builder) = context.report_lint(&CALL_NON_CALLABLE, &**value) - { - builder.into_diagnostic(format_args!( - "Method `__setitem__` of type `{}` is not callable \ - on object of type `{}`", - bindings.callable_type().display(db), - value_ty.display(db), - )); - } - } - CallErrorKind::BindingError => { - let assigned_d = assigned_ty.display(db); - let value_d = value_ty.display(db); + valid + } - if let Some(typed_dict) = value_ty.as_typed_dict() { - if let Some(key) = slice_ty.as_string_literal() { - let key = key.value(self.db()); - validate_typed_dict_key_assignment( - &self.context, - typed_dict, - key, - assigned_ty, - value.as_ref(), - slice.as_ref(), - rhs, - TypedDictAssignmentKind::Subscript, - ); - } else { - // Check if the key has a valid type. We only allow string literals, a union of string literals, - // or a dynamic type like `Any`. We can do this by checking assignability to `LiteralString`, - // but we need to exclude `LiteralString` itself. This check would technically allow weird key - // types like `LiteralString & Any` to pass, but it does not need to be perfect. We would just - // fail to provide the "Only string literals are allowed" hint in that case. - if slice_ty.is_assignable_to(db, Type::LiteralString) - && !slice_ty.is_equivalent_to(db, Type::LiteralString) + Type::Intersection(intersection) => { + let check_positive_elements = |emit_diagnostic_and_short_circuit| { + let mut valid = false; + for element_ty in intersection.positive(db) { + valid |= self.validate_subscript_assignment_impl( + object_node, + full_object_ty.or(Some(object_ty)), + *element_ty, + slice_node, + slice_ty, + rhs_value_node, + rhs_value_ty, + emit_diagnostic_and_short_circuit, + ); + + if !valid && emit_diagnostic_and_short_circuit { + break; + } + } + + valid + }; + + // Perform an initial check of all elements. If the assignment is valid + // for at least one element, we do not emit any diagnostics. Otherwise, + // we re-run the check and emit a diagnostic on the first failing element. + let valid = check_positive_elements(false); + + if !valid { + check_positive_elements(true); + } + + valid + } + + Type::TypedDict(typed_dict) => { + // As an optimization, prevent calling `__setitem__` on (unions of) large `TypedDict`s, and + // validate the assignment ourselves. This also allows us to emit better diagnostics. + + let mut valid = true; + let Some(keys) = key_literals(db, slice_ty) else { + // Check if the key has a valid type. We only allow string literals, a union of string literals, + // or a dynamic type like `Any`. We can do this by checking assignability to `LiteralString`, + // but we need to exclude `LiteralString` itself. This check would technically allow weird key + // types like `LiteralString & Any` to pass, but it does not need to be perfect. We would just + // fail to provide the "Only string literals are allowed" hint in that case. + + if slice_ty.is_dynamic() { + return true; + } + + let assigned_d = rhs_value_ty.display(db); + let value_d = object_ty.display(db); + + if slice_ty.is_assignable_to(db, Type::LiteralString) + && !slice_ty.is_equivalent_to(db, Type::LiteralString) + { + if let Some(builder) = + self.context.report_lint(&INVALID_ASSIGNMENT, slice_node) + { + let diagnostic = builder.into_diagnostic(format_args!( + "Cannot assign value of type `{assigned_d}` to key of type `{}` on TypedDict `{value_d}`", + slice_ty.display(db) + )); + attach_original_type_info(diagnostic); + } + } else { + if let Some(builder) = self.context.report_lint(&INVALID_KEY, slice_node) { + let diagnostic = builder.into_diagnostic(format_args!( + "Cannot access `{value_d}` with a key of type `{}`. Only string literals are allowed as keys on TypedDicts.", + slice_ty.display(db) + )); + attach_original_type_info(diagnostic); + } + } + + return false; + }; + + for key in keys { + valid &= validate_typed_dict_key_assignment( + &self.context, + typed_dict, + full_object_ty, + key, + rhs_value_ty, + object_node, + slice_node, + rhs_value_node, + TypedDictAssignmentKind::Subscript, + emit_diagnostic, + ); + } + + valid + } + + _ => { + match object_ty.try_call_dunder( + db, + "__setitem__", + CallArguments::positional([slice_ty, rhs_value_ty]), + TypeContext::default(), + ) { + Ok(_) => true, + Err(err) => match err { + CallDunderError::PossiblyUnbound { .. } => { + if emit_diagnostic + && let Some(builder) = self + .context + .report_lint(&POSSIBLY_MISSING_IMPLICIT_CALL, rhs_value_node) + { + let diagnostic = builder.into_diagnostic(format_args!( + "Method `__setitem__` of type `{}` may be missing", + object_ty.display(db), + )); + attach_original_type_info(diagnostic); + } + false + } + CallDunderError::CallError(call_error_kind, bindings) => { + match call_error_kind { + CallErrorKind::NotCallable => { + if emit_diagnostic + && let Some(builder) = self + .context + .report_lint(&CALL_NON_CALLABLE, object_node) { - if let Some(builder) = - context.report_lint(&INVALID_ASSIGNMENT, &**slice) - { - builder.into_diagnostic(format_args!( - "Cannot assign value of type `{assigned_d}` to key of type `{}` on TypedDict `{value_d}`", - slice_ty.display(db) - )); + let diagnostic = builder.into_diagnostic(format_args!( + "Method `__setitem__` of type `{}` is not callable \ + on object of type `{}`", + bindings.callable_type().display(db), + object_ty.display(db), + )); + attach_original_type_info(diagnostic); + } + } + CallErrorKind::BindingError => { + if let Some(typed_dict) = object_ty.as_typed_dict() { + if let Some(key) = slice_ty.as_string_literal() { + let key = key.value(db); + validate_typed_dict_key_assignment( + &self.context, + typed_dict, + full_object_ty, + key, + rhs_value_ty, + object_node, + slice_node, + rhs_value_node, + TypedDictAssignmentKind::Subscript, + true, + ); } } else { - if let Some(builder) = - context.report_lint(&INVALID_KEY, &**slice) + if emit_diagnostic + && let Some(builder) = self + .context + .report_lint(&INVALID_ASSIGNMENT, object_node) { - builder.into_diagnostic(format_args!( - "Cannot access `{value_d}` with a key of type `{}`. Only string literals are allowed as keys on TypedDicts.", - slice_ty.display(db) + let assigned_d = rhs_value_ty.display(db); + let value_d = object_ty.display(db); + + let diagnostic = builder.into_diagnostic(format_args!( + "Method `__setitem__` of type `{}` cannot be called with \ + a key of type `{}` and a value of type `{assigned_d}` on object of type `{value_d}`", + bindings.callable_type().display(db), + slice_ty.display(db), )); + attach_original_type_info(diagnostic); } } } - } else { - if let Some(builder) = - context.report_lint(&INVALID_ASSIGNMENT, &**value) - { - builder.into_diagnostic(format_args!( - "Method `__setitem__` of type `{}` cannot be called with \ - a key of type `{}` and a value of type `{assigned_d}` on object of type `{value_d}`", - bindings.callable_type().display(db), - slice_ty.display(db), - )); + CallErrorKind::PossiblyNotCallable => { + if emit_diagnostic + && let Some(builder) = self + .context + .report_lint(&CALL_NON_CALLABLE, object_node) + { + let diagnostic = builder.into_diagnostic(format_args!( + "Method `__setitem__` of type `{}` may not be callable on object of type `{}`", + bindings.callable_type().display(db), + object_ty.display(db), + )); + attach_original_type_info(diagnostic); + } } } + false } - CallErrorKind::PossiblyNotCallable => { - if let Some(builder) = context.report_lint(&CALL_NON_CALLABLE, &**value) + CallDunderError::MethodNotAvailable => { + if emit_diagnostic + && let Some(builder) = + self.context.report_lint(&INVALID_ASSIGNMENT, object_node) { - builder.into_diagnostic(format_args!( - "Method `__setitem__` of type `{}` may not be \ - callable on object of type `{}`", - bindings.callable_type().display(db), - value_ty.display(db), + let diagnostic = builder.into_diagnostic(format_args!( + "Cannot assign to a subscript on an object of type `{}` with no `__setitem__` method", + object_ty.display(db), )); + attach_original_type_info(diagnostic); } + false } - } - false + }, } - CallDunderError::MethodNotAvailable => { - if let Some(builder) = context.report_lint(&INVALID_ASSIGNMENT, &**value) { - builder.into_diagnostic(format_args!( - "Cannot assign to object of type `{}` with no `__setitem__` method", - value_ty.display(db), - )); - } - - false - } - }, + } } } @@ -7682,6 +7845,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { first_arg.into(), first_arg.into(), Type::TypedDict(typed_dict_ty), + None, key_ty, &items, ); @@ -10908,6 +11072,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { value_node.into(), slice_node.into(), value_ty, + None, slice_ty, &typed_dict.items(db), ); diff --git a/crates/ty_python_semantic/src/types/typed_dict.rs b/crates/ty_python_semantic/src/types/typed_dict.rs index 83b4ae946e..e07dbe6e60 100644 --- a/crates/ty_python_semantic/src/types/typed_dict.rs +++ b/crates/ty_python_semantic/src/types/typed_dict.rs @@ -143,30 +143,57 @@ impl TypedDictAssignmentKind { pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>( context: &InferContext<'db, 'ast>, typed_dict: TypedDictType<'db>, + full_object_ty: Option>, key: &str, value_ty: Type<'db>, - typed_dict_node: impl Into>, + typed_dict_node: impl Into> + Copy, key_node: impl Into>, value_node: impl Into>, assignment_kind: TypedDictAssignmentKind, + emit_diagnostic: bool, ) -> bool { let db = context.db(); let items = typed_dict.items(db); // Check if key exists in `TypedDict` let Some((_, item)) = items.iter().find(|(name, _)| *name == key) else { - report_invalid_key_on_typed_dict( - context, - typed_dict_node.into(), - key_node.into(), - Type::TypedDict(typed_dict), - Type::string_literal(db, key), - &items, - ); + if emit_diagnostic { + report_invalid_key_on_typed_dict( + context, + typed_dict_node.into(), + key_node.into(), + Type::TypedDict(typed_dict), + full_object_ty, + Type::string_literal(db, key), + &items, + ); + } return false; }; + let add_object_type_annotation = + |diagnostic: &mut Diagnostic| { + if let Some(full_object_ty) = full_object_ty { + diagnostic.annotate(context.secondary(typed_dict_node.into()).message( + format_args!( + "TypedDict `{}` in {kind} type `{}`", + Type::TypedDict(typed_dict).display(db), + full_object_ty.display(db), + kind = if full_object_ty.is_union() { + "union" + } else { + "intersection" + }, + ), + )); + } else { + diagnostic.annotate(context.secondary(typed_dict_node.into()).message( + format_args!("TypedDict `{}`", Type::TypedDict(typed_dict).display(db)), + )); + } + }; + let add_item_definition_subdiagnostic = |diagnostic: &mut Diagnostic, message| { if let Some(declaration) = item.single_declaration { let file = declaration.file(db); @@ -184,8 +211,9 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>( }; if assignment_kind.is_subscript() && item.is_read_only() { - if let Some(builder) = - context.report_lint(assignment_kind.diagnostic_type(), key_node.into()) + if emit_diagnostic + && let Some(builder) = + context.report_lint(assignment_kind.diagnostic_type(), key_node.into()) { let typed_dict_ty = Type::TypedDict(typed_dict); let typed_dict_d = typed_dict_ty.display(db); @@ -195,13 +223,7 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>( )); diagnostic.set_primary_message(format_args!("key is marked read-only")); - - diagnostic.annotate( - context - .secondary(typed_dict_node.into()) - .message(format_args!("TypedDict `{typed_dict_d}`")), - ); - + add_object_type_annotation(&mut diagnostic); add_item_definition_subdiagnostic(&mut diagnostic, "Read-only item declared here"); } @@ -219,7 +241,9 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>( } // Invalid assignment - emit diagnostic - if let Some(builder) = context.report_lint(assignment_kind.diagnostic_type(), value_node) { + if emit_diagnostic + && let Some(builder) = context.report_lint(assignment_kind.diagnostic_type(), value_node) + { let typed_dict_ty = Type::TypedDict(typed_dict); let typed_dict_d = typed_dict_ty.display(db); let value_d = value_ty.display(db); @@ -232,12 +256,6 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>( diagnostic.set_primary_message(format_args!("value of type `{value_d}`")); - diagnostic.annotate( - context - .secondary(typed_dict_node.into()) - .message(format_args!("TypedDict `{typed_dict_d}`")), - ); - diagnostic.annotate( context .secondary(key_node.into()) @@ -245,6 +263,7 @@ pub(super) fn validate_typed_dict_key_assignment<'db, 'ast>( ); add_item_definition_subdiagnostic(&mut diagnostic, "Item declared here"); + add_object_type_annotation(&mut diagnostic); } false @@ -343,12 +362,14 @@ fn validate_from_dict_literal<'db, 'ast>( validate_typed_dict_key_assignment( context, typed_dict, + None, key_str, value_type, error_node, key_expr, &dict_item.value, TypedDictAssignmentKind::Constructor, + true, ); } } @@ -380,12 +401,14 @@ fn validate_from_keywords<'db, 'ast>( validate_typed_dict_key_assignment( context, typed_dict, + None, arg_name.as_str(), arg_type, error_node, keyword, &keyword.value, TypedDictAssignmentKind::Constructor, + true, ); } } @@ -418,12 +441,14 @@ pub(super) fn validate_typed_dict_dict_literal<'db>( valid &= validate_typed_dict_key_assignment( context, typed_dict, + None, key_str, value_type, error_node, key_expr, &item.value, TypedDictAssignmentKind::Constructor, + true, ); } } From 3d4b0559f1e7c2bb52276178f28e9e8df7f9d663 Mon Sep 17 00:00:00 2001 From: Aria Desires Date: Wed, 12 Nov 2025 15:47:33 -0500 Subject: [PATCH 179/180] [ty] remove erroneous canonicalize (#21405) Alternative implementation to https://github.com/astral-sh/ruff/pull/21052 --- crates/ty/tests/cli/python_environment.rs | 225 ++++++++++++++++++ .../src/module_resolver/resolver.rs | 9 +- 2 files changed, 228 insertions(+), 6 deletions(-) diff --git a/crates/ty/tests/cli/python_environment.rs b/crates/ty/tests/cli/python_environment.rs index de6d99aa9a..638fc6c4ca 100644 --- a/crates/ty/tests/cli/python_environment.rs +++ b/crates/ty/tests/cli/python_environment.rs @@ -323,6 +323,231 @@ fn python_version_inferred_from_system_installation() -> anyhow::Result<()> { Ok(()) } +/// This attempts to simulate the tangled web of symlinks that a homebrew install has +/// which can easily confuse us if we're ever told to use it. +/// +/// The main thing this is regression-testing is a panic in one *extremely* specific case +/// that you have to try really hard to hit (but vscode, hilariously, did hit). +#[cfg(unix)] +#[test] +fn python_argument_trapped_in_a_symlink_factory() -> anyhow::Result<()> { + let case = CliTest::with_files([ + // This is the real python binary. + ( + "opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3.13", + "", + ), + // There's a real site-packages here (although it's basically empty). + ( + "opt/homebrew/Cellar/python@3.13/3.13.5/lib/python3.13/site-packages/foo.py", + "", + ), + // There's also a real site-packages here (although it's basically empty). + ("opt/homebrew/lib/python3.13/site-packages/bar.py", ""), + // This has the real stdlib, but the site-packages in this dir is a symlink. + ( + "opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/abc.py", + "", + ), + // It's important that this our faux-homebrew not be in the same dir as our working directory + // to reproduce the crash, don't ask me why. + ( + "project/test.py", + "\ +import foo +import bar +import colorama +", + ), + ])?; + + // many python symlinks pointing to a single real python (the longest path) + case.write_symlink( + "opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3.13", + "opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3", + )?; + case.write_symlink( + "opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3", + "opt/homebrew/Cellar/python@3.13/3.13.5/bin/python3", + )?; + case.write_symlink( + "opt/homebrew/Cellar/python@3.13/3.13.5/bin/python3", + "opt/homebrew/bin/python3", + )?; + // the "real" python's site-packages is a symlink to a different dir + case.write_symlink( + "opt/homebrew/Cellar/python@3.13/3.13.5/lib/python3.13/site-packages", + "opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages", + )?; + + // Try all 4 pythons with absolute paths to our fauxbrew install + assert_cmd_snapshot!(case.command() + .current_dir(case.root().join("project")) + .arg("--python").arg(case.root().join("opt/homebrew/bin/python3")), @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-import]: Cannot resolve imported module `foo` + --> test.py:1:8 + | + 1 | import foo + | ^^^ + 2 | import bar + 3 | import colorama + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + error[unresolved-import]: Cannot resolve imported module `colorama` + --> test.py:3:8 + | + 1 | import foo + 2 | import bar + 3 | import colorama + | ^^^^^^^^ + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + Found 2 diagnostics + + ----- stderr ----- + "); + + assert_cmd_snapshot!(case.command() + .current_dir(case.root().join("project")) + .arg("--python").arg(case.root().join("opt/homebrew/Cellar/python@3.13/3.13.5/bin/python3")), @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-import]: Cannot resolve imported module `bar` + --> test.py:2:8 + | + 1 | import foo + 2 | import bar + | ^^^ + 3 | import colorama + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/Cellar/python@3.13/3.13.5/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + error[unresolved-import]: Cannot resolve imported module `colorama` + --> test.py:3:8 + | + 1 | import foo + 2 | import bar + 3 | import colorama + | ^^^^^^^^ + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/Cellar/python@3.13/3.13.5/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + Found 2 diagnostics + + ----- stderr ----- + "); + + assert_cmd_snapshot!(case.command() + .current_dir(case.root().join("project")) + .arg("--python").arg(case.root().join("opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3")), @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-import]: Cannot resolve imported module `bar` + --> test.py:2:8 + | + 1 | import foo + 2 | import bar + | ^^^ + 3 | import colorama + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + error[unresolved-import]: Cannot resolve imported module `colorama` + --> test.py:3:8 + | + 1 | import foo + 2 | import bar + 3 | import colorama + | ^^^^^^^^ + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + Found 2 diagnostics + + ----- stderr ----- + "); + + assert_cmd_snapshot!(case.command() + .current_dir(case.root().join("project")) + .arg("--python").arg(case.root().join("opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/bin/python3.13")), @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-import]: Cannot resolve imported module `bar` + --> test.py:2:8 + | + 1 | import foo + 2 | import bar + | ^^^ + 3 | import colorama + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + error[unresolved-import]: Cannot resolve imported module `colorama` + --> test.py:3:8 + | + 1 | import foo + 2 | import bar + 3 | import colorama + | ^^^^^^^^ + | + info: Searched in the following paths during module resolution: + info: 1. /project (first-party code) + info: 2. vendored://stdlib (stdlib typeshed stubs vendored by ty) + info: 3. /opt/homebrew/Cellar/python@3.13/3.13.5/Frameworks/Python.framework/Versions/3.13/lib/python3.13/site-packages (site-packages) + info: make sure your Python environment is properly configured: https://docs.astral.sh/ty/modules/#python-environment + info: rule `unresolved-import` is enabled by default + + Found 2 diagnostics + + ----- stderr ----- + "); + + Ok(()) +} + /// On Unix systems, it's common for a Python installation at `.venv/bin/python` to only be a symlink /// to a system Python installation. We must be careful not to resolve the symlink too soon! /// If we do, we will incorrectly add the system installation's `site-packages` as a search path, diff --git a/crates/ty_python_semantic/src/module_resolver/resolver.rs b/crates/ty_python_semantic/src/module_resolver/resolver.rs index 0787859049..349d685862 100644 --- a/crates/ty_python_semantic/src/module_resolver/resolver.rs +++ b/crates/ty_python_semantic/src/module_resolver/resolver.rs @@ -452,15 +452,12 @@ pub(crate) fn dynamic_resolution_paths<'db>( let site_packages_dir = site_packages_search_path .as_system_path() .expect("Expected site package path to be a system path"); - let site_packages_dir = system - .canonicalize_path(site_packages_dir) - .unwrap_or_else(|_| site_packages_dir.to_path_buf()); - if !existing_paths.insert(Cow::Owned(site_packages_dir.clone())) { + if !existing_paths.insert(Cow::Borrowed(site_packages_dir)) { continue; } - let site_packages_root = files.expect_root(db, &site_packages_dir); + let site_packages_root = files.expect_root(db, site_packages_dir); // This query needs to be re-executed each time a `.pth` file // is added, modified or removed from the `site-packages` directory. @@ -477,7 +474,7 @@ pub(crate) fn dynamic_resolution_paths<'db>( // containing a (relative or absolute) path. // Each of these paths may point to an editable install of a package, // so should be considered an additional search path. - let pth_file_iterator = match PthFileIterator::new(db, &site_packages_dir) { + let pth_file_iterator = match PthFileIterator::new(db, site_packages_dir) { Ok(iterator) => iterator, Err(error) => { tracing::warn!( From a6abd65c2c2700fe3151127f965789f2d7875426 Mon Sep 17 00:00:00 2001 From: Dan Parizher <105245560+danparizher@users.noreply.github.com> Date: Wed, 12 Nov 2025 16:37:55 -0500 Subject: [PATCH 180/180] [`pydoclint`] Fix false positive when Sphinx directives follow Raises section (`DOC502`) (#20535) ## Summary Fixes #18959 --------- Co-authored-by: Brent Westbrook --- .../test/fixtures/pydoclint/DOC502_google.py | 31 ++++++++++++++++ .../test/fixtures/pydoclint/DOC502_numpy.py | 30 ++++++++++++++++ .../rules/pydoclint/rules/check_docstring.rs | 36 +++++++++++++++---- ...-extraneous-exception_DOC502_numpy.py.snap | 20 +++++++++++ 4 files changed, 111 insertions(+), 6 deletions(-) diff --git a/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_google.py b/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_google.py index 9709d9ff53..916b11c634 100644 --- a/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_google.py +++ b/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_google.py @@ -83,6 +83,37 @@ def calculate_speed(distance: float, time: float) -> float: raise +# DOC502 regression for Sphinx directive after Raises (issue #18959) +def foo(): + """First line. + + Raises: + ValueError: + some text + + .. versionadded:: 0.7.0 + The ``init_kwargs`` argument. + """ + raise ValueError + + +# DOC502 regression for following section with colons +def example_with_following_section(): + """Summary. + + Returns: + str: The resulting expression. + + Raises: + ValueError: If the unit is not valid. + + Relation to `time_range_lookup`: + - Handles the "start of" modifier. + - Example: "start of month" → `DATETRUNC()`. + """ + raise ValueError + + # This should NOT trigger DOC502 because OSError is explicitly re-raised def f(): """Do nothing. diff --git a/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_numpy.py b/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_numpy.py index 5e8bf5f36e..c32814597d 100644 --- a/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_numpy.py +++ b/crates/ruff_linter/resources/test/fixtures/pydoclint/DOC502_numpy.py @@ -117,3 +117,33 @@ def calculate_speed(distance: float, time: float) -> float: except TypeError: print("Not a number? Shame on you!") raise + + +# DOC502 regression for Sphinx directive after Raises (issue #18959) +def foo(): + """First line. + + Raises + ------ + ValueError + some text + + .. versionadded:: 0.7.0 + The ``init_kwargs`` argument. + """ + raise ValueError + +# Make sure we don't bail out on a Sphinx directive in the description of one +# of the exceptions +def foo(): + """First line. + + Raises + ------ + ValueError + some text + .. math:: e^{xception} + ZeroDivisionError + Will not be raised, DOC502 + """ + raise ValueError diff --git a/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs b/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs index 97f1b7ed9b..2f84e8501d 100644 --- a/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs +++ b/crates/ruff_linter/src/rules/pydoclint/rules/check_docstring.rs @@ -722,12 +722,30 @@ fn parse_raises(content: &str, style: Option) -> Vec Vec> { let mut entries: Vec = Vec::new(); - for potential in content.lines() { - let Some(colon_idx) = potential.find(':') else { - continue; - }; - let entry = potential[..colon_idx].trim(); - entries.push(QualifiedName::user_defined(entry)); + let mut lines = content.lines().peekable(); + let Some(first) = lines.peek() else { + return entries; + }; + let indentation = &first[..first.len() - first.trim_start().len()]; + for potential in lines { + if let Some(entry) = potential.strip_prefix(indentation) { + if let Some(first_char) = entry.chars().next() { + if !first_char.is_whitespace() { + if let Some(colon_idx) = entry.find(':') { + let entry = entry[..colon_idx].trim(); + if !entry.is_empty() { + entries.push(QualifiedName::user_defined(entry)); + } + } + } + } + } else { + // If we can't strip the expected indentation, check if this is a dedented line + // (not blank) - if so, break early as we've reached the end of this section + if !potential.trim().is_empty() { + break; + } + } } entries } @@ -751,6 +769,12 @@ fn parse_raises_numpy(content: &str) -> Vec> { let indentation = &dashes[..dashes.len() - dashes.trim_start().len()]; for potential in lines { if let Some(entry) = potential.strip_prefix(indentation) { + // Check for Sphinx directives (lines starting with ..) - these indicate the end of the + // section. In numpy-style, exceptions are dedented to the same level as sphinx + // directives. + if entry.starts_with("..") { + break; + } if let Some(first_char) = entry.chars().next() { if !first_char.is_whitespace() { entries.push(QualifiedName::user_defined(entry.trim_end())); diff --git a/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__docstring-extraneous-exception_DOC502_numpy.py.snap b/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__docstring-extraneous-exception_DOC502_numpy.py.snap index fd28bded5d..2bb6d04b0b 100644 --- a/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__docstring-extraneous-exception_DOC502_numpy.py.snap +++ b/crates/ruff_linter/src/rules/pydoclint/snapshots/ruff_linter__rules__pydoclint__tests__docstring-extraneous-exception_DOC502_numpy.py.snap @@ -95,3 +95,23 @@ DOC502 Raised exception is not explicitly raised: `DivisionByZero` 82 | return distance / time | help: Remove `DivisionByZero` from the docstring + +DOC502 Raised exception is not explicitly raised: `ZeroDivisionError` + --> DOC502_numpy.py:139:5 + | +137 | # of the exceptions +138 | def foo(): +139 | / """First line. +140 | | +141 | | Raises +142 | | ------ +143 | | ValueError +144 | | some text +145 | | .. math:: e^{xception} +146 | | ZeroDivisionError +147 | | Will not be raised, DOC502 +148 | | """ + | |_______^ +149 | raise ValueError + | +help: Remove `ZeroDivisionError` from the docstring