diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 83b6b83bc3..b218a2e99b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -24,6 +24,8 @@ env: PACKAGE_NAME: ruff PYTHON_VERSION: "3.14" NEXTEST_PROFILE: ci + # Enable mdtests that require external dependencies + MDTEST_EXTERNAL: "1" jobs: determine_changes: @@ -296,7 +298,7 @@ jobs: # sync, not just public items. Eventually we should do this for all # crates; for now add crates here as they are warning-clean to prevent # regression. - - run: cargo doc --no-deps -p ty_python_semantic -p ty -p ty_test -p ruff_db --document-private-items + - run: cargo doc --no-deps -p ty_python_semantic -p ty -p ty_test -p ruff_db -p ruff_python_formatter --document-private-items env: # Setting RUSTDOCFLAGS because `cargo doc --check` isn't yet implemented (https://github.com/rust-lang/cargo/issues/10025). RUSTDOCFLAGS: "-D warnings" diff --git a/.github/workflows/mypy_primer.yaml b/.github/workflows/mypy_primer.yaml index b2f7f5e275..22af7025da 100644 --- a/.github/workflows/mypy_primer.yaml +++ b/.github/workflows/mypy_primer.yaml @@ -47,6 +47,7 @@ jobs: - uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 with: + shared-key: "mypy-primer" workspaces: "ruff" - name: Install Rust toolchain @@ -86,6 +87,7 @@ jobs: - uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 with: workspaces: "ruff" + shared-key: "mypy-primer" - name: Install Rust toolchain run: rustup show @@ -105,3 +107,54 @@ jobs: with: name: mypy_primer_memory_diff path: mypy_primer_memory.diff + + # Runs mypy twice against the same ty version to catch any non-deterministic behavior (ideally). + # The job is disabled for now because there are some non-deterministic diagnostics. + mypy_primer_same_revision: + name: Run mypy_primer on same revision + runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }} + timeout-minutes: 20 + # TODO: Enable once we fixed the non-deterministic diagnostics + if: false + steps: + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + path: ruff + fetch-depth: 0 + persist-credentials: false + + - name: Install the latest version of uv + uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # v7.1.4 + + - uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 + with: + workspaces: "ruff" + shared-key: "mypy-primer" + + - name: Install Rust toolchain + run: rustup show + + - name: Run determinism check + env: + BASE_REVISION: ${{ github.event.pull_request.head.sha }} + PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt + CLICOLOR_FORCE: "1" + DIFF_FILE: mypy_primer_determinism.diff + run: | + cd ruff + scripts/mypy_primer.sh + + - name: Check for non-determinism + run: | + # Remove ANSI color codes for checking + sed -e 's/\x1b\[[0-9;]*m//g' mypy_primer_determinism.diff > mypy_primer_determinism_clean.diff + + # Check if there are any differences (non-determinism) + if [ -s mypy_primer_determinism_clean.diff ]; then + echo "ERROR: Non-deterministic output detected!" + echo "The following differences were found when running ty twice on the same commit:" + cat mypy_primer_determinism_clean.diff + exit 1 + else + echo "✓ Output is deterministic" + fi diff --git a/CHANGELOG.md b/CHANGELOG.md index 2508b4a54f..3707d70b1d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,47 @@ # Changelog +## 0.14.9 + +Released on 2025-12-11. + +### Preview features + +- \[`ruff`\] New `RUF100` diagnostics for unused range suppressions ([#21783](https://github.com/astral-sh/ruff/pull/21783)) +- \[`pylint`\] Detect subclasses of builtin exceptions (`PLW0133`) ([#21382](https://github.com/astral-sh/ruff/pull/21382)) + +### Bug fixes + +- Fix comment placement in lambda parameters ([#21868](https://github.com/astral-sh/ruff/pull/21868)) +- Skip over trivia tokens after re-lexing ([#21895](https://github.com/astral-sh/ruff/pull/21895)) +- \[`flake8-bandit`\] Fix false positive when using non-standard `CSafeLoader` path (S506). ([#21830](https://github.com/astral-sh/ruff/pull/21830)) +- \[`flake8-bugbear`\] Accept immutable slice default arguments (`B008`) ([#21823](https://github.com/astral-sh/ruff/pull/21823)) + +### Rule changes + +- \[`pydocstyle`\] Suppress `D417` for parameters with `Unpack` annotations ([#21816](https://github.com/astral-sh/ruff/pull/21816)) + +### Performance + +- Use `memchr` for computing line indexes ([#21838](https://github.com/astral-sh/ruff/pull/21838)) + +### Documentation + +- Document `*.pyw` is included by default in preview ([#21885](https://github.com/astral-sh/ruff/pull/21885)) +- Document range suppressions, reorganize suppression docs ([#21884](https://github.com/astral-sh/ruff/pull/21884)) +- Update mkdocs-material to 9.7.0 (Insiders now free) ([#21797](https://github.com/astral-sh/ruff/pull/21797)) + +### Contributors + +- [@Avasam](https://github.com/Avasam) +- [@MichaReiser](https://github.com/MichaReiser) +- [@charliermarsh](https://github.com/charliermarsh) +- [@amyreese](https://github.com/amyreese) +- [@phongddo](https://github.com/phongddo) +- [@prakhar1144](https://github.com/prakhar1144) +- [@mahiro72](https://github.com/mahiro72) +- [@ntBre](https://github.com/ntBre) +- [@LoicRiegel](https://github.com/LoicRiegel) + ## 0.14.8 Released on 2025-12-04. diff --git a/Cargo.lock b/Cargo.lock index 6bc8bf881c..e75eedb6c2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1016,7 +1016,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] @@ -1108,7 +1108,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] @@ -1238,9 +1238,9 @@ dependencies = [ [[package]] name = "get-size-derive2" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff47daa61505c85af126e9dd64af6a342a33dc0cccfe1be74ceadc7d352e6efd" +checksum = "ab21d7bd2c625f2064f04ce54bcb88bc57c45724cde45cba326d784e22d3f71a" dependencies = [ "attribute-derive", "quote", @@ -1249,14 +1249,15 @@ dependencies = [ [[package]] name = "get-size2" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac7bb8710e1f09672102be7ddf39f764d8440ae74a9f4e30aaa4820dcdffa4af" +checksum = "879272b0de109e2b67b39fcfe3d25fdbba96ac07e44a254f5a0b4d7ff55340cb" dependencies = [ "compact_str", "get-size-derive2", "hashbrown 0.16.1", "indexmap", + "ordermap", "smallvec", ] @@ -1763,7 +1764,7 @@ dependencies = [ "portable-atomic", "portable-atomic-util", "serde_core", - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] @@ -2233,9 +2234,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "ordermap" -version = "0.5.12" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b100f7dd605611822d30e182214d3c02fdefce2d801d23993f6b6ba6ca1392af" +checksum = "ed637741ced8fb240855d22a2b4f208dab7a06bcce73380162e5253000c16758" dependencies = [ "indexmap", "serde", @@ -2859,7 +2860,7 @@ dependencies = [ [[package]] name = "ruff" -version = "0.14.8" +version = "0.14.9" dependencies = [ "anyhow", "argfile", @@ -3117,7 +3118,7 @@ dependencies = [ [[package]] name = "ruff_linter" -version = "0.14.8" +version = "0.14.9" dependencies = [ "aho-corasick", "anyhow", @@ -3348,6 +3349,7 @@ dependencies = [ "compact_str", "get-size2", "insta", + "itertools 0.14.0", "memchr", "ruff_annotate_snippets", "ruff_python_ast", @@ -3473,7 +3475,7 @@ dependencies = [ [[package]] name = "ruff_wasm" -version = "0.14.8" +version = "0.14.9" dependencies = [ "console_error_panic_hook", "console_log", @@ -3571,7 +3573,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] @@ -3589,7 +3591,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "salsa" version = "0.24.0" -source = "git+https://github.com/salsa-rs/salsa.git?rev=59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0#59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0" +source = "git+https://github.com/salsa-rs/salsa.git?rev=55e5e7d32fa3fc189276f35bb04c9438f9aedbd1#55e5e7d32fa3fc189276f35bb04c9438f9aedbd1" dependencies = [ "boxcar", "compact_str", @@ -3600,6 +3602,7 @@ dependencies = [ "indexmap", "intrusive-collections", "inventory", + "ordermap", "parking_lot", "portable-atomic", "rustc-hash", @@ -3613,12 +3616,12 @@ dependencies = [ [[package]] name = "salsa-macro-rules" version = "0.24.0" -source = "git+https://github.com/salsa-rs/salsa.git?rev=59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0#59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0" +source = "git+https://github.com/salsa-rs/salsa.git?rev=55e5e7d32fa3fc189276f35bb04c9438f9aedbd1#55e5e7d32fa3fc189276f35bb04c9438f9aedbd1" [[package]] name = "salsa-macros" version = "0.24.0" -source = "git+https://github.com/salsa-rs/salsa.git?rev=59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0#59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0" +source = "git+https://github.com/salsa-rs/salsa.git?rev=55e5e7d32fa3fc189276f35bb04c9438f9aedbd1#55e5e7d32fa3fc189276f35bb04c9438f9aedbd1" dependencies = [ "proc-macro2", "quote", @@ -3972,7 +3975,7 @@ dependencies = [ "getrandom 0.3.4", "once_cell", "rustix", - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] @@ -4557,6 +4560,7 @@ dependencies = [ "anyhow", "camino", "colored 3.0.0", + "dunce", "insta", "memchr", "path-slash", @@ -5025,7 +5029,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 554ad219d2..0badc79e3c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -88,7 +88,7 @@ etcetera = { version = "0.11.0" } fern = { version = "0.7.0" } filetime = { version = "0.2.23" } getrandom = { version = "0.3.1" } -get-size2 = { version = "0.7.0", features = [ +get-size2 = { version = "0.7.3", features = [ "derive", "smallvec", "hashbrown", @@ -129,7 +129,7 @@ memchr = { version = "2.7.1" } mimalloc = { version = "0.1.39" } natord = { version = "1.0.9" } notify = { version = "8.0.0" } -ordermap = { version = "0.5.0" } +ordermap = { version = "1.0.0" } path-absolutize = { version = "3.1.1" } path-slash = { version = "0.2.1" } pathdiff = { version = "0.2.1" } @@ -146,7 +146,7 @@ regex-automata = { version = "0.4.9" } rustc-hash = { version = "2.0.0" } rustc-stable-hash = { version = "0.1.2" } # When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml` -salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0", default-features = false, features = [ +salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "55e5e7d32fa3fc189276f35bb04c9438f9aedbd1", default-features = false, features = [ "compact_str", "macros", "salsa_unstable", diff --git a/README.md b/README.md index 7e96c92479..091e0d3cfe 100644 --- a/README.md +++ b/README.md @@ -147,8 +147,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh powershell -c "irm https://astral.sh/ruff/install.ps1 | iex" # For a specific version. -curl -LsSf https://astral.sh/ruff/0.14.8/install.sh | sh -powershell -c "irm https://astral.sh/ruff/0.14.8/install.ps1 | iex" +curl -LsSf https://astral.sh/ruff/0.14.9/install.sh | sh +powershell -c "irm https://astral.sh/ruff/0.14.9/install.ps1 | iex" ``` You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff), @@ -181,7 +181,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.8 + rev: v0.14.9 hooks: # Run the linter. - id: ruff-check diff --git a/crates/ruff/Cargo.toml b/crates/ruff/Cargo.toml index ff8516ebf2..76dfe3dfe5 100644 --- a/crates/ruff/Cargo.toml +++ b/crates/ruff/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ruff" -version = "0.14.8" +version = "0.14.9" publish = true authors = { workspace = true } edition = { workspace = true } diff --git a/crates/ruff/tests/cli/lint.rs b/crates/ruff/tests/cli/lint.rs index 25500ed346..a86d8e81be 100644 --- a/crates/ruff/tests/cli/lint.rs +++ b/crates/ruff/tests/cli/lint.rs @@ -1440,6 +1440,78 @@ def function(): Ok(()) } +#[test] +fn ignore_noqa() -> Result<()> { + let fixture = CliTest::new()?; + fixture.write_file( + "ruff.toml", + r#" +[lint] +select = ["F401"] +"#, + )?; + + fixture.write_file( + "noqa.py", + r#" +import os # noqa: F401 + +# ruff: disable[F401] +import sys +"#, + )?; + + // without --ignore-noqa + assert_cmd_snapshot!(fixture + .check_command() + .args(["--config", "ruff.toml"]) + .arg("noqa.py"), + @r" + success: false + exit_code: 1 + ----- stdout ----- + noqa.py:5:8: F401 [*] `sys` imported but unused + Found 1 error. + [*] 1 fixable with the `--fix` option. + + ----- stderr ----- + "); + + assert_cmd_snapshot!(fixture + .check_command() + .args(["--config", "ruff.toml"]) + .arg("noqa.py") + .args(["--preview"]), + @r" + success: true + exit_code: 0 + ----- stdout ----- + All checks passed! + + ----- stderr ----- + "); + + // with --ignore-noqa --preview + assert_cmd_snapshot!(fixture + .check_command() + .args(["--config", "ruff.toml"]) + .arg("noqa.py") + .args(["--ignore-noqa", "--preview"]), + @r" + success: false + exit_code: 1 + ----- stdout ----- + noqa.py:2:8: F401 [*] `os` imported but unused + noqa.py:5:8: F401 [*] `sys` imported but unused + Found 2 errors. + [*] 2 fixable with the `--fix` option. + + ----- stderr ----- + "); + + Ok(()) +} + #[test] fn add_noqa() -> Result<()> { let fixture = CliTest::new()?; @@ -1632,6 +1704,100 @@ def unused(x): # noqa: ANN001, ARG001, D103 Ok(()) } +#[test] +fn add_noqa_existing_file_level_noqa() -> Result<()> { + let fixture = CliTest::new()?; + fixture.write_file( + "ruff.toml", + r#" +[lint] +select = ["F401"] +"#, + )?; + + fixture.write_file( + "noqa.py", + r#" +# ruff: noqa F401 +import os +"#, + )?; + + assert_cmd_snapshot!(fixture + .check_command() + .args(["--config", "ruff.toml"]) + .arg("noqa.py") + .arg("--preview") + .args(["--add-noqa"]) + .arg("-") + .pass_stdin(r#" + +"#), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + "); + + let test_code = + fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file"); + + insta::assert_snapshot!(test_code, @r" + # ruff: noqa F401 + import os + "); + + Ok(()) +} + +#[test] +fn add_noqa_existing_range_suppression() -> Result<()> { + let fixture = CliTest::new()?; + fixture.write_file( + "ruff.toml", + r#" +[lint] +select = ["F401"] +"#, + )?; + + fixture.write_file( + "noqa.py", + r#" +# ruff: disable[F401] +import os +"#, + )?; + + assert_cmd_snapshot!(fixture + .check_command() + .args(["--config", "ruff.toml"]) + .arg("noqa.py") + .arg("--preview") + .args(["--add-noqa"]) + .arg("-") + .pass_stdin(r#" + +"#), @r" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + "); + + let test_code = + fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file"); + + insta::assert_snapshot!(test_code, @r" + # ruff: disable[F401] + import os + "); + + Ok(()) +} + #[test] fn add_noqa_multiline_comment() -> Result<()> { let fixture = CliTest::new()?; diff --git a/crates/ruff_benchmark/benches/ty_walltime.rs b/crates/ruff_benchmark/benches/ty_walltime.rs index 9f29b193a5..7a0f9a5848 100644 --- a/crates/ruff_benchmark/benches/ty_walltime.rs +++ b/crates/ruff_benchmark/benches/ty_walltime.rs @@ -194,7 +194,7 @@ static SYMPY: Benchmark = Benchmark::new( max_dep_date: "2025-06-17", python_version: PythonVersion::PY312, }, - 13000, + 13030, ); static TANJUN: Benchmark = Benchmark::new( diff --git a/crates/ruff_db/src/diagnostic/mod.rs b/crates/ruff_db/src/diagnostic/mod.rs index 33348ddf2e..e966cdd208 100644 --- a/crates/ruff_db/src/diagnostic/mod.rs +++ b/crates/ruff_db/src/diagnostic/mod.rs @@ -166,28 +166,8 @@ impl Diagnostic { /// Returns the primary message for this diagnostic. /// /// A diagnostic always has a message, but it may be empty. - /// - /// NOTE: At present, this routine will return the first primary - /// annotation's message as the primary message when the main diagnostic - /// message is empty. This is meant to facilitate an incremental migration - /// in ty over to the new diagnostic data model. (The old data model - /// didn't distinguish between messages on the entire diagnostic and - /// messages attached to a particular span.) pub fn primary_message(&self) -> &str { - if !self.inner.message.as_str().is_empty() { - return self.inner.message.as_str(); - } - // FIXME: As a special case, while we're migrating ty - // to the new diagnostic data model, we'll look for a primary - // message from the primary annotation. This is because most - // ty diagnostics are created with an empty diagnostic - // message and instead attach the message to the annotation. - // Fixing this will require touching basically every diagnostic - // in ty, so we do it this way for now to match the old - // semantics. ---AG - self.primary_annotation() - .and_then(|ann| ann.get_message()) - .unwrap_or_default() + self.inner.message.as_str() } /// Introspects this diagnostic and returns what kind of "primary" message @@ -199,18 +179,6 @@ impl Diagnostic { /// contains *essential* information or context for understanding the /// diagnostic. /// - /// The reason why we don't just always return both the main diagnostic - /// message and the primary annotation message is because this was written - /// in the midst of an incremental migration of ty over to the new - /// diagnostic data model. At time of writing, diagnostics were still - /// constructed in the old model where the main diagnostic message and the - /// primary annotation message were not distinguished from each other. So - /// for now, we carefully return what kind of messages this diagnostic - /// contains. In effect, if this diagnostic has a non-empty main message - /// *and* a non-empty primary annotation message, then the diagnostic is - /// 100% using the new diagnostic data model and we can format things - /// appropriately. - /// /// The type returned implements the `std::fmt::Display` trait. In most /// cases, just converting it to a string (or printing it) will do what /// you want. @@ -224,11 +192,10 @@ impl Diagnostic { .primary_annotation() .and_then(|ann| ann.get_message()) .unwrap_or_default(); - match (main.is_empty(), annotation.is_empty()) { - (false, true) => ConciseMessage::MainDiagnostic(main), - (true, false) => ConciseMessage::PrimaryAnnotation(annotation), - (false, false) => ConciseMessage::Both { main, annotation }, - (true, true) => ConciseMessage::Empty, + if annotation.is_empty() { + ConciseMessage::MainDiagnostic(main) + } else { + ConciseMessage::Both { main, annotation } } } @@ -693,18 +660,6 @@ impl SubDiagnostic { /// contains *essential* information or context for understanding the /// diagnostic. /// - /// The reason why we don't just always return both the main diagnostic - /// message and the primary annotation message is because this was written - /// in the midst of an incremental migration of ty over to the new - /// diagnostic data model. At time of writing, diagnostics were still - /// constructed in the old model where the main diagnostic message and the - /// primary annotation message were not distinguished from each other. So - /// for now, we carefully return what kind of messages this diagnostic - /// contains. In effect, if this diagnostic has a non-empty main message - /// *and* a non-empty primary annotation message, then the diagnostic is - /// 100% using the new diagnostic data model and we can format things - /// appropriately. - /// /// The type returned implements the `std::fmt::Display` trait. In most /// cases, just converting it to a string (or printing it) will do what /// you want. @@ -714,11 +669,10 @@ impl SubDiagnostic { .primary_annotation() .and_then(|ann| ann.get_message()) .unwrap_or_default(); - match (main.is_empty(), annotation.is_empty()) { - (false, true) => ConciseMessage::MainDiagnostic(main), - (true, false) => ConciseMessage::PrimaryAnnotation(annotation), - (false, false) => ConciseMessage::Both { main, annotation }, - (true, true) => ConciseMessage::Empty, + if annotation.is_empty() { + ConciseMessage::MainDiagnostic(main) + } else { + ConciseMessage::Both { main, annotation } } } } @@ -888,6 +842,10 @@ impl Annotation { pub fn hide_snippet(&mut self, yes: bool) { self.hide_snippet = yes; } + + pub fn is_primary(&self) -> bool { + self.is_primary + } } /// Tags that can be associated with an annotation. @@ -1508,28 +1466,10 @@ pub enum DiagnosticFormat { pub enum ConciseMessage<'a> { /// A diagnostic contains a non-empty main message and an empty /// primary annotation message. - /// - /// This strongly suggests that the diagnostic is using the - /// "new" data model. MainDiagnostic(&'a str), - /// A diagnostic contains an empty main message and a non-empty - /// primary annotation message. - /// - /// This strongly suggests that the diagnostic is using the - /// "old" data model. - PrimaryAnnotation(&'a str), /// A diagnostic contains a non-empty main message and a non-empty /// primary annotation message. - /// - /// This strongly suggests that the diagnostic is using the - /// "new" data model. Both { main: &'a str, annotation: &'a str }, - /// A diagnostic contains an empty main message and an empty - /// primary annotation message. - /// - /// This indicates that the diagnostic is probably using the old - /// model. - Empty, /// A custom concise message has been provided. Custom(&'a str), } @@ -1540,13 +1480,9 @@ impl std::fmt::Display for ConciseMessage<'_> { ConciseMessage::MainDiagnostic(main) => { write!(f, "{main}") } - ConciseMessage::PrimaryAnnotation(annotation) => { - write!(f, "{annotation}") - } ConciseMessage::Both { main, annotation } => { write!(f, "{main}: {annotation}") } - ConciseMessage::Empty => Ok(()), ConciseMessage::Custom(message) => { write!(f, "{message}") } diff --git a/crates/ruff_linter/Cargo.toml b/crates/ruff_linter/Cargo.toml index 9d11a41e50..28f4b0e413 100644 --- a/crates/ruff_linter/Cargo.toml +++ b/crates/ruff_linter/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ruff_linter" -version = "0.14.8" +version = "0.14.9" publish = false authors = { workspace = true } edition = { workspace = true } diff --git a/crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B006_B008.py b/crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B006_B008.py index 77ab80b7ee..8e55d5b340 100644 --- a/crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B006_B008.py +++ b/crates/ruff_linter/resources/test/fixtures/flake8_bugbear/B006_B008.py @@ -199,6 +199,9 @@ def bytes_okay(value=bytes(1)): def int_okay(value=int("12")): pass +# Allow immutable slice() +def slice_okay(value=slice(1,2)): + pass # Allow immutable complex() value def complex_okay(value=complex(1,2)): diff --git a/crates/ruff_linter/resources/test/fixtures/pydocstyle/D417.py b/crates/ruff_linter/resources/test/fixtures/pydocstyle/D417.py index 7c5a1f538c..510bf77bf8 100644 --- a/crates/ruff_linter/resources/test/fixtures/pydocstyle/D417.py +++ b/crates/ruff_linter/resources/test/fixtures/pydocstyle/D417.py @@ -218,3 +218,26 @@ def should_not_fail(payload, Args): Args: The other arguments. """ + + +# Test cases for Unpack[TypedDict] kwargs +from typing import TypedDict +from typing_extensions import Unpack + +class User(TypedDict): + id: int + name: str + +def function_with_unpack_args_should_not_fail(query: str, **kwargs: Unpack[User]): + """Function with Unpack kwargs. + + Args: + query: some arg + """ + +def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]): + """Function with Unpack kwargs but missing query arg documentation. + + Args: + **kwargs: keyword arguments + """ diff --git a/crates/ruff_linter/resources/test/fixtures/pylint/useless_exception_statement.py b/crates/ruff_linter/resources/test/fixtures/pylint/useless_exception_statement.py index eaff5cd895..a382445e9c 100644 --- a/crates/ruff_linter/resources/test/fixtures/pylint/useless_exception_statement.py +++ b/crates/ruff_linter/resources/test/fixtures/pylint/useless_exception_statement.py @@ -2,15 +2,40 @@ from abc import ABC, abstractmethod from contextlib import suppress +class MyError(Exception): + ... + + +class MySubError(MyError): + ... + + +class MyValueError(ValueError): + ... + + +class MyUserWarning(UserWarning): + ... + + +# Violation test cases with builtin errors: PLW0133 + + # Test case 1: Useless exception statement def func(): AssertionError("This is an assertion error") # PLW0133 + MyError("This is a custom error") # PLW0133 + MySubError("This is a custom error") # PLW0133 + MyValueError("This is a custom value error") # PLW0133 # Test case 2: Useless exception statement in try-except block def func(): try: Exception("This is an exception") # PLW0133 + MyError("This is an exception") # PLW0133 + MySubError("This is an exception") # PLW0133 + MyValueError("This is an exception") # PLW0133 except Exception as err: pass @@ -19,6 +44,9 @@ def func(): def func(): if True: RuntimeError("This is an exception") # PLW0133 + MyError("This is an exception") # PLW0133 + MySubError("This is an exception") # PLW0133 + MyValueError("This is an exception") # PLW0133 # Test case 4: Useless exception statement in class @@ -26,12 +54,18 @@ def func(): class Class: def __init__(self): TypeError("This is an exception") # PLW0133 + MyError("This is an exception") # PLW0133 + MySubError("This is an exception") # PLW0133 + MyValueError("This is an exception") # PLW0133 # Test case 5: Useless exception statement in function def func(): def inner(): IndexError("This is an exception") # PLW0133 + MyError("This is an exception") # PLW0133 + MySubError("This is an exception") # PLW0133 + MyValueError("This is an exception") # PLW0133 inner() @@ -40,6 +74,9 @@ def func(): def func(): while True: KeyError("This is an exception") # PLW0133 + MyError("This is an exception") # PLW0133 + MySubError("This is an exception") # PLW0133 + MyValueError("This is an exception") # PLW0133 # Test case 7: Useless exception statement in abstract class @@ -48,27 +85,58 @@ def func(): @abstractmethod def method(self): NotImplementedError("This is an exception") # PLW0133 + MyError("This is an exception") # PLW0133 + MySubError("This is an exception") # PLW0133 + MyValueError("This is an exception") # PLW0133 # Test case 8: Useless exception statement inside context manager def func(): - with suppress(AttributeError): + with suppress(Exception): AttributeError("This is an exception") # PLW0133 + MyError("This is an exception") # PLW0133 + MySubError("This is an exception") # PLW0133 + MyValueError("This is an exception") # PLW0133 # Test case 9: Useless exception statement in parentheses def func(): (RuntimeError("This is an exception")) # PLW0133 + (MyError("This is an exception")) # PLW0133 + (MySubError("This is an exception")) # PLW0133 + (MyValueError("This is an exception")) # PLW0133 # Test case 10: Useless exception statement in continuation def func(): x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133 + x = 1; (MyError("This is an exception")); y = 2 # PLW0133 + x = 1; (MySubError("This is an exception")); y = 2 # PLW0133 + x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133 # Test case 11: Useless warning statement def func(): - UserWarning("This is an assertion error") # PLW0133 + UserWarning("This is a user warning") # PLW0133 + MyUserWarning("This is a custom user warning") # PLW0133 + + +# Test case 12: Useless exception statement at module level +import builtins + +builtins.TypeError("still an exception even though it's an Attribute") # PLW0133 + +PythonFinalizationError("Added in Python 3.13") # PLW0133 + +MyError("This is an exception") # PLW0133 + +MySubError("This is an exception") # PLW0133 + +MyValueError("This is an exception") # PLW0133 + +UserWarning("This is a user warning") # PLW0133 + +MyUserWarning("This is a custom user warning") # PLW0133 # Non-violation test cases: PLW0133 @@ -119,10 +187,3 @@ def func(): def func(): with suppress(AttributeError): raise AttributeError("This is an exception") # OK - - -import builtins - -builtins.TypeError("still an exception even though it's an Attribute") - -PythonFinalizationError("Added in Python 3.13") diff --git a/crates/ruff_linter/resources/test/fixtures/ruff/suppressions.py b/crates/ruff_linter/resources/test/fixtures/ruff/suppressions.py new file mode 100644 index 0000000000..f8a3c882aa --- /dev/null +++ b/crates/ruff_linter/resources/test/fixtures/ruff/suppressions.py @@ -0,0 +1,88 @@ +def f(): + # These should both be ignored by the range suppression. + # ruff: disable[E741, F841] + I = 1 + # ruff: enable[E741, F841] + + +def f(): + # These should both be ignored by the implicit range suppression. + # Should also generate an "unmatched suppression" warning. + # ruff:disable[E741,F841] + I = 1 + + +def f(): + # Neither warning is ignored, and an "unmatched suppression" + # should be generated. + I = 1 + # ruff: enable[E741, F841] + + +def f(): + # One should be ignored by the range suppression, and + # the other logged to the user. + # ruff: disable[E741] + I = 1 + # ruff: enable[E741] + + +def f(): + # Test interleaved range suppressions. The first and last + # lines should each log a different warning, while the + # middle line should be completely silenced. + # ruff: disable[E741] + l = 0 + # ruff: disable[F841] + O = 1 + # ruff: enable[E741] + I = 2 + # ruff: enable[F841] + + +def f(): + # Neither of these are ignored and warnings are + # logged to user + # ruff: disable[E501] + I = 1 + # ruff: enable[E501] + + +def f(): + # These should both be ignored by the range suppression, + # and an unusued noqa diagnostic should be logged. + # ruff:disable[E741,F841] + I = 1 # noqa: E741,F841 + # ruff:enable[E741,F841] + + +def f(): + # TODO: Duplicate codes should be counted as duplicate, not unused + # ruff: disable[F841, F841] + foo = 0 + + +def f(): + # Overlapping range suppressions, one should be marked as used, + # and the other should trigger an unused suppression diagnostic + # ruff: disable[F841] + # ruff: disable[F841] + foo = 0 + + +def f(): + # Multiple codes but only one is used + # ruff: disable[E741, F401, F841] + foo = 0 + + +def f(): + # Multiple codes but only two are used + # ruff: disable[E741, F401, F841] + I = 0 + + +def f(): + # Multiple codes but none are used + # ruff: disable[E741, F401, F841] + print("hello") diff --git a/crates/ruff_linter/src/checkers/ast/mod.rs b/crates/ruff_linter/src/checkers/ast/mod.rs index 4d4d7e9293..37422cbf18 100644 --- a/crates/ruff_linter/src/checkers/ast/mod.rs +++ b/crates/ruff_linter/src/checkers/ast/mod.rs @@ -437,6 +437,15 @@ impl<'a> Checker<'a> { } } + /// Returns the [`Tokens`] for the parsed source file. + /// + /// + /// Unlike [`Self::tokens`], this method always returns + /// the tokens for the current file, even when within a parsed type annotation. + pub(crate) fn source_tokens(&self) -> &'a Tokens { + self.parsed.tokens() + } + /// The [`Locator`] for the current file, which enables extraction of source code from byte /// offsets. pub(crate) const fn locator(&self) -> &'a Locator<'a> { diff --git a/crates/ruff_linter/src/checkers/noqa.rs b/crates/ruff_linter/src/checkers/noqa.rs index 7cf58a5def..f984ef3576 100644 --- a/crates/ruff_linter/src/checkers/noqa.rs +++ b/crates/ruff_linter/src/checkers/noqa.rs @@ -12,17 +12,20 @@ use crate::fix::edits::delete_comment; use crate::noqa::{ Code, Directive, FileExemption, FileNoqaDirectives, NoqaDirectives, NoqaMapping, }; +use crate::preview::is_range_suppressions_enabled; use crate::registry::Rule; use crate::rule_redirects::get_redirect_target; use crate::rules::pygrep_hooks; use crate::rules::ruff; use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA}; use crate::settings::LinterSettings; +use crate::suppression::Suppressions; use crate::{Edit, Fix, Locator}; use super::ast::LintContext; /// RUF100 +#[expect(clippy::too_many_arguments)] pub(crate) fn check_noqa( context: &mut LintContext, path: &Path, @@ -31,6 +34,7 @@ pub(crate) fn check_noqa( noqa_line_for: &NoqaMapping, analyze_directives: bool, settings: &LinterSettings, + suppressions: &Suppressions, ) -> Vec { // Identify any codes that are globally exempted (within the current file). let file_noqa_directives = @@ -40,7 +44,7 @@ pub(crate) fn check_noqa( let mut noqa_directives = NoqaDirectives::from_commented_ranges(comment_ranges, &settings.external, path, locator); - if file_noqa_directives.is_empty() && noqa_directives.is_empty() { + if file_noqa_directives.is_empty() && noqa_directives.is_empty() && suppressions.is_empty() { return Vec::new(); } @@ -60,11 +64,19 @@ pub(crate) fn check_noqa( continue; } + // Apply file-level suppressions first if exemption.contains_secondary_code(code) { ignored_diagnostics.push(index); continue; } + // Apply ranged suppressions next + if is_range_suppressions_enabled(settings) && suppressions.check_diagnostic(diagnostic) { + ignored_diagnostics.push(index); + continue; + } + + // Apply end-of-line noqa suppressions last let noqa_offsets = diagnostic .parent() .into_iter() @@ -107,6 +119,9 @@ pub(crate) fn check_noqa( } } + // Diagnostics for unused/invalid range suppressions + suppressions.check_suppressions(context, locator); + // Enforce that the noqa directive was actually used (RUF100), unless RUF100 was itself // suppressed. if context.is_rule_enabled(Rule::UnusedNOQA) @@ -128,8 +143,13 @@ pub(crate) fn check_noqa( Directive::All(directive) => { if matches.is_empty() { let edit = delete_comment(directive.range(), locator); - let mut diagnostic = context - .report_diagnostic(UnusedNOQA { codes: None }, directive.range()); + let mut diagnostic = context.report_diagnostic( + UnusedNOQA { + codes: None, + kind: ruff::rules::UnusedNOQAKind::Noqa, + }, + directive.range(), + ); diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Unnecessary); diagnostic.set_fix(Fix::safe_edit(edit)); } @@ -224,6 +244,7 @@ pub(crate) fn check_noqa( .map(|code| (*code).to_string()) .collect(), }), + kind: ruff::rules::UnusedNOQAKind::Noqa, }, directive.range(), ); diff --git a/crates/ruff_linter/src/fix/edits.rs b/crates/ruff_linter/src/fix/edits.rs index 05e90519eb..20f50d6e10 100644 --- a/crates/ruff_linter/src/fix/edits.rs +++ b/crates/ruff_linter/src/fix/edits.rs @@ -3,14 +3,13 @@ use anyhow::{Context, Result}; use ruff_python_ast::AnyNodeRef; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::{self, Tokens, parenthesized_range}; use ruff_python_ast::{self as ast, Arguments, ExceptHandler, Expr, ExprList, Parameters, Stmt}; use ruff_python_codegen::Stylist; use ruff_python_index::Indexer; use ruff_python_trivia::textwrap::dedent_to; use ruff_python_trivia::{ - CommentRanges, PythonWhitespace, SimpleTokenKind, SimpleTokenizer, has_leading_content, - is_python_whitespace, + PythonWhitespace, SimpleTokenKind, SimpleTokenizer, has_leading_content, is_python_whitespace, }; use ruff_source_file::{LineRanges, NewlineWithTrailingNewline, UniversalNewlines}; use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; @@ -209,7 +208,7 @@ pub(crate) fn remove_argument( arguments: &Arguments, parentheses: Parentheses, source: &str, - comment_ranges: &CommentRanges, + tokens: &Tokens, ) -> Result { // Partition into arguments before and after the argument to remove. let (before, after): (Vec<_>, Vec<_>) = arguments @@ -224,7 +223,7 @@ pub(crate) fn remove_argument( .context("Unable to find argument")?; let parenthesized_range = - parenthesized_range(arg.value().into(), arguments.into(), comment_ranges, source) + token::parenthesized_range(arg.value().into(), arguments.into(), tokens) .unwrap_or(arg.range()); if !after.is_empty() { @@ -270,25 +269,14 @@ pub(crate) fn remove_argument( /// /// The new argument will be inserted before the first existing keyword argument in `arguments`, if /// there are any present. Otherwise, the new argument is added to the end of the argument list. -pub(crate) fn add_argument( - argument: &str, - arguments: &Arguments, - comment_ranges: &CommentRanges, - source: &str, -) -> Edit { +pub(crate) fn add_argument(argument: &str, arguments: &Arguments, tokens: &Tokens) -> Edit { if let Some(ast::Keyword { range, value, .. }) = arguments.keywords.first() { - let keyword = parenthesized_range(value.into(), arguments.into(), comment_ranges, source) - .unwrap_or(*range); + let keyword = parenthesized_range(value.into(), arguments.into(), tokens).unwrap_or(*range); Edit::insertion(format!("{argument}, "), keyword.start()) } else if let Some(last) = arguments.arguments_source_order().last() { // Case 1: existing arguments, so append after the last argument. - let last = parenthesized_range( - last.value().into(), - arguments.into(), - comment_ranges, - source, - ) - .unwrap_or(last.range()); + let last = parenthesized_range(last.value().into(), arguments.into(), tokens) + .unwrap_or(last.range()); Edit::insertion(format!(", {argument}"), last.end()) } else { // Case 2: no arguments. Add argument, without any trailing comma. diff --git a/crates/ruff_linter/src/linter.rs b/crates/ruff_linter/src/linter.rs index 719d5ac9c5..08c0417020 100644 --- a/crates/ruff_linter/src/linter.rs +++ b/crates/ruff_linter/src/linter.rs @@ -32,6 +32,7 @@ use crate::rules::ruff::rules::test_rules::{self, TEST_RULES, TestRule}; use crate::settings::types::UnsafeFixes; use crate::settings::{LinterSettings, TargetVersion, flags}; use crate::source_kind::SourceKind; +use crate::suppression::Suppressions; use crate::{Locator, directives, fs}; pub(crate) mod float; @@ -128,6 +129,7 @@ pub fn check_path( source_type: PySourceType, parsed: &Parsed, target_version: TargetVersion, + suppressions: &Suppressions, ) -> Vec { // Aggregate all diagnostics. let mut context = LintContext::new(path, locator.contents(), settings); @@ -339,6 +341,7 @@ pub fn check_path( &directives.noqa_line_for, parsed.has_valid_syntax(), settings, + suppressions, ); if noqa.is_enabled() { for index in ignored.iter().rev() { @@ -400,6 +403,9 @@ pub fn add_noqa_to_path( &indexer, ); + // Parse range suppression comments + let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens()); + // Generate diagnostics, ignoring any existing `noqa` directives. let diagnostics = check_path( path, @@ -414,6 +420,7 @@ pub fn add_noqa_to_path( source_type, &parsed, target_version, + &suppressions, ); // Add any missing `# noqa` pragmas. @@ -427,6 +434,7 @@ pub fn add_noqa_to_path( &directives.noqa_line_for, stylist.line_ending(), reason, + &suppressions, ) } @@ -461,6 +469,9 @@ pub fn lint_only( &indexer, ); + // Parse range suppression comments + let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens()); + // Generate diagnostics. let diagnostics = check_path( path, @@ -475,6 +486,7 @@ pub fn lint_only( source_type, &parsed, target_version, + &suppressions, ); LinterResult { @@ -566,6 +578,9 @@ pub fn lint_fix<'a>( &indexer, ); + // Parse range suppression comments + let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens()); + // Generate diagnostics. let diagnostics = check_path( path, @@ -580,6 +595,7 @@ pub fn lint_fix<'a>( source_type, &parsed, target_version, + &suppressions, ); if iterations == 0 { @@ -769,6 +785,7 @@ mod tests { use crate::registry::Rule; use crate::settings::LinterSettings; use crate::source_kind::SourceKind; + use crate::suppression::Suppressions; use crate::test::{TestedNotebook, assert_notebook_path, test_contents, test_snippet}; use crate::{Locator, assert_diagnostics, directives, settings}; @@ -944,6 +961,7 @@ mod tests { &locator, &indexer, ); + let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens()); let mut diagnostics = check_path( path, None, @@ -957,6 +975,7 @@ mod tests { source_type, &parsed, target_version, + &suppressions, ); diagnostics.sort_by(Diagnostic::ruff_start_ordering); diagnostics diff --git a/crates/ruff_linter/src/noqa.rs b/crates/ruff_linter/src/noqa.rs index da9535817e..a3b5b6133d 100644 --- a/crates/ruff_linter/src/noqa.rs +++ b/crates/ruff_linter/src/noqa.rs @@ -20,12 +20,14 @@ use crate::Locator; use crate::fs::relativize_path; use crate::registry::Rule; use crate::rule_redirects::get_redirect_target; +use crate::suppression::Suppressions; /// Generates an array of edits that matches the length of `messages`. /// Each potential edit in the array is paired, in order, with the associated diagnostic. /// Each edit will add a `noqa` comment to the appropriate line in the source to hide /// the diagnostic. These edits may conflict with each other and should not be applied /// simultaneously. +#[expect(clippy::too_many_arguments)] pub fn generate_noqa_edits( path: &Path, diagnostics: &[Diagnostic], @@ -34,11 +36,19 @@ pub fn generate_noqa_edits( external: &[String], noqa_line_for: &NoqaMapping, line_ending: LineEnding, + suppressions: &Suppressions, ) -> Vec> { let file_directives = FileNoqaDirectives::extract(locator, comment_ranges, external, path); let exemption = FileExemption::from(&file_directives); let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator); - let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for); + let comments = find_noqa_comments( + diagnostics, + locator, + &exemption, + &directives, + noqa_line_for, + suppressions, + ); build_noqa_edits_by_diagnostic(comments, locator, line_ending, None) } @@ -725,6 +735,7 @@ pub(crate) fn add_noqa( noqa_line_for: &NoqaMapping, line_ending: LineEnding, reason: Option<&str>, + suppressions: &Suppressions, ) -> Result { let (count, output) = add_noqa_inner( path, @@ -735,6 +746,7 @@ pub(crate) fn add_noqa( noqa_line_for, line_ending, reason, + suppressions, ); fs::write(path, output)?; @@ -751,6 +763,7 @@ fn add_noqa_inner( noqa_line_for: &NoqaMapping, line_ending: LineEnding, reason: Option<&str>, + suppressions: &Suppressions, ) -> (usize, String) { let mut count = 0; @@ -760,7 +773,14 @@ fn add_noqa_inner( let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator); - let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for); + let comments = find_noqa_comments( + diagnostics, + locator, + &exemption, + &directives, + noqa_line_for, + suppressions, + ); let edits = build_noqa_edits_by_line(comments, locator, line_ending, reason); @@ -859,6 +879,7 @@ fn find_noqa_comments<'a>( exemption: &'a FileExemption, directives: &'a NoqaDirectives, noqa_line_for: &NoqaMapping, + suppressions: &'a Suppressions, ) -> Vec>> { // List of noqa comments, ordered to match up with `messages` let mut comments_by_line: Vec>> = vec![]; @@ -875,6 +896,12 @@ fn find_noqa_comments<'a>( continue; } + // Apply ranged suppressions next + if suppressions.check_diagnostic(message) { + comments_by_line.push(None); + continue; + } + // Is the violation ignored by a `noqa` directive on the parent line? if let Some(parent) = message.parent() { if let Some(directive_line) = @@ -1253,6 +1280,7 @@ mod tests { use crate::rules::pycodestyle::rules::{AmbiguousVariableName, UselessSemicolon}; use crate::rules::pyflakes::rules::UnusedVariable; use crate::rules::pyupgrade::rules::PrintfStringFormatting; + use crate::suppression::Suppressions; use crate::{Edit, Violation}; use crate::{Locator, generate_noqa_edits}; @@ -2848,6 +2876,7 @@ mod tests { &noqa_line_for, LineEnding::Lf, None, + &Suppressions::default(), ); assert_eq!(count, 0); assert_eq!(output, format!("{contents}")); @@ -2872,6 +2901,7 @@ mod tests { &noqa_line_for, LineEnding::Lf, None, + &Suppressions::default(), ); assert_eq!(count, 1); assert_eq!(output, "x = 1 # noqa: F841\n"); @@ -2903,6 +2933,7 @@ mod tests { &noqa_line_for, LineEnding::Lf, None, + &Suppressions::default(), ); assert_eq!(count, 1); assert_eq!(output, "x = 1 # noqa: E741, F841\n"); @@ -2934,6 +2965,7 @@ mod tests { &noqa_line_for, LineEnding::Lf, None, + &Suppressions::default(), ); assert_eq!(count, 0); assert_eq!(output, "x = 1 # noqa"); @@ -2956,6 +2988,7 @@ print( let messages = [PrintfStringFormatting .into_diagnostic(TextRange::new(12.into(), 79.into()), &source_file)]; let comment_ranges = CommentRanges::default(); + let suppressions = Suppressions::default(); let edits = generate_noqa_edits( path, &messages, @@ -2964,6 +2997,7 @@ print( &[], &noqa_line_for, LineEnding::Lf, + &suppressions, ); assert_eq!( edits, @@ -2987,6 +3021,7 @@ bar = [UselessSemicolon.into_diagnostic(TextRange::new(4.into(), 5.into()), &source_file)]; let noqa_line_for = NoqaMapping::default(); let comment_ranges = CommentRanges::default(); + let suppressions = Suppressions::default(); let edits = generate_noqa_edits( path, &messages, @@ -2995,6 +3030,7 @@ bar = &[], &noqa_line_for, LineEnding::Lf, + &suppressions, ); assert_eq!( edits, diff --git a/crates/ruff_linter/src/preview.rs b/crates/ruff_linter/src/preview.rs index 52be730545..5c43172053 100644 --- a/crates/ruff_linter/src/preview.rs +++ b/crates/ruff_linter/src/preview.rs @@ -9,6 +9,11 @@ use crate::settings::LinterSettings; // Rule-specific behavior +// https://github.com/astral-sh/ruff/pull/21382 +pub(crate) const fn is_custom_exception_checking_enabled(settings: &LinterSettings) -> bool { + settings.preview.is_enabled() +} + // https://github.com/astral-sh/ruff/pull/15541 pub(crate) const fn is_suspicious_function_reference_enabled(settings: &LinterSettings) -> bool { settings.preview.is_enabled() @@ -286,3 +291,8 @@ pub(crate) const fn is_s310_resolve_string_literal_bindings_enabled( ) -> bool { settings.preview.is_enabled() } + +// https://github.com/astral-sh/ruff/pull/21623 +pub(crate) const fn is_range_suppressions_enabled(settings: &LinterSettings) -> bool { + settings.preview.is_enabled() +} diff --git a/crates/ruff_linter/src/rules/fastapi/rules/fastapi_redundant_response_model.rs b/crates/ruff_linter/src/rules/fastapi/rules/fastapi_redundant_response_model.rs index 440be901a7..d31ffbf61e 100644 --- a/crates/ruff_linter/src/rules/fastapi/rules/fastapi_redundant_response_model.rs +++ b/crates/ruff_linter/src/rules/fastapi/rules/fastapi_redundant_response_model.rs @@ -91,8 +91,8 @@ pub(crate) fn fastapi_redundant_response_model(checker: &Checker, function_def: response_model_arg, &call.arguments, Parentheses::Preserve, - checker.locator().contents(), - checker.comment_ranges(), + checker.source(), + checker.tokens(), ) .map(Fix::unsafe_edit) }); diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/rules/map_without_explicit_strict.rs b/crates/ruff_linter/src/rules/flake8_bugbear/rules/map_without_explicit_strict.rs index cd268f610b..04e9640e09 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/rules/map_without_explicit_strict.rs +++ b/crates/ruff_linter/src/rules/flake8_bugbear/rules/map_without_explicit_strict.rs @@ -74,12 +74,7 @@ pub(crate) fn map_without_explicit_strict(checker: &Checker, call: &ast::ExprCal checker .report_diagnostic(MapWithoutExplicitStrict, call.range()) .set_fix(Fix::applicable_edit( - add_argument( - "strict=False", - &call.arguments, - checker.comment_ranges(), - checker.locator().contents(), - ), + add_argument("strict=False", &call.arguments, checker.tokens()), Applicability::Unsafe, )); } diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/rules/mutable_argument_default.rs b/crates/ruff_linter/src/rules/flake8_bugbear/rules/mutable_argument_default.rs index 94098831b8..c0b1b5e840 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/rules/mutable_argument_default.rs +++ b/crates/ruff_linter/src/rules/flake8_bugbear/rules/mutable_argument_default.rs @@ -3,7 +3,7 @@ use std::fmt::Write; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::helpers::is_docstring_stmt; use ruff_python_ast::name::QualifiedName; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{self as ast, Expr, ParameterWithDefault}; use ruff_python_semantic::SemanticModel; use ruff_python_semantic::analyze::function_type::is_stub; @@ -166,12 +166,7 @@ fn move_initialization( return None; } - let range = match parenthesized_range( - default.into(), - parameter.into(), - checker.comment_ranges(), - checker.source(), - ) { + let range = match parenthesized_range(default.into(), parameter.into(), checker.tokens()) { Some(range) => range, None => default.range(), }; @@ -194,13 +189,8 @@ fn move_initialization( "{} = {}", parameter.parameter.name(), locator.slice( - parenthesized_range( - default.into(), - parameter.into(), - checker.comment_ranges(), - checker.source() - ) - .unwrap_or(default.range()) + parenthesized_range(default.into(), parameter.into(), checker.tokens()) + .unwrap_or(default.range()) ) ); } else { diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/rules/no_explicit_stacklevel.rs b/crates/ruff_linter/src/rules/flake8_bugbear/rules/no_explicit_stacklevel.rs index f737e781ef..a21cf9a30e 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/rules/no_explicit_stacklevel.rs +++ b/crates/ruff_linter/src/rules/flake8_bugbear/rules/no_explicit_stacklevel.rs @@ -92,12 +92,7 @@ pub(crate) fn no_explicit_stacklevel(checker: &Checker, call: &ast::ExprCall) { } let mut diagnostic = checker.report_diagnostic(NoExplicitStacklevel, call.func.range()); - let edit = add_argument( - "stacklevel=2", - &call.arguments, - checker.comment_ranges(), - checker.locator().contents(), - ); + let edit = add_argument("stacklevel=2", &call.arguments, checker.tokens()); diagnostic.set_fix(Fix::unsafe_edit(edit)); } diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/rules/zip_without_explicit_strict.rs b/crates/ruff_linter/src/rules/flake8_bugbear/rules/zip_without_explicit_strict.rs index 136715b981..db71c7b2fb 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/rules/zip_without_explicit_strict.rs +++ b/crates/ruff_linter/src/rules/flake8_bugbear/rules/zip_without_explicit_strict.rs @@ -70,12 +70,7 @@ pub(crate) fn zip_without_explicit_strict(checker: &Checker, call: &ast::ExprCal checker .report_diagnostic(ZipWithoutExplicitStrict, call.range()) .set_fix(Fix::applicable_edit( - add_argument( - "strict=False", - &call.arguments, - checker.comment_ranges(), - checker.locator().contents(), - ), + add_argument("strict=False", &call.arguments, checker.tokens()), Applicability::Unsafe, )); } diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B006_B006_B008.py.snap b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B006_B006_B008.py.snap index e3f42e2da7..f4113617b5 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B006_B006_B008.py.snap +++ b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B006_B006_B008.py.snap @@ -236,227 +236,227 @@ help: Replace with `None`; initialize within function note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:239:20 + --> B006_B008.py:242:20 | -237 | # B006 and B008 -238 | # We should handle arbitrary nesting of these B008. -239 | def nested_combo(a=[float(3), dt.datetime.now()]): +240 | # B006 and B008 +241 | # We should handle arbitrary nesting of these B008. +242 | def nested_combo(a=[float(3), dt.datetime.now()]): | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -240 | pass +243 | pass | help: Replace with `None`; initialize within function -236 | -237 | # B006 and B008 -238 | # We should handle arbitrary nesting of these B008. +239 | +240 | # B006 and B008 +241 | # We should handle arbitrary nesting of these B008. - def nested_combo(a=[float(3), dt.datetime.now()]): -239 + def nested_combo(a=None): -240 | pass -241 | -242 | +242 + def nested_combo(a=None): +243 | pass +244 | +245 | note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:276:27 + --> B006_B008.py:279:27 | -275 | def mutable_annotations( -276 | a: list[int] | None = [], +278 | def mutable_annotations( +279 | a: list[int] | None = [], | ^^ -277 | b: Optional[Dict[int, int]] = {}, -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +280 | b: Optional[Dict[int, int]] = {}, +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), | help: Replace with `None`; initialize within function -273 | -274 | -275 | def mutable_annotations( +276 | +277 | +278 | def mutable_annotations( - a: list[int] | None = [], -276 + a: list[int] | None = None, -277 | b: Optional[Dict[int, int]] = {}, -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +279 + a: list[int] | None = None, +280 | b: Optional[Dict[int, int]] = {}, +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:277:35 + --> B006_B008.py:280:35 | -275 | def mutable_annotations( -276 | a: list[int] | None = [], -277 | b: Optional[Dict[int, int]] = {}, +278 | def mutable_annotations( +279 | a: list[int] | None = [], +280 | b: Optional[Dict[int, int]] = {}, | ^^ -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), | help: Replace with `None`; initialize within function -274 | -275 | def mutable_annotations( -276 | a: list[int] | None = [], +277 | +278 | def mutable_annotations( +279 | a: list[int] | None = [], - b: Optional[Dict[int, int]] = {}, -277 + b: Optional[Dict[int, int]] = None, -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -280 | ): +280 + b: Optional[Dict[int, int]] = None, +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +283 | ): note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:278:62 + --> B006_B008.py:281:62 | -276 | a: list[int] | None = [], -277 | b: Optional[Dict[int, int]] = {}, -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +279 | a: list[int] | None = [], +280 | b: Optional[Dict[int, int]] = {}, +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), | ^^^^^ -279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -280 | ): +282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +283 | ): | help: Replace with `None`; initialize within function -275 | def mutable_annotations( -276 | a: list[int] | None = [], -277 | b: Optional[Dict[int, int]] = {}, +278 | def mutable_annotations( +279 | a: list[int] | None = [], +280 | b: Optional[Dict[int, int]] = {}, - c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -278 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None, -279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -280 | ): -281 | pass +281 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None, +282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +283 | ): +284 | pass note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:279:80 + --> B006_B008.py:282:80 | -277 | b: Optional[Dict[int, int]] = {}, -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +280 | b: Optional[Dict[int, int]] = {}, +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), | ^^^^^ -280 | ): -281 | pass +283 | ): +284 | pass | help: Replace with `None`; initialize within function -276 | a: list[int] | None = [], -277 | b: Optional[Dict[int, int]] = {}, -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +279 | a: list[int] | None = [], +280 | b: Optional[Dict[int, int]] = {}, +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), - d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -279 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None, -280 | ): -281 | pass -282 | +282 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None, +283 | ): +284 | pass +285 | note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:284:52 + --> B006_B008.py:287:52 | -284 | def single_line_func_wrong(value: dict[str, str] = {}): +287 | def single_line_func_wrong(value: dict[str, str] = {}): | ^^ -285 | """Docstring""" +288 | """Docstring""" | help: Replace with `None`; initialize within function -281 | pass -282 | -283 | - - def single_line_func_wrong(value: dict[str, str] = {}): -284 + def single_line_func_wrong(value: dict[str, str] = None): -285 | """Docstring""" +284 | pass +285 | 286 | -287 | + - def single_line_func_wrong(value: dict[str, str] = {}): +287 + def single_line_func_wrong(value: dict[str, str] = None): +288 | """Docstring""" +289 | +290 | note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:288:52 + --> B006_B008.py:291:52 | -288 | def single_line_func_wrong(value: dict[str, str] = {}): +291 | def single_line_func_wrong(value: dict[str, str] = {}): | ^^ -289 | """Docstring""" -290 | ... +292 | """Docstring""" +293 | ... | help: Replace with `None`; initialize within function -285 | """Docstring""" -286 | -287 | +288 | """Docstring""" +289 | +290 | - def single_line_func_wrong(value: dict[str, str] = {}): -288 + def single_line_func_wrong(value: dict[str, str] = None): -289 | """Docstring""" -290 | ... -291 | +291 + def single_line_func_wrong(value: dict[str, str] = None): +292 | """Docstring""" +293 | ... +294 | note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:293:52 + --> B006_B008.py:296:52 | -293 | def single_line_func_wrong(value: dict[str, str] = {}): +296 | def single_line_func_wrong(value: dict[str, str] = {}): | ^^ -294 | """Docstring"""; ... +297 | """Docstring"""; ... | help: Replace with `None`; initialize within function -290 | ... -291 | -292 | - - def single_line_func_wrong(value: dict[str, str] = {}): -293 + def single_line_func_wrong(value: dict[str, str] = None): -294 | """Docstring"""; ... +293 | ... +294 | 295 | -296 | + - def single_line_func_wrong(value: dict[str, str] = {}): +296 + def single_line_func_wrong(value: dict[str, str] = None): +297 | """Docstring"""; ... +298 | +299 | note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:297:52 + --> B006_B008.py:300:52 | -297 | def single_line_func_wrong(value: dict[str, str] = {}): +300 | def single_line_func_wrong(value: dict[str, str] = {}): | ^^ -298 | """Docstring"""; \ -299 | ... +301 | """Docstring"""; \ +302 | ... | help: Replace with `None`; initialize within function -294 | """Docstring"""; ... -295 | -296 | +297 | """Docstring"""; ... +298 | +299 | - def single_line_func_wrong(value: dict[str, str] = {}): -297 + def single_line_func_wrong(value: dict[str, str] = None): -298 | """Docstring"""; \ -299 | ... -300 | +300 + def single_line_func_wrong(value: dict[str, str] = None): +301 | """Docstring"""; \ +302 | ... +303 | note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:302:52 + --> B006_B008.py:305:52 | -302 | def single_line_func_wrong(value: dict[str, str] = { +305 | def single_line_func_wrong(value: dict[str, str] = { | ____________________________________________________^ -303 | | # This is a comment -304 | | }): +306 | | # This is a comment +307 | | }): | |_^ -305 | """Docstring""" +308 | """Docstring""" | help: Replace with `None`; initialize within function -299 | ... -300 | -301 | +302 | ... +303 | +304 | - def single_line_func_wrong(value: dict[str, str] = { - # This is a comment - }): -302 + def single_line_func_wrong(value: dict[str, str] = None): -303 | """Docstring""" -304 | -305 | +305 + def single_line_func_wrong(value: dict[str, str] = None): +306 | """Docstring""" +307 | +308 | note: This is an unsafe fix and may change runtime behavior B006 Do not use mutable data structures for argument defaults - --> B006_B008.py:308:52 + --> B006_B008.py:311:52 | -308 | def single_line_func_wrong(value: dict[str, str] = {}) \ +311 | def single_line_func_wrong(value: dict[str, str] = {}) \ | ^^ -309 | : \ -310 | """Docstring""" +312 | : \ +313 | """Docstring""" | help: Replace with `None`; initialize within function B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:313:52 + --> B006_B008.py:316:52 | -313 | def single_line_func_wrong(value: dict[str, str] = {}): +316 | def single_line_func_wrong(value: dict[str, str] = {}): | ^^ -314 | """Docstring without newline""" +317 | """Docstring without newline""" | help: Replace with `None`; initialize within function -310 | """Docstring""" -311 | -312 | +313 | """Docstring""" +314 | +315 | - def single_line_func_wrong(value: dict[str, str] = {}): -313 + def single_line_func_wrong(value: dict[str, str] = None): -314 | """Docstring without newline""" +316 + def single_line_func_wrong(value: dict[str, str] = None): +317 | """Docstring without newline""" note: This is an unsafe fix and may change runtime behavior diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B008_B006_B008.py.snap b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B008_B006_B008.py.snap index 49da306103..edaaadb944 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B008_B006_B008.py.snap +++ b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__B008_B006_B008.py.snap @@ -53,39 +53,39 @@ B008 Do not perform function call in argument defaults; instead, perform the cal | B008 Do not perform function call `dt.datetime.now` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable - --> B006_B008.py:239:31 + --> B006_B008.py:242:31 | -237 | # B006 and B008 -238 | # We should handle arbitrary nesting of these B008. -239 | def nested_combo(a=[float(3), dt.datetime.now()]): +240 | # B006 and B008 +241 | # We should handle arbitrary nesting of these B008. +242 | def nested_combo(a=[float(3), dt.datetime.now()]): | ^^^^^^^^^^^^^^^^^ -240 | pass +243 | pass | B008 Do not perform function call `map` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable - --> B006_B008.py:245:22 + --> B006_B008.py:248:22 | -243 | # Don't flag nested B006 since we can't guarantee that -244 | # it isn't made mutable by the outer operation. -245 | def no_nested_b006(a=map(lambda s: s.upper(), ["a", "b", "c"])): +246 | # Don't flag nested B006 since we can't guarantee that +247 | # it isn't made mutable by the outer operation. +248 | def no_nested_b006(a=map(lambda s: s.upper(), ["a", "b", "c"])): | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -246 | pass +249 | pass | B008 Do not perform function call `random.randint` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable - --> B006_B008.py:250:19 + --> B006_B008.py:253:19 | -249 | # B008-ception. -250 | def nested_b008(a=random.randint(0, dt.datetime.now().year)): +252 | # B008-ception. +253 | def nested_b008(a=random.randint(0, dt.datetime.now().year)): | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -251 | pass +254 | pass | B008 Do not perform function call `dt.datetime.now` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable - --> B006_B008.py:250:37 + --> B006_B008.py:253:37 | -249 | # B008-ception. -250 | def nested_b008(a=random.randint(0, dt.datetime.now().year)): +252 | # B008-ception. +253 | def nested_b008(a=random.randint(0, dt.datetime.now().year)): | ^^^^^^^^^^^^^^^^^ -251 | pass +254 | pass | diff --git a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__preview__B006_B006_B008.py.snap b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__preview__B006_B006_B008.py.snap index e3f42e2da7..f4113617b5 100644 --- a/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__preview__B006_B006_B008.py.snap +++ b/crates/ruff_linter/src/rules/flake8_bugbear/snapshots/ruff_linter__rules__flake8_bugbear__tests__preview__B006_B006_B008.py.snap @@ -236,227 +236,227 @@ help: Replace with `None`; initialize within function note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:239:20 + --> B006_B008.py:242:20 | -237 | # B006 and B008 -238 | # We should handle arbitrary nesting of these B008. -239 | def nested_combo(a=[float(3), dt.datetime.now()]): +240 | # B006 and B008 +241 | # We should handle arbitrary nesting of these B008. +242 | def nested_combo(a=[float(3), dt.datetime.now()]): | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -240 | pass +243 | pass | help: Replace with `None`; initialize within function -236 | -237 | # B006 and B008 -238 | # We should handle arbitrary nesting of these B008. +239 | +240 | # B006 and B008 +241 | # We should handle arbitrary nesting of these B008. - def nested_combo(a=[float(3), dt.datetime.now()]): -239 + def nested_combo(a=None): -240 | pass -241 | -242 | +242 + def nested_combo(a=None): +243 | pass +244 | +245 | note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:276:27 + --> B006_B008.py:279:27 | -275 | def mutable_annotations( -276 | a: list[int] | None = [], +278 | def mutable_annotations( +279 | a: list[int] | None = [], | ^^ -277 | b: Optional[Dict[int, int]] = {}, -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +280 | b: Optional[Dict[int, int]] = {}, +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), | help: Replace with `None`; initialize within function -273 | -274 | -275 | def mutable_annotations( +276 | +277 | +278 | def mutable_annotations( - a: list[int] | None = [], -276 + a: list[int] | None = None, -277 | b: Optional[Dict[int, int]] = {}, -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +279 + a: list[int] | None = None, +280 | b: Optional[Dict[int, int]] = {}, +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:277:35 + --> B006_B008.py:280:35 | -275 | def mutable_annotations( -276 | a: list[int] | None = [], -277 | b: Optional[Dict[int, int]] = {}, +278 | def mutable_annotations( +279 | a: list[int] | None = [], +280 | b: Optional[Dict[int, int]] = {}, | ^^ -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), | help: Replace with `None`; initialize within function -274 | -275 | def mutable_annotations( -276 | a: list[int] | None = [], +277 | +278 | def mutable_annotations( +279 | a: list[int] | None = [], - b: Optional[Dict[int, int]] = {}, -277 + b: Optional[Dict[int, int]] = None, -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -280 | ): +280 + b: Optional[Dict[int, int]] = None, +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +283 | ): note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:278:62 + --> B006_B008.py:281:62 | -276 | a: list[int] | None = [], -277 | b: Optional[Dict[int, int]] = {}, -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +279 | a: list[int] | None = [], +280 | b: Optional[Dict[int, int]] = {}, +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), | ^^^^^ -279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -280 | ): +282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +283 | ): | help: Replace with `None`; initialize within function -275 | def mutable_annotations( -276 | a: list[int] | None = [], -277 | b: Optional[Dict[int, int]] = {}, +278 | def mutable_annotations( +279 | a: list[int] | None = [], +280 | b: Optional[Dict[int, int]] = {}, - c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -278 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None, -279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -280 | ): -281 | pass +281 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None, +282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +283 | ): +284 | pass note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:279:80 + --> B006_B008.py:282:80 | -277 | b: Optional[Dict[int, int]] = {}, -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +280 | b: Optional[Dict[int, int]] = {}, +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), | ^^^^^ -280 | ): -281 | pass +283 | ): +284 | pass | help: Replace with `None`; initialize within function -276 | a: list[int] | None = [], -277 | b: Optional[Dict[int, int]] = {}, -278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), +279 | a: list[int] | None = [], +280 | b: Optional[Dict[int, int]] = {}, +281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), - d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), -279 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None, -280 | ): -281 | pass -282 | +282 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None, +283 | ): +284 | pass +285 | note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:284:52 + --> B006_B008.py:287:52 | -284 | def single_line_func_wrong(value: dict[str, str] = {}): +287 | def single_line_func_wrong(value: dict[str, str] = {}): | ^^ -285 | """Docstring""" +288 | """Docstring""" | help: Replace with `None`; initialize within function -281 | pass -282 | -283 | - - def single_line_func_wrong(value: dict[str, str] = {}): -284 + def single_line_func_wrong(value: dict[str, str] = None): -285 | """Docstring""" +284 | pass +285 | 286 | -287 | + - def single_line_func_wrong(value: dict[str, str] = {}): +287 + def single_line_func_wrong(value: dict[str, str] = None): +288 | """Docstring""" +289 | +290 | note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:288:52 + --> B006_B008.py:291:52 | -288 | def single_line_func_wrong(value: dict[str, str] = {}): +291 | def single_line_func_wrong(value: dict[str, str] = {}): | ^^ -289 | """Docstring""" -290 | ... +292 | """Docstring""" +293 | ... | help: Replace with `None`; initialize within function -285 | """Docstring""" -286 | -287 | +288 | """Docstring""" +289 | +290 | - def single_line_func_wrong(value: dict[str, str] = {}): -288 + def single_line_func_wrong(value: dict[str, str] = None): -289 | """Docstring""" -290 | ... -291 | +291 + def single_line_func_wrong(value: dict[str, str] = None): +292 | """Docstring""" +293 | ... +294 | note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:293:52 + --> B006_B008.py:296:52 | -293 | def single_line_func_wrong(value: dict[str, str] = {}): +296 | def single_line_func_wrong(value: dict[str, str] = {}): | ^^ -294 | """Docstring"""; ... +297 | """Docstring"""; ... | help: Replace with `None`; initialize within function -290 | ... -291 | -292 | - - def single_line_func_wrong(value: dict[str, str] = {}): -293 + def single_line_func_wrong(value: dict[str, str] = None): -294 | """Docstring"""; ... +293 | ... +294 | 295 | -296 | + - def single_line_func_wrong(value: dict[str, str] = {}): +296 + def single_line_func_wrong(value: dict[str, str] = None): +297 | """Docstring"""; ... +298 | +299 | note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:297:52 + --> B006_B008.py:300:52 | -297 | def single_line_func_wrong(value: dict[str, str] = {}): +300 | def single_line_func_wrong(value: dict[str, str] = {}): | ^^ -298 | """Docstring"""; \ -299 | ... +301 | """Docstring"""; \ +302 | ... | help: Replace with `None`; initialize within function -294 | """Docstring"""; ... -295 | -296 | +297 | """Docstring"""; ... +298 | +299 | - def single_line_func_wrong(value: dict[str, str] = {}): -297 + def single_line_func_wrong(value: dict[str, str] = None): -298 | """Docstring"""; \ -299 | ... -300 | +300 + def single_line_func_wrong(value: dict[str, str] = None): +301 | """Docstring"""; \ +302 | ... +303 | note: This is an unsafe fix and may change runtime behavior B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:302:52 + --> B006_B008.py:305:52 | -302 | def single_line_func_wrong(value: dict[str, str] = { +305 | def single_line_func_wrong(value: dict[str, str] = { | ____________________________________________________^ -303 | | # This is a comment -304 | | }): +306 | | # This is a comment +307 | | }): | |_^ -305 | """Docstring""" +308 | """Docstring""" | help: Replace with `None`; initialize within function -299 | ... -300 | -301 | +302 | ... +303 | +304 | - def single_line_func_wrong(value: dict[str, str] = { - # This is a comment - }): -302 + def single_line_func_wrong(value: dict[str, str] = None): -303 | """Docstring""" -304 | -305 | +305 + def single_line_func_wrong(value: dict[str, str] = None): +306 | """Docstring""" +307 | +308 | note: This is an unsafe fix and may change runtime behavior B006 Do not use mutable data structures for argument defaults - --> B006_B008.py:308:52 + --> B006_B008.py:311:52 | -308 | def single_line_func_wrong(value: dict[str, str] = {}) \ +311 | def single_line_func_wrong(value: dict[str, str] = {}) \ | ^^ -309 | : \ -310 | """Docstring""" +312 | : \ +313 | """Docstring""" | help: Replace with `None`; initialize within function B006 [*] Do not use mutable data structures for argument defaults - --> B006_B008.py:313:52 + --> B006_B008.py:316:52 | -313 | def single_line_func_wrong(value: dict[str, str] = {}): +316 | def single_line_func_wrong(value: dict[str, str] = {}): | ^^ -314 | """Docstring without newline""" +317 | """Docstring without newline""" | help: Replace with `None`; initialize within function -310 | """Docstring""" -311 | -312 | +313 | """Docstring""" +314 | +315 | - def single_line_func_wrong(value: dict[str, str] = {}): -313 + def single_line_func_wrong(value: dict[str, str] = None): -314 | """Docstring without newline""" +316 + def single_line_func_wrong(value: dict[str, str] = None): +317 | """Docstring without newline""" note: This is an unsafe fix and may change runtime behavior diff --git a/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_generator_list.rs b/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_generator_list.rs index 5fdc1a37a3..d271a13792 100644 --- a/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_generator_list.rs +++ b/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_generator_list.rs @@ -2,8 +2,8 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast as ast; use ruff_python_ast::ExprGenerator; use ruff_python_ast::comparable::ComparableExpr; -use ruff_python_ast::parenthesize::parenthesized_range; use ruff_python_ast::token::TokenKind; +use ruff_python_ast::token::parenthesized_range; use ruff_text_size::{Ranged, TextRange, TextSize}; use crate::checkers::ast::Checker; @@ -142,13 +142,9 @@ pub(crate) fn unnecessary_generator_list(checker: &Checker, call: &ast::ExprCall if *parenthesized { // The generator's range will include the innermost parentheses, but it could be // surrounded by additional parentheses. - let range = parenthesized_range( - argument.into(), - (&call.arguments).into(), - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(argument.range()); + let range = + parenthesized_range(argument.into(), (&call.arguments).into(), checker.tokens()) + .unwrap_or(argument.range()); // The generator always parenthesizes the expression; trim the parentheses. let generator = checker.generator().expr(argument); diff --git a/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_generator_set.rs b/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_generator_set.rs index 0560935bae..05a1c523cf 100644 --- a/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_generator_set.rs +++ b/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_generator_set.rs @@ -2,8 +2,8 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast as ast; use ruff_python_ast::ExprGenerator; use ruff_python_ast::comparable::ComparableExpr; -use ruff_python_ast::parenthesize::parenthesized_range; use ruff_python_ast::token::TokenKind; +use ruff_python_ast::token::parenthesized_range; use ruff_text_size::{Ranged, TextRange, TextSize}; use crate::checkers::ast::Checker; @@ -147,13 +147,9 @@ pub(crate) fn unnecessary_generator_set(checker: &Checker, call: &ast::ExprCall) if *parenthesized { // The generator's range will include the innermost parentheses, but it could be // surrounded by additional parentheses. - let range = parenthesized_range( - argument.into(), - (&call.arguments).into(), - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(argument.range()); + let range = + parenthesized_range(argument.into(), (&call.arguments).into(), checker.tokens()) + .unwrap_or(argument.range()); // The generator always parenthesizes the expression; trim the parentheses. let generator = checker.generator().expr(argument); diff --git a/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_list_comprehension_set.rs b/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_list_comprehension_set.rs index b4fda738e2..f6699500af 100644 --- a/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_list_comprehension_set.rs +++ b/crates/ruff_linter/src/rules/flake8_comprehensions/rules/unnecessary_list_comprehension_set.rs @@ -1,7 +1,7 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast as ast; -use ruff_python_ast::parenthesize::parenthesized_range; use ruff_python_ast::token::TokenKind; +use ruff_python_ast::token::parenthesized_range; use ruff_text_size::{Ranged, TextRange, TextSize}; use crate::checkers::ast::Checker; @@ -89,13 +89,9 @@ pub(crate) fn unnecessary_list_comprehension_set(checker: &Checker, call: &ast:: // If the list comprehension is parenthesized, remove the parentheses in addition to // removing the brackets. - let replacement_range = parenthesized_range( - argument.into(), - (&call.arguments).into(), - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or_else(|| argument.range()); + let replacement_range = + parenthesized_range(argument.into(), (&call.arguments).into(), checker.tokens()) + .unwrap_or_else(|| argument.range()); let span = argument.range().add_start(one).sub_end(one); let replacement = diff --git a/crates/ruff_linter/src/rules/flake8_implicit_str_concat/rules/explicit.rs b/crates/ruff_linter/src/rules/flake8_implicit_str_concat/rules/explicit.rs index 8edb7b46ad..e742b8922d 100644 --- a/crates/ruff_linter/src/rules/flake8_implicit_str_concat/rules/explicit.rs +++ b/crates/ruff_linter/src/rules/flake8_implicit_str_concat/rules/explicit.rs @@ -1,5 +1,5 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{self as ast, Expr, Operator}; use ruff_python_trivia::is_python_whitespace; use ruff_source_file::LineRanges; @@ -88,13 +88,7 @@ pub(crate) fn explicit(checker: &Checker, expr: &Expr) { checker.report_diagnostic(ExplicitStringConcatenation, expr.range()); let is_parenthesized = |expr: &Expr| { - parenthesized_range( - expr.into(), - bin_op.into(), - checker.comment_ranges(), - checker.source(), - ) - .is_some() + parenthesized_range(expr.into(), bin_op.into(), checker.tokens()).is_some() }; // If either `left` or `right` is parenthesized, generating // a fix would be too involved. Just report the diagnostic. diff --git a/crates/ruff_linter/src/rules/flake8_logging/rules/exc_info_outside_except_handler.rs b/crates/ruff_linter/src/rules/flake8_logging/rules/exc_info_outside_except_handler.rs index 1172e40893..7d18897708 100644 --- a/crates/ruff_linter/src/rules/flake8_logging/rules/exc_info_outside_except_handler.rs +++ b/crates/ruff_linter/src/rules/flake8_logging/rules/exc_info_outside_except_handler.rs @@ -111,7 +111,6 @@ pub(crate) fn exc_info_outside_except_handler(checker: &Checker, call: &ExprCall } let arguments = &call.arguments; - let source = checker.source(); let mut diagnostic = checker.report_diagnostic(ExcInfoOutsideExceptHandler, exc_info.range); @@ -120,8 +119,8 @@ pub(crate) fn exc_info_outside_except_handler(checker: &Checker, call: &ExprCall exc_info, arguments, Parentheses::Preserve, - source, - checker.comment_ranges(), + checker.source(), + checker.tokens(), )?; Ok(Fix::unsafe_edit(edit)) }); diff --git a/crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_dict_kwargs.rs b/crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_dict_kwargs.rs index cc436d258a..1b3faba504 100644 --- a/crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_dict_kwargs.rs +++ b/crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_dict_kwargs.rs @@ -2,7 +2,7 @@ use itertools::Itertools; use rustc_hash::{FxBuildHasher, FxHashSet}; use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{self as ast, Expr}; use ruff_python_stdlib::identifiers::is_identifier; use ruff_text_size::Ranged; @@ -129,8 +129,8 @@ pub(crate) fn unnecessary_dict_kwargs(checker: &Checker, call: &ast::ExprCall) { keyword, &call.arguments, Parentheses::Preserve, - checker.locator().contents(), - checker.comment_ranges(), + checker.source(), + checker.tokens(), ) .map(Fix::safe_edit) }); @@ -158,8 +158,7 @@ pub(crate) fn unnecessary_dict_kwargs(checker: &Checker, call: &ast::ExprCall) { parenthesized_range( value.into(), dict.into(), - checker.comment_ranges(), - checker.locator().contents(), + checker.tokens() ) .unwrap_or(value.range()) ) diff --git a/crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_range_start.rs b/crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_range_start.rs index 6e6b10b206..e12af6e069 100644 --- a/crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_range_start.rs +++ b/crates/ruff_linter/src/rules/flake8_pie/rules/unnecessary_range_start.rs @@ -73,11 +73,11 @@ pub(crate) fn unnecessary_range_start(checker: &Checker, call: &ast::ExprCall) { let mut diagnostic = checker.report_diagnostic(UnnecessaryRangeStart, start.range()); diagnostic.try_set_fix(|| { remove_argument( - &start, + start, &call.arguments, Parentheses::Preserve, - checker.locator().contents(), - checker.comment_ranges(), + checker.source(), + checker.tokens(), ) .map(Fix::safe_edit) }); diff --git a/crates/ruff_linter/src/rules/flake8_pyi/rules/generic_not_last_base_class.rs b/crates/ruff_linter/src/rules/flake8_pyi/rules/generic_not_last_base_class.rs index 4cd5035693..90b8476809 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/rules/generic_not_last_base_class.rs +++ b/crates/ruff_linter/src/rules/flake8_pyi/rules/generic_not_last_base_class.rs @@ -160,20 +160,16 @@ fn generate_fix( ) -> anyhow::Result { let locator = checker.locator(); let source = locator.contents(); + let tokens = checker.tokens(); let deletion = remove_argument( generic_base, arguments, Parentheses::Preserve, source, - checker.comment_ranges(), + tokens, )?; - let insertion = add_argument( - locator.slice(generic_base), - arguments, - checker.comment_ranges(), - source, - ); + let insertion = add_argument(locator.slice(generic_base), arguments, tokens); Ok(Fix::unsafe_edits(deletion, [insertion])) } diff --git a/crates/ruff_linter/src/rules/flake8_pyi/rules/redundant_none_literal.rs b/crates/ruff_linter/src/rules/flake8_pyi/rules/redundant_none_literal.rs index 14145229fc..b3e35c21c2 100644 --- a/crates/ruff_linter/src/rules/flake8_pyi/rules/redundant_none_literal.rs +++ b/crates/ruff_linter/src/rules/flake8_pyi/rules/redundant_none_literal.rs @@ -5,7 +5,7 @@ use ruff_python_ast::{ helpers::{pep_604_union, typing_optional}, name::Name, operator_precedence::OperatorPrecedence, - parenthesize::parenthesized_range, + token::{Tokens, parenthesized_range}, }; use ruff_python_semantic::analyze::typing::{traverse_literal, traverse_union}; use ruff_text_size::{Ranged, TextRange}; @@ -243,16 +243,12 @@ fn create_fix( let union_expr = pep_604_union(&[new_literal_expr, none_expr]); // Check if we need parentheses to preserve operator precedence - let content = if needs_parentheses_for_precedence( - semantic, - literal_expr, - checker.comment_ranges(), - checker.source(), - ) { - format!("({})", checker.generator().expr(&union_expr)) - } else { - checker.generator().expr(&union_expr) - }; + let content = + if needs_parentheses_for_precedence(semantic, literal_expr, checker.tokens()) { + format!("({})", checker.generator().expr(&union_expr)) + } else { + checker.generator().expr(&union_expr) + }; let union_edit = Edit::range_replacement(content, literal_expr.range()); Fix::applicable_edit(union_edit, applicability) @@ -278,8 +274,7 @@ enum UnionKind { fn needs_parentheses_for_precedence( semantic: &ruff_python_semantic::SemanticModel, literal_expr: &Expr, - comment_ranges: &ruff_python_trivia::CommentRanges, - source: &str, + tokens: &Tokens, ) -> bool { // Get the parent expression to check if we're in a context that needs parentheses let Some(parent_expr) = semantic.current_expression_parent() else { @@ -287,14 +282,7 @@ fn needs_parentheses_for_precedence( }; // Check if the literal expression is already parenthesized - if parenthesized_range( - literal_expr.into(), - parent_expr.into(), - comment_ranges, - source, - ) - .is_some() - { + if parenthesized_range(literal_expr.into(), parent_expr.into(), tokens).is_some() { return false; // Already parenthesized, don't add more } diff --git a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/assertion.rs b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/assertion.rs index 545372dd6c..c97efd8b05 100644 --- a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/assertion.rs +++ b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/assertion.rs @@ -10,7 +10,7 @@ use libcst_native::{ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::helpers::Truthiness; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::visitor::Visitor; use ruff_python_ast::{ self as ast, AnyNodeRef, Arguments, BoolOp, ExceptHandler, Expr, Keyword, Stmt, UnaryOp, @@ -303,8 +303,7 @@ pub(crate) fn unittest_assertion( parenthesized_range( expr.into(), checker.semantic().current_statement().into(), - checker.comment_ranges(), - checker.locator().contents(), + checker.tokens(), ) .unwrap_or(expr.range()), ))); diff --git a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/fixture.rs b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/fixture.rs index 49be564b48..c939d44346 100644 --- a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/fixture.rs +++ b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/fixture.rs @@ -768,8 +768,8 @@ fn check_fixture_decorator(checker: &Checker, func_name: &str, decorator: &Decor keyword, arguments, edits::Parentheses::Preserve, - checker.locator().contents(), - checker.comment_ranges(), + checker.source(), + checker.tokens(), ) .map(Fix::unsafe_edit) }); diff --git a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/parametrize.rs b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/parametrize.rs index 20b14399f9..904ca1c494 100644 --- a/crates/ruff_linter/src/rules/flake8_pytest_style/rules/parametrize.rs +++ b/crates/ruff_linter/src/rules/flake8_pytest_style/rules/parametrize.rs @@ -2,10 +2,9 @@ use rustc_hash::{FxBuildHasher, FxHashMap}; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::comparable::ComparableExpr; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::{Tokens, parenthesized_range}; use ruff_python_ast::{self as ast, Expr, ExprCall, ExprContext, StringLiteralFlags}; use ruff_python_codegen::Generator; -use ruff_python_trivia::CommentRanges; use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer}; use ruff_text_size::{Ranged, TextRange, TextSize}; @@ -322,18 +321,8 @@ fn elts_to_csv(elts: &[Expr], generator: Generator, flags: StringLiteralFlags) - /// ``` /// /// This method assumes that the first argument is a string. -fn get_parametrize_name_range( - call: &ExprCall, - expr: &Expr, - comment_ranges: &CommentRanges, - source: &str, -) -> Option { - parenthesized_range( - expr.into(), - (&call.arguments).into(), - comment_ranges, - source, - ) +fn get_parametrize_name_range(call: &ExprCall, expr: &Expr, tokens: &Tokens) -> Option { + parenthesized_range(expr.into(), (&call.arguments).into(), tokens) } /// PT006 @@ -349,13 +338,8 @@ fn check_names(checker: &Checker, call: &ExprCall, expr: &Expr, argvalues: &Expr if names.len() > 1 { match names_type { types::ParametrizeNameType::Tuple => { - let name_range = get_parametrize_name_range( - call, - expr, - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(expr.range()); + let name_range = get_parametrize_name_range(call, expr, checker.tokens()) + .unwrap_or(expr.range()); let mut diagnostic = checker.report_diagnostic( PytestParametrizeNamesWrongType { single_argument: false, @@ -386,13 +370,8 @@ fn check_names(checker: &Checker, call: &ExprCall, expr: &Expr, argvalues: &Expr ))); } types::ParametrizeNameType::List => { - let name_range = get_parametrize_name_range( - call, - expr, - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(expr.range()); + let name_range = get_parametrize_name_range(call, expr, checker.tokens()) + .unwrap_or(expr.range()); let mut diagnostic = checker.report_diagnostic( PytestParametrizeNamesWrongType { single_argument: false, diff --git a/crates/ruff_linter/src/rules/flake8_simplify/rules/ast_bool_op.rs b/crates/ruff_linter/src/rules/flake8_simplify/rules/ast_bool_op.rs index 0b53a271f4..5de313a600 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/rules/ast_bool_op.rs +++ b/crates/ruff_linter/src/rules/flake8_simplify/rules/ast_bool_op.rs @@ -10,7 +10,7 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::comparable::ComparableExpr; use ruff_python_ast::helpers::{Truthiness, contains_effect}; use ruff_python_ast::name::Name; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_codegen::Generator; use ruff_python_semantic::SemanticModel; @@ -800,14 +800,9 @@ fn is_short_circuit( edit = Some(get_short_circuit_edit( value, TextRange::new( - parenthesized_range( - furthest.into(), - expr.into(), - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(furthest.range()) - .start(), + parenthesized_range(furthest.into(), expr.into(), checker.tokens()) + .unwrap_or(furthest.range()) + .start(), expr.end(), ), short_circuit_truthiness, @@ -828,14 +823,9 @@ fn is_short_circuit( edit = Some(get_short_circuit_edit( next_value, TextRange::new( - parenthesized_range( - furthest.into(), - expr.into(), - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(furthest.range()) - .start(), + parenthesized_range(furthest.into(), expr.into(), checker.tokens()) + .unwrap_or(furthest.range()) + .start(), expr.end(), ), short_circuit_truthiness, diff --git a/crates/ruff_linter/src/rules/flake8_simplify/rules/ast_ifexp.rs b/crates/ruff_linter/src/rules/flake8_simplify/rules/ast_ifexp.rs index 15b2f54bf8..2292f4e581 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/rules/ast_ifexp.rs +++ b/crates/ruff_linter/src/rules/flake8_simplify/rules/ast_ifexp.rs @@ -4,7 +4,7 @@ use ruff_text_size::{Ranged, TextRange}; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::helpers::{is_const_false, is_const_true}; use ruff_python_ast::name::Name; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use crate::checkers::ast::Checker; use crate::{AlwaysFixableViolation, Edit, Fix, FixAvailability, Violation}; @@ -171,13 +171,8 @@ pub(crate) fn if_expr_with_true_false( checker .locator() .slice( - parenthesized_range( - test.into(), - expr.into(), - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(test.range()), + parenthesized_range(test.into(), expr.into(), checker.tokens()) + .unwrap_or(test.range()), ) .to_string(), expr.range(), diff --git a/crates/ruff_linter/src/rules/flake8_simplify/rules/if_with_same_arms.rs b/crates/ruff_linter/src/rules/flake8_simplify/rules/if_with_same_arms.rs index 92182b383a..17b7c5c612 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/rules/if_with_same_arms.rs +++ b/crates/ruff_linter/src/rules/flake8_simplify/rules/if_with_same_arms.rs @@ -4,10 +4,10 @@ use anyhow::Result; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::comparable::ComparableStmt; -use ruff_python_ast::parenthesize::parenthesized_range; use ruff_python_ast::stmt_if::{IfElifBranch, if_elif_branches}; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{self as ast, Expr}; -use ruff_python_trivia::{CommentRanges, SimpleTokenKind, SimpleTokenizer}; +use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer}; use ruff_source_file::LineRanges; use ruff_text_size::{Ranged, TextRange}; @@ -99,7 +99,7 @@ pub(crate) fn if_with_same_arms(checker: &Checker, stmt_if: &ast::StmtIf) { ¤t_branch, following_branch, checker.locator(), - checker.comment_ranges(), + checker.tokens(), ) }); } @@ -111,7 +111,7 @@ fn merge_branches( current_branch: &IfElifBranch, following_branch: &IfElifBranch, locator: &Locator, - comment_ranges: &CommentRanges, + tokens: &ruff_python_ast::token::Tokens, ) -> Result { // Identify the colon (`:`) at the end of the current branch's test. let Some(current_branch_colon) = @@ -127,12 +127,9 @@ fn merge_branches( ); // If the following test isn't parenthesized, consider parenthesizing it. - let following_branch_test = if let Some(range) = parenthesized_range( - following_branch.test.into(), - stmt_if.into(), - comment_ranges, - locator.contents(), - ) { + let following_branch_test = if let Some(range) = + parenthesized_range(following_branch.test.into(), stmt_if.into(), tokens) + { Cow::Borrowed(locator.slice(range)) } else if matches!( following_branch.test, @@ -153,24 +150,19 @@ fn merge_branches( // // For example, if the current test is `x if x else y`, we should parenthesize it to // `(x if x else y) or ...`. - let parenthesize_edit = if matches!( - current_branch.test, - Expr::Lambda(_) | Expr::Named(_) | Expr::If(_) - ) && parenthesized_range( - current_branch.test.into(), - stmt_if.into(), - comment_ranges, - locator.contents(), - ) - .is_none() - { - Some(Edit::range_replacement( - format!("({})", locator.slice(current_branch.test)), - current_branch.test.range(), - )) - } else { - None - }; + let parenthesize_edit = + if matches!( + current_branch.test, + Expr::Lambda(_) | Expr::Named(_) | Expr::If(_) + ) && parenthesized_range(current_branch.test.into(), stmt_if.into(), tokens).is_none() + { + Some(Edit::range_replacement( + format!("({})", locator.slice(current_branch.test)), + current_branch.test.range(), + )) + } else { + None + }; Ok(Fix::safe_edits( deletion_edit, diff --git a/crates/ruff_linter/src/rules/flake8_simplify/rules/key_in_dict.rs b/crates/ruff_linter/src/rules/flake8_simplify/rules/key_in_dict.rs index 5ced08a673..645a79d9e1 100644 --- a/crates/ruff_linter/src/rules/flake8_simplify/rules/key_in_dict.rs +++ b/crates/ruff_linter/src/rules/flake8_simplify/rules/key_in_dict.rs @@ -1,6 +1,6 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::AnyNodeRef; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{self as ast, Arguments, CmpOp, Comprehension, Expr}; use ruff_python_semantic::analyze::typing; use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer}; @@ -90,20 +90,10 @@ fn key_in_dict(checker: &Checker, left: &Expr, right: &Expr, operator: CmpOp, pa } // Extract the exact range of the left and right expressions. - let left_range = parenthesized_range( - left.into(), - parent, - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(left.range()); - let right_range = parenthesized_range( - right.into(), - parent, - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(right.range()); + let left_range = + parenthesized_range(left.into(), parent, checker.tokens()).unwrap_or(left.range()); + let right_range = + parenthesized_range(right.into(), parent, checker.tokens()).unwrap_or(right.range()); let mut diagnostic = checker.report_diagnostic( InDictKeys { diff --git a/crates/ruff_linter/src/rules/flake8_type_checking/rules/type_alias_quotes.rs b/crates/ruff_linter/src/rules/flake8_type_checking/rules/type_alias_quotes.rs index 776ce1486e..c07de4b813 100644 --- a/crates/ruff_linter/src/rules/flake8_type_checking/rules/type_alias_quotes.rs +++ b/crates/ruff_linter/src/rules/flake8_type_checking/rules/type_alias_quotes.rs @@ -11,7 +11,7 @@ use crate::registry::Rule; use crate::rules::flake8_type_checking::helpers::quote_type_expression; use crate::{AlwaysFixableViolation, Edit, Fix, FixAvailability, Violation}; use ruff_python_ast::PythonVersion; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; /// ## What it does /// Checks if [PEP 613] explicit type aliases contain references to @@ -295,21 +295,20 @@ pub(crate) fn quoted_type_alias( let range = annotation_expr.range(); let mut diagnostic = checker.report_diagnostic(QuotedTypeAlias, range); let fix_string = annotation_expr.value.to_string(); + let fix_string = if (fix_string.contains('\n') || fix_string.contains('\r')) && parenthesized_range( - // Check for parenthesis outside string ("""...""") + // Check for parentheses outside the string ("""...""") annotation_expr.into(), checker.semantic().current_statement().into(), - checker.comment_ranges(), - checker.locator().contents(), + checker.source_tokens(), ) .is_none() && parenthesized_range( - // Check for parenthesis inside string """(...)""" + // Check for parentheses inside the string """(...)""" expr.into(), annotation_expr.into(), - checker.comment_ranges(), - checker.locator().contents(), + checker.tokens(), ) .is_none() { diff --git a/crates/ruff_linter/src/rules/flake8_use_pathlib/rules/path_constructor_current_directory.rs b/crates/ruff_linter/src/rules/flake8_use_pathlib/rules/path_constructor_current_directory.rs index bf5a4ed8b7..840befca76 100644 --- a/crates/ruff_linter/src/rules/flake8_use_pathlib/rules/path_constructor_current_directory.rs +++ b/crates/ruff_linter/src/rules/flake8_use_pathlib/rules/path_constructor_current_directory.rs @@ -1,10 +1,9 @@ use std::ops::Range; use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{Expr, ExprBinOp, ExprCall, Operator}; use ruff_python_semantic::SemanticModel; -use ruff_python_trivia::CommentRanges; use ruff_text_size::{Ranged, TextRange}; use crate::checkers::ast::Checker; @@ -89,11 +88,7 @@ pub(crate) fn path_constructor_current_directory( let mut diagnostic = checker.report_diagnostic(PathConstructorCurrentDirectory, arg.range()); - match parent_and_next_path_fragment_range( - checker.semantic(), - checker.comment_ranges(), - checker.source(), - ) { + match parent_and_next_path_fragment_range(checker.semantic(), checker.tokens()) { Some((parent_range, next_fragment_range)) => { let next_fragment_expr = checker.locator().slice(next_fragment_range); let call_expr = checker.locator().slice(call.range()); @@ -116,7 +111,7 @@ pub(crate) fn path_constructor_current_directory( arguments, Parentheses::Preserve, checker.source(), - checker.comment_ranges(), + checker.tokens(), )?; Ok(Fix::applicable_edit(edit, applicability(call.range()))) }), @@ -125,8 +120,7 @@ pub(crate) fn path_constructor_current_directory( fn parent_and_next_path_fragment_range( semantic: &SemanticModel, - comment_ranges: &CommentRanges, - source: &str, + tokens: &ruff_python_ast::token::Tokens, ) -> Option<(TextRange, TextRange)> { let parent = semantic.current_expression_parent()?; @@ -142,6 +136,6 @@ fn parent_and_next_path_fragment_range( Some(( parent.range(), - parenthesized_range(right.into(), parent.into(), comment_ranges, source).unwrap_or(range), + parenthesized_range(right.into(), parent.into(), tokens).unwrap_or(range), )) } diff --git a/crates/ruff_linter/src/rules/pandas_vet/rules/inplace_argument.rs b/crates/ruff_linter/src/rules/pandas_vet/rules/inplace_argument.rs index 0fa448f7c5..71e0d82db9 100644 --- a/crates/ruff_linter/src/rules/pandas_vet/rules/inplace_argument.rs +++ b/crates/ruff_linter/src/rules/pandas_vet/rules/inplace_argument.rs @@ -1,8 +1,7 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::helpers::is_const_true; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::{Tokens, parenthesized_range}; use ruff_python_ast::{self as ast, Keyword, Stmt}; -use ruff_python_trivia::CommentRanges; use ruff_text_size::Ranged; use crate::Locator; @@ -91,7 +90,7 @@ pub(crate) fn inplace_argument(checker: &Checker, call: &ast::ExprCall) { call, keyword, statement, - checker.comment_ranges(), + checker.tokens(), checker.locator(), ) { diagnostic.set_fix(fix); @@ -111,21 +110,16 @@ fn convert_inplace_argument_to_assignment( call: &ast::ExprCall, keyword: &Keyword, statement: &Stmt, - comment_ranges: &CommentRanges, + tokens: &Tokens, locator: &Locator, ) -> Option { // Add the assignment. let attr = call.func.as_attribute_expr()?; let insert_assignment = Edit::insertion( format!("{name} = ", name = locator.slice(attr.value.range())), - parenthesized_range( - call.into(), - statement.into(), - comment_ranges, - locator.contents(), - ) - .unwrap_or(call.range()) - .start(), + parenthesized_range(call.into(), statement.into(), tokens) + .unwrap_or(call.range()) + .start(), ); // Remove the `inplace` argument. @@ -134,7 +128,7 @@ fn convert_inplace_argument_to_assignment( &call.arguments, Parentheses::Preserve, locator.contents(), - comment_ranges, + tokens, ) .ok()?; diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/lambda_assignment.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/lambda_assignment.rs index a42473386b..9b437aa279 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/lambda_assignment.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/lambda_assignment.rs @@ -1,5 +1,5 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{ self as ast, Expr, ExprEllipsisLiteral, ExprLambda, Identifier, Parameter, ParameterWithDefault, Parameters, Stmt, @@ -265,29 +265,19 @@ fn replace_trailing_ellipsis_with_original_expr( stmt: &Stmt, checker: &Checker, ) -> String { - let original_expr_range = parenthesized_range( - (&lambda.body).into(), - lambda.into(), - checker.comment_ranges(), - checker.source(), - ) - .unwrap_or(lambda.body.range()); + let original_expr_range = + parenthesized_range((&lambda.body).into(), lambda.into(), checker.tokens()) + .unwrap_or(lambda.body.range()); // This prevents the autofix of introducing a syntax error if the lambda's body is an // expression spanned across multiple lines. To avoid the syntax error we preserve // the parenthesis around the body. - let original_expr_in_source = if parenthesized_range( - lambda.into(), - stmt.into(), - checker.comment_ranges(), - checker.source(), - ) - .is_some() - { - format!("({})", checker.locator().slice(original_expr_range)) - } else { - checker.locator().slice(original_expr_range).to_string() - }; + let original_expr_in_source = + if parenthesized_range(lambda.into(), stmt.into(), checker.tokens()).is_some() { + format!("({})", checker.locator().slice(original_expr_range)) + } else { + checker.locator().slice(original_expr_range).to_string() + }; let placeholder_ellipsis_start = generated.rfind("...").unwrap(); let placeholder_ellipsis_end = placeholder_ellipsis_start + "...".len(); diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/literal_comparisons.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/literal_comparisons.rs index a68e492846..5ae6fe9028 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/literal_comparisons.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/literal_comparisons.rs @@ -1,4 +1,4 @@ -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::{Tokens, parenthesized_range}; use rustc_hash::FxHashMap; use ruff_macros::{ViolationMetadata, derive_message_formats}; @@ -179,15 +179,14 @@ fn is_redundant_boolean_comparison(op: CmpOp, comparator: &Expr) -> Option fn generate_redundant_comparison( compare: &ast::ExprCompare, - comment_ranges: &ruff_python_trivia::CommentRanges, + tokens: &Tokens, source: &str, comparator: &Expr, kind: bool, needs_wrap: bool, ) -> String { - let comparator_range = - parenthesized_range(comparator.into(), compare.into(), comment_ranges, source) - .unwrap_or(comparator.range()); + let comparator_range = parenthesized_range(comparator.into(), compare.into(), tokens) + .unwrap_or(comparator.range()); let comparator_str = &source[comparator_range]; @@ -379,7 +378,7 @@ pub(crate) fn literal_comparisons(checker: &Checker, compare: &ast::ExprCompare) .copied() .collect::>(); - let comment_ranges = checker.comment_ranges(); + let tokens = checker.tokens(); let source = checker.source(); let content = match (&*compare.ops, &*compare.comparators) { @@ -387,18 +386,13 @@ pub(crate) fn literal_comparisons(checker: &Checker, compare: &ast::ExprCompare) if let Some(kind) = is_redundant_boolean_comparison(*op, &compare.left) { let needs_wrap = compare.left.range().start() != compare.range().start(); generate_redundant_comparison( - compare, - comment_ranges, - source, - comparator, - kind, - needs_wrap, + compare, tokens, source, comparator, kind, needs_wrap, ) } else if let Some(kind) = is_redundant_boolean_comparison(*op, comparator) { let needs_wrap = comparator.range().end() != compare.range().end(); generate_redundant_comparison( compare, - comment_ranges, + tokens, source, &compare.left, kind, @@ -410,7 +404,7 @@ pub(crate) fn literal_comparisons(checker: &Checker, compare: &ast::ExprCompare) &ops, &compare.comparators, compare.into(), - comment_ranges, + tokens, source, ) } @@ -420,7 +414,7 @@ pub(crate) fn literal_comparisons(checker: &Checker, compare: &ast::ExprCompare) &ops, &compare.comparators, compare.into(), - comment_ranges, + tokens, source, ), }; diff --git a/crates/ruff_linter/src/rules/pycodestyle/rules/not_tests.rs b/crates/ruff_linter/src/rules/pycodestyle/rules/not_tests.rs index a4234093bc..0c759f9e0e 100644 --- a/crates/ruff_linter/src/rules/pycodestyle/rules/not_tests.rs +++ b/crates/ruff_linter/src/rules/pycodestyle/rules/not_tests.rs @@ -107,7 +107,7 @@ pub(crate) fn not_tests(checker: &Checker, unary_op: &ast::ExprUnaryOp) { &[CmpOp::NotIn], comparators, unary_op.into(), - checker.comment_ranges(), + checker.tokens(), checker.source(), ), unary_op.range(), @@ -127,7 +127,7 @@ pub(crate) fn not_tests(checker: &Checker, unary_op: &ast::ExprUnaryOp) { &[CmpOp::IsNot], comparators, unary_op.into(), - checker.comment_ranges(), + checker.tokens(), checker.source(), ), unary_op.range(), diff --git a/crates/ruff_linter/src/rules/pydocstyle/rules/sections.rs b/crates/ruff_linter/src/rules/pydocstyle/rules/sections.rs index 7b9fc80ba8..bf84ea85fc 100644 --- a/crates/ruff_linter/src/rules/pydocstyle/rules/sections.rs +++ b/crates/ruff_linter/src/rules/pydocstyle/rules/sections.rs @@ -4,7 +4,9 @@ use rustc_hash::FxHashSet; use std::sync::LazyLock; use ruff_macros::{ViolationMetadata, derive_message_formats}; +use ruff_python_ast::Parameter; use ruff_python_ast::docstrings::{clean_space, leading_space}; +use ruff_python_ast::helpers::map_subscript; use ruff_python_ast::identifier::Identifier; use ruff_python_semantic::analyze::visibility::is_staticmethod; use ruff_python_trivia::textwrap::dedent; @@ -1184,6 +1186,9 @@ impl AlwaysFixableViolation for MissingSectionNameColon { /// This rule is enabled when using the `google` convention, and disabled when /// using the `pep257` and `numpy` conventions. /// +/// Parameters annotated with `typing.Unpack` are exempt from this rule. +/// This follows the Python typing specification for unpacking keyword arguments. +/// /// ## Example /// ```python /// def calculate_speed(distance: float, time: float) -> float: @@ -1233,6 +1238,7 @@ impl AlwaysFixableViolation for MissingSectionNameColon { /// - [PEP 257 – Docstring Conventions](https://peps.python.org/pep-0257/) /// - [PEP 287 – reStructuredText Docstring Format](https://peps.python.org/pep-0287/) /// - [Google Python Style Guide - Docstrings](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings) +/// - [Python - Unpack for keyword arguments](https://typing.python.org/en/latest/spec/callables.html#unpack-kwargs) #[derive(ViolationMetadata)] #[violation_metadata(stable_since = "v0.0.73")] pub(crate) struct UndocumentedParam { @@ -1808,7 +1814,9 @@ fn missing_args(checker: &Checker, docstring: &Docstring, docstrings_args: &FxHa missing_arg_names.insert(starred_arg_name); } } - if let Some(arg) = function.parameters.kwarg.as_ref() { + if let Some(arg) = function.parameters.kwarg.as_ref() + && !has_unpack_annotation(checker, arg) + { let arg_name = arg.name.as_str(); let starred_arg_name = format!("**{arg_name}"); if !arg_name.starts_with('_') @@ -1834,6 +1842,15 @@ fn missing_args(checker: &Checker, docstring: &Docstring, docstrings_args: &FxHa } } +/// Returns `true` if the parameter is annotated with `typing.Unpack` +fn has_unpack_annotation(checker: &Checker, parameter: &Parameter) -> bool { + parameter.annotation.as_ref().is_some_and(|annotation| { + checker + .semantic() + .match_typing_expr(map_subscript(annotation), "Unpack") + }) +} + // See: `GOOGLE_ARGS_REGEX` in `pydocstyle/checker.py`. static GOOGLE_ARGS_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"^\s*(\*?\*?\w+)\s*(\(.*?\))?\s*:(\r\n|\n)?\s*.+").unwrap()); diff --git a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_google.snap b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_google.snap index 63b975feb9..44b20a8c6b 100644 --- a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_google.snap +++ b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_google.snap @@ -101,3 +101,13 @@ D417 Missing argument description in the docstring for `should_fail`: `Args` 200 | """ 201 | Send a message. | + +D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query` + --> D417.py:238:5 + | +236 | """ +237 | +238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]): + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +239 | """Function with Unpack kwargs but missing query arg documentation. + | diff --git a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_google_ignore_var_parameters.snap b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_google_ignore_var_parameters.snap index f645ff960e..7ff0f72bf0 100644 --- a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_google_ignore_var_parameters.snap +++ b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_google_ignore_var_parameters.snap @@ -83,3 +83,13 @@ D417 Missing argument description in the docstring for `should_fail`: `Args` 200 | """ 201 | Send a message. | + +D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query` + --> D417.py:238:5 + | +236 | """ +237 | +238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]): + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +239 | """Function with Unpack kwargs but missing query arg documentation. + | diff --git a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_unspecified.snap b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_unspecified.snap index 63b975feb9..44b20a8c6b 100644 --- a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_unspecified.snap +++ b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_unspecified.snap @@ -101,3 +101,13 @@ D417 Missing argument description in the docstring for `should_fail`: `Args` 200 | """ 201 | Send a message. | + +D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query` + --> D417.py:238:5 + | +236 | """ +237 | +238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]): + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +239 | """Function with Unpack kwargs but missing query arg documentation. + | diff --git a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_unspecified_ignore_var_parameters.snap b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_unspecified_ignore_var_parameters.snap index 63b975feb9..44b20a8c6b 100644 --- a/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_unspecified_ignore_var_parameters.snap +++ b/crates/ruff_linter/src/rules/pydocstyle/snapshots/ruff_linter__rules__pydocstyle__tests__d417_unspecified_ignore_var_parameters.snap @@ -101,3 +101,13 @@ D417 Missing argument description in the docstring for `should_fail`: `Args` 200 | """ 201 | Send a message. | + +D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query` + --> D417.py:238:5 + | +236 | """ +237 | +238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]): + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +239 | """Function with Unpack kwargs but missing query arg documentation. + | diff --git a/crates/ruff_linter/src/rules/pyflakes/mod.rs b/crates/ruff_linter/src/rules/pyflakes/mod.rs index d290ed38c5..02cd5158a8 100644 --- a/crates/ruff_linter/src/rules/pyflakes/mod.rs +++ b/crates/ruff_linter/src/rules/pyflakes/mod.rs @@ -28,6 +28,7 @@ mod tests { use crate::settings::types::PreviewMode; use crate::settings::{LinterSettings, flags}; use crate::source_kind::SourceKind; + use crate::suppression::Suppressions; use crate::test::{test_contents, test_path, test_snippet}; use crate::{Locator, assert_diagnostics, assert_diagnostics_diff, directives}; @@ -955,6 +956,8 @@ mod tests { &locator, &indexer, ); + let suppressions = + Suppressions::from_tokens(&settings, locator.contents(), parsed.tokens()); let mut messages = check_path( Path::new(""), None, @@ -968,6 +971,7 @@ mod tests { source_type, &parsed, target_version, + &suppressions, ); messages.sort_by(Diagnostic::ruff_start_ordering); let actual = messages diff --git a/crates/ruff_linter/src/rules/pyflakes/rules/repeated_keys.rs b/crates/ruff_linter/src/rules/pyflakes/rules/repeated_keys.rs index 1acdc90138..de02e4c85a 100644 --- a/crates/ruff_linter/src/rules/pyflakes/rules/repeated_keys.rs +++ b/crates/ruff_linter/src/rules/pyflakes/rules/repeated_keys.rs @@ -3,7 +3,7 @@ use std::collections::hash_map::Entry; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::comparable::{ComparableExpr, HashableExpr}; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{self as ast, Expr}; use ruff_text_size::Ranged; @@ -193,16 +193,14 @@ pub(crate) fn repeated_keys(checker: &Checker, dict: &ast::ExprDict) { parenthesized_range( dict.value(i - 1).into(), dict.into(), - checker.comment_ranges(), - checker.locator().contents(), + checker.tokens(), ) .unwrap_or_else(|| dict.value(i - 1).range()) .end(), parenthesized_range( dict.value(i).into(), dict.into(), - checker.comment_ranges(), - checker.locator().contents(), + checker.tokens(), ) .unwrap_or_else(|| dict.value(i).range()) .end(), @@ -224,16 +222,14 @@ pub(crate) fn repeated_keys(checker: &Checker, dict: &ast::ExprDict) { parenthesized_range( dict.value(i - 1).into(), dict.into(), - checker.comment_ranges(), - checker.locator().contents(), + checker.tokens(), ) .unwrap_or_else(|| dict.value(i - 1).range()) .end(), parenthesized_range( dict.value(i).into(), dict.into(), - checker.comment_ranges(), - checker.locator().contents(), + checker.tokens(), ) .unwrap_or_else(|| dict.value(i).range()) .end(), diff --git a/crates/ruff_linter/src/rules/pyflakes/rules/unused_variable.rs b/crates/ruff_linter/src/rules/pyflakes/rules/unused_variable.rs index 810c5742b9..59dcbf5c22 100644 --- a/crates/ruff_linter/src/rules/pyflakes/rules/unused_variable.rs +++ b/crates/ruff_linter/src/rules/pyflakes/rules/unused_variable.rs @@ -2,7 +2,7 @@ use itertools::Itertools; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::helpers::contains_effect; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::token::{TokenKind, Tokens}; use ruff_python_ast::{self as ast, Stmt}; use ruff_python_semantic::Binding; @@ -172,14 +172,10 @@ fn remove_unused_variable(binding: &Binding, checker: &Checker) -> Option { { // If the expression is complex (`x = foo()`), remove the assignment, // but preserve the right-hand side. - let start = parenthesized_range( - target.into(), - statement.into(), - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(target.range()) - .start(); + let start = + parenthesized_range(target.into(), statement.into(), checker.tokens()) + .unwrap_or(target.range()) + .start(); let end = match_token_after(checker.tokens(), target.end(), |token| { token == TokenKind::Equal })? diff --git a/crates/ruff_linter/src/rules/pylint/mod.rs b/crates/ruff_linter/src/rules/pylint/mod.rs index a0ab9a908e..de341b1146 100644 --- a/crates/ruff_linter/src/rules/pylint/mod.rs +++ b/crates/ruff_linter/src/rules/pylint/mod.rs @@ -16,10 +16,10 @@ mod tests { use crate::registry::Rule; use crate::rules::{flake8_tidy_imports, pylint}; - use crate::assert_diagnostics; use crate::settings::LinterSettings; use crate::settings::types::PreviewMode; use crate::test::test_path; + use crate::{assert_diagnostics, assert_diagnostics_diff}; #[test_case(Rule::SingledispatchMethod, Path::new("singledispatch_method.py"))] #[test_case( @@ -253,6 +253,32 @@ mod tests { Ok(()) } + #[test_case( + Rule::UselessExceptionStatement, + Path::new("useless_exception_statement.py") + )] + fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> { + let snapshot = format!( + "preview__{}_{}", + rule_code.noqa_code(), + path.to_string_lossy() + ); + + assert_diagnostics_diff!( + snapshot, + Path::new("pylint").join(path).as_path(), + &LinterSettings { + preview: PreviewMode::Disabled, + ..LinterSettings::for_rule(rule_code) + }, + &LinterSettings { + preview: PreviewMode::Enabled, + ..LinterSettings::for_rule(rule_code) + } + ); + Ok(()) + } + #[test] fn continue_in_finally() -> Result<()> { let diagnostics = test_path( diff --git a/crates/ruff_linter/src/rules/pylint/rules/boolean_chained_comparison.rs b/crates/ruff_linter/src/rules/pylint/rules/boolean_chained_comparison.rs index 27d6d49ad5..9673524a93 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/boolean_chained_comparison.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/boolean_chained_comparison.rs @@ -2,7 +2,7 @@ use itertools::Itertools; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::{ BoolOp, CmpOp, Expr, ExprBoolOp, ExprCompare, - parenthesize::{parentheses_iterator, parenthesized_range}, + token::{parentheses_iterator, parenthesized_range}, }; use ruff_text_size::{Ranged, TextRange}; @@ -62,7 +62,7 @@ pub(crate) fn boolean_chained_comparison(checker: &Checker, expr_bool_op: &ExprB } let locator = checker.locator(); - let comment_ranges = checker.comment_ranges(); + let tokens = checker.tokens(); // retrieve all compare expressions from boolean expression let compare_expressions = expr_bool_op @@ -89,40 +89,22 @@ pub(crate) fn boolean_chained_comparison(checker: &Checker, expr_bool_op: &ExprB continue; } - let left_paren_count = parentheses_iterator( - left_compare.into(), - Some(expr_bool_op.into()), - comment_ranges, - locator.contents(), - ) - .count(); + let left_paren_count = + parentheses_iterator(left_compare.into(), Some(expr_bool_op.into()), tokens).count(); - let right_paren_count = parentheses_iterator( - right_compare.into(), - Some(expr_bool_op.into()), - comment_ranges, - locator.contents(), - ) - .count(); + let right_paren_count = + parentheses_iterator(right_compare.into(), Some(expr_bool_op.into()), tokens).count(); // Create the edit that removes the comparison operator // In `a<(b) and ((b)) "rsplit", }; - let maxsplit_argument_edit = fix::edits::add_argument( - "maxsplit=1", - arguments, - checker.comment_ranges(), - checker.locator().contents(), - ); + let maxsplit_argument_edit = + fix::edits::add_argument("maxsplit=1", arguments, checker.tokens()); // Only change `actual_split_type` if it doesn't match `suggested_split_type` let split_type_edit: Option = if actual_split_type == suggested_split_type { diff --git a/crates/ruff_linter/src/rules/pylint/rules/non_augmented_assignment.rs b/crates/ruff_linter/src/rules/pylint/rules/non_augmented_assignment.rs index 7423c2dc76..cbe51cdd8d 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/non_augmented_assignment.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/non_augmented_assignment.rs @@ -2,7 +2,7 @@ use ast::Expr; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast as ast; use ruff_python_ast::comparable::ComparableExpr; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{ExprBinOp, ExprRef, Operator}; use ruff_text_size::{Ranged, TextRange}; @@ -150,12 +150,10 @@ fn augmented_assignment( let right_operand_ref = ExprRef::from(right_operand); let parent = original_expr.into(); - let comment_ranges = checker.comment_ranges(); - let source = checker.source(); + let tokens = checker.tokens(); let right_operand_range = - parenthesized_range(right_operand_ref, parent, comment_ranges, source) - .unwrap_or(right_operand.range()); + parenthesized_range(right_operand_ref, parent, tokens).unwrap_or(right_operand.range()); let right_operand_expr = locator.slice(right_operand_range); let target_expr = locator.slice(target); diff --git a/crates/ruff_linter/src/rules/pylint/rules/subprocess_run_without_check.rs b/crates/ruff_linter/src/rules/pylint/rules/subprocess_run_without_check.rs index 0ed569eef8..21a4643f39 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/subprocess_run_without_check.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/subprocess_run_without_check.rs @@ -75,12 +75,7 @@ pub(crate) fn subprocess_run_without_check(checker: &Checker, call: &ast::ExprCa let mut diagnostic = checker.report_diagnostic(SubprocessRunWithoutCheck, call.func.range()); diagnostic.set_fix(Fix::applicable_edit( - add_argument( - "check=False", - &call.arguments, - checker.comment_ranges(), - checker.locator().contents(), - ), + add_argument("check=False", &call.arguments, checker.tokens()), // If the function call contains `**kwargs`, mark the fix as unsafe. if call .arguments diff --git a/crates/ruff_linter/src/rules/pylint/rules/unspecified_encoding.rs b/crates/ruff_linter/src/rules/pylint/rules/unspecified_encoding.rs index 7d9cd12506..316d2e7f54 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/unspecified_encoding.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/unspecified_encoding.rs @@ -1,8 +1,7 @@ use std::fmt::{Display, Formatter}; use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::name::QualifiedName; -use ruff_python_ast::{self as ast, Expr}; +use ruff_python_ast::{self as ast, Expr, name::QualifiedName}; use ruff_python_semantic::SemanticModel; use ruff_python_semantic::analyze::typing; use ruff_text_size::{Ranged, TextRange}; @@ -193,8 +192,7 @@ fn generate_keyword_fix(checker: &Checker, call: &ast::ExprCall) -> Fix { })) ), &call.arguments, - checker.comment_ranges(), - checker.locator().contents(), + checker.tokens(), )) } diff --git a/crates/ruff_linter/src/rules/pylint/rules/useless_exception_statement.rs b/crates/ruff_linter/src/rules/pylint/rules/useless_exception_statement.rs index 4608297683..6eba84adc3 100644 --- a/crates/ruff_linter/src/rules/pylint/rules/useless_exception_statement.rs +++ b/crates/ruff_linter/src/rules/pylint/rules/useless_exception_statement.rs @@ -1,10 +1,11 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::{self as ast, Expr}; -use ruff_python_semantic::SemanticModel; +use ruff_python_semantic::{SemanticModel, analyze}; use ruff_python_stdlib::builtins; use ruff_text_size::Ranged; use crate::checkers::ast::Checker; +use crate::preview::is_custom_exception_checking_enabled; use crate::{Edit, Fix, FixAvailability, Violation}; use ruff_python_ast::PythonVersion; @@ -20,6 +21,9 @@ use ruff_python_ast::PythonVersion; /// This rule only detects built-in exceptions, like `ValueError`, and does /// not catch user-defined exceptions. /// +/// In [preview], this rule will also detect user-defined exceptions, but only +/// the ones defined in the file being checked. +/// /// ## Example /// ```python /// ValueError("...") @@ -32,7 +36,8 @@ use ruff_python_ast::PythonVersion; /// /// ## Fix safety /// This rule's fix is marked as unsafe, as converting a useless exception -/// statement to a `raise` statement will change the program's behavior. +/// +/// [preview]: https://docs.astral.sh/ruff/preview/ #[derive(ViolationMetadata)] #[violation_metadata(stable_since = "0.5.0")] pub(crate) struct UselessExceptionStatement; @@ -56,7 +61,10 @@ pub(crate) fn useless_exception_statement(checker: &Checker, expr: &ast::StmtExp return; }; - if is_builtin_exception(func, checker.semantic(), checker.target_version()) { + if is_builtin_exception(func, checker.semantic(), checker.target_version()) + || (is_custom_exception_checking_enabled(checker.settings()) + && is_custom_exception(func, checker.semantic(), checker.target_version())) + { let mut diagnostic = checker.report_diagnostic(UselessExceptionStatement, expr.range()); diagnostic.set_fix(Fix::unsafe_edit(Edit::insertion( "raise ".to_string(), @@ -78,3 +86,34 @@ fn is_builtin_exception( if builtins::is_exception(name, target_version.minor)) }) } + +/// Returns `true` if the given expression is a custom exception. +fn is_custom_exception( + expr: &Expr, + semantic: &SemanticModel, + target_version: PythonVersion, +) -> bool { + let Some(qualified_name) = semantic.resolve_qualified_name(expr) else { + return false; + }; + let Some(symbol) = qualified_name.segments().last() else { + return false; + }; + let Some(binding_id) = semantic.lookup_symbol(symbol) else { + return false; + }; + let binding = semantic.binding(binding_id); + let Some(source) = binding.source else { + return false; + }; + let statement = semantic.statement(source); + if let ast::Stmt::ClassDef(class_def) = statement { + return analyze::class::any_qualified_base_class(class_def, semantic, &|qualified_name| { + if let ["" | "builtins", name] = qualified_name.segments() { + return builtins::is_exception(name, target_version.minor); + } + false + }); + } + false +} diff --git a/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLW0133_useless_exception_statement.py.snap b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLW0133_useless_exception_statement.py.snap index 6199c06156..9cbb8b9dfe 100644 --- a/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLW0133_useless_exception_statement.py.snap +++ b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__PLW0133_useless_exception_statement.py.snap @@ -2,250 +2,294 @@ source: crates/ruff_linter/src/rules/pylint/mod.rs --- PLW0133 [*] Missing `raise` statement on exception - --> useless_exception_statement.py:7:5 - | -5 | # Test case 1: Useless exception statement -6 | def func(): -7 | AssertionError("This is an assertion error") # PLW0133 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | -help: Add `raise` keyword -4 | -5 | # Test case 1: Useless exception statement -6 | def func(): - - AssertionError("This is an assertion error") # PLW0133 -7 + raise AssertionError("This is an assertion error") # PLW0133 -8 | -9 | -10 | # Test case 2: Useless exception statement in try-except block -note: This is an unsafe fix and may change runtime behavior - -PLW0133 [*] Missing `raise` statement on exception - --> useless_exception_statement.py:13:9 + --> useless_exception_statement.py:26:5 | -11 | def func(): -12 | try: -13 | Exception("This is an exception") # PLW0133 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -14 | except Exception as err: -15 | pass - | -help: Add `raise` keyword -10 | # Test case 2: Useless exception statement in try-except block -11 | def func(): -12 | try: - - Exception("This is an exception") # PLW0133 -13 + raise Exception("This is an exception") # PLW0133 -14 | except Exception as err: -15 | pass -16 | -note: This is an unsafe fix and may change runtime behavior - -PLW0133 [*] Missing `raise` statement on exception - --> useless_exception_statement.py:21:9 - | -19 | def func(): -20 | if True: -21 | RuntimeError("This is an exception") # PLW0133 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | -help: Add `raise` keyword -18 | # Test case 3: Useless exception statement in if statement -19 | def func(): -20 | if True: - - RuntimeError("This is an exception") # PLW0133 -21 + raise RuntimeError("This is an exception") # PLW0133 -22 | -23 | -24 | # Test case 4: Useless exception statement in class -note: This is an unsafe fix and may change runtime behavior - -PLW0133 [*] Missing `raise` statement on exception - --> useless_exception_statement.py:28:13 - | -26 | class Class: -27 | def __init__(self): -28 | TypeError("This is an exception") # PLW0133 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | -help: Add `raise` keyword +24 | # Test case 1: Useless exception statement 25 | def func(): -26 | class Class: -27 | def __init__(self): +26 | AssertionError("This is an assertion error") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +27 | MyError("This is a custom error") # PLW0133 +28 | MySubError("This is a custom error") # PLW0133 + | +help: Add `raise` keyword +23 | +24 | # Test case 1: Useless exception statement +25 | def func(): + - AssertionError("This is an assertion error") # PLW0133 +26 + raise AssertionError("This is an assertion error") # PLW0133 +27 | MyError("This is a custom error") # PLW0133 +28 | MySubError("This is a custom error") # PLW0133 +29 | MyValueError("This is a custom value error") # PLW0133 +note: This is an unsafe fix and may change runtime behavior + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:35:9 + | +33 | def func(): +34 | try: +35 | Exception("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +36 | MyError("This is an exception") # PLW0133 +37 | MySubError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +32 | # Test case 2: Useless exception statement in try-except block +33 | def func(): +34 | try: + - Exception("This is an exception") # PLW0133 +35 + raise Exception("This is an exception") # PLW0133 +36 | MyError("This is an exception") # PLW0133 +37 | MySubError("This is an exception") # PLW0133 +38 | MyValueError("This is an exception") # PLW0133 +note: This is an unsafe fix and may change runtime behavior + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:46:9 + | +44 | def func(): +45 | if True: +46 | RuntimeError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +47 | MyError("This is an exception") # PLW0133 +48 | MySubError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +43 | # Test case 3: Useless exception statement in if statement +44 | def func(): +45 | if True: + - RuntimeError("This is an exception") # PLW0133 +46 + raise RuntimeError("This is an exception") # PLW0133 +47 | MyError("This is an exception") # PLW0133 +48 | MySubError("This is an exception") # PLW0133 +49 | MyValueError("This is an exception") # PLW0133 +note: This is an unsafe fix and may change runtime behavior + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:56:13 + | +54 | class Class: +55 | def __init__(self): +56 | TypeError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +57 | MyError("This is an exception") # PLW0133 +58 | MySubError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +53 | def func(): +54 | class Class: +55 | def __init__(self): - TypeError("This is an exception") # PLW0133 -28 + raise TypeError("This is an exception") # PLW0133 -29 | -30 | -31 | # Test case 5: Useless exception statement in function +56 + raise TypeError("This is an exception") # PLW0133 +57 | MyError("This is an exception") # PLW0133 +58 | MySubError("This is an exception") # PLW0133 +59 | MyValueError("This is an exception") # PLW0133 note: This is an unsafe fix and may change runtime behavior PLW0133 [*] Missing `raise` statement on exception - --> useless_exception_statement.py:34:9 + --> useless_exception_statement.py:65:9 | -32 | def func(): -33 | def inner(): -34 | IndexError("This is an exception") # PLW0133 +63 | def func(): +64 | def inner(): +65 | IndexError("This is an exception") # PLW0133 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -35 | -36 | inner() +66 | MyError("This is an exception") # PLW0133 +67 | MySubError("This is an exception") # PLW0133 | help: Add `raise` keyword -31 | # Test case 5: Useless exception statement in function -32 | def func(): -33 | def inner(): +62 | # Test case 5: Useless exception statement in function +63 | def func(): +64 | def inner(): - IndexError("This is an exception") # PLW0133 -34 + raise IndexError("This is an exception") # PLW0133 -35 | -36 | inner() -37 | +65 + raise IndexError("This is an exception") # PLW0133 +66 | MyError("This is an exception") # PLW0133 +67 | MySubError("This is an exception") # PLW0133 +68 | MyValueError("This is an exception") # PLW0133 note: This is an unsafe fix and may change runtime behavior PLW0133 [*] Missing `raise` statement on exception - --> useless_exception_statement.py:42:9 + --> useless_exception_statement.py:76:9 | -40 | def func(): -41 | while True: -42 | KeyError("This is an exception") # PLW0133 +74 | def func(): +75 | while True: +76 | KeyError("This is an exception") # PLW0133 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +77 | MyError("This is an exception") # PLW0133 +78 | MySubError("This is an exception") # PLW0133 | help: Add `raise` keyword -39 | # Test case 6: Useless exception statement in while loop -40 | def func(): -41 | while True: +73 | # Test case 6: Useless exception statement in while loop +74 | def func(): +75 | while True: - KeyError("This is an exception") # PLW0133 -42 + raise KeyError("This is an exception") # PLW0133 -43 | -44 | -45 | # Test case 7: Useless exception statement in abstract class +76 + raise KeyError("This is an exception") # PLW0133 +77 | MyError("This is an exception") # PLW0133 +78 | MySubError("This is an exception") # PLW0133 +79 | MyValueError("This is an exception") # PLW0133 note: This is an unsafe fix and may change runtime behavior PLW0133 [*] Missing `raise` statement on exception - --> useless_exception_statement.py:50:13 + --> useless_exception_statement.py:87:13 | -48 | @abstractmethod -49 | def method(self): -50 | NotImplementedError("This is an exception") # PLW0133 +85 | @abstractmethod +86 | def method(self): +87 | NotImplementedError("This is an exception") # PLW0133 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +88 | MyError("This is an exception") # PLW0133 +89 | MySubError("This is an exception") # PLW0133 | help: Add `raise` keyword -47 | class Class(ABC): -48 | @abstractmethod -49 | def method(self): +84 | class Class(ABC): +85 | @abstractmethod +86 | def method(self): - NotImplementedError("This is an exception") # PLW0133 -50 + raise NotImplementedError("This is an exception") # PLW0133 -51 | -52 | -53 | # Test case 8: Useless exception statement inside context manager +87 + raise NotImplementedError("This is an exception") # PLW0133 +88 | MyError("This is an exception") # PLW0133 +89 | MySubError("This is an exception") # PLW0133 +90 | MyValueError("This is an exception") # PLW0133 note: This is an unsafe fix and may change runtime behavior PLW0133 [*] Missing `raise` statement on exception - --> useless_exception_statement.py:56:9 + --> useless_exception_statement.py:96:9 | -54 | def func(): -55 | with suppress(AttributeError): -56 | AttributeError("This is an exception") # PLW0133 +94 | def func(): +95 | with suppress(Exception): +96 | AttributeError("This is an exception") # PLW0133 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +97 | MyError("This is an exception") # PLW0133 +98 | MySubError("This is an exception") # PLW0133 | help: Add `raise` keyword -53 | # Test case 8: Useless exception statement inside context manager -54 | def func(): -55 | with suppress(AttributeError): +93 | # Test case 8: Useless exception statement inside context manager +94 | def func(): +95 | with suppress(Exception): - AttributeError("This is an exception") # PLW0133 -56 + raise AttributeError("This is an exception") # PLW0133 -57 | -58 | -59 | # Test case 9: Useless exception statement in parentheses +96 + raise AttributeError("This is an exception") # PLW0133 +97 | MyError("This is an exception") # PLW0133 +98 | MySubError("This is an exception") # PLW0133 +99 | MyValueError("This is an exception") # PLW0133 note: This is an unsafe fix and may change runtime behavior PLW0133 [*] Missing `raise` statement on exception - --> useless_exception_statement.py:61:5 - | -59 | # Test case 9: Useless exception statement in parentheses -60 | def func(): -61 | (RuntimeError("This is an exception")) # PLW0133 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | -help: Add `raise` keyword -58 | -59 | # Test case 9: Useless exception statement in parentheses -60 | def func(): - - (RuntimeError("This is an exception")) # PLW0133 -61 + raise (RuntimeError("This is an exception")) # PLW0133 -62 | -63 | -64 | # Test case 10: Useless exception statement in continuation -note: This is an unsafe fix and may change runtime behavior - -PLW0133 [*] Missing `raise` statement on exception - --> useless_exception_statement.py:66:12 - | -64 | # Test case 10: Useless exception statement in continuation -65 | def func(): -66 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | -help: Add `raise` keyword -63 | -64 | # Test case 10: Useless exception statement in continuation -65 | def func(): - - x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133 -66 + x = 1; raise (RuntimeError("This is an exception")); y = 2 # PLW0133 -67 | -68 | -69 | # Test case 11: Useless warning statement -note: This is an unsafe fix and may change runtime behavior - -PLW0133 [*] Missing `raise` statement on exception - --> useless_exception_statement.py:71:5 - | -69 | # Test case 11: Useless warning statement -70 | def func(): -71 | UserWarning("This is an assertion error") # PLW0133 - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - | -help: Add `raise` keyword -68 | -69 | # Test case 11: Useless warning statement -70 | def func(): - - UserWarning("This is an assertion error") # PLW0133 -71 + raise UserWarning("This is an assertion error") # PLW0133 -72 | -73 | -74 | # Non-violation test cases: PLW0133 -note: This is an unsafe fix and may change runtime behavior - -PLW0133 [*] Missing `raise` statement on exception - --> useless_exception_statement.py:126:1 + --> useless_exception_statement.py:104:5 | -124 | import builtins -125 | -126 | builtins.TypeError("still an exception even though it's an Attribute") - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -127 | -128 | PythonFinalizationError("Added in Python 3.13") +102 | # Test case 9: Useless exception statement in parentheses +103 | def func(): +104 | (RuntimeError("This is an exception")) # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +105 | (MyError("This is an exception")) # PLW0133 +106 | (MySubError("This is an exception")) # PLW0133 | help: Add `raise` keyword +101 | +102 | # Test case 9: Useless exception statement in parentheses +103 | def func(): + - (RuntimeError("This is an exception")) # PLW0133 +104 + raise (RuntimeError("This is an exception")) # PLW0133 +105 | (MyError("This is an exception")) # PLW0133 +106 | (MySubError("This is an exception")) # PLW0133 +107 | (MyValueError("This is an exception")) # PLW0133 +note: This is an unsafe fix and may change runtime behavior + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:112:12 + | +110 | # Test case 10: Useless exception statement in continuation +111 | def func(): +112 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133 +114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133 + | +help: Add `raise` keyword +109 | +110 | # Test case 10: Useless exception statement in continuation +111 | def func(): + - x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133 +112 + x = 1; raise (RuntimeError("This is an exception")); y = 2 # PLW0133 +113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133 +114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133 +115 | x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133 +note: This is an unsafe fix and may change runtime behavior + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:120:5 + | +118 | # Test case 11: Useless warning statement +119 | def func(): +120 | UserWarning("This is a user warning") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +121 | MyUserWarning("This is a custom user warning") # PLW0133 + | +help: Add `raise` keyword +117 | +118 | # Test case 11: Useless warning statement +119 | def func(): + - UserWarning("This is a user warning") # PLW0133 +120 + raise UserWarning("This is a user warning") # PLW0133 +121 | MyUserWarning("This is a custom user warning") # PLW0133 +122 | 123 | -124 | import builtins -125 | - - builtins.TypeError("still an exception even though it's an Attribute") -126 + raise builtins.TypeError("still an exception even though it's an Attribute") -127 | -128 | PythonFinalizationError("Added in Python 3.13") note: This is an unsafe fix and may change runtime behavior PLW0133 [*] Missing `raise` statement on exception - --> useless_exception_statement.py:128:1 + --> useless_exception_statement.py:127:1 | -126 | builtins.TypeError("still an exception even though it's an Attribute") -127 | -128 | PythonFinalizationError("Added in Python 3.13") - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +125 | import builtins +126 | +127 | builtins.TypeError("still an exception even though it's an Attribute") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +128 | +129 | PythonFinalizationError("Added in Python 3.13") # PLW0133 | help: Add `raise` keyword -125 | -126 | builtins.TypeError("still an exception even though it's an Attribute") -127 | - - PythonFinalizationError("Added in Python 3.13") -128 + raise PythonFinalizationError("Added in Python 3.13") +124 | # Test case 12: Useless exception statement at module level +125 | import builtins +126 | + - builtins.TypeError("still an exception even though it's an Attribute") # PLW0133 +127 + raise builtins.TypeError("still an exception even though it's an Attribute") # PLW0133 +128 | +129 | PythonFinalizationError("Added in Python 3.13") # PLW0133 +130 | +note: This is an unsafe fix and may change runtime behavior + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:129:1 + | +127 | builtins.TypeError("still an exception even though it's an Attribute") # PLW0133 +128 | +129 | PythonFinalizationError("Added in Python 3.13") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +130 | +131 | MyError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +126 | +127 | builtins.TypeError("still an exception even though it's an Attribute") # PLW0133 +128 | + - PythonFinalizationError("Added in Python 3.13") # PLW0133 +129 + raise PythonFinalizationError("Added in Python 3.13") # PLW0133 +130 | +131 | MyError("This is an exception") # PLW0133 +132 | +note: This is an unsafe fix and may change runtime behavior + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:137:1 + | +135 | MyValueError("This is an exception") # PLW0133 +136 | +137 | UserWarning("This is a user warning") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +138 | +139 | MyUserWarning("This is a custom user warning") # PLW0133 + | +help: Add `raise` keyword +134 | +135 | MyValueError("This is an exception") # PLW0133 +136 | + - UserWarning("This is a user warning") # PLW0133 +137 + raise UserWarning("This is a user warning") # PLW0133 +138 | +139 | MyUserWarning("This is a custom user warning") # PLW0133 +140 | note: This is an unsafe fix and may change runtime behavior diff --git a/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__preview__PLW0133_useless_exception_statement.py.snap b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__preview__PLW0133_useless_exception_statement.py.snap new file mode 100644 index 0000000000..c6a1f16ba8 --- /dev/null +++ b/crates/ruff_linter/src/rules/pylint/snapshots/ruff_linter__rules__pylint__tests__preview__PLW0133_useless_exception_statement.py.snap @@ -0,0 +1,751 @@ +--- +source: crates/ruff_linter/src/rules/pylint/mod.rs +--- +--- Linter settings --- +-linter.preview = disabled ++linter.preview = enabled + +--- Summary --- +Removed: 0 +Added: 35 + +--- Added --- +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:27:5 + | +25 | def func(): +26 | AssertionError("This is an assertion error") # PLW0133 +27 | MyError("This is a custom error") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +28 | MySubError("This is a custom error") # PLW0133 +29 | MyValueError("This is a custom value error") # PLW0133 + | +help: Add `raise` keyword +24 | # Test case 1: Useless exception statement +25 | def func(): +26 | AssertionError("This is an assertion error") # PLW0133 + - MyError("This is a custom error") # PLW0133 +27 + raise MyError("This is a custom error") # PLW0133 +28 | MySubError("This is a custom error") # PLW0133 +29 | MyValueError("This is a custom value error") # PLW0133 +30 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:28:5 + | +26 | AssertionError("This is an assertion error") # PLW0133 +27 | MyError("This is a custom error") # PLW0133 +28 | MySubError("This is a custom error") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +29 | MyValueError("This is a custom value error") # PLW0133 + | +help: Add `raise` keyword +25 | def func(): +26 | AssertionError("This is an assertion error") # PLW0133 +27 | MyError("This is a custom error") # PLW0133 + - MySubError("This is a custom error") # PLW0133 +28 + raise MySubError("This is a custom error") # PLW0133 +29 | MyValueError("This is a custom value error") # PLW0133 +30 | +31 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:29:5 + | +27 | MyError("This is a custom error") # PLW0133 +28 | MySubError("This is a custom error") # PLW0133 +29 | MyValueError("This is a custom value error") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: Add `raise` keyword +26 | AssertionError("This is an assertion error") # PLW0133 +27 | MyError("This is a custom error") # PLW0133 +28 | MySubError("This is a custom error") # PLW0133 + - MyValueError("This is a custom value error") # PLW0133 +29 + raise MyValueError("This is a custom value error") # PLW0133 +30 | +31 | +32 | # Test case 2: Useless exception statement in try-except block +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:36:9 + | +34 | try: +35 | Exception("This is an exception") # PLW0133 +36 | MyError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +37 | MySubError("This is an exception") # PLW0133 +38 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +33 | def func(): +34 | try: +35 | Exception("This is an exception") # PLW0133 + - MyError("This is an exception") # PLW0133 +36 + raise MyError("This is an exception") # PLW0133 +37 | MySubError("This is an exception") # PLW0133 +38 | MyValueError("This is an exception") # PLW0133 +39 | except Exception as err: +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:37:9 + | +35 | Exception("This is an exception") # PLW0133 +36 | MyError("This is an exception") # PLW0133 +37 | MySubError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +38 | MyValueError("This is an exception") # PLW0133 +39 | except Exception as err: + | +help: Add `raise` keyword +34 | try: +35 | Exception("This is an exception") # PLW0133 +36 | MyError("This is an exception") # PLW0133 + - MySubError("This is an exception") # PLW0133 +37 + raise MySubError("This is an exception") # PLW0133 +38 | MyValueError("This is an exception") # PLW0133 +39 | except Exception as err: +40 | pass +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:38:9 + | +36 | MyError("This is an exception") # PLW0133 +37 | MySubError("This is an exception") # PLW0133 +38 | MyValueError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +39 | except Exception as err: +40 | pass + | +help: Add `raise` keyword +35 | Exception("This is an exception") # PLW0133 +36 | MyError("This is an exception") # PLW0133 +37 | MySubError("This is an exception") # PLW0133 + - MyValueError("This is an exception") # PLW0133 +38 + raise MyValueError("This is an exception") # PLW0133 +39 | except Exception as err: +40 | pass +41 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:47:9 + | +45 | if True: +46 | RuntimeError("This is an exception") # PLW0133 +47 | MyError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +48 | MySubError("This is an exception") # PLW0133 +49 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +44 | def func(): +45 | if True: +46 | RuntimeError("This is an exception") # PLW0133 + - MyError("This is an exception") # PLW0133 +47 + raise MyError("This is an exception") # PLW0133 +48 | MySubError("This is an exception") # PLW0133 +49 | MyValueError("This is an exception") # PLW0133 +50 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:48:9 + | +46 | RuntimeError("This is an exception") # PLW0133 +47 | MyError("This is an exception") # PLW0133 +48 | MySubError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +49 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +45 | if True: +46 | RuntimeError("This is an exception") # PLW0133 +47 | MyError("This is an exception") # PLW0133 + - MySubError("This is an exception") # PLW0133 +48 + raise MySubError("This is an exception") # PLW0133 +49 | MyValueError("This is an exception") # PLW0133 +50 | +51 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:49:9 + | +47 | MyError("This is an exception") # PLW0133 +48 | MySubError("This is an exception") # PLW0133 +49 | MyValueError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: Add `raise` keyword +46 | RuntimeError("This is an exception") # PLW0133 +47 | MyError("This is an exception") # PLW0133 +48 | MySubError("This is an exception") # PLW0133 + - MyValueError("This is an exception") # PLW0133 +49 + raise MyValueError("This is an exception") # PLW0133 +50 | +51 | +52 | # Test case 4: Useless exception statement in class +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:57:13 + | +55 | def __init__(self): +56 | TypeError("This is an exception") # PLW0133 +57 | MyError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +58 | MySubError("This is an exception") # PLW0133 +59 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +54 | class Class: +55 | def __init__(self): +56 | TypeError("This is an exception") # PLW0133 + - MyError("This is an exception") # PLW0133 +57 + raise MyError("This is an exception") # PLW0133 +58 | MySubError("This is an exception") # PLW0133 +59 | MyValueError("This is an exception") # PLW0133 +60 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:58:13 + | +56 | TypeError("This is an exception") # PLW0133 +57 | MyError("This is an exception") # PLW0133 +58 | MySubError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +59 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +55 | def __init__(self): +56 | TypeError("This is an exception") # PLW0133 +57 | MyError("This is an exception") # PLW0133 + - MySubError("This is an exception") # PLW0133 +58 + raise MySubError("This is an exception") # PLW0133 +59 | MyValueError("This is an exception") # PLW0133 +60 | +61 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:59:13 + | +57 | MyError("This is an exception") # PLW0133 +58 | MySubError("This is an exception") # PLW0133 +59 | MyValueError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: Add `raise` keyword +56 | TypeError("This is an exception") # PLW0133 +57 | MyError("This is an exception") # PLW0133 +58 | MySubError("This is an exception") # PLW0133 + - MyValueError("This is an exception") # PLW0133 +59 + raise MyValueError("This is an exception") # PLW0133 +60 | +61 | +62 | # Test case 5: Useless exception statement in function +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:66:9 + | +64 | def inner(): +65 | IndexError("This is an exception") # PLW0133 +66 | MyError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +67 | MySubError("This is an exception") # PLW0133 +68 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +63 | def func(): +64 | def inner(): +65 | IndexError("This is an exception") # PLW0133 + - MyError("This is an exception") # PLW0133 +66 + raise MyError("This is an exception") # PLW0133 +67 | MySubError("This is an exception") # PLW0133 +68 | MyValueError("This is an exception") # PLW0133 +69 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:67:9 + | +65 | IndexError("This is an exception") # PLW0133 +66 | MyError("This is an exception") # PLW0133 +67 | MySubError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +68 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +64 | def inner(): +65 | IndexError("This is an exception") # PLW0133 +66 | MyError("This is an exception") # PLW0133 + - MySubError("This is an exception") # PLW0133 +67 + raise MySubError("This is an exception") # PLW0133 +68 | MyValueError("This is an exception") # PLW0133 +69 | +70 | inner() +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:68:9 + | +66 | MyError("This is an exception") # PLW0133 +67 | MySubError("This is an exception") # PLW0133 +68 | MyValueError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +69 | +70 | inner() + | +help: Add `raise` keyword +65 | IndexError("This is an exception") # PLW0133 +66 | MyError("This is an exception") # PLW0133 +67 | MySubError("This is an exception") # PLW0133 + - MyValueError("This is an exception") # PLW0133 +68 + raise MyValueError("This is an exception") # PLW0133 +69 | +70 | inner() +71 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:77:9 + | +75 | while True: +76 | KeyError("This is an exception") # PLW0133 +77 | MyError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +78 | MySubError("This is an exception") # PLW0133 +79 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +74 | def func(): +75 | while True: +76 | KeyError("This is an exception") # PLW0133 + - MyError("This is an exception") # PLW0133 +77 + raise MyError("This is an exception") # PLW0133 +78 | MySubError("This is an exception") # PLW0133 +79 | MyValueError("This is an exception") # PLW0133 +80 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:78:9 + | +76 | KeyError("This is an exception") # PLW0133 +77 | MyError("This is an exception") # PLW0133 +78 | MySubError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +79 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +75 | while True: +76 | KeyError("This is an exception") # PLW0133 +77 | MyError("This is an exception") # PLW0133 + - MySubError("This is an exception") # PLW0133 +78 + raise MySubError("This is an exception") # PLW0133 +79 | MyValueError("This is an exception") # PLW0133 +80 | +81 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:79:9 + | +77 | MyError("This is an exception") # PLW0133 +78 | MySubError("This is an exception") # PLW0133 +79 | MyValueError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: Add `raise` keyword +76 | KeyError("This is an exception") # PLW0133 +77 | MyError("This is an exception") # PLW0133 +78 | MySubError("This is an exception") # PLW0133 + - MyValueError("This is an exception") # PLW0133 +79 + raise MyValueError("This is an exception") # PLW0133 +80 | +81 | +82 | # Test case 7: Useless exception statement in abstract class +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:88:13 + | +86 | def method(self): +87 | NotImplementedError("This is an exception") # PLW0133 +88 | MyError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +89 | MySubError("This is an exception") # PLW0133 +90 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +85 | @abstractmethod +86 | def method(self): +87 | NotImplementedError("This is an exception") # PLW0133 + - MyError("This is an exception") # PLW0133 +88 + raise MyError("This is an exception") # PLW0133 +89 | MySubError("This is an exception") # PLW0133 +90 | MyValueError("This is an exception") # PLW0133 +91 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:89:13 + | +87 | NotImplementedError("This is an exception") # PLW0133 +88 | MyError("This is an exception") # PLW0133 +89 | MySubError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +90 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +86 | def method(self): +87 | NotImplementedError("This is an exception") # PLW0133 +88 | MyError("This is an exception") # PLW0133 + - MySubError("This is an exception") # PLW0133 +89 + raise MySubError("This is an exception") # PLW0133 +90 | MyValueError("This is an exception") # PLW0133 +91 | +92 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:90:13 + | +88 | MyError("This is an exception") # PLW0133 +89 | MySubError("This is an exception") # PLW0133 +90 | MyValueError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: Add `raise` keyword +87 | NotImplementedError("This is an exception") # PLW0133 +88 | MyError("This is an exception") # PLW0133 +89 | MySubError("This is an exception") # PLW0133 + - MyValueError("This is an exception") # PLW0133 +90 + raise MyValueError("This is an exception") # PLW0133 +91 | +92 | +93 | # Test case 8: Useless exception statement inside context manager +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:97:9 + | +95 | with suppress(Exception): +96 | AttributeError("This is an exception") # PLW0133 +97 | MyError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +98 | MySubError("This is an exception") # PLW0133 +99 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +94 | def func(): +95 | with suppress(Exception): +96 | AttributeError("This is an exception") # PLW0133 + - MyError("This is an exception") # PLW0133 +97 + raise MyError("This is an exception") # PLW0133 +98 | MySubError("This is an exception") # PLW0133 +99 | MyValueError("This is an exception") # PLW0133 +100 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:98:9 + | +96 | AttributeError("This is an exception") # PLW0133 +97 | MyError("This is an exception") # PLW0133 +98 | MySubError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +99 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +95 | with suppress(Exception): +96 | AttributeError("This is an exception") # PLW0133 +97 | MyError("This is an exception") # PLW0133 + - MySubError("This is an exception") # PLW0133 +98 + raise MySubError("This is an exception") # PLW0133 +99 | MyValueError("This is an exception") # PLW0133 +100 | +101 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:99:9 + | +97 | MyError("This is an exception") # PLW0133 +98 | MySubError("This is an exception") # PLW0133 +99 | MyValueError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: Add `raise` keyword +96 | AttributeError("This is an exception") # PLW0133 +97 | MyError("This is an exception") # PLW0133 +98 | MySubError("This is an exception") # PLW0133 + - MyValueError("This is an exception") # PLW0133 +99 + raise MyValueError("This is an exception") # PLW0133 +100 | +101 | +102 | # Test case 9: Useless exception statement in parentheses +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:105:5 + | +103 | def func(): +104 | (RuntimeError("This is an exception")) # PLW0133 +105 | (MyError("This is an exception")) # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +106 | (MySubError("This is an exception")) # PLW0133 +107 | (MyValueError("This is an exception")) # PLW0133 + | +help: Add `raise` keyword +102 | # Test case 9: Useless exception statement in parentheses +103 | def func(): +104 | (RuntimeError("This is an exception")) # PLW0133 + - (MyError("This is an exception")) # PLW0133 +105 + raise (MyError("This is an exception")) # PLW0133 +106 | (MySubError("This is an exception")) # PLW0133 +107 | (MyValueError("This is an exception")) # PLW0133 +108 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:106:5 + | +104 | (RuntimeError("This is an exception")) # PLW0133 +105 | (MyError("This is an exception")) # PLW0133 +106 | (MySubError("This is an exception")) # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +107 | (MyValueError("This is an exception")) # PLW0133 + | +help: Add `raise` keyword +103 | def func(): +104 | (RuntimeError("This is an exception")) # PLW0133 +105 | (MyError("This is an exception")) # PLW0133 + - (MySubError("This is an exception")) # PLW0133 +106 + raise (MySubError("This is an exception")) # PLW0133 +107 | (MyValueError("This is an exception")) # PLW0133 +108 | +109 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:107:5 + | +105 | (MyError("This is an exception")) # PLW0133 +106 | (MySubError("This is an exception")) # PLW0133 +107 | (MyValueError("This is an exception")) # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: Add `raise` keyword +104 | (RuntimeError("This is an exception")) # PLW0133 +105 | (MyError("This is an exception")) # PLW0133 +106 | (MySubError("This is an exception")) # PLW0133 + - (MyValueError("This is an exception")) # PLW0133 +107 + raise (MyValueError("This is an exception")) # PLW0133 +108 | +109 | +110 | # Test case 10: Useless exception statement in continuation +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:113:12 + | +111 | def func(): +112 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133 +113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133 +115 | x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133 + | +help: Add `raise` keyword +110 | # Test case 10: Useless exception statement in continuation +111 | def func(): +112 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133 + - x = 1; (MyError("This is an exception")); y = 2 # PLW0133 +113 + x = 1; raise (MyError("This is an exception")); y = 2 # PLW0133 +114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133 +115 | x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133 +116 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:114:12 + | +112 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133 +113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133 +114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +115 | x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133 + | +help: Add `raise` keyword +111 | def func(): +112 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133 +113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133 + - x = 1; (MySubError("This is an exception")); y = 2 # PLW0133 +114 + x = 1; raise (MySubError("This is an exception")); y = 2 # PLW0133 +115 | x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133 +116 | +117 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:115:12 + | +113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133 +114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133 +115 | x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: Add `raise` keyword +112 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133 +113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133 +114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133 + - x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133 +115 + x = 1; raise (MyValueError("This is an exception")); y = 2 # PLW0133 +116 | +117 | +118 | # Test case 11: Useless warning statement +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:121:5 + | +119 | def func(): +120 | UserWarning("This is a user warning") # PLW0133 +121 | MyUserWarning("This is a custom user warning") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: Add `raise` keyword +118 | # Test case 11: Useless warning statement +119 | def func(): +120 | UserWarning("This is a user warning") # PLW0133 + - MyUserWarning("This is a custom user warning") # PLW0133 +121 + raise MyUserWarning("This is a custom user warning") # PLW0133 +122 | +123 | +124 | # Test case 12: Useless exception statement at module level +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:131:1 + | +129 | PythonFinalizationError("Added in Python 3.13") # PLW0133 +130 | +131 | MyError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +132 | +133 | MySubError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +128 | +129 | PythonFinalizationError("Added in Python 3.13") # PLW0133 +130 | + - MyError("This is an exception") # PLW0133 +131 + raise MyError("This is an exception") # PLW0133 +132 | +133 | MySubError("This is an exception") # PLW0133 +134 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:133:1 + | +131 | MyError("This is an exception") # PLW0133 +132 | +133 | MySubError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +134 | +135 | MyValueError("This is an exception") # PLW0133 + | +help: Add `raise` keyword +130 | +131 | MyError("This is an exception") # PLW0133 +132 | + - MySubError("This is an exception") # PLW0133 +133 + raise MySubError("This is an exception") # PLW0133 +134 | +135 | MyValueError("This is an exception") # PLW0133 +136 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:135:1 + | +133 | MySubError("This is an exception") # PLW0133 +134 | +135 | MyValueError("This is an exception") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +136 | +137 | UserWarning("This is a user warning") # PLW0133 + | +help: Add `raise` keyword +132 | +133 | MySubError("This is an exception") # PLW0133 +134 | + - MyValueError("This is an exception") # PLW0133 +135 + raise MyValueError("This is an exception") # PLW0133 +136 | +137 | UserWarning("This is a user warning") # PLW0133 +138 | +note: This is an unsafe fix and may change runtime behavior + + +PLW0133 [*] Missing `raise` statement on exception + --> useless_exception_statement.py:139:1 + | +137 | UserWarning("This is a user warning") # PLW0133 +138 | +139 | MyUserWarning("This is a custom user warning") # PLW0133 + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: Add `raise` keyword +136 | +137 | UserWarning("This is a user warning") # PLW0133 +138 | + - MyUserWarning("This is a custom user warning") # PLW0133 +139 + raise MyUserWarning("This is a custom user warning") # PLW0133 +140 | +141 | +142 | # Non-violation test cases: PLW0133 +note: This is an unsafe fix and may change runtime behavior diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/pep695/non_pep695_generic_class.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/pep695/non_pep695_generic_class.rs index b0d273b7c4..02f8cdd1e8 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/pep695/non_pep695_generic_class.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/pep695/non_pep695_generic_class.rs @@ -204,7 +204,7 @@ pub(crate) fn non_pep695_generic_class(checker: &Checker, class_def: &StmtClassD arguments, Parentheses::Remove, checker.source(), - checker.comment_ranges(), + checker.tokens(), )?; Ok(Fix::unsafe_edits( Edit::insertion(type_params.to_string(), name.end()), diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/pep695/non_pep695_type_alias.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/pep695/non_pep695_type_alias.rs index 43e3ec8536..6b10c3bc07 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/pep695/non_pep695_type_alias.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/pep695/non_pep695_type_alias.rs @@ -2,7 +2,7 @@ use itertools::Itertools; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::name::Name; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::visitor::Visitor; use ruff_python_ast::{Expr, ExprCall, ExprName, Keyword, StmtAnnAssign, StmtAssign, StmtRef}; use ruff_text_size::{Ranged, TextRange}; @@ -261,11 +261,11 @@ fn create_diagnostic( type_alias_kind: TypeAliasKind, ) { let source = checker.source(); + let tokens = checker.tokens(); let comment_ranges = checker.comment_ranges(); let range_with_parentheses = - parenthesized_range(value.into(), stmt.into(), comment_ranges, source) - .unwrap_or(value.range()); + parenthesized_range(value.into(), stmt.into(), tokens).unwrap_or(value.range()); let content = format!( "type {name}{type_params} = {value}", diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/replace_stdout_stderr.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/replace_stdout_stderr.rs index 7c5dd2f027..6ce403647d 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/replace_stdout_stderr.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/replace_stdout_stderr.rs @@ -1,9 +1,8 @@ use anyhow::Result; use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::{self as ast, Keyword}; +use ruff_python_ast::{self as ast, Keyword, token::Tokens}; use ruff_python_semantic::Modules; -use ruff_python_trivia::CommentRanges; use ruff_text_size::Ranged; use crate::checkers::ast::Checker; @@ -104,7 +103,7 @@ pub(crate) fn replace_stdout_stderr(checker: &Checker, call: &ast::ExprCall) { stderr, call, checker.locator().contents(), - checker.comment_ranges(), + checker.tokens(), ) }); } @@ -117,7 +116,7 @@ fn generate_fix( stderr: &Keyword, call: &ast::ExprCall, source: &str, - comment_ranges: &CommentRanges, + tokens: &Tokens, ) -> Result { let (first, second) = if stdout.start() < stderr.start() { (stdout, stderr) @@ -132,7 +131,7 @@ fn generate_fix( &call.arguments, Parentheses::Preserve, source, - comment_ranges, + tokens, )?], )) } diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/replace_universal_newlines.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/replace_universal_newlines.rs index 97000fa75f..b7b13c7b66 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/replace_universal_newlines.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/replace_universal_newlines.rs @@ -78,7 +78,7 @@ pub(crate) fn replace_universal_newlines(checker: &Checker, call: &ast::ExprCall &call.arguments, Parentheses::Preserve, checker.locator().contents(), - checker.comment_ranges(), + checker.tokens(), ) .map(Fix::safe_edit) }); diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_encode_utf8.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_encode_utf8.rs index 01cfe4fb03..792365042f 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_encode_utf8.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/unnecessary_encode_utf8.rs @@ -188,7 +188,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &Checker, call: &ast::ExprCall) { &call.arguments, Parentheses::Preserve, checker.locator().contents(), - checker.comment_ranges(), + checker.tokens(), ) .map(Fix::safe_edit) }); @@ -206,7 +206,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &Checker, call: &ast::ExprCall) { &call.arguments, Parentheses::Preserve, checker.locator().contents(), - checker.comment_ranges(), + checker.tokens(), ) .map(Fix::safe_edit) }); @@ -231,7 +231,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &Checker, call: &ast::ExprCall) { &call.arguments, Parentheses::Preserve, checker.locator().contents(), - checker.comment_ranges(), + checker.tokens(), ) .map(Fix::safe_edit) }); @@ -249,7 +249,7 @@ pub(crate) fn unnecessary_encode_utf8(checker: &Checker, call: &ast::ExprCall) { &call.arguments, Parentheses::Preserve, checker.locator().contents(), - checker.comment_ranges(), + checker.tokens(), ) .map(Fix::safe_edit) }); diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/useless_class_metaclass_type.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/useless_class_metaclass_type.rs index 75bbe20eca..20d3f64461 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/useless_class_metaclass_type.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/useless_class_metaclass_type.rs @@ -70,7 +70,7 @@ pub(crate) fn useless_class_metaclass_type(checker: &Checker, class_def: &StmtCl arguments, Parentheses::Remove, checker.locator().contents(), - checker.comment_ranges(), + checker.tokens(), )?; let range = edit.range(); diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/useless_object_inheritance.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/useless_object_inheritance.rs index 4a5789c78f..a1b0d900f8 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/useless_object_inheritance.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/useless_object_inheritance.rs @@ -73,7 +73,7 @@ pub(crate) fn useless_object_inheritance(checker: &Checker, class_def: &ast::Stm arguments, Parentheses::Remove, checker.locator().contents(), - checker.comment_ranges(), + checker.tokens(), )?; let range = edit.range(); diff --git a/crates/ruff_linter/src/rules/pyupgrade/rules/yield_in_for_loop.rs b/crates/ruff_linter/src/rules/pyupgrade/rules/yield_in_for_loop.rs index 7fec2b7d79..55f2afc89a 100644 --- a/crates/ruff_linter/src/rules/pyupgrade/rules/yield_in_for_loop.rs +++ b/crates/ruff_linter/src/rules/pyupgrade/rules/yield_in_for_loop.rs @@ -1,5 +1,5 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{self as ast, Expr, Stmt}; use ruff_text_size::Ranged; @@ -139,13 +139,8 @@ pub(crate) fn yield_in_for_loop(checker: &Checker, stmt_for: &ast::StmtFor) { let mut diagnostic = checker.report_diagnostic(YieldInForLoop, stmt_for.range()); let contents = checker.locator().slice( - parenthesized_range( - iter.as_ref().into(), - stmt_for.into(), - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(iter.range()), + parenthesized_range(iter.as_ref().into(), stmt_for.into(), checker.tokens()) + .unwrap_or(iter.range()), ); let contents = if iter.as_tuple_expr().is_some_and(|it| !it.parenthesized) { format!("yield from ({contents})") diff --git a/crates/ruff_linter/src/rules/refurb/helpers.rs b/crates/ruff_linter/src/rules/refurb/helpers.rs index 0a09d70aba..a6871f0497 100644 --- a/crates/ruff_linter/src/rules/refurb/helpers.rs +++ b/crates/ruff_linter/src/rules/refurb/helpers.rs @@ -1,7 +1,7 @@ use std::borrow::Cow; use ruff_python_ast::PythonVersion; -use ruff_python_ast::{self as ast, Expr, name::Name, parenthesize::parenthesized_range}; +use ruff_python_ast::{self as ast, Expr, name::Name, token::parenthesized_range}; use ruff_python_codegen::Generator; use ruff_python_semantic::{BindingId, ResolvedReference, SemanticModel}; use ruff_text_size::{Ranged, TextRange}; @@ -330,12 +330,8 @@ pub(super) fn parenthesize_loop_iter_if_necessary<'a>( let locator = checker.locator(); let iter = for_stmt.iter.as_ref(); - let original_parenthesized_range = parenthesized_range( - iter.into(), - for_stmt.into(), - checker.comment_ranges(), - checker.source(), - ); + let original_parenthesized_range = + parenthesized_range(iter.into(), for_stmt.into(), checker.tokens()); if let Some(range) = original_parenthesized_range { return Cow::Borrowed(locator.slice(range)); diff --git a/crates/ruff_linter/src/rules/refurb/rules/fromisoformat_replace_z.rs b/crates/ruff_linter/src/rules/refurb/rules/fromisoformat_replace_z.rs index 3622148fbf..b2b3193d9e 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/fromisoformat_replace_z.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/fromisoformat_replace_z.rs @@ -1,5 +1,5 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{ Expr, ExprAttribute, ExprBinOp, ExprCall, ExprStringLiteral, ExprSubscript, ExprUnaryOp, Number, Operator, PythonVersion, UnaryOp, @@ -112,8 +112,7 @@ pub(crate) fn fromisoformat_replace_z(checker: &Checker, call: &ExprCall) { let value_full_range = parenthesized_range( replace_time_zone.date.into(), replace_time_zone.parent.into(), - checker.comment_ranges(), - checker.source(), + checker.tokens(), ) .unwrap_or(replace_time_zone.date.range()); diff --git a/crates/ruff_linter/src/rules/refurb/rules/if_exp_instead_of_or_operator.rs b/crates/ruff_linter/src/rules/refurb/rules/if_exp_instead_of_or_operator.rs index fa660587ef..f4f4d1f7b7 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/if_exp_instead_of_or_operator.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/if_exp_instead_of_or_operator.rs @@ -5,8 +5,7 @@ use ruff_python_ast as ast; use ruff_python_ast::Expr; use ruff_python_ast::comparable::ComparableExpr; use ruff_python_ast::helpers::contains_effect; -use ruff_python_ast::parenthesize::parenthesized_range; -use ruff_python_trivia::CommentRanges; +use ruff_python_ast::token::{Tokens, parenthesized_range}; use ruff_text_size::Ranged; use crate::Locator; @@ -76,8 +75,8 @@ pub(crate) fn if_exp_instead_of_or_operator(checker: &Checker, if_expr: &ast::Ex Edit::range_replacement( format!( "{} or {}", - parenthesize_test(test, if_expr, checker.comment_ranges(), checker.locator()), - parenthesize_test(orelse, if_expr, checker.comment_ranges(), checker.locator()), + parenthesize_test(test, if_expr, checker.tokens(), checker.locator()), + parenthesize_test(orelse, if_expr, checker.tokens(), checker.locator()), ), if_expr.range(), ), @@ -99,15 +98,10 @@ pub(crate) fn if_exp_instead_of_or_operator(checker: &Checker, if_expr: &ast::Ex fn parenthesize_test<'a>( expr: &Expr, if_expr: &ast::ExprIf, - comment_ranges: &CommentRanges, + tokens: &Tokens, locator: &Locator<'a>, ) -> Cow<'a, str> { - if let Some(range) = parenthesized_range( - expr.into(), - if_expr.into(), - comment_ranges, - locator.contents(), - ) { + if let Some(range) = parenthesized_range(expr.into(), if_expr.into(), tokens) { Cow::Borrowed(locator.slice(range)) } else if matches!(expr, Expr::If(_) | Expr::Lambda(_) | Expr::Named(_)) { Cow::Owned(format!("({})", locator.slice(expr.range()))) diff --git a/crates/ruff_linter/src/rules/refurb/rules/readlines_in_for.rs b/crates/ruff_linter/src/rules/refurb/rules/readlines_in_for.rs index a6ea1eb570..943a013cbb 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/readlines_in_for.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/readlines_in_for.rs @@ -1,6 +1,6 @@ use ruff_diagnostics::Applicability; use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{Comprehension, Expr, StmtFor}; use ruff_python_semantic::analyze::typing; use ruff_python_semantic::analyze::typing::is_io_base_expr; @@ -104,8 +104,7 @@ fn readlines_in_iter(checker: &Checker, iter_expr: &Expr) { let deletion_range = if let Some(parenthesized_range) = parenthesized_range( expr_attr.value.as_ref().into(), expr_attr.into(), - checker.comment_ranges(), - checker.source(), + checker.tokens(), ) { expr_call.range().add_start(parenthesized_range.len()) } else { diff --git a/crates/ruff_linter/src/rules/refurb/rules/redundant_log_base.rs b/crates/ruff_linter/src/rules/refurb/rules/redundant_log_base.rs index a54bd261d1..35774cde28 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/redundant_log_base.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/redundant_log_base.rs @@ -1,7 +1,7 @@ use anyhow::Result; use ruff_diagnostics::Applicability; use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{self as ast, Expr, Number}; use ruff_text_size::Ranged; @@ -152,13 +152,8 @@ fn generate_fix(checker: &Checker, call: &ast::ExprCall, base: Base, arg: &Expr) checker.semantic(), )?; - let arg_range = parenthesized_range( - arg.into(), - call.into(), - checker.comment_ranges(), - checker.source(), - ) - .unwrap_or(arg.range()); + let arg_range = + parenthesized_range(arg.into(), call.into(), checker.tokens()).unwrap_or(arg.range()); let arg_str = checker.locator().slice(arg_range); Ok(Fix::applicable_edits( diff --git a/crates/ruff_linter/src/rules/refurb/rules/single_item_membership_test.rs b/crates/ruff_linter/src/rules/refurb/rules/single_item_membership_test.rs index 1df2fdde78..04b00f07f7 100644 --- a/crates/ruff_linter/src/rules/refurb/rules/single_item_membership_test.rs +++ b/crates/ruff_linter/src/rules/refurb/rules/single_item_membership_test.rs @@ -95,7 +95,7 @@ pub(crate) fn single_item_membership_test( &[membership_test.replacement_op()], std::slice::from_ref(item), expr.into(), - checker.comment_ranges(), + checker.tokens(), checker.source(), ), expr.range(), diff --git a/crates/ruff_linter/src/rules/ruff/mod.rs b/crates/ruff_linter/src/rules/ruff/mod.rs index 5f06ffdb9f..c2d03fb1ae 100644 --- a/crates/ruff_linter/src/rules/ruff/mod.rs +++ b/crates/ruff_linter/src/rules/ruff/mod.rs @@ -305,6 +305,25 @@ mod tests { Ok(()) } + #[test] + fn range_suppressions() -> Result<()> { + assert_diagnostics_diff!( + Path::new("ruff/suppressions.py"), + &settings::LinterSettings::for_rules(vec![ + Rule::UnusedVariable, + Rule::AmbiguousVariableName, + Rule::UnusedNOQA, + ]), + &settings::LinterSettings::for_rules(vec![ + Rule::UnusedVariable, + Rule::AmbiguousVariableName, + Rule::UnusedNOQA, + ]) + .with_preview_mode(), + ); + Ok(()) + } + #[test] fn ruf100_0() -> Result<()> { let diagnostics = test_path( diff --git a/crates/ruff_linter/src/rules/ruff/rules/class_with_mixed_type_vars.rs b/crates/ruff_linter/src/rules/ruff/rules/class_with_mixed_type_vars.rs index 4178217718..7b023de830 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/class_with_mixed_type_vars.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/class_with_mixed_type_vars.rs @@ -163,7 +163,7 @@ fn convert_type_vars( class_arguments, Parentheses::Remove, source, - checker.comment_ranges(), + checker.tokens(), )?; let replace_type_params = Edit::range_replacement(new_type_params.to_string(), type_params.range); diff --git a/crates/ruff_linter/src/rules/ruff/rules/default_factory_kwarg.rs b/crates/ruff_linter/src/rules/ruff/rules/default_factory_kwarg.rs index ea792ce97c..70ef8cb6d4 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/default_factory_kwarg.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/default_factory_kwarg.rs @@ -3,8 +3,8 @@ use anyhow::Result; use ast::Keyword; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::helpers::is_constant; +use ruff_python_ast::token::Tokens; use ruff_python_ast::{self as ast, Expr}; -use ruff_python_trivia::CommentRanges; use ruff_text_size::Ranged; use crate::Locator; @@ -108,9 +108,8 @@ pub(crate) fn default_factory_kwarg(checker: &Checker, call: &ast::ExprCall) { }, call.range(), ); - diagnostic.try_set_fix(|| { - convert_to_positional(call, keyword, checker.locator(), checker.comment_ranges()) - }); + diagnostic + .try_set_fix(|| convert_to_positional(call, keyword, checker.locator(), checker.tokens())); } /// Returns `true` if a value is definitively not callable (e.g., `1` or `[]`). @@ -136,7 +135,7 @@ fn convert_to_positional( call: &ast::ExprCall, default_factory: &Keyword, locator: &Locator, - comment_ranges: &CommentRanges, + tokens: &Tokens, ) -> Result { if call.arguments.len() == 1 { // Ex) `defaultdict(default_factory=list)` @@ -153,7 +152,7 @@ fn convert_to_positional( &call.arguments, Parentheses::Preserve, locator.contents(), - comment_ranges, + tokens, )?; // Second, insert the value as the first positional argument. diff --git a/crates/ruff_linter/src/rules/ruff/rules/falsy_dict_get_fallback.rs b/crates/ruff_linter/src/rules/ruff/rules/falsy_dict_get_fallback.rs index a19a9c451a..de3c072bdd 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/falsy_dict_get_fallback.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/falsy_dict_get_fallback.rs @@ -128,7 +128,7 @@ pub(crate) fn falsy_dict_get_fallback(checker: &Checker, expr: &Expr) { &call.arguments, Parentheses::Preserve, checker.locator().contents(), - checker.comment_ranges(), + checker.tokens(), ) .map(|edit| Fix::applicable_edit(edit, applicability)) }); diff --git a/crates/ruff_linter/src/rules/ruff/rules/parenthesize_chained_operators.rs b/crates/ruff_linter/src/rules/ruff/rules/parenthesize_chained_operators.rs index 7433f63f2b..6e2351aafb 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/parenthesize_chained_operators.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/parenthesize_chained_operators.rs @@ -1,6 +1,6 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast as ast; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_text_size::Ranged; use crate::checkers::ast::Checker; @@ -77,14 +77,7 @@ pub(crate) fn parenthesize_chained_logical_operators(checker: &Checker, expr: &a ) => { let locator = checker.locator(); let source_range = bool_op.range(); - if parenthesized_range( - bool_op.into(), - expr.into(), - checker.comment_ranges(), - locator.contents(), - ) - .is_none() - { + if parenthesized_range(bool_op.into(), expr.into(), checker.tokens()).is_none() { let new_source = format!("({})", locator.slice(source_range)); let edit = Edit::range_replacement(new_source, source_range); checker diff --git a/crates/ruff_linter/src/rules/ruff/rules/post_init_default.rs b/crates/ruff_linter/src/rules/ruff/rules/post_init_default.rs index 36941a98c9..2ec2472262 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/post_init_default.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/post_init_default.rs @@ -2,7 +2,7 @@ use anyhow::Context; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast as ast; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_semantic::{Scope, ScopeKind}; use ruff_python_trivia::{indentation_at_offset, textwrap}; use ruff_source_file::LineRanges; @@ -159,8 +159,7 @@ fn use_initvar( let default_loc = parenthesized_range( default.into(), parameter_with_default.into(), - checker.comment_ranges(), - checker.source(), + checker.tokens(), ) .unwrap_or(default.range()); diff --git a/crates/ruff_linter/src/rules/ruff/rules/quadratic_list_summation.rs b/crates/ruff_linter/src/rules/ruff/rules/quadratic_list_summation.rs index d4e062ad71..a38fd2fd6d 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/quadratic_list_summation.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/quadratic_list_summation.rs @@ -2,7 +2,7 @@ use anyhow::Result; use itertools::Itertools; use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{self as ast, Arguments, Expr}; use ruff_python_semantic::SemanticModel; use ruff_text_size::Ranged; @@ -116,13 +116,8 @@ fn convert_to_reduce(iterable: &Expr, call: &ast::ExprCall, checker: &Checker) - )?; let iterable = checker.locator().slice( - parenthesized_range( - iterable.into(), - (&call.arguments).into(), - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(iterable.range()), + parenthesized_range(iterable.into(), (&call.arguments).into(), checker.tokens()) + .unwrap_or(iterable.range()), ); Ok(Fix::unsafe_edits( diff --git a/crates/ruff_linter/src/rules/ruff/rules/starmap_zip.rs b/crates/ruff_linter/src/rules/ruff/rules/starmap_zip.rs index e9ed8d31bb..79881b8abc 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/starmap_zip.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/starmap_zip.rs @@ -1,7 +1,7 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::PythonVersion; use ruff_python_ast::token::TokenKind; -use ruff_python_ast::{Expr, ExprCall, parenthesize::parenthesized_range}; +use ruff_python_ast::{Expr, ExprCall, token::parenthesized_range}; use ruff_text_size::{Ranged, TextRange}; use crate::checkers::ast::Checker; @@ -124,13 +124,8 @@ fn replace_with_map(starmap: &ExprCall, zip: &ExprCall, checker: &Checker) -> Op let mut remove_zip = vec![]; - let full_zip_range = parenthesized_range( - zip.into(), - starmap.into(), - checker.comment_ranges(), - checker.source(), - ) - .unwrap_or(zip.range()); + let full_zip_range = + parenthesized_range(zip.into(), starmap.into(), checker.tokens()).unwrap_or(zip.range()); // Delete any parentheses around the `zip` call to prevent that the argument turns into a tuple. remove_zip.push(Edit::range_deletion(TextRange::new( @@ -138,13 +133,8 @@ fn replace_with_map(starmap: &ExprCall, zip: &ExprCall, checker: &Checker) -> Op zip.start(), ))); - let full_zip_func_range = parenthesized_range( - (&zip.func).into(), - zip.into(), - checker.comment_ranges(), - checker.source(), - ) - .unwrap_or(zip.func.range()); + let full_zip_func_range = parenthesized_range((&zip.func).into(), zip.into(), checker.tokens()) + .unwrap_or(zip.func.range()); // Delete the `zip` callee remove_zip.push(Edit::range_deletion(full_zip_func_range)); diff --git a/crates/ruff_linter/src/rules/ruff/rules/unnecessary_cast_to_int.rs b/crates/ruff_linter/src/rules/ruff/rules/unnecessary_cast_to_int.rs index b3c34c29e0..453aa07801 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/unnecessary_cast_to_int.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/unnecessary_cast_to_int.rs @@ -1,5 +1,5 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::{Tokens, parenthesized_range}; use ruff_python_ast::{Arguments, Expr, ExprCall}; use ruff_python_semantic::SemanticModel; use ruff_python_semantic::analyze::type_inference::{NumberLike, PythonType, ResolvedPythonType}; @@ -86,6 +86,7 @@ pub(crate) fn unnecessary_cast_to_int(checker: &Checker, call: &ExprCall) { applicability, checker.semantic(), checker.locator(), + checker.tokens(), checker.comment_ranges(), checker.source(), ); @@ -95,27 +96,26 @@ pub(crate) fn unnecessary_cast_to_int(checker: &Checker, call: &ExprCall) { } /// Creates a fix that replaces `int(expression)` with `expression`. +#[allow(clippy::too_many_arguments)] fn unwrap_int_expression( call: &ExprCall, argument: &Expr, applicability: Applicability, semantic: &SemanticModel, locator: &Locator, + tokens: &Tokens, comment_ranges: &CommentRanges, source: &str, ) -> Fix { - let content = if let Some(range) = parenthesized_range( - argument.into(), - (&call.arguments).into(), - comment_ranges, - source, - ) { + let content = if let Some(range) = + parenthesized_range(argument.into(), (&call.arguments).into(), tokens) + { locator.slice(range).to_string() } else { let parenthesize = semantic.current_expression_parent().is_some() || argument.is_named_expr() || locator.count_lines(argument.range()) > 0; - if parenthesize && !has_own_parentheses(argument, comment_ranges, source) { + if parenthesize && !has_own_parentheses(argument, tokens, source) { format!("({})", locator.slice(argument.range())) } else { locator.slice(argument.range()).to_string() @@ -255,7 +255,7 @@ fn round_applicability(arguments: &Arguments, semantic: &SemanticModel) -> Optio } /// Returns `true` if the given [`Expr`] has its own parentheses (e.g., `()`, `[]`, `{}`). -fn has_own_parentheses(expr: &Expr, comment_ranges: &CommentRanges, source: &str) -> bool { +fn has_own_parentheses(expr: &Expr, tokens: &Tokens, source: &str) -> bool { match expr { Expr::ListComp(_) | Expr::SetComp(_) @@ -276,14 +276,10 @@ fn has_own_parentheses(expr: &Expr, comment_ranges: &CommentRanges, source: &str // f // (10) // ``` - let func_end = parenthesized_range( - call_expr.func.as_ref().into(), - call_expr.into(), - comment_ranges, - source, - ) - .unwrap_or(call_expr.func.range()) - .end(); + let func_end = + parenthesized_range(call_expr.func.as_ref().into(), call_expr.into(), tokens) + .unwrap_or(call_expr.func.range()) + .end(); lines_after_ignoring_trivia(func_end, source) == 0 } Expr::Subscript(subscript_expr) => { @@ -291,8 +287,7 @@ fn has_own_parentheses(expr: &Expr, comment_ranges: &CommentRanges, source: &str let subscript_end = parenthesized_range( subscript_expr.value.as_ref().into(), subscript_expr.into(), - comment_ranges, - source, + tokens, ) .unwrap_or(subscript_expr.value.range()) .end(); diff --git a/crates/ruff_linter/src/rules/ruff/rules/unnecessary_key_check.rs b/crates/ruff_linter/src/rules/ruff/rules/unnecessary_key_check.rs index 7c13fb3d1c..502391dcf2 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/unnecessary_key_check.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/unnecessary_key_check.rs @@ -3,7 +3,7 @@ use ruff_python_ast::{self as ast, BoolOp, CmpOp, Expr}; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::helpers::contains_effect; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_text_size::Ranged; use crate::checkers::ast::Checker; @@ -108,22 +108,12 @@ pub(crate) fn unnecessary_key_check(checker: &Checker, expr: &Expr) { format!( "{}.get({})", checker.locator().slice( - parenthesized_range( - obj_right.into(), - right.into(), - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(obj_right.range()) + parenthesized_range(obj_right.into(), right.into(), checker.tokens(),) + .unwrap_or(obj_right.range()) ), checker.locator().slice( - parenthesized_range( - key_right.into(), - right.into(), - checker.comment_ranges(), - checker.locator().contents(), - ) - .unwrap_or(key_right.range()) + parenthesized_range(key_right.into(), right.into(), checker.tokens(),) + .unwrap_or(key_right.range()) ), ), expr.range(), diff --git a/crates/ruff_linter/src/rules/ruff/rules/unnecessary_literal_within_deque_call.rs b/crates/ruff_linter/src/rules/ruff/rules/unnecessary_literal_within_deque_call.rs index fd31765715..b322d57fd6 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/unnecessary_literal_within_deque_call.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/unnecessary_literal_within_deque_call.rs @@ -2,7 +2,7 @@ use ruff_diagnostics::{Applicability, Edit}; use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_python_ast::helpers::is_empty_f_string; -use ruff_python_ast::parenthesize::parenthesized_range; +use ruff_python_ast::token::parenthesized_range; use ruff_python_ast::{self as ast, Expr}; use ruff_text_size::Ranged; @@ -140,31 +140,19 @@ fn fix_unnecessary_literal_in_deque( // call. otherwise, we only delete the `iterable` argument and leave the others untouched. let edit = if let Some(maxlen) = maxlen { let deque_name = checker.locator().slice( - parenthesized_range( - deque.func.as_ref().into(), - deque.into(), - checker.comment_ranges(), - checker.source(), - ) - .unwrap_or(deque.func.range()), + parenthesized_range(deque.func.as_ref().into(), deque.into(), checker.tokens()) + .unwrap_or(deque.func.range()), ); let len_str = checker.locator().slice(maxlen); let deque_str = format!("{deque_name}(maxlen={len_str})"); Edit::range_replacement(deque_str, deque.range) } else { - let range = parenthesized_range( - iterable.value().into(), - (&deque.arguments).into(), - checker.comment_ranges(), - checker.source(), - ) - .unwrap_or(iterable.range()); remove_argument( - &range, + &iterable, &deque.arguments, Parentheses::Preserve, checker.source(), - checker.comment_ranges(), + checker.tokens(), )? }; let has_comments = checker.comment_ranges().intersects(edit.range()); diff --git a/crates/ruff_linter/src/rules/ruff/rules/unused_noqa.rs b/crates/ruff_linter/src/rules/ruff/rules/unused_noqa.rs index e4645a5541..d6e4ce94d9 100644 --- a/crates/ruff_linter/src/rules/ruff/rules/unused_noqa.rs +++ b/crates/ruff_linter/src/rules/ruff/rules/unused_noqa.rs @@ -4,7 +4,7 @@ use ruff_macros::{ViolationMetadata, derive_message_formats}; use crate::AlwaysFixableViolation; -#[derive(Debug, PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq, Default)] pub(crate) struct UnusedCodes { pub disabled: Vec, pub duplicated: Vec, @@ -12,6 +12,21 @@ pub(crate) struct UnusedCodes { pub unmatched: Vec, } +#[derive(Debug, PartialEq, Eq)] +pub(crate) enum UnusedNOQAKind { + Noqa, + Suppression, +} + +impl UnusedNOQAKind { + fn as_str(&self) -> &str { + match self { + UnusedNOQAKind::Noqa => "`noqa` directive", + UnusedNOQAKind::Suppression => "suppression", + } + } +} + /// ## What it does /// Checks for `noqa` directives that are no longer applicable. /// @@ -46,6 +61,7 @@ pub(crate) struct UnusedCodes { #[violation_metadata(stable_since = "v0.0.155")] pub(crate) struct UnusedNOQA { pub codes: Option, + pub kind: UnusedNOQAKind, } impl AlwaysFixableViolation for UnusedNOQA { @@ -95,16 +111,20 @@ impl AlwaysFixableViolation for UnusedNOQA { )); } if codes_by_reason.is_empty() { - "Unused `noqa` directive".to_string() + format!("Unused {}", self.kind.as_str()) } else { - format!("Unused `noqa` directive ({})", codes_by_reason.join("; ")) + format!( + "Unused {} ({})", + self.kind.as_str(), + codes_by_reason.join("; ") + ) } } - None => "Unused blanket `noqa` directive".to_string(), + None => format!("Unused blanket {}", self.kind.as_str()), } } fn fix_title(&self) -> String { - "Remove unused `noqa` directive".to_string() + format!("Remove unused {}", self.kind.as_str()) } } diff --git a/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__range_suppressions.snap b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__range_suppressions.snap new file mode 100644 index 0000000000..24a43ade5e --- /dev/null +++ b/crates/ruff_linter/src/rules/ruff/snapshots/ruff_linter__rules__ruff__tests__range_suppressions.snap @@ -0,0 +1,451 @@ +--- +source: crates/ruff_linter/src/rules/ruff/mod.rs +--- +--- Linter settings --- +-linter.preview = disabled ++linter.preview = enabled + +--- Summary --- +Removed: 14 +Added: 11 + +--- Removed --- +E741 Ambiguous variable name: `I` + --> suppressions.py:4:5 + | +2 | # These should both be ignored by the range suppression. +3 | # ruff: disable[E741, F841] +4 | I = 1 + | ^ +5 | # ruff: enable[E741, F841] + | + + +F841 [*] Local variable `I` is assigned to but never used + --> suppressions.py:4:5 + | +2 | # These should both be ignored by the range suppression. +3 | # ruff: disable[E741, F841] +4 | I = 1 + | ^ +5 | # ruff: enable[E741, F841] + | +help: Remove assignment to unused variable `I` +1 | def f(): +2 | # These should both be ignored by the range suppression. +3 | # ruff: disable[E741, F841] + - I = 1 +4 + pass +5 | # ruff: enable[E741, F841] +6 | +7 | +note: This is an unsafe fix and may change runtime behavior + + +E741 Ambiguous variable name: `I` + --> suppressions.py:12:5 + | +10 | # Should also generate an "unmatched suppression" warning. +11 | # ruff:disable[E741,F841] +12 | I = 1 + | ^ + | + + +F841 [*] Local variable `I` is assigned to but never used + --> suppressions.py:12:5 + | +10 | # Should also generate an "unmatched suppression" warning. +11 | # ruff:disable[E741,F841] +12 | I = 1 + | ^ + | +help: Remove assignment to unused variable `I` +9 | # These should both be ignored by the implicit range suppression. +10 | # Should also generate an "unmatched suppression" warning. +11 | # ruff:disable[E741,F841] + - I = 1 +12 + pass +13 | +14 | +15 | def f(): +note: This is an unsafe fix and may change runtime behavior + + +E741 Ambiguous variable name: `I` + --> suppressions.py:26:5 + | +24 | # the other logged to the user. +25 | # ruff: disable[E741] +26 | I = 1 + | ^ +27 | # ruff: enable[E741] + | + + +E741 Ambiguous variable name: `l` + --> suppressions.py:35:5 + | +33 | # middle line should be completely silenced. +34 | # ruff: disable[E741] +35 | l = 0 + | ^ +36 | # ruff: disable[F841] +37 | O = 1 + | + + +E741 Ambiguous variable name: `O` + --> suppressions.py:37:5 + | +35 | l = 0 +36 | # ruff: disable[F841] +37 | O = 1 + | ^ +38 | # ruff: enable[E741] +39 | I = 2 + | + + +F841 [*] Local variable `O` is assigned to but never used + --> suppressions.py:37:5 + | +35 | l = 0 +36 | # ruff: disable[F841] +37 | O = 1 + | ^ +38 | # ruff: enable[E741] +39 | I = 2 + | +help: Remove assignment to unused variable `O` +34 | # ruff: disable[E741] +35 | l = 0 +36 | # ruff: disable[F841] + - O = 1 +37 | # ruff: enable[E741] +38 | I = 2 +39 | # ruff: enable[F841] +note: This is an unsafe fix and may change runtime behavior + + +F841 [*] Local variable `I` is assigned to but never used + --> suppressions.py:39:5 + | +37 | O = 1 +38 | # ruff: enable[E741] +39 | I = 2 + | ^ +40 | # ruff: enable[F841] + | +help: Remove assignment to unused variable `I` +36 | # ruff: disable[F841] +37 | O = 1 +38 | # ruff: enable[E741] + - I = 2 +39 | # ruff: enable[F841] +40 | +41 | +note: This is an unsafe fix and may change runtime behavior + + +F841 [*] Local variable `foo` is assigned to but never used + --> suppressions.py:62:5 + | +60 | # TODO: Duplicate codes should be counted as duplicate, not unused +61 | # ruff: disable[F841, F841] +62 | foo = 0 + | ^^^ + | +help: Remove assignment to unused variable `foo` +59 | def f(): +60 | # TODO: Duplicate codes should be counted as duplicate, not unused +61 | # ruff: disable[F841, F841] + - foo = 0 +62 + pass +63 | +64 | +65 | def f(): +note: This is an unsafe fix and may change runtime behavior + + +F841 [*] Local variable `foo` is assigned to but never used + --> suppressions.py:70:5 + | +68 | # ruff: disable[F841] +69 | # ruff: disable[F841] +70 | foo = 0 + | ^^^ + | +help: Remove assignment to unused variable `foo` +67 | # and the other should trigger an unused suppression diagnostic +68 | # ruff: disable[F841] +69 | # ruff: disable[F841] + - foo = 0 +70 + pass +71 | +72 | +73 | def f(): +note: This is an unsafe fix and may change runtime behavior + + +F841 [*] Local variable `foo` is assigned to but never used + --> suppressions.py:76:5 + | +74 | # Multiple codes but only one is used +75 | # ruff: disable[E741, F401, F841] +76 | foo = 0 + | ^^^ + | +help: Remove assignment to unused variable `foo` +73 | def f(): +74 | # Multiple codes but only one is used +75 | # ruff: disable[E741, F401, F841] + - foo = 0 +76 + pass +77 | +78 | +79 | def f(): +note: This is an unsafe fix and may change runtime behavior + + +E741 Ambiguous variable name: `I` + --> suppressions.py:82:5 + | +80 | # Multiple codes but only two are used +81 | # ruff: disable[E741, F401, F841] +82 | I = 0 + | ^ + | + + +F841 [*] Local variable `I` is assigned to but never used + --> suppressions.py:82:5 + | +80 | # Multiple codes but only two are used +81 | # ruff: disable[E741, F401, F841] +82 | I = 0 + | ^ + | +help: Remove assignment to unused variable `I` +79 | def f(): +80 | # Multiple codes but only two are used +81 | # ruff: disable[E741, F401, F841] + - I = 0 +82 + pass +83 | +84 | +85 | def f(): +note: This is an unsafe fix and may change runtime behavior + + + +--- Added --- +RUF100 [*] Unused suppression (non-enabled: `E501`) + --> suppressions.py:46:5 + | +44 | # Neither of these are ignored and warnings are +45 | # logged to user +46 | # ruff: disable[E501] + | ^^^^^^^^^^^^^^^^^^^^^ +47 | I = 1 +48 | # ruff: enable[E501] + | +help: Remove unused suppression +43 | def f(): +44 | # Neither of these are ignored and warnings are +45 | # logged to user + - # ruff: disable[E501] +46 | I = 1 +47 | # ruff: enable[E501] +48 | + + +RUF100 [*] Unused suppression (non-enabled: `E501`) + --> suppressions.py:48:5 + | +46 | # ruff: disable[E501] +47 | I = 1 +48 | # ruff: enable[E501] + | ^^^^^^^^^^^^^^^^^^^^ + | +help: Remove unused suppression +45 | # logged to user +46 | # ruff: disable[E501] +47 | I = 1 + - # ruff: enable[E501] +48 | +49 | +50 | def f(): + + +RUF100 [*] Unused `noqa` directive (unused: `E741`, `F841`) + --> suppressions.py:55:12 + | +53 | # and an unusued noqa diagnostic should be logged. +54 | # ruff:disable[E741,F841] +55 | I = 1 # noqa: E741,F841 + | ^^^^^^^^^^^^^^^^^ +56 | # ruff:enable[E741,F841] + | +help: Remove unused `noqa` directive +52 | # These should both be ignored by the range suppression, +53 | # and an unusued noqa diagnostic should be logged. +54 | # ruff:disable[E741,F841] + - I = 1 # noqa: E741,F841 +55 + I = 1 +56 | # ruff:enable[E741,F841] +57 | +58 | + + +RUF100 [*] Unused suppression (unused: `F841`) + --> suppressions.py:61:21 + | +59 | def f(): +60 | # TODO: Duplicate codes should be counted as duplicate, not unused +61 | # ruff: disable[F841, F841] + | ^^^^ +62 | foo = 0 + | +help: Remove unused suppression +58 | +59 | def f(): +60 | # TODO: Duplicate codes should be counted as duplicate, not unused + - # ruff: disable[F841, F841] +61 + # ruff: disable[F841] +62 | foo = 0 +63 | +64 | + + +RUF100 [*] Unused suppression (unused: `F841`) + --> suppressions.py:69:5 + | +67 | # and the other should trigger an unused suppression diagnostic +68 | # ruff: disable[F841] +69 | # ruff: disable[F841] + | ^^^^^^^^^^^^^^^^^^^^^ +70 | foo = 0 + | +help: Remove unused suppression +66 | # Overlapping range suppressions, one should be marked as used, +67 | # and the other should trigger an unused suppression diagnostic +68 | # ruff: disable[F841] + - # ruff: disable[F841] +69 | foo = 0 +70 | +71 | + + +RUF100 [*] Unused suppression (unused: `E741`) + --> suppressions.py:75:21 + | +73 | def f(): +74 | # Multiple codes but only one is used +75 | # ruff: disable[E741, F401, F841] + | ^^^^ +76 | foo = 0 + | +help: Remove unused suppression +72 | +73 | def f(): +74 | # Multiple codes but only one is used + - # ruff: disable[E741, F401, F841] +75 + # ruff: disable[F401, F841] +76 | foo = 0 +77 | +78 | + + +RUF100 [*] Unused suppression (non-enabled: `F401`) + --> suppressions.py:75:27 + | +73 | def f(): +74 | # Multiple codes but only one is used +75 | # ruff: disable[E741, F401, F841] + | ^^^^ +76 | foo = 0 + | +help: Remove unused suppression +72 | +73 | def f(): +74 | # Multiple codes but only one is used + - # ruff: disable[E741, F401, F841] +75 + # ruff: disable[E741, F841] +76 | foo = 0 +77 | +78 | + + +RUF100 [*] Unused suppression (non-enabled: `F401`) + --> suppressions.py:81:27 + | +79 | def f(): +80 | # Multiple codes but only two are used +81 | # ruff: disable[E741, F401, F841] + | ^^^^ +82 | I = 0 + | +help: Remove unused suppression +78 | +79 | def f(): +80 | # Multiple codes but only two are used + - # ruff: disable[E741, F401, F841] +81 + # ruff: disable[E741, F841] +82 | I = 0 +83 | +84 | + + +RUF100 [*] Unused suppression (unused: `E741`) + --> suppressions.py:87:21 + | +85 | def f(): +86 | # Multiple codes but none are used +87 | # ruff: disable[E741, F401, F841] + | ^^^^ +88 | print("hello") + | +help: Remove unused suppression +84 | +85 | def f(): +86 | # Multiple codes but none are used + - # ruff: disable[E741, F401, F841] +87 + # ruff: disable[F401, F841] +88 | print("hello") + + +RUF100 [*] Unused suppression (non-enabled: `F401`) + --> suppressions.py:87:27 + | +85 | def f(): +86 | # Multiple codes but none are used +87 | # ruff: disable[E741, F401, F841] + | ^^^^ +88 | print("hello") + | +help: Remove unused suppression +84 | +85 | def f(): +86 | # Multiple codes but none are used + - # ruff: disable[E741, F401, F841] +87 + # ruff: disable[E741, F841] +88 | print("hello") + + +RUF100 [*] Unused suppression (unused: `F841`) + --> suppressions.py:87:33 + | +85 | def f(): +86 | # Multiple codes but none are used +87 | # ruff: disable[E741, F401, F841] + | ^^^^ +88 | print("hello") + | +help: Remove unused suppression +84 | +85 | def f(): +86 | # Multiple codes but none are used + - # ruff: disable[E741, F401, F841] +87 + # ruff: disable[E741, F401] +88 | print("hello") diff --git a/crates/ruff_linter/src/settings/mod.rs b/crates/ruff_linter/src/settings/mod.rs index b94e4edafb..5d5e35aa8d 100644 --- a/crates/ruff_linter/src/settings/mod.rs +++ b/crates/ruff_linter/src/settings/mod.rs @@ -465,6 +465,12 @@ impl LinterSettings { self } + #[must_use] + pub fn with_preview_mode(mut self) -> Self { + self.preview = PreviewMode::Enabled; + self + } + /// Resolve the [`TargetVersion`] to use for linting. /// /// This method respects the per-file version overrides in diff --git a/crates/ruff_linter/src/suppression.rs b/crates/ruff_linter/src/suppression.rs index 66ad98d25e..18c6ad50ce 100644 --- a/crates/ruff_linter/src/suppression.rs +++ b/crates/ruff_linter/src/suppression.rs @@ -1,7 +1,10 @@ use compact_str::CompactString; use core::fmt; +use ruff_db::diagnostic::Diagnostic; +use ruff_diagnostics::{Edit, Fix}; use ruff_python_ast::token::{TokenKind, Tokens}; use ruff_python_ast::whitespace::indentation; +use std::cell::Cell; use std::{error::Error, fmt::Formatter}; use thiserror::Error; @@ -9,7 +12,14 @@ use ruff_python_trivia::Cursor; use ruff_text_size::{Ranged, TextLen, TextRange, TextSize, TextSlice}; use smallvec::{SmallVec, smallvec}; -#[allow(unused)] +use crate::Locator; +use crate::checkers::ast::LintContext; +use crate::codes::Rule; +use crate::fix::edits::delete_comment; +use crate::preview::is_range_suppressions_enabled; +use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA, UnusedNOQAKind}; +use crate::settings::LinterSettings; + #[derive(Clone, Debug, Eq, PartialEq)] enum SuppressionAction { Disable, @@ -31,7 +41,6 @@ pub(crate) struct SuppressionComment { reason: TextRange, } -#[allow(unused)] impl SuppressionComment { /// Return the suppressed codes as strings fn codes_as_str<'src>(&self, source: &'src str) -> impl Iterator { @@ -48,7 +57,6 @@ pub(crate) struct PendingSuppressionComment<'a> { comment: SuppressionComment, } -#[allow(unused)] impl PendingSuppressionComment<'_> { /// Whether the comment "matches" another comment, based on indentation and suppressed codes /// Expects a "forward search" for matches, ie, will only match if the current comment is a @@ -64,8 +72,7 @@ impl PendingSuppressionComment<'_> { } } -#[allow(unused)] -#[derive(Clone, Debug)] +#[derive(Debug)] pub(crate) struct Suppression { /// The lint code being suppressed code: CompactString, @@ -75,9 +82,11 @@ pub(crate) struct Suppression { /// Any comments associated with the suppression comments: SmallVec<[SuppressionComment; 2]>, + + /// Whether this suppression actually suppressed a diagnostic + used: Cell, } -#[allow(unused)] #[derive(Copy, Clone, Debug)] pub(crate) enum InvalidSuppressionKind { /// Trailing suppression not supported @@ -98,8 +107,8 @@ pub(crate) struct InvalidSuppression { } #[allow(unused)] -#[derive(Debug)] -pub(crate) struct Suppressions { +#[derive(Debug, Default)] +pub struct Suppressions { /// Valid suppression ranges with associated comments valid: Vec, @@ -110,11 +119,121 @@ pub(crate) struct Suppressions { errors: Vec, } -#[allow(unused)] impl Suppressions { - pub(crate) fn from_tokens(source: &str, tokens: &Tokens) -> Suppressions { - let builder = SuppressionsBuilder::new(source); - builder.load_from_tokens(tokens) + pub fn from_tokens(settings: &LinterSettings, source: &str, tokens: &Tokens) -> Suppressions { + if is_range_suppressions_enabled(settings) { + let builder = SuppressionsBuilder::new(source); + builder.load_from_tokens(tokens) + } else { + Suppressions::default() + } + } + + pub(crate) fn is_empty(&self) -> bool { + self.valid.is_empty() + } + + /// Check if a diagnostic is suppressed by any known range suppressions + pub(crate) fn check_diagnostic(&self, diagnostic: &Diagnostic) -> bool { + if self.valid.is_empty() { + return false; + } + + let Some(code) = diagnostic.secondary_code() else { + return false; + }; + let Some(span) = diagnostic.primary_span() else { + return false; + }; + let Some(range) = span.range() else { + return false; + }; + + for suppression in &self.valid { + if *code == suppression.code.as_str() && suppression.range.contains_range(range) { + suppression.used.set(true); + return true; + } + } + false + } + + pub(crate) fn check_suppressions(&self, context: &LintContext, locator: &Locator) { + if !context.any_rule_enabled(&[Rule::UnusedNOQA, Rule::InvalidRuleCode]) { + return; + } + + let unused = self + .valid + .iter() + .filter(|suppression| !suppression.used.get()); + + for suppression in unused { + let Ok(rule) = Rule::from_code(&suppression.code) else { + continue; // TODO: invalid code + }; + for comment in &suppression.comments { + let mut range = comment.range; + let edit = if comment.codes.len() == 1 { + delete_comment(comment.range, locator) + } else { + let code_index = comment + .codes + .iter() + .position(|range| locator.slice(range) == suppression.code) + .unwrap(); + range = comment.codes[code_index]; + let code_range = if code_index < (comment.codes.len() - 1) { + TextRange::new( + comment.codes[code_index].start(), + comment.codes[code_index + 1].start(), + ) + } else { + TextRange::new( + comment.codes[code_index - 1].end(), + comment.codes[code_index].end(), + ) + }; + Edit::range_deletion(code_range) + }; + + let codes = if context.is_rule_enabled(rule) { + UnusedCodes { + unmatched: vec![suppression.code.to_string()], + ..Default::default() + } + } else { + UnusedCodes { + disabled: vec![suppression.code.to_string()], + ..Default::default() + } + }; + + let mut diagnostic = context.report_diagnostic( + UnusedNOQA { + codes: Some(codes), + kind: UnusedNOQAKind::Suppression, + }, + range, + ); + diagnostic.set_fix(Fix::safe_edit(edit)); + } + } + + for error in self + .errors + .iter() + .filter(|error| error.kind == ParseErrorKind::MissingCodes) + { + let mut diagnostic = context.report_diagnostic( + UnusedNOQA { + codes: Some(UnusedCodes::default()), + kind: UnusedNOQAKind::Suppression, + }, + error.range, + ); + diagnostic.set_fix(Fix::safe_edit(delete_comment(error.range, locator))); + } } } @@ -240,6 +359,7 @@ impl<'a> SuppressionsBuilder<'a> { code: code.into(), range: combined_range, comments: smallvec![comment.comment.clone(), other.comment.clone()], + used: false.into(), }); } @@ -256,6 +376,7 @@ impl<'a> SuppressionsBuilder<'a> { code: code.into(), range: implicit_range, comments: smallvec![comment.comment.clone()], + used: false.into(), }); } self.pending.remove(comment_index); @@ -369,8 +490,10 @@ impl<'src> SuppressionParser<'src> { } else if self.cursor.as_str().starts_with("enable") { self.cursor.skip_bytes("enable".len()); Ok(SuppressionAction::Enable) - } else if self.cursor.as_str().starts_with("noqa") { - // file-level "noqa" variant, ignore for now + } else if self.cursor.as_str().starts_with("noqa") + || self.cursor.as_str().starts_with("isort") + { + // alternate suppression variants, ignore for now self.error(ParseErrorKind::NotASuppression) } else { self.error(ParseErrorKind::UnknownAction) @@ -457,9 +580,12 @@ mod tests { use ruff_text_size::{TextRange, TextSize}; use similar::DiffableStr; - use crate::suppression::{ - InvalidSuppression, ParseError, Suppression, SuppressionAction, SuppressionComment, - SuppressionParser, Suppressions, + use crate::{ + settings::LinterSettings, + suppression::{ + InvalidSuppression, ParseError, Suppression, SuppressionAction, SuppressionComment, + SuppressionParser, Suppressions, + }, }; #[test] @@ -1376,7 +1502,11 @@ def bar(): /// Parse all suppressions and errors in a module for testing fn debug(source: &'_ str) -> DebugSuppressions<'_> { let parsed = parse(source, ParseOptions::from(Mode::Module)).unwrap(); - let suppressions = Suppressions::from_tokens(source, parsed.tokens()); + let suppressions = Suppressions::from_tokens( + &LinterSettings::default().with_preview_mode(), + source, + parsed.tokens(), + ); DebugSuppressions { source, suppressions, diff --git a/crates/ruff_linter/src/test.rs b/crates/ruff_linter/src/test.rs index 67a6728404..344c921890 100644 --- a/crates/ruff_linter/src/test.rs +++ b/crates/ruff_linter/src/test.rs @@ -32,6 +32,7 @@ use crate::packaging::detect_package_root; use crate::settings::types::UnsafeFixes; use crate::settings::{LinterSettings, flags}; use crate::source_kind::SourceKind; +use crate::suppression::Suppressions; use crate::{Applicability, FixAvailability}; use crate::{Locator, directives}; @@ -234,6 +235,7 @@ pub(crate) fn test_contents<'a>( &locator, &indexer, ); + let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens()); let messages = check_path( path, path.parent() @@ -249,6 +251,7 @@ pub(crate) fn test_contents<'a>( source_type, &parsed, target_version, + &suppressions, ); let source_has_errors = parsed.has_invalid_syntax(); @@ -299,6 +302,8 @@ pub(crate) fn test_contents<'a>( &indexer, ); + let suppressions = + Suppressions::from_tokens(settings, locator.contents(), parsed.tokens()); let fixed_messages = check_path( path, None, @@ -312,6 +317,7 @@ pub(crate) fn test_contents<'a>( source_type, &parsed, target_version, + &suppressions, ); if parsed.has_invalid_syntax() && !source_has_errors { diff --git a/crates/ruff_python_ast/src/helpers.rs b/crates/ruff_python_ast/src/helpers.rs index 4879e04780..9680d03ec3 100644 --- a/crates/ruff_python_ast/src/helpers.rs +++ b/crates/ruff_python_ast/src/helpers.rs @@ -3,13 +3,14 @@ use std::path::Path; use rustc_hash::FxHashMap; -use ruff_python_trivia::{CommentRanges, SimpleTokenKind, SimpleTokenizer, indentation_at_offset}; +use ruff_python_trivia::{SimpleTokenKind, SimpleTokenizer, indentation_at_offset}; use ruff_source_file::LineRanges; use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; use crate::name::{Name, QualifiedName, QualifiedNameBuilder}; -use crate::parenthesize::parenthesized_range; use crate::statement_visitor::StatementVisitor; +use crate::token::Tokens; +use crate::token::parenthesized_range; use crate::visitor::Visitor; use crate::{ self as ast, Arguments, AtomicNodeIndex, CmpOp, DictItem, ExceptHandler, Expr, ExprNoneLiteral, @@ -1474,7 +1475,7 @@ pub fn generate_comparison( ops: &[CmpOp], comparators: &[Expr], parent: AnyNodeRef, - comment_ranges: &CommentRanges, + tokens: &Tokens, source: &str, ) -> String { let start = left.start(); @@ -1483,8 +1484,7 @@ pub fn generate_comparison( // Add the left side of the comparison. contents.push_str( - &source[parenthesized_range(left.into(), parent, comment_ranges, source) - .unwrap_or(left.range())], + &source[parenthesized_range(left.into(), parent, tokens).unwrap_or(left.range())], ); for (op, comparator) in ops.iter().zip(comparators) { @@ -1504,7 +1504,7 @@ pub fn generate_comparison( // Add the right side of the comparison. contents.push_str( - &source[parenthesized_range(comparator.into(), parent, comment_ranges, source) + &source[parenthesized_range(comparator.into(), parent, tokens) .unwrap_or(comparator.range())], ); } diff --git a/crates/ruff_python_ast/src/token/tokens.rs b/crates/ruff_python_ast/src/token/tokens.rs index edc7e27463..7ca096d9b0 100644 --- a/crates/ruff_python_ast/src/token/tokens.rs +++ b/crates/ruff_python_ast/src/token/tokens.rs @@ -154,9 +154,7 @@ impl Tokens { // the tokens which is valid as well. assert!( offset >= last.end(), - "Offset {:?} is inside a token range {:?}", - offset, - last.range() + "Offset {offset:?} is inside token `{last:?}`", ); } before @@ -181,9 +179,7 @@ impl Tokens { // the tokens which is valid as well. assert!( offset <= first.start(), - "Offset {:?} is inside a token range {:?}", - offset, - first.range() + "Offset {offset:?} is inside token `{first:?}`", ); } @@ -391,7 +387,7 @@ mod tests { } #[test] - #[should_panic(expected = "Offset 5 is inside a token range 4..7")] + #[should_panic(expected = "Offset 5 is inside token `Name 4..7`")] fn tokens_after_offset_inside_token() { let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter()); tokens.after(TextSize::new(5)); @@ -453,7 +449,7 @@ mod tests { } #[test] - #[should_panic(expected = "Offset 5 is inside a token range 4..7")] + #[should_panic(expected = "Offset 5 is inside token `Name 4..7`")] fn tokens_before_offset_inside_token() { let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter()); tokens.before(TextSize::new(5)); @@ -505,14 +501,14 @@ mod tests { } #[test] - #[should_panic(expected = "Offset 5 is inside a token range 4..7")] + #[should_panic(expected = "Offset 5 is inside token `Name 4..7`")] fn tokens_in_range_start_offset_inside_token() { let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter()); tokens.in_range(TextRange::new(5.into(), 10.into())); } #[test] - #[should_panic(expected = "Offset 6 is inside a token range 4..7")] + #[should_panic(expected = "Offset 6 is inside token `Name 4..7`")] fn tokens_in_range_end_offset_inside_token() { let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter()); tokens.in_range(TextRange::new(0.into(), 6.into())); diff --git a/crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/lambda.py b/crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/lambda.py index 4a7090ff13..1b1c1ee3c2 100644 --- a/crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/lambda.py +++ b/crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/lambda.py @@ -228,3 +228,46 @@ def a(): g = 10 ) +( + lambda + * # comment 2 + x: + x +) + +( + lambda # comment 1 + * # comment 2 + x: + x +) + +( + lambda # comment 1 + y, + * # comment 2 + x: + x +) + +( + lambda + # comment + *x, + **y: x +) + +( + lambda + * # comment 2 + x, + **y: + x +) + +( + lambda + ** # comment 1 + x: + x +) diff --git a/crates/ruff_python_formatter/src/comments/debug.rs b/crates/ruff_python_formatter/src/comments/debug.rs index 3e4ae7c6bd..0adbba24f3 100644 --- a/crates/ruff_python_formatter/src/comments/debug.rs +++ b/crates/ruff_python_formatter/src/comments/debug.rs @@ -36,7 +36,7 @@ impl Debug for DebugComment<'_> { } } -/// Pretty-printed debug representation of [`Comments`]. +/// Pretty-printed debug representation of [`Comments`](super::Comments). pub(crate) struct DebugComments<'a> { comments: &'a CommentsMap<'a>, source_code: SourceCode<'a>, diff --git a/crates/ruff_python_formatter/src/comments/map.rs b/crates/ruff_python_formatter/src/comments/map.rs index 3f6d621a59..8d25f4f8a7 100644 --- a/crates/ruff_python_formatter/src/comments/map.rs +++ b/crates/ruff_python_formatter/src/comments/map.rs @@ -504,7 +504,7 @@ impl InOrderEntry { #[derive(Clone, Debug)] struct OutOfOrderEntry { - /// Index into the [`MultiMap::out_of_order`] vector at which offset the leading vec is stored. + /// Index into the [`MultiMap::out_of_order_parts`] vector at which offset the leading vec is stored. leading_index: usize, _count: Count, } diff --git a/crates/ruff_python_formatter/src/comments/node_key.rs b/crates/ruff_python_formatter/src/comments/node_key.rs index ec15ced488..115a751150 100644 --- a/crates/ruff_python_formatter/src/comments/node_key.rs +++ b/crates/ruff_python_formatter/src/comments/node_key.rs @@ -2,7 +2,8 @@ use ruff_python_ast::AnyNodeRef; use std::fmt::{Debug, Formatter}; use std::hash::{Hash, Hasher}; -/// Used as key into the [`MultiMap`] storing the comments per node by [`Comments`]. +/// Used as key into the [`MultiMap`](super::MultiMap) storing the comments per node by +/// [`Comments`](super::Comments). /// /// Implements equality and hashing based on the address of the [`AnyNodeRef`] to get fast and cheap /// hashing/equality comparison. diff --git a/crates/ruff_python_formatter/src/comments/placement.rs b/crates/ruff_python_formatter/src/comments/placement.rs index f28f9b18a8..ff38d7a100 100644 --- a/crates/ruff_python_formatter/src/comments/placement.rs +++ b/crates/ruff_python_formatter/src/comments/placement.rs @@ -871,7 +871,20 @@ fn handle_parameter_comment<'a>( CommentPlacement::Default(comment) } } else if comment.start() < parameter.name.start() { - CommentPlacement::leading(parameter, comment) + // For lambdas, where the parameters cannot be parenthesized and the first parameter thus + // starts at the same position as the parent parameters, mark a comment before the first + // parameter as leading on the parameters rather than the individual parameter to prevent + // the whole parameter list from breaking. + // + // Note that this check is not needed above because lambda parameters cannot have + // annotations. + if let Some(AnyNodeRef::Parameters(parameters)) = comment.enclosing_parent() + && parameters.start() == parameter.start() + { + CommentPlacement::leading(parameters, comment) + } else { + CommentPlacement::leading(parameter, comment) + } } else { CommentPlacement::Default(comment) } @@ -1816,7 +1829,7 @@ fn handle_lambda_comment<'a>( source: &str, ) -> CommentPlacement<'a> { if let Some(parameters) = lambda.parameters.as_deref() { - // Comments between the `lambda` and the parameters are dangling on the lambda: + // End-of-line comments between the `lambda` and the parameters are dangling on the lambda: // ```python // ( // lambda # comment @@ -1824,8 +1837,22 @@ fn handle_lambda_comment<'a>( // y // ) // ``` + // + // But own-line comments are leading on the first parameter, if it exists: + // ```python + // ( + // lambda + // # comment + // x: + // y + // ) + // ``` if comment.start() < parameters.start() { - return CommentPlacement::dangling(comment.enclosing_node(), comment); + return if comment.line_position().is_own_line() { + CommentPlacement::leading(parameters, comment) + } else { + CommentPlacement::dangling(comment.enclosing_node(), comment) + }; } // Comments between the parameters and the body are dangling on the lambda: @@ -1947,8 +1974,8 @@ fn handle_unary_op_comment<'a>( /// ) /// ``` /// -/// The comment will be attached to the [`Arguments`] node as a dangling comment, to ensure -/// that it remains on the same line as open parenthesis. +/// The comment will be attached to the [`Arguments`](ast::Arguments) node as a dangling comment, to +/// ensure that it remains on the same line as open parenthesis. /// /// Similarly, given: /// ```python @@ -1957,8 +1984,8 @@ fn handle_unary_op_comment<'a>( /// ] = ... /// ``` /// -/// The comment will be attached to the [`TypeParams`] node as a dangling comment, to ensure -/// that it remains on the same line as open bracket. +/// The comment will be attached to the [`TypeParams`](ast::TypeParams) node as a dangling comment, +/// to ensure that it remains on the same line as open bracket. fn handle_bracketed_end_of_line_comment<'a>( comment: DecoratedComment<'a>, source: &str, diff --git a/crates/ruff_python_formatter/src/comments/visitor.rs b/crates/ruff_python_formatter/src/comments/visitor.rs index b9670a3ee8..1aad7e273d 100644 --- a/crates/ruff_python_formatter/src/comments/visitor.rs +++ b/crates/ruff_python_formatter/src/comments/visitor.rs @@ -174,7 +174,8 @@ impl<'ast> SourceOrderVisitor<'ast> for CommentsVisitor<'ast, '_> { /// A comment decorated with additional information about its surrounding context in the source document. /// -/// Used by [`CommentStyle::place_comment`] to determine if this should become a [leading](self#leading-comments), [dangling](self#dangling-comments), or [trailing](self#trailing-comments) comment. +/// Used by [`place_comment`] to determine if this should become a [leading](self#leading-comments), +/// [dangling](self#dangling-comments), or [trailing](self#trailing-comments) comment. #[derive(Debug, Clone)] pub(crate) struct DecoratedComment<'a> { enclosing: AnyNodeRef<'a>, @@ -465,7 +466,7 @@ pub(super) enum CommentPlacement<'a> { /// /// [`preceding_node`]: DecoratedComment::preceding_node /// [`following_node`]: DecoratedComment::following_node - /// [`enclosing_node`]: DecoratedComment::enclosing_node_id + /// [`enclosing_node`]: DecoratedComment::enclosing_node /// [trailing comment]: self#trailing-comments /// [leading comment]: self#leading-comments /// [dangling comment]: self#dangling-comments diff --git a/crates/ruff_python_formatter/src/context.rs b/crates/ruff_python_formatter/src/context.rs index 239edc8d5b..8eaf52ee35 100644 --- a/crates/ruff_python_formatter/src/context.rs +++ b/crates/ruff_python_formatter/src/context.rs @@ -166,7 +166,7 @@ impl InterpolatedStringState { } } - /// Returns `true` if the interpolated string state is [`NestedInterpolatedElement`]. + /// Returns `true` if the interpolated string state is [`Self::NestedInterpolatedElement`]. pub(crate) fn is_nested(self) -> bool { matches!(self, Self::NestedInterpolatedElement(..)) } diff --git a/crates/ruff_python_formatter/src/expression/binary_like.rs b/crates/ruff_python_formatter/src/expression/binary_like.rs index 5d86608452..f1da88299d 100644 --- a/crates/ruff_python_formatter/src/expression/binary_like.rs +++ b/crates/ruff_python_formatter/src/expression/binary_like.rs @@ -1095,9 +1095,9 @@ impl OperandIndex { } } - /// Returns the index of the operand's right operator. The method always returns an index - /// even if the operand has no right operator. Use [`BinaryCallChain::get_operator`] to test if - /// the operand has a right operator. + /// Returns the index of the operand's right operator. The method always returns an index even + /// if the operand has no right operator. Use [`FlatBinaryExpressionSlice::get_operator`] to + /// test if the operand has a right operator. fn right_operator(self) -> OperatorIndex { OperatorIndex::new(self.0 + 1) } diff --git a/crates/ruff_python_formatter/src/expression/expr_lambda.rs b/crates/ruff_python_formatter/src/expression/expr_lambda.rs index c5890fba24..f91666ecf7 100644 --- a/crates/ruff_python_formatter/src/expression/expr_lambda.rs +++ b/crates/ruff_python_formatter/src/expression/expr_lambda.rs @@ -32,7 +32,65 @@ impl FormatNodeRule for FormatExprLambda { .split_at(dangling.partition_point(|comment| comment.end() < parameters.start())); if dangling_before_parameters.is_empty() { - write!(f, [space()])?; + // If the parameters have a leading comment, insert a hard line break. This + // comment is associated as a leading comment on the parameters: + // + // ```py + // ( + // lambda + // * # comment + // x: + // x + // ) + // ``` + // + // so a hard line break is needed to avoid formatting it like: + // + // ```py + // ( + // lambda # comment + // *x: x + // ) + // ``` + // + // which is unstable because it's missing the second space before the comment. + // + // Inserting the line break causes it to format like: + // + // ```py + // ( + // lambda + // # comment + // *x :x + // ) + // ``` + // + // which is also consistent with the formatting in the presence of an actual + // dangling comment on the lambda: + // + // ```py + // ( + // lambda # comment 1 + // * # comment 2 + // x: + // x + // ) + // ``` + // + // formats to: + // + // ```py + // ( + // lambda # comment 1 + // # comment 2 + // *x: x + // ) + // ``` + if comments.has_leading(&**parameters) { + hard_line_break().fmt(f)?; + } else { + write!(f, [space()])?; + } } else { write!(f, [dangling_comments(dangling_before_parameters)])?; } diff --git a/crates/ruff_python_formatter/src/expression/parentheses.rs b/crates/ruff_python_formatter/src/expression/parentheses.rs index a76f8a0aec..18b9b274c1 100644 --- a/crates/ruff_python_formatter/src/expression/parentheses.rs +++ b/crates/ruff_python_formatter/src/expression/parentheses.rs @@ -56,18 +56,20 @@ pub(crate) enum Parenthesize { /// Adding parentheses is desired to prevent the comments from wandering. IfRequired, - /// Same as [`Self::IfBreaks`] except that it uses [`parenthesize_if_expands`] for expressions - /// with the layout [`NeedsParentheses::BestFit`] which is used by non-splittable - /// expressions like literals, name, and strings. + /// Same as [`Self::IfBreaks`] except that it uses + /// [`parenthesize_if_expands`](crate::builders::parenthesize_if_expands) for expressions with + /// the layout [`OptionalParentheses::BestFit`] which is used by non-splittable expressions like + /// literals, name, and strings. /// /// Use this layout over `IfBreaks` when there's a sequence of `maybe_parenthesize_expression` /// in a single logical-line and you want to break from right-to-left. Use `IfBreaks` for the /// first expression and `IfBreaksParenthesized` for the rest. IfBreaksParenthesized, - /// Same as [`Self::IfBreaksParenthesized`] but uses [`parenthesize_if_expands`] for nested - /// [`maybe_parenthesized_expression`] calls unlike other layouts that always omit parentheses - /// when outer parentheses are present. + /// Same as [`Self::IfBreaksParenthesized`] but uses + /// [`parenthesize_if_expands`](crate::builders::parenthesize_if_expands) for nested + /// [`maybe_parenthesized_expression`](crate::expression::maybe_parenthesize_expression) calls + /// unlike other layouts that always omit parentheses when outer parentheses are present. IfBreaksParenthesizedNested, } diff --git a/crates/ruff_python_formatter/src/pattern/mod.rs b/crates/ruff_python_formatter/src/pattern/mod.rs index a379aeb849..ac763a41ff 100644 --- a/crates/ruff_python_formatter/src/pattern/mod.rs +++ b/crates/ruff_python_formatter/src/pattern/mod.rs @@ -214,8 +214,9 @@ impl Format> for MaybeParenthesizePattern<'_> { } } -/// This function is very similar to [`can_omit_optional_parentheses`] with the only difference that it is for patterns -/// and not expressions. +/// This function is very similar to +/// [`can_omit_optional_parentheses`](crate::expression::can_omit_optional_parentheses) +/// with the only difference that it is for patterns and not expressions. /// /// The base idea of the omit optional parentheses layout is to prefer using parentheses of sub-patterns /// when splitting the pattern over introducing new patterns. For example, prefer splitting the sequence pattern in diff --git a/crates/ruff_python_formatter/src/pattern/pattern_arguments.rs b/crates/ruff_python_formatter/src/pattern/pattern_arguments.rs index 94d7448226..d82a9756e0 100644 --- a/crates/ruff_python_formatter/src/pattern/pattern_arguments.rs +++ b/crates/ruff_python_formatter/src/pattern/pattern_arguments.rs @@ -72,8 +72,9 @@ impl FormatNodeRule for FormatPatternArguments { } } -/// Returns `true` if the pattern (which is the only argument to a [`PatternMatchClass`]) is -/// parenthesized. Used to avoid falsely assuming that `x` is parenthesized in cases like: +/// Returns `true` if the pattern (which is the only argument to a +/// [`PatternMatchClass`](ruff_python_ast::PatternMatchClass)) is parenthesized. +/// Used to avoid falsely assuming that `x` is parenthesized in cases like: /// ```python /// case Point2D(x): ... /// ``` diff --git a/crates/ruff_python_formatter/src/range.rs b/crates/ruff_python_formatter/src/range.rs index 436c1a12c2..ccc7a51aba 100644 --- a/crates/ruff_python_formatter/src/range.rs +++ b/crates/ruff_python_formatter/src/range.rs @@ -23,7 +23,8 @@ use crate::{FormatModuleError, PyFormatOptions, format_module_source}; /// /// The returned formatted range guarantees to cover at least `range` (excluding whitespace), but the range might be larger. /// Some cases in which the returned range is larger than `range` are: -/// * The logical lines in `range` use a indentation different from the configured [`IndentStyle`] and [`IndentWidth`]. +/// * The logical lines in `range` use a indentation different from the configured [`IndentStyle`] +/// and [`IndentWidth`](ruff_formatter::IndentWidth). /// * `range` is smaller than a logical lines and the formatter needs to format the entire logical line. /// * `range` falls on a single line body. /// @@ -129,16 +130,19 @@ pub fn format_range( /// b) formatting a sub-expression has fewer split points than formatting the entire expressions. /// /// ### Possible docstrings -/// Strings that are suspected to be docstrings are excluded from the search to format the enclosing suite instead -/// so that the formatter's docstring detection in [`FormatSuite`] correctly detects and formats the docstrings. +/// Strings that are suspected to be docstrings are excluded from the search to format the enclosing +/// suite instead so that the formatter's docstring detection in +/// [`FormatSuite`](crate::statement::suite::FormatSuite) correctly detects and formats the +/// docstrings. /// /// ### Compound statements with a simple statement body /// Don't include simple-statement bodies of compound statements `if True: pass` because the formatter -/// must run [`FormatClauseBody`] to determine if the body should be collapsed or not. +/// must run `FormatClauseBody` to determine if the body should be collapsed or not. /// /// ### Incorrectly indented code -/// Code that uses indentations that don't match the configured [`IndentStyle`] and [`IndentWidth`] are excluded from the search, -/// because formatting such nodes on their own can lead to indentation mismatch with its sibling nodes. +/// Code that uses indentations that don't match the configured [`IndentStyle`] and +/// [`IndentWidth`](ruff_formatter::IndentWidth) are excluded from the search, because formatting +/// such nodes on their own can lead to indentation mismatch with its sibling nodes. /// /// ## Suppression comments /// The search ends when `range` falls into a suppressed range because there's nothing to format. It also avoids that the @@ -279,13 +283,15 @@ enum EnclosingNode<'a> { /// /// ## Compound statements with simple statement bodies /// Similar to [`find_enclosing_node`], exclude the compound statement's body if it is a simple statement (not a suite) from the search to format the entire clause header -/// with the body. This ensures that the formatter runs [`FormatClauseBody`] that determines if the body should be indented.s +/// with the body. This ensures that the formatter runs `FormatClauseBody` that determines if the body should be indented. /// /// ## Non-standard indentation -/// Node's that use an indentation that doesn't match the configured [`IndentStyle`] and [`IndentWidth`] are excluded from the search. -/// This is because the formatter always uses the configured [`IndentStyle`] and [`IndentWidth`], resulting in the -/// formatted nodes using a different indentation than the unformatted sibling nodes. This would be tolerable -/// in non whitespace sensitive languages like JavaScript but results in lexical errors in Python. +/// Nodes that use an indentation that doesn't match the configured [`IndentStyle`] and +/// [`IndentWidth`](ruff_formatter::IndentWidth) are excluded from the search. This is because the +/// formatter always uses the configured [`IndentStyle`] and +/// [`IndentWidth`](ruff_formatter::IndentWidth), resulting in the formatted nodes using a different +/// indentation than the unformatted sibling nodes. This would be tolerable in non whitespace +/// sensitive languages like JavaScript but results in lexical errors in Python. /// /// ## Implementation /// It would probably be possible to merge this visitor with [`FindEnclosingNode`] but they are separate because @@ -713,9 +719,11 @@ impl Format> for FormatEnclosingNode<'_> { } } -/// Computes the level of indentation for `indentation` when using the configured [`IndentStyle`] and [`IndentWidth`]. +/// Computes the level of indentation for `indentation` when using the configured [`IndentStyle`] +/// and [`IndentWidth`](ruff_formatter::IndentWidth). /// -/// Returns `None` if the indentation doesn't conform to the configured [`IndentStyle`] and [`IndentWidth`]. +/// Returns `None` if the indentation doesn't conform to the configured [`IndentStyle`] and +/// [`IndentWidth`](ruff_formatter::IndentWidth). /// /// # Panics /// If `offset` is outside of `source`. diff --git a/crates/ruff_python_formatter/src/statement/stmt_assign.rs b/crates/ruff_python_formatter/src/statement/stmt_assign.rs index b9fbe6b7a3..5a16e5e8bf 100644 --- a/crates/ruff_python_formatter/src/statement/stmt_assign.rs +++ b/crates/ruff_python_formatter/src/statement/stmt_assign.rs @@ -184,7 +184,7 @@ impl Format> for FormatTargetWithEqualOperator<'_> { /// No parentheses are added for `short` because it fits into the configured line length, regardless of whether /// the comment exceeds the line width or not. /// -/// This logic isn't implemented in [`place_comment`] by associating trailing statement comments to the expression because +/// This logic isn't implemented in `place_comment` by associating trailing statement comments to the expression because /// doing so breaks the suite empty lines formatting that relies on trailing comments to be stored on the statement. #[derive(Debug)] pub(super) enum FormatStatementsLastExpression<'a> { @@ -202,8 +202,8 @@ pub(super) enum FormatStatementsLastExpression<'a> { /// ] = some_long_value /// ``` /// - /// This layout is preferred over [`RightToLeft`] if the left is unsplittable (single keyword like `return` or a Name) - /// because it has better performance characteristics. + /// This layout is preferred over [`Self::RightToLeft`] if the left is unsplittable (single + /// keyword like `return` or a Name) because it has better performance characteristics. LeftToRight { /// The right side of an assignment or the value returned in a return statement. value: &'a Expr, @@ -1083,11 +1083,10 @@ impl Format> for InterpolatedString<'_> { /// For legibility, we discuss only the case of f-strings below, but the /// same comments apply to t-strings. /// -/// This is just a wrapper around [`FormatFString`] while considering a special -/// case when the f-string is at an assignment statement's value position. -/// This is necessary to prevent an instability where an f-string contains a -/// multiline expression and the f-string fits on the line, but only when it's -/// surrounded by parentheses. +/// This is just a wrapper around [`FormatFString`](crate::other::f_string::FormatFString) while +/// considering a special case when the f-string is at an assignment statement's value position. +/// This is necessary to prevent an instability where an f-string contains a multiline expression +/// and the f-string fits on the line, but only when it's surrounded by parentheses. /// /// ```python /// aaaaaaaaaaaaaaaaaa = f"testeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee{ diff --git a/crates/ruff_python_formatter/src/statement/stmt_with.rs b/crates/ruff_python_formatter/src/statement/stmt_with.rs index 3ef8e52a23..4e2c6bc876 100644 --- a/crates/ruff_python_formatter/src/statement/stmt_with.rs +++ b/crates/ruff_python_formatter/src/statement/stmt_with.rs @@ -177,8 +177,10 @@ enum WithItemsLayout<'a> { /// ... /// ``` /// - /// In this case, use [`maybe_parenthesize_expression`] to format the context expression - /// to get the exact same formatting as when formatting an expression in any other clause header. + /// In this case, use + /// [`maybe_parenthesize_expression`](crate::expression::maybe_parenthesize_expression) to + /// format the context expression to get the exact same formatting as when formatting an + /// expression in any other clause header. /// /// Only used for Python 3.9+ /// diff --git a/crates/ruff_python_formatter/src/string/docstring.rs b/crates/ruff_python_formatter/src/string/docstring.rs index ad357fc65e..c4554175d5 100644 --- a/crates/ruff_python_formatter/src/string/docstring.rs +++ b/crates/ruff_python_formatter/src/string/docstring.rs @@ -783,7 +783,7 @@ enum CodeExampleKind<'src> { /// /// Documentation describing doctests and how they're recognized can be /// found as part of the Python standard library: - /// https://docs.python.org/3/library/doctest.html. + /// . /// /// (You'll likely need to read the [regex matching] used internally by the /// doctest module to determine more precisely how it works.) diff --git a/crates/ruff_python_formatter/src/string/normalize.rs b/crates/ruff_python_formatter/src/string/normalize.rs index fabd10a029..1500737950 100644 --- a/crates/ruff_python_formatter/src/string/normalize.rs +++ b/crates/ruff_python_formatter/src/string/normalize.rs @@ -38,8 +38,9 @@ impl<'a, 'src> StringNormalizer<'a, 'src> { /// it can't because the string contains the preferred quotes OR /// it leads to more escaping. /// - /// Note: If you add more cases here where we return `QuoteStyle::Preserve`, - /// make sure to also add them to [`FormatImplicitConcatenatedStringFlat::new`]. + /// Note: If you add more cases here where we return `QuoteStyle::Preserve`, make sure to also + /// add them to + /// [`FormatImplicitConcatenatedStringFlat::new`](crate::string::implicit::FormatImplicitConcatenatedStringFlat::new). pub(super) fn preferred_quote_style(&self, string: StringLikePart) -> QuoteStyle { let preferred_quote_style = self .preferred_quote_style diff --git a/crates/ruff_python_formatter/src/type_param/type_params.rs b/crates/ruff_python_formatter/src/type_param/type_params.rs index e243dc6006..50c2cc5770 100644 --- a/crates/ruff_python_formatter/src/type_param/type_params.rs +++ b/crates/ruff_python_formatter/src/type_param/type_params.rs @@ -9,7 +9,7 @@ use crate::prelude::*; #[derive(Default)] pub struct FormatTypeParams; -/// Formats a sequence of [`TypeParam`] nodes. +/// Formats a sequence of [`TypeParam`](ruff_python_ast::TypeParam) nodes. impl FormatNodeRule for FormatTypeParams { fn fmt_fields(&self, item: &TypeParams, f: &mut PyFormatter) -> FormatResult<()> { // A dangling comment indicates a comment on the same line as the opening bracket, e.g.: diff --git a/crates/ruff_python_formatter/src/verbatim.rs b/crates/ruff_python_formatter/src/verbatim.rs index e0bbf00ad6..3dc78a92b7 100644 --- a/crates/ruff_python_formatter/src/verbatim.rs +++ b/crates/ruff_python_formatter/src/verbatim.rs @@ -679,8 +679,9 @@ impl Indentation { /// Returns `true` for a space or tab character. /// -/// This is different than [`is_python_whitespace`] in that it returns `false` for a form feed character. -/// Form feed characters are excluded because they should be preserved in the suppressed output. +/// This is different than [`is_python_whitespace`](ruff_python_trivia::is_python_whitespace) in +/// that it returns `false` for a form feed character. Form feed characters are excluded because +/// they should be preserved in the suppressed output. const fn is_indent_whitespace(c: char) -> bool { matches!(c, ' ' | '\t') } diff --git a/crates/ruff_python_formatter/tests/snapshots/format@expression__lambda.py.snap b/crates/ruff_python_formatter/tests/snapshots/format@expression__lambda.py.snap index 03332c6f92..5997ff539a 100644 --- a/crates/ruff_python_formatter/tests/snapshots/format@expression__lambda.py.snap +++ b/crates/ruff_python_formatter/tests/snapshots/format@expression__lambda.py.snap @@ -1,7 +1,6 @@ --- source: crates/ruff_python_formatter/tests/fixtures.rs input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/lambda.py -snapshot_kind: text --- ## Input ```python @@ -235,6 +234,49 @@ def a(): g = 10 ) +( + lambda + * # comment 2 + x: + x +) + +( + lambda # comment 1 + * # comment 2 + x: + x +) + +( + lambda # comment 1 + y, + * # comment 2 + x: + x +) + +( + lambda + # comment + *x, + **y: x +) + +( + lambda + * # comment 2 + x, + **y: + x +) + +( + lambda + ** # comment 1 + x: + x +) ``` ## Output @@ -473,4 +515,42 @@ def a(): g=2: d, g=10, ) + + +( + lambda + # comment 2 + *x: x +) + +( + lambda # comment 1 + # comment 2 + *x: x +) + +( + lambda # comment 1 + y, + # comment 2 + *x: x +) + +( + lambda + # comment + *x, **y: x +) + +( + lambda + # comment 2 + *x, **y: x +) + +( + lambda + # comment 1 + **x: x +) ``` diff --git a/crates/ruff_python_parser/Cargo.toml b/crates/ruff_python_parser/Cargo.toml index ae45871866..c527f96e11 100644 --- a/crates/ruff_python_parser/Cargo.toml +++ b/crates/ruff_python_parser/Cargo.toml @@ -35,6 +35,7 @@ ruff_source_file = { workspace = true } anyhow = { workspace = true } insta = { workspace = true, features = ["glob"] } +itertools = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } walkdir = { workspace = true } diff --git a/crates/ruff_python_parser/resources/invalid/re_lexing/ty_1828.py b/crates/ruff_python_parser/resources/invalid/re_lexing/ty_1828.py new file mode 100644 index 0000000000..3e953b541f --- /dev/null +++ b/crates/ruff_python_parser/resources/invalid/re_lexing/ty_1828.py @@ -0,0 +1,5 @@ +# Regression test for https://github.com/astral-sh/ty/issues/1828 +(c: int = 1,f"""{d=[ +def a( +class A: + pass diff --git a/crates/ruff_python_parser/src/token_source.rs b/crates/ruff_python_parser/src/token_source.rs index e5755806e3..e22196e00e 100644 --- a/crates/ruff_python_parser/src/token_source.rs +++ b/crates/ruff_python_parser/src/token_source.rs @@ -67,26 +67,59 @@ impl<'src> TokenSource<'src> { /// /// [`re_lex_logical_token`]: Lexer::re_lex_logical_token pub(crate) fn re_lex_logical_token(&mut self) { - let mut non_logical_newline_start = None; - for token in self.tokens.iter().rev() { + let mut non_logical_newline = None; + + #[cfg(debug_assertions)] + let last_non_trivia_end_before = { + self.tokens + .iter() + .rev() + .find(|tok| !tok.kind().is_trivia()) + .map(ruff_text_size::Ranged::end) + }; + + for (index, token) in self.tokens.iter().enumerate().rev() { match token.kind() { TokenKind::NonLogicalNewline => { - non_logical_newline_start = Some(token.start()); + non_logical_newline = Some((index, token.start())); } TokenKind::Comment => continue, _ => break, } } - if self.lexer.re_lex_logical_token(non_logical_newline_start) { - let current_start = self.current_range().start(); - while self - .tokens - .last() - .is_some_and(|last| last.start() >= current_start) - { - self.tokens.pop(); - } + if !self + .lexer + .re_lex_logical_token(non_logical_newline.map(|(_, start)| start)) + { + return; + } + + let non_logical_line_index = non_logical_newline + .expect( + "`re_lex_logical_token` should only return `true` if `non_logical_line` is `Some`", + ) + .0; + + // Trim the already bumped logical line token (and comments coming after it) as it might now have become a logical line token + self.tokens.truncate(non_logical_line_index); + + #[cfg(debug_assertions)] + { + let last_non_trivia_end_now = { + self.tokens + .iter() + .rev() + .find(|tok| !tok.kind().is_trivia()) + .map(ruff_text_size::Ranged::end) + }; + + assert_eq!(last_non_trivia_end_before, last_non_trivia_end_now); + } + + // Ensure `current` is positioned at a non-trivia token. + if self.current_kind().is_trivia() { + self.bump(self.current_kind()); } } diff --git a/crates/ruff_python_parser/tests/fixtures.rs b/crates/ruff_python_parser/tests/fixtures.rs index 2c89ba7aad..a9378eddfe 100644 --- a/crates/ruff_python_parser/tests/fixtures.rs +++ b/crates/ruff_python_parser/tests/fixtures.rs @@ -4,15 +4,16 @@ use std::fmt::{Formatter, Write}; use std::fs; use std::path::Path; +use itertools::Itertools; use ruff_annotate_snippets::{Level, Renderer, Snippet}; -use ruff_python_ast::token::Token; +use ruff_python_ast::token::{Token, Tokens}; use ruff_python_ast::visitor::Visitor; use ruff_python_ast::visitor::source_order::{SourceOrderVisitor, TraversalSignal, walk_module}; use ruff_python_ast::{self as ast, AnyNodeRef, Mod, PythonVersion}; use ruff_python_parser::semantic_errors::{ SemanticSyntaxChecker, SemanticSyntaxContext, SemanticSyntaxError, }; -use ruff_python_parser::{Mode, ParseErrorType, ParseOptions, parse_unchecked}; +use ruff_python_parser::{Mode, ParseErrorType, ParseOptions, Parsed, parse_unchecked}; use ruff_source_file::{LineIndex, OneIndexed, SourceCode}; use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; @@ -81,7 +82,7 @@ fn test_valid_syntax(input_path: &Path) { } validate_tokens(parsed.tokens(), source.text_len(), input_path); - validate_ast(parsed.syntax(), source.text_len(), input_path); + validate_ast(&parsed, source.text_len(), input_path); let mut output = String::new(); writeln!(&mut output, "## AST").unwrap(); @@ -139,7 +140,7 @@ fn test_invalid_syntax(input_path: &Path) { let parsed = parse_unchecked(&source, options.clone()); validate_tokens(parsed.tokens(), source.text_len(), input_path); - validate_ast(parsed.syntax(), source.text_len(), input_path); + validate_ast(&parsed, source.text_len(), input_path); let mut output = String::new(); writeln!(&mut output, "## AST").unwrap(); @@ -402,12 +403,16 @@ Tokens: {tokens:#?} /// * the range of the parent node fully encloses all its child nodes /// * the ranges are strictly increasing when traversing the nodes in pre-order. /// * all ranges are within the length of the source code. -fn validate_ast(root: &Mod, source_len: TextSize, test_path: &Path) { - walk_module(&mut ValidateAstVisitor::new(source_len, test_path), root); +fn validate_ast(parsed: &Parsed, source_len: TextSize, test_path: &Path) { + walk_module( + &mut ValidateAstVisitor::new(parsed.tokens(), source_len, test_path), + parsed.syntax(), + ); } #[derive(Debug)] struct ValidateAstVisitor<'a> { + tokens: std::iter::Peekable>, parents: Vec>, previous: Option>, source_length: TextSize, @@ -415,8 +420,9 @@ struct ValidateAstVisitor<'a> { } impl<'a> ValidateAstVisitor<'a> { - fn new(source_length: TextSize, test_path: &'a Path) -> Self { + fn new(tokens: &'a Tokens, source_length: TextSize, test_path: &'a Path) -> Self { Self { + tokens: tokens.iter().peekable(), parents: Vec::new(), previous: None, source_length, @@ -425,6 +431,47 @@ impl<'a> ValidateAstVisitor<'a> { } } +impl ValidateAstVisitor<'_> { + /// Check that the node's start doesn't fall within a token. + /// Called in `enter_node` before visiting children. + fn assert_start_boundary(&mut self, node: AnyNodeRef<'_>) { + // Skip tokens that end at or before the node starts. + self.tokens + .peeking_take_while(|t| t.end() <= node.start()) + .last(); + + if let Some(next) = self.tokens.peek() { + // At this point, next_token.end() > node.start() + assert!( + next.start() >= node.start(), + "{path}: The start of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}", + path = self.test_path.display(), + root = self.parents.first() + ); + } + } + + /// Check that the node's end doesn't fall within a token. + /// Called in `leave_node` after visiting children, so all tokens + /// within the node have been consumed. + fn assert_end_boundary(&mut self, node: AnyNodeRef<'_>) { + // Skip tokens that end at or before the node ends. + self.tokens + .peeking_take_while(|t| t.end() <= node.end()) + .last(); + + if let Some(next) = self.tokens.peek() { + // At this point, `next_token.end() > node.end()` + assert!( + next.start() >= node.end(), + "{path}: The end of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}", + path = self.test_path.display(), + root = self.parents.first() + ); + } + } +} + impl<'ast> SourceOrderVisitor<'ast> for ValidateAstVisitor<'ast> { fn enter_node(&mut self, node: AnyNodeRef<'ast>) -> TraversalSignal { assert!( @@ -452,12 +499,16 @@ impl<'ast> SourceOrderVisitor<'ast> for ValidateAstVisitor<'ast> { ); } + self.assert_start_boundary(node); + self.parents.push(node); TraversalSignal::Traverse } fn leave_node(&mut self, node: AnyNodeRef<'ast>) { + self.assert_end_boundary(node); + self.parents.pop().expect("Expected tree to be balanced"); self.previous = Some(node); diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token.py.snap index 61f3230855..93b1439a58 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token.py.snap @@ -296,7 +296,7 @@ Module( test: Call( ExprCall { node_index: NodeIndex(None), - range: 456..472, + range: 456..471, func: Name( ExprName { node_index: NodeIndex(None), @@ -306,7 +306,7 @@ Module( }, ), arguments: Arguments { - range: 460..472, + range: 460..471, node_index: NodeIndex(None), args: [ Name( @@ -581,7 +581,7 @@ Module( test: Call( ExprCall { node_index: NodeIndex(None), - range: 890..906, + range: 890..905, func: Name( ExprName { node_index: NodeIndex(None), @@ -591,7 +591,7 @@ Module( }, ), arguments: Arguments { - range: 894..906, + range: 894..905, node_index: NodeIndex(None), args: [ FString( @@ -832,7 +832,16 @@ Module( | 28 | # The lexer is nested with multiple levels of parentheses 29 | if call(foo, [a, b - | ^ Syntax Error: Expected `]`, found NonLogicalNewline +30 | def bar(): + | ^^^ Syntax Error: Expected `]`, found `def` +31 | pass + | + + + | +28 | # The lexer is nested with multiple levels of parentheses +29 | if call(foo, [a, b + | ^ Syntax Error: Expected `)`, found newline 30 | def bar(): 31 | pass | @@ -857,11 +866,10 @@ Module( | -41 | # test is to make sure it emits a `NonLogicalNewline` token after `b`. 42 | if call(foo, [a, 43 | b - | ^ Syntax Error: Expected `]`, found NonLogicalNewline 44 | ) + | ^ Syntax Error: Expected `]`, found `)` 45 | def bar(): 46 | pass | @@ -898,7 +906,7 @@ Module( | 49 | # F-strings uses normal list parsing, so test those as well 50 | if call(f"hello {x - | ^ Syntax Error: Expected FStringEnd, found NonLogicalNewline + | ^ Syntax Error: Expected `)`, found newline 51 | def bar(): 52 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_mac_eol.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_mac_eol.py.snap index 5567459c70..2eb9515848 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_mac_eol.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_mac_eol.py.snap @@ -17,7 +17,7 @@ Module( test: Call( ExprCall { node_index: NodeIndex(None), - range: 3..19, + range: 3..18, func: Name( ExprName { node_index: NodeIndex(None), @@ -27,7 +27,7 @@ Module( }, ), arguments: Arguments { - range: 7..19, + range: 7..18, node_index: NodeIndex(None), args: [ Name( @@ -113,5 +113,11 @@ Module( | 1 | if call(foo, [a, b def bar(): pass - | ^ Syntax Error: Expected `]`, found NonLogicalNewline + | ^^^ Syntax Error: Expected `]`, found `def` + | + + + | +1 | if call(foo, [a, b def bar(): pass + | ^ Syntax Error: Expected `)`, found newline | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_windows_eol.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_windows_eol.py.snap index ae03fee095..692755d4e8 100644 --- a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_windows_eol.py.snap +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lex_logical_token_windows_eol.py.snap @@ -17,7 +17,7 @@ Module( test: Call( ExprCall { node_index: NodeIndex(None), - range: 3..20, + range: 3..18, func: Name( ExprName { node_index: NodeIndex(None), @@ -27,7 +27,7 @@ Module( }, ), arguments: Arguments { - range: 7..20, + range: 7..18, node_index: NodeIndex(None), args: [ Name( @@ -113,7 +113,15 @@ Module( | 1 | if call(foo, [a, b - | ^ Syntax Error: Expected `]`, found NonLogicalNewline +2 | def bar(): + | ^^^ Syntax Error: Expected `]`, found `def` +3 | pass + | + + + | +1 | if call(foo, [a, b + | ^ Syntax Error: Expected `)`, found newline 2 | def bar(): 3 | pass | diff --git a/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__ty_1828.py.snap b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__ty_1828.py.snap new file mode 100644 index 0000000000..49ea0c7d58 --- /dev/null +++ b/crates/ruff_python_parser/tests/snapshots/invalid_syntax@re_lexing__ty_1828.py.snap @@ -0,0 +1,227 @@ +--- +source: crates/ruff_python_parser/tests/fixtures.rs +input_file: crates/ruff_python_parser/resources/invalid/re_lexing/ty_1828.py +--- +## AST + +``` +Module( + ModModule { + node_index: NodeIndex(None), + range: 0..112, + body: [ + AnnAssign( + StmtAnnAssign { + node_index: NodeIndex(None), + range: 66..93, + target: Name( + ExprName { + node_index: NodeIndex(None), + range: 67..68, + id: Name("c"), + ctx: Store, + }, + ), + annotation: Name( + ExprName { + node_index: NodeIndex(None), + range: 70..73, + id: Name("int"), + ctx: Load, + }, + ), + value: Some( + Tuple( + ExprTuple { + node_index: NodeIndex(None), + range: 76..93, + elts: [ + NumberLiteral( + ExprNumberLiteral { + node_index: NodeIndex(None), + range: 76..77, + value: Int( + 1, + ), + }, + ), + Subscript( + ExprSubscript { + node_index: NodeIndex(None), + range: 78..90, + value: FString( + ExprFString { + node_index: NodeIndex(None), + range: 78..85, + value: FStringValue { + inner: Single( + FString( + FString { + range: 78..85, + node_index: NodeIndex(None), + elements: [ + Interpolation( + InterpolatedElement { + range: 82..85, + node_index: NodeIndex(None), + expression: Name( + ExprName { + node_index: NodeIndex(None), + range: 83..84, + id: Name("d"), + ctx: Load, + }, + ), + debug_text: Some( + DebugText { + leading: "", + trailing: "=", + }, + ), + conversion: None, + format_spec: None, + }, + ), + ], + flags: FStringFlags { + quote_style: Double, + prefix: Regular, + triple_quoted: true, + unclosed: true, + }, + }, + ), + ), + }, + }, + ), + slice: Slice( + ExprSlice { + node_index: NodeIndex(None), + range: 87..90, + lower: None, + upper: Some( + Name( + ExprName { + node_index: NodeIndex(None), + range: 87..90, + id: Name("def"), + ctx: Load, + }, + ), + ), + step: None, + }, + ), + ctx: Load, + }, + ), + Call( + ExprCall { + node_index: NodeIndex(None), + range: 91..93, + func: Name( + ExprName { + node_index: NodeIndex(None), + range: 91..92, + id: Name("a"), + ctx: Load, + }, + ), + arguments: Arguments { + range: 92..93, + node_index: NodeIndex(None), + args: [], + keywords: [], + }, + }, + ), + ], + ctx: Load, + parenthesized: false, + }, + ), + ), + simple: false, + }, + ), + ], + }, +) +``` +## Errors + + | +1 | # Regression test for https://github.com/astral-sh/ty/issues/1828 +2 | (c: int = 1,f"""{d=[ + | ^ Syntax Error: Expected `)`, found `:` +3 | def a( +4 | class A: + | + + + | +1 | # Regression test for https://github.com/astral-sh/ty/issues/1828 +2 | (c: int = 1,f"""{d=[ + | ^ Syntax Error: f-string: expecting `}` +3 | def a( +4 | class A: + | + + + | +1 | # Regression test for https://github.com/astral-sh/ty/issues/1828 +2 | (c: int = 1,f"""{d=[ +3 | def a( + | ^^^ Syntax Error: Expected `:`, found `def` +4 | class A: +5 | pass + | + + + | +1 | # Regression test for https://github.com/astral-sh/ty/issues/1828 +2 | (c: int = 1,f"""{d=[ +3 | def a( + | ^ Syntax Error: Expected `]`, found name +4 | class A: +5 | pass + | + + + | +1 | # Regression test for https://github.com/astral-sh/ty/issues/1828 +2 | (c: int = 1,f"""{d=[ +3 | def a( + | _______^ +4 | | class A: +5 | | pass + | |_________^ Syntax Error: f-string: unterminated triple-quoted string + | + + + | +2 | (c: int = 1,f"""{d=[ +3 | def a( +4 | class A: + | ^^^^^ Syntax Error: Expected `)`, found `class` +5 | pass + | + + + | +1 | # Regression test for https://github.com/astral-sh/ty/issues/1828 +2 | (c: int = 1,f"""{d=[ +3 | def a( + | _______^ +4 | | class A: +5 | | pass + | |_________^ Syntax Error: Expected a statement + | + + + | +4 | class A: +5 | pass + | ^ Syntax Error: unexpected EOF while parsing + | diff --git a/crates/ruff_python_stdlib/src/typing.rs b/crates/ruff_python_stdlib/src/typing.rs index 63d7ccf32e..7f4abd367c 100644 --- a/crates/ruff_python_stdlib/src/typing.rs +++ b/crates/ruff_python_stdlib/src/typing.rs @@ -326,7 +326,15 @@ pub fn is_immutable_return_type(qualified_name: &[&str]) -> bool { | ["re", "compile"] | [ "", - "bool" | "bytes" | "complex" | "float" | "frozenset" | "int" | "str" | "tuple" + "bool" + | "bytes" + | "complex" + | "float" + | "frozenset" + | "int" + | "str" + | "tuple" + | "slice" ] ) } diff --git a/crates/ruff_server/src/lint.rs b/crates/ruff_server/src/lint.rs index c9d0d76bec..db3f9ce4d8 100644 --- a/crates/ruff_server/src/lint.rs +++ b/crates/ruff_server/src/lint.rs @@ -20,6 +20,7 @@ use ruff_linter::{ packaging::detect_package_root, settings::flags, source_kind::SourceKind, + suppression::Suppressions, }; use ruff_notebook::Notebook; use ruff_python_codegen::Stylist; @@ -118,6 +119,10 @@ pub(crate) fn check( // Extract the `# noqa` and `# isort: skip` directives from the source. let directives = extract_directives(parsed.tokens(), Flags::all(), &locator, &indexer); + // Parse range suppression comments + let suppressions = + Suppressions::from_tokens(&settings.linter, locator.contents(), parsed.tokens()); + // Generate checks. let diagnostics = check_path( &document_path, @@ -132,6 +137,7 @@ pub(crate) fn check( source_type, &parsed, target_version, + &suppressions, ); let noqa_edits = generate_noqa_edits( @@ -142,6 +148,7 @@ pub(crate) fn check( &settings.linter.external, &directives.noqa_line_for, stylist.line_ending(), + &suppressions, ); let mut diagnostics_map = DiagnosticsMap::default(); diff --git a/crates/ruff_source_file/src/line_index.rs b/crates/ruff_source_file/src/line_index.rs index 4adb9d17b7..c1d0769e83 100644 --- a/crates/ruff_source_file/src/line_index.rs +++ b/crates/ruff_source_file/src/line_index.rs @@ -33,26 +33,29 @@ impl LineIndex { line_starts.push(TextSize::default()); let bytes = text.as_bytes(); - let mut utf8 = false; assert!(u32::try_from(bytes.len()).is_ok()); - for (i, byte) in bytes.iter().enumerate() { - utf8 |= !byte.is_ascii(); - - match byte { - // Only track one line break for `\r\n`. - b'\r' if bytes.get(i + 1) == Some(&b'\n') => continue, - b'\n' | b'\r' => { - // SAFETY: Assertion above guarantees `i <= u32::MAX` - #[expect(clippy::cast_possible_truncation)] - line_starts.push(TextSize::from(i as u32) + TextSize::from(1)); - } - _ => {} + for i in memchr::memchr2_iter(b'\n', b'\r', bytes) { + // Skip `\r` in `\r\n` sequences (only count the `\n`). + if bytes[i] == b'\r' && bytes.get(i + 1) == Some(&b'\n') { + continue; } + // SAFETY: Assertion above guarantees `i <= u32::MAX` + #[expect(clippy::cast_possible_truncation)] + line_starts.push(TextSize::from(i as u32) + TextSize::from(1)); } - let kind = if utf8 { + // Determine whether the source text is ASCII. + // + // Empirically, this simple loop is auto-vectorized by LLVM and benchmarks faster than both + // `str::is_ascii()` and hand-written SIMD. + let mut has_non_ascii = false; + for byte in bytes { + has_non_ascii |= !byte.is_ascii(); + } + + let kind = if has_non_ascii { IndexKind::Utf8 } else { IndexKind::Ascii diff --git a/crates/ruff_wasm/Cargo.toml b/crates/ruff_wasm/Cargo.toml index a83171ab4b..254f6d5fdc 100644 --- a/crates/ruff_wasm/Cargo.toml +++ b/crates/ruff_wasm/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "ruff_wasm" -version = "0.14.8" +version = "0.14.9" publish = false authors = { workspace = true } edition = { workspace = true } diff --git a/crates/ruff_wasm/src/lib.rs b/crates/ruff_wasm/src/lib.rs index 8c18111d16..6dd49a15bf 100644 --- a/crates/ruff_wasm/src/lib.rs +++ b/crates/ruff_wasm/src/lib.rs @@ -2,6 +2,7 @@ use std::path::Path; use js_sys::Error; use ruff_linter::settings::types::PythonVersion; +use ruff_linter::suppression::Suppressions; use serde::{Deserialize, Serialize}; use wasm_bindgen::prelude::*; @@ -212,6 +213,9 @@ impl Workspace { &indexer, ); + let suppressions = + Suppressions::from_tokens(&self.settings.linter, locator.contents(), parsed.tokens()); + // Generate checks. let diagnostics = check_path( Path::new(""), @@ -226,6 +230,7 @@ impl Workspace { source_type, &parsed, target_version, + &suppressions, ); let source_code = locator.to_source_code(); diff --git a/crates/ty/README.md b/crates/ty/README.md index fb98292a0f..5c766624c3 100644 --- a/crates/ty/README.md +++ b/crates/ty/README.md @@ -1,7 +1,6 @@ # ty ty is an extremely fast type checker. -Currently, it is a work-in-progress and not ready for production use. The Rust code for ty lives in this repository; see [CONTRIBUTING.md](CONTRIBUTING.md) for more information on contributing to ty. diff --git a/crates/ty/docs/rules.md b/crates/ty/docs/rules.md index 5ac36c4fb9..e32e8d3e53 100644 --- a/crates/ty/docs/rules.md +++ b/crates/ty/docs/rules.md @@ -39,7 +39,7 @@ def test(): -> "int": Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -63,7 +63,7 @@ Calling a non-callable object will raise a `TypeError` at runtime. Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -95,7 +95,7 @@ f(int) # error Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -126,7 +126,7 @@ a = 1 Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -158,7 +158,7 @@ class C(A, B): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -190,7 +190,7 @@ class B(A): ... Default level: error · Preview (since 1.0.0) · Related issues · -View source +View source @@ -218,7 +218,7 @@ type B = A Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -245,7 +245,7 @@ class B(A, A): ... Default level: error · Added in 0.0.1-alpha.12 · Related issues · -View source +View source @@ -357,7 +357,7 @@ def test(): -> "Literal[5]": Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -387,7 +387,7 @@ class C(A, B): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -413,7 +413,7 @@ t[3] # IndexError: tuple index out of range Default level: error · Added in 0.0.1-alpha.12 · Related issues · -View source +View source @@ -502,7 +502,7 @@ an atypical memory layout. Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -529,7 +529,7 @@ func("foo") # error: [invalid-argument-type] Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -557,7 +557,7 @@ a: int = '' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -591,7 +591,7 @@ C.instance_var = 3 # error: Cannot assign to instance variable Default level: error · Added in 0.0.1-alpha.19 · Related issues · -View source +View source @@ -627,7 +627,7 @@ asyncio.run(main()) Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -651,7 +651,7 @@ class A(42): ... # error: [invalid-base] Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -678,7 +678,7 @@ with 1: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -707,7 +707,7 @@ a: str Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -751,7 +751,7 @@ except ZeroDivisionError: Default level: error · Added in 0.0.1-alpha.28 · Related issues · -View source +View source @@ -793,7 +793,7 @@ class D(A): Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -826,7 +826,7 @@ class C[U](Generic[T]): ... Default level: error · Added in 0.0.1-alpha.17 · Related issues · -View source +View source @@ -865,7 +865,7 @@ carol = Person(name="Carol", age=25) # typo! Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -900,7 +900,7 @@ def f(t: TypeVar("U")): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -934,7 +934,7 @@ class B(metaclass=f): ... Default level: error · Added in 0.0.1-alpha.20 · Related issues · -View source +View source @@ -1041,7 +1041,7 @@ Correct use of `@override` is enforced by ty's `invalid-explicit-override` rule. Default level: error · Added in 0.0.1-alpha.19 · Related issues · -View source +View source @@ -1095,7 +1095,7 @@ AttributeError: Cannot overwrite NamedTuple attribute _asdict Default level: error · Preview (since 1.0.0) · Related issues · -View source +View source @@ -1125,7 +1125,7 @@ Baz = NewType("Baz", int | str) # error: invalid base for `typing.NewType` Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1175,7 +1175,7 @@ def foo(x: int) -> int: ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1201,7 +1201,7 @@ def f(a: int = ''): ... Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1232,7 +1232,7 @@ P2 = ParamSpec("S2") # error: ParamSpec name must match the variable it's assig Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1266,7 +1266,7 @@ TypeError: Protocols can only inherit from other protocols, got Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1315,7 +1315,7 @@ def g(): Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1340,7 +1340,7 @@ def func() -> int: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1398,7 +1398,7 @@ TODO #14889 Default level: error · Added in 0.0.1-alpha.6 · Related issues · -View source +View source @@ -1425,7 +1425,7 @@ NewAlias = TypeAliasType(get_name(), int) # error: TypeAliasType name mus Default level: error · Added in 0.0.1-alpha.29 · Related issues · -View source +View source @@ -1472,7 +1472,7 @@ Bar[int] # error: too few arguments Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1502,7 +1502,7 @@ TYPE_CHECKING = '' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1532,7 +1532,7 @@ b: Annotated[int] # `Annotated` expects at least two arguments Default level: error · Added in 0.0.1-alpha.11 · Related issues · -View source +View source @@ -1566,7 +1566,7 @@ f(10) # Error Default level: error · Added in 0.0.1-alpha.11 · Related issues · -View source +View source @@ -1600,7 +1600,7 @@ class C: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1635,7 +1635,7 @@ T = TypeVar('T', bound=str) # valid bound TypeVar Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1660,7 +1660,7 @@ func() # TypeError: func() missing 1 required positional argument: 'x' Default level: error · Added in 0.0.1-alpha.20 · Related issues · -View source +View source @@ -1693,7 +1693,7 @@ alice["age"] # KeyError Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1722,7 +1722,7 @@ func("string") # error: [no-matching-overload] Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1746,7 +1746,7 @@ Subscripting an object that does not support it will raise a `TypeError` at runt Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1772,7 +1772,7 @@ for i in 34: # TypeError: 'int' object is not iterable Default level: error · Added in 0.0.1-alpha.29 · Related issues · -View source +View source @@ -1805,7 +1805,7 @@ class B(A): Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1832,7 +1832,7 @@ f(1, x=2) # Error raised here Default level: error · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -1890,7 +1890,7 @@ def test(): -> "int": Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1920,7 +1920,7 @@ static_assert(int(2.0 * 3.0) == 6) # error: does not have a statically known tr Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -1949,7 +1949,7 @@ class B(A): ... # Error raised here Default level: error · Preview (since 0.0.1-alpha.30) · Related issues · -View source +View source @@ -1983,7 +1983,7 @@ class F(NamedTuple): Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2010,7 +2010,7 @@ f("foo") # Error raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2038,7 +2038,7 @@ def _(x: int): Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2084,7 +2084,7 @@ class A: Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2111,7 +2111,7 @@ f(x=1, y=2) # Error raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2139,7 +2139,7 @@ A().foo # AttributeError: 'A' object has no attribute 'foo' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2164,7 +2164,7 @@ import foo # ModuleNotFoundError: No module named 'foo' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2189,7 +2189,7 @@ print(x) # NameError: name 'x' is not defined Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2226,7 +2226,7 @@ b1 < b2 < b1 # exception raised here Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2254,7 +2254,7 @@ A() + A() # TypeError: unsupported operand type(s) for +: 'A' and 'A' Default level: error · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2279,7 +2279,7 @@ l[1:10:0] # ValueError: slice step cannot be zero Default level: warn · Added in 0.0.1-alpha.20 · Related issues · -View source +View source @@ -2320,7 +2320,7 @@ class SubProto(BaseProto, Protocol): Default level: warn · Added in 0.0.1-alpha.16 · Related issues · -View source +View source @@ -2408,7 +2408,7 @@ a = 20 / 0 # type: ignore Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2436,7 +2436,7 @@ A.c # AttributeError: type object 'A' has no attribute 'c' Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2468,7 +2468,7 @@ A()[0] # TypeError: 'A' object is not subscriptable Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2500,7 +2500,7 @@ from module import a # ImportError: cannot import name 'a' from 'module' Default level: warn · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2527,7 +2527,7 @@ cast(int, f()) # Redundant Default level: warn · Added in 0.0.1-alpha.1 · Related issues · -View source +View source @@ -2551,7 +2551,7 @@ reveal_type(1) # NameError: name 'reveal_type' is not defined Default level: warn · Added in 0.0.1-alpha.15 · Related issues · -View source +View source @@ -2609,7 +2609,7 @@ def g(): Default level: warn · Added in 0.0.1-alpha.7 · Related issues · -View source +View source @@ -2648,7 +2648,7 @@ class D(C): ... # error: [unsupported-base] Default level: warn · Added in 0.0.1-alpha.22 · Related issues · -View source +View source @@ -2711,7 +2711,7 @@ def foo(x: int | str) -> int | str: Default level: ignore · Preview (since 0.0.1-alpha.1) · Related issues · -View source +View source @@ -2735,7 +2735,7 @@ Dividing by zero raises a `ZeroDivisionError` at runtime. Default level: ignore · Added in 0.0.1-alpha.1 · Related issues · -View source +View source diff --git a/crates/ty/tests/cli/main.rs b/crates/ty/tests/cli/main.rs index a352ba355a..546d3ac260 100644 --- a/crates/ty/tests/cli/main.rs +++ b/crates/ty/tests/cli/main.rs @@ -580,6 +580,53 @@ fn check_non_existing_path() -> anyhow::Result<()> { Ok(()) } +#[test] +fn check_file_without_extension() -> anyhow::Result<()> { + let case = CliTest::with_file("main", "a = b")?; + + assert_cmd_snapshot!( + case.command().arg("main"), + @r" + success: false + exit_code: 1 + ----- stdout ----- + error[unresolved-reference]: Name `b` used when not defined + --> main:1:5 + | + 1 | a = b + | ^ + | + info: rule `unresolved-reference` is enabled by default + + Found 1 diagnostic + + ----- stderr ----- + " + ); + + Ok(()) +} + +#[test] +fn check_file_without_extension_in_subfolder() -> anyhow::Result<()> { + let case = CliTest::with_file("src/main", "a = b")?; + + assert_cmd_snapshot!( + case.command().arg("src"), + @r" + success: true + exit_code: 0 + ----- stdout ----- + All checks passed! + + ----- stderr ----- + WARN No python files found under the given path(s) + " + ); + + Ok(()) +} + #[test] fn concise_diagnostics() -> anyhow::Result<()> { let case = CliTest::with_file( diff --git a/crates/ty/tests/cli/python_environment.rs b/crates/ty/tests/cli/python_environment.rs index 3bc4eee1c3..1f3479d2fd 100644 --- a/crates/ty/tests/cli/python_environment.rs +++ b/crates/ty/tests/cli/python_environment.rs @@ -43,7 +43,7 @@ fn config_override_python_version() -> anyhow::Result<()> { | 2 | [tool.ty.environment] 3 | python-version = "3.11" - | ^^^^^^ Python 3.11 assumed due to this configuration setting + | ^^^^^^ Python version configuration | info: rule `unresolved-attribute` is enabled by default @@ -143,7 +143,7 @@ fn config_file_annotation_showing_where_python_version_set_typing_error() -> any ), ])?; - assert_cmd_snapshot!(case.command(), @r###" + assert_cmd_snapshot!(case.command(), @r#" success: false exit_code: 1 ----- stdout ----- @@ -159,14 +159,14 @@ fn config_file_annotation_showing_where_python_version_set_typing_error() -> any | 2 | [tool.ty.environment] 3 | python-version = "3.8" - | ^^^^^ Python 3.8 assumed due to this configuration setting + | ^^^^^ Python version configuration | info: rule `unresolved-reference` is enabled by default Found 1 diagnostic ----- stderr ----- - "###); + "#); assert_cmd_snapshot!(case.command().arg("--python-version=3.9"), @r###" success: false @@ -772,7 +772,7 @@ fn pyvenv_cfg_file_annotation_showing_where_python_version_set() -> anyhow::Resu ("test.py", "aiter"), ])?; - assert_cmd_snapshot!(case.command(), @r###" + assert_cmd_snapshot!(case.command(), @r" success: false exit_code: 1 ----- stdout ----- @@ -787,7 +787,7 @@ fn pyvenv_cfg_file_annotation_showing_where_python_version_set() -> anyhow::Resu --> venv/pyvenv.cfg:2:11 | 2 | version = 3.8 - | ^^^ Python version inferred from virtual environment metadata file + | ^^^ Virtual environment metadata 3 | home = foo/bar/bin | info: No Python version was specified on the command line or in a configuration file @@ -796,7 +796,7 @@ fn pyvenv_cfg_file_annotation_showing_where_python_version_set() -> anyhow::Resu Found 1 diagnostic ----- stderr ----- - "###); + "); Ok(()) } @@ -831,7 +831,7 @@ fn pyvenv_cfg_file_annotation_no_trailing_newline() -> anyhow::Result<()> { ("test.py", "aiter"), ])?; - assert_cmd_snapshot!(case.command(), @r###" + assert_cmd_snapshot!(case.command(), @r" success: false exit_code: 1 ----- stdout ----- @@ -846,7 +846,7 @@ fn pyvenv_cfg_file_annotation_no_trailing_newline() -> anyhow::Result<()> { --> venv/pyvenv.cfg:4:23 | 4 | version = 3.8 - | ^^^ Python version inferred from virtual environment metadata file + | ^^^ Virtual environment metadata | info: No Python version was specified on the command line or in a configuration file info: rule `unresolved-reference` is enabled by default @@ -854,7 +854,7 @@ fn pyvenv_cfg_file_annotation_no_trailing_newline() -> anyhow::Result<()> { Found 1 diagnostic ----- stderr ----- - "###); + "); Ok(()) } @@ -898,7 +898,7 @@ fn config_file_annotation_showing_where_python_version_set_syntax_error() -> any | 2 | [project] 3 | requires-python = ">=3.8" - | ^^^^^^^ Python 3.8 assumed due to this configuration setting + | ^^^^^^^ Python version configuration | Found 1 diagnostic @@ -1206,7 +1206,7 @@ fn defaults_to_a_new_python_version() -> anyhow::Result<()> { | 2 | [environment] 3 | python-version = "3.10" - | ^^^^^^ Python 3.10 assumed due to this configuration setting + | ^^^^^^ Python version configuration 4 | python-platform = "linux" | info: rule `unresolved-attribute` is enabled by default @@ -1225,7 +1225,7 @@ fn defaults_to_a_new_python_version() -> anyhow::Result<()> { | 2 | [environment] 3 | python-version = "3.10" - | ^^^^^^ Python 3.10 assumed due to this configuration setting + | ^^^^^^ Python version configuration 4 | python-platform = "linux" | info: rule `unresolved-import` is enabled by default diff --git a/crates/ty_ide/src/code_action.rs b/crates/ty_ide/src/code_action.rs index 1a02389735..2b6703afac 100644 --- a/crates/ty_ide/src/code_action.rs +++ b/crates/ty_ide/src/code_action.rs @@ -5,7 +5,8 @@ use ruff_diagnostics::Edit; use ruff_text_size::TextRange; use ty_project::Db; use ty_python_semantic::create_suppression_fix; -use ty_python_semantic::types::UNRESOLVED_REFERENCE; +use ty_python_semantic::lint::LintId; +use ty_python_semantic::types::{UNDEFINED_REVEAL, UNRESOLVED_REFERENCE}; /// A `QuickFix` Code Action #[derive(Debug, Clone)] @@ -28,12 +29,17 @@ pub fn code_actions( let mut actions = Vec::new(); - if lint_id.name() == UNRESOLVED_REFERENCE.name() + // Suggest imports for unresolved references (often ideal) + // TODO: suggest qualifying with an already imported symbol + let is_unresolved_reference = + lint_id == LintId::of(&UNRESOLVED_REFERENCE) || lint_id == LintId::of(&UNDEFINED_REVEAL); + if is_unresolved_reference && let Some(import_quick_fix) = create_import_symbol_quick_fix(db, file, diagnostic_range) { actions.extend(import_quick_fix); } + // Suggest just suppressing the lint (always a valid option, but never ideal) actions.push(QuickFix { title: format!("Ignore '{}' for this line", lint_id.name()), edits: create_suppression_fix(db, file, lint_id, diagnostic_range).into_edits(), @@ -80,7 +86,10 @@ mod tests { use ruff_diagnostics::Fix; use ruff_text_size::{TextRange, TextSize}; use ty_project::ProjectMetadata; - use ty_python_semantic::{lint::LintMetadata, types::UNRESOLVED_REFERENCE}; + use ty_python_semantic::{ + lint::LintMetadata, + types::{UNDEFINED_REVEAL, UNRESOLVED_REFERENCE}, + }; #[test] fn add_ignore() { @@ -435,6 +444,40 @@ mod tests { "#); } + #[test] + fn undefined_reveal_type() { + let test = CodeActionTest::with_source( + r#" + reveal_type(1) + "#, + ); + + assert_snapshot!(test.code_actions(&UNDEFINED_REVEAL), @r" + info[code-action]: import typing.reveal_type + --> main.py:2:13 + | + 2 | reveal_type(1) + | ^^^^^^^^^^^ + | + help: This is a preferred code action + 1 + from typing import reveal_type + 2 | + 3 | reveal_type(1) + 4 | + + info[code-action]: Ignore 'undefined-reveal' for this line + --> main.py:2:13 + | + 2 | reveal_type(1) + | ^^^^^^^^^^^ + | + 1 | + - reveal_type(1) + 2 + reveal_type(1) # ty:ignore[undefined-reveal] + 3 | + "); + } + pub(super) struct CodeActionTest { pub(super) db: ty_project::TestDb, pub(super) file: File, diff --git a/crates/ty_ide/src/completion.rs b/crates/ty_ide/src/completion.rs index 70505ac4c8..ca2305df0c 100644 --- a/crates/ty_ide/src/completion.rs +++ b/crates/ty_ide/src/completion.rs @@ -9,6 +9,7 @@ use ruff_python_ast::token::{Token, TokenAt, TokenKind, Tokens}; use ruff_python_ast::{self as ast, AnyNodeRef}; use ruff_python_codegen::Stylist; use ruff_text_size::{Ranged, TextLen, TextRange, TextSize}; +use rustc_hash::FxHashSet; use ty_python_semantic::types::UnionType; use ty_python_semantic::{ Completion as SemanticCompletion, KnownModule, ModuleName, NameKind, SemanticModel, @@ -20,7 +21,7 @@ use crate::find_node::covering_node; use crate::goto::Definitions; use crate::importer::{ImportRequest, Importer}; use crate::symbols::QueryPattern; -use crate::{Db, all_symbols}; +use crate::{Db, all_symbols, signature_help}; /// A collection of completions built up from various sources. #[derive(Clone)] @@ -379,11 +380,22 @@ pub enum CompletionKind { TypeParameter, } -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug)] pub struct CompletionSettings { pub auto_import: bool, } +// N.B. It's important for the defaults here to match the defaults +// established by `CompletionOptions::into_settings`. This is +// because `WorkspaceSettings::default()` uses this definition. +// But `WorkspaceOptions::default().into_settings()` will use the +// `CompletionOptions::into_settings` definition. +impl Default for CompletionSettings { + fn default() -> CompletionSettings { + CompletionSettings { auto_import: true } + } +} + pub fn completion<'db>( db: &'db dyn Db, settings: &CompletionSettings, @@ -436,6 +448,10 @@ pub fn completion<'db>( ); } } + + if let Some(arg_completions) = detect_function_arg_completions(db, file, &parsed, offset) { + completions.extend(arg_completions); + } } if is_raising_exception(tokens) { @@ -451,10 +467,89 @@ pub fn completion<'db>( !ty.is_notimplemented(db) }); } - completions.into_completions() } +/// Detect and construct completions for unset function arguments. +/// +/// Suggestions are only provided if the cursor is currently inside a +/// function call and the function arguments have not 1) already been +/// set and 2) been defined as positional-only. +fn detect_function_arg_completions<'db>( + db: &'db dyn Db, + file: File, + parsed: &ParsedModuleRef, + offset: TextSize, +) -> Option>> { + let sig_help = signature_help(db, file, offset)?; + let set_function_args = detect_set_function_args(parsed, offset); + + let completions = sig_help + .signatures + .iter() + .flat_map(|sig| &sig.parameters) + .filter(|p| !p.is_positional_only && !set_function_args.contains(&p.name.as_str())) + .map(|p| { + let name = Name::new(&p.name); + let documentation = p + .documentation + .as_ref() + .map(|d| Docstring::new(d.to_owned())); + let insert = Some(format!("{name}=").into_boxed_str()); + Completion { + name, + qualified: None, + insert, + ty: p.ty, + kind: Some(CompletionKind::Variable), + module_name: None, + import: None, + builtin: false, + is_type_check_only: false, + is_definitively_raisable: false, + documentation, + } + }) + .collect(); + Some(completions) +} + +/// Returns function arguments that have already been set. +/// +/// If `offset` is inside an arguments node, this returns +/// the list of argument names that are already set. +/// +/// For example, given: +/// +/// ```python +/// def abc(foo, bar, baz): ... +/// abc(foo=1, bar=2, b) +/// ``` +/// +/// the resulting value is `["foo", "bar"]` +/// +/// This is useful to be able to exclude autocomplete suggestions +/// for arguments that have already been set to some value. +/// +/// If the parent node is not an arguments node, the return value +/// is an empty Vec. +fn detect_set_function_args(parsed: &ParsedModuleRef, offset: TextSize) -> FxHashSet<&str> { + let range = TextRange::empty(offset); + covering_node(parsed.syntax().into(), range) + .parent() + .and_then(|node| match node { + ast::AnyNodeRef::Arguments(args) => Some(args), + _ => None, + }) + .map(|args| { + args.keywords + .iter() + .filter_map(|kw| kw.arg.as_ref().map(|ident| ident.id.as_str())) + .collect() + }) + .unwrap_or_default() +} + pub(crate) struct ImportEdit { pub label: String, pub edit: Edit, @@ -1740,7 +1835,7 @@ x = foobad ); assert_snapshot!( - test.skip_builtins().build().snapshot(), + test.skip_builtins().skip_auto_import().build().snapshot(), @"foo_bar_baz", ); } @@ -1754,7 +1849,7 @@ type ); assert_snapshot!( - test.type_signatures().build().snapshot(), + test.type_signatures().skip_auto_import().build().snapshot(), @r" TypeError :: type :: @@ -1949,7 +2044,10 @@ f ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @"foo", + ); } #[test] @@ -1963,7 +2061,7 @@ g ); assert_snapshot!( - builder.skip_keywords().skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), @"", ); } @@ -1994,7 +2092,10 @@ f ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @"foo", + ); } #[test] @@ -2023,7 +2124,9 @@ def foo(): ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" foo foofoo "); @@ -2073,9 +2176,11 @@ def foo(): ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" - foo - foofoo + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" + foo + foofoo "); } @@ -2089,9 +2194,11 @@ def foo(): f", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" - foo - foofoo + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" + foo + foofoo "); } @@ -2107,10 +2214,12 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" - foo - foofoo - frob + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" + foo + foofoo + frob "); } @@ -2126,9 +2235,11 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" - foo - frob + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" + foo + frob "); } @@ -2144,11 +2255,13 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" - foo - foofoo - foofoofoo - frob + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" + foo + foofoo + foofoofoo + frob "); } @@ -2295,7 +2408,10 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @"foo", + ); } #[test] @@ -2306,7 +2422,10 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @"foo", + ); } #[test] @@ -2317,7 +2436,10 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @"foo", + ); } #[test] @@ -2328,7 +2450,10 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @"foo", + ); } #[test] @@ -2339,7 +2464,10 @@ def frob(): ... ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @"foo", + ); } #[test] @@ -2386,10 +2514,11 @@ def frob(): ... ", ); - // FIXME: Should include `foo`. assert_snapshot!( builder.skip_keywords().skip_builtins().build().snapshot(), - @"", + @r" + foo + ", ); } @@ -2401,10 +2530,11 @@ def frob(): ... ", ); - // FIXME: Should include `foo`. assert_snapshot!( builder.skip_keywords().skip_builtins().build().snapshot(), - @"", + @r" + foo + ", ); } @@ -2419,10 +2549,13 @@ class Foo: ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" bar frob - "); + ", + ); } #[test] @@ -2435,7 +2568,10 @@ class Foo: ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"bar"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @"bar", + ); } #[test] @@ -3039,7 +3175,6 @@ quux. "); } - // We don't yet take function parameters into account. #[test] fn call_prefix1() { let builder = completion_test_builder( @@ -3052,7 +3187,157 @@ bar(o ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @"foo"); + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" + foo + okay + " + ); + } + + #[test] + fn call_keyword_only_argument() { + let builder = completion_test_builder( + "\ +def bar(*, okay): ... + +foo = 1 + +bar(o +", + ); + + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" + foo + okay + " + ); + } + + #[test] + fn call_multiple_keyword_arguments() { + let builder = completion_test_builder( + "\ +def foo(bar, baz, barbaz): ... + +foo(b +", + ); + + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" + bar + barbaz + baz + " + ); + } + + #[test] + fn call_multiple_keyword_arguments_some_set() { + let builder = completion_test_builder( + "\ +def foo(bar, baz): ... + +foo(bar=1, b +", + ); + + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" + baz + " + ); + } + + #[test] + fn call_arguments_multi_def() { + let builder = completion_test_builder( + "\ +def abc(okay, x): ... +def bar(not_okay, y): ... +def baz(foobarbaz, z): ... + +abc(o +", + ); + + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" + okay + " + ); + } + + #[test] + fn call_arguments_cursor_middle() { + let builder = completion_test_builder( + "\ +def abc(okay, foo, bar, baz): ... + +abc(okay=1, ba baz=5 +", + ); + + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" + bar + " + ); + } + + #[test] + fn call_positional_only_argument() { + // If the parameter is positional only we don't + // want to suggest it as specifying by name + // is not valid. + let builder = completion_test_builder( + "\ +def bar(okay, /): ... + +foo = 1 + +bar(o +", + ); + + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @"foo" + ); + } + + #[test] + fn call_positional_only_keyword_only_argument_mix() { + // If the parameter is positional only we don't + // want to suggest it as specifying by name + // is not valid. + let builder = completion_test_builder( + "\ +def bar(not_okay, no, /, okay, *, okay_abc, okay_okay): ... + +foo = 1 + +bar(o +", + ); + + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" + foo + okay + okay_abc + okay_okay + " + ); } #[test] @@ -3070,6 +3355,7 @@ bar( assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" bar foo + okay "); } @@ -3086,9 +3372,11 @@ class C: ", ); - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" - foo - self + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" + foo + self "); } @@ -3120,7 +3408,9 @@ class C: // FIXME: Should NOT include `foo` here, since // that is only a method that can be called on // `self`. - assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r" + assert_snapshot!( + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), + @r" foo self "); @@ -3137,7 +3427,7 @@ class ); assert_snapshot!( - builder.skip_keywords().skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), @"classy_variable_name", ); } @@ -3153,7 +3443,7 @@ print(f\"{some ); assert_snapshot!( - builder.skip_keywords().skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), @"some_symbol", ); } @@ -3192,7 +3482,7 @@ if sys.platform == \"not-my-current-platform\": // currently make no effort to provide a good IDE experience within sections that // are unreachable assert_snapshot!( - builder.skip_keywords().skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), @"", ); } @@ -3678,7 +3968,7 @@ Fo = float ); assert_snapshot!( - builder.skip_keywords().skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), @"Fo", ); } @@ -3800,7 +4090,7 @@ except Type: ); assert_snapshot!( - builder.skip_keywords().skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), @"", ); } @@ -3816,7 +4106,7 @@ def _(): ); assert_snapshot!( - builder.skip_keywords().skip_builtins().build().snapshot(), + builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), @"", ); } @@ -4350,7 +4640,6 @@ from os. .source("main.py", "Abra") .source("package/__init__.py", "AbraKadabra = 1") .completion_test_builder() - .auto_import() .build() .contains("AbraKadabra"); } @@ -4361,7 +4650,6 @@ from os. .source("main.py", "Kadabra = 1\nKad") .source("package/__init__.py", "AbraKadabra = 1") .completion_test_builder() - .auto_import() .type_signatures() .module_names() .filter(|c| c.name.contains("Kadabra")) @@ -4444,7 +4732,7 @@ from os. ); assert_snapshot!( - test.skip_keywords().skip_builtins().build().snapshot(), + test.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(), @"", ); } @@ -5543,7 +5831,6 @@ def foo(param: s) .source("foo.py", "def long_namea(): ...") .completion_test_builder() .type_signatures() - .auto_import() .module_names() .filter(|c| c.name.contains("long_name")) .build() @@ -5562,6 +5849,7 @@ def foo(param: s) let snapshot = completion_test_builder("from typing import Protocol\nclass Foo(P: ...") .filter(|c| c.name.starts_with('P')) + .skip_auto_import() .build() .snapshot(); @@ -5827,9 +6115,7 @@ from .imp #[test] fn typing_extensions_excluded_from_auto_import() { - let builder = completion_test_builder("deprecated") - .auto_import() - .module_names(); + let builder = completion_test_builder("deprecated").module_names(); assert_snapshot!(builder.build().snapshot(), @"deprecated :: warnings"); } @@ -5852,7 +6138,6 @@ from .imp .source("typing_extensions.py", "deprecated = 1") .source("foo.py", "deprecated") .completion_test_builder() - .auto_import() .module_names(); assert_snapshot!(builder.build().snapshot(), @r" deprecated :: typing_extensions @@ -5877,7 +6162,6 @@ from .imp let builder = CursorTest::builder() .source("foo.pyi", "deprecated") .completion_test_builder() - .auto_import() .module_names(); assert_snapshot!(builder.build().snapshot(), @r" deprecated :: typing_extensions @@ -5916,7 +6200,6 @@ ZQ .source("foo.py", r#"from bar import ZQZQ"#) .source("bar.py", r#"ZQZQ = 1"#) .completion_test_builder() - .auto_import() .module_names() .build() .snapshot(); @@ -5958,7 +6241,6 @@ ZQ .source("foo.py", r#"from bar import ZQZQ as ZQZQ"#) .source("bar.py", r#"ZQZQ = 1"#) .completion_test_builder() - .auto_import() .module_names() .build() .snapshot(); @@ -5986,7 +6268,6 @@ ZQ "#, ) .completion_test_builder() - .auto_import() .module_names() .build() .snapshot(); @@ -6021,7 +6302,6 @@ bar.ZQ "#, ) .completion_test_builder() - .auto_import() .module_names() .build() .snapshot(); @@ -6043,7 +6323,6 @@ Quitter "#, ) .completion_test_builder() - .auto_import() .module_names() .build() .snapshot(); @@ -6075,7 +6354,6 @@ ZQ "#, ) .completion_test_builder() - .auto_import() .module_names() .build() .snapshot(); @@ -6095,7 +6373,6 @@ multiprocess "#, ) .completion_test_builder() - .auto_import() .build() .snapshot(); assert_snapshot!(snapshot, @r" @@ -6136,7 +6413,6 @@ zqzqzq ) .source("zqzqzqzqzq.py", "") .completion_test_builder() - .auto_import() .build() .snapshot(); assert_snapshot!(snapshot, @"zqzqzqzqzq"); @@ -6152,12 +6428,160 @@ collabc "#, ) .completion_test_builder() - .auto_import() .build() .snapshot(); assert_snapshot!(snapshot, @"collections.abc"); } + #[test] + fn local_function_variable_with_return() { + let builder = completion_test_builder( + "\ +variable_global = 1 +def foo(): + variable_local = 1 + variable_ + return +", + ); + assert_snapshot!( + builder.skip_auto_import().build().snapshot(), + @r" + variable_global + variable_local + ", + ); + } + + #[test] + fn nested_scopes_with_return() { + let builder = completion_test_builder( + "\ +variable_1 = 1 +def fun1(): + variable_2 = 1 + def fun2(): + variable_3 = 1 + def fun3(): + variable_4 = 1 + variable_ + return + return + return +", + ); + assert_snapshot!( + builder.skip_auto_import().build().snapshot(), + @r" + variable_1 + variable_2 + variable_3 + variable_4 + ", + ); + } + + #[test] + fn multiple_declarations_global_scope1() { + let builder = completion_test_builder( + "\ +zqzqzq: int = 1 +zqzqzq: str = 'foo' +zqzq +", + ); + // The type for `zqzqzq` *should* be `str`, but we consider all + // reachable declarations and bindings, which means we get a + // union of `int` and `str` here even though the `int` binding + // isn't live at the cursor position. + assert_snapshot!( + builder.skip_auto_import().type_signatures().build().snapshot(), + @"zqzqzq :: int | str", + ); + } + + #[test] + fn multiple_declarations_global_scope2() { + let builder = completion_test_builder( + "\ +zqzqzq: int = 1 +zqzq +zqzqzq: str = 'foo' +", + ); + // The type for `zqzqzq` *should* be `int`, but we consider all + // reachable declarations and bindings, which means we get a + // union of `int` and `str` here even though the `str` binding + // doesn't exist at the cursor position. + assert_snapshot!( + builder.skip_auto_import().type_signatures().build().snapshot(), + @"zqzqzq :: int | str", + ); + } + + #[test] + fn multiple_declarations_function_scope1() { + let builder = completion_test_builder( + "\ +def foo(): + zqzqzq: int = 1 + zqzqzq: str = 'foo' + zqzq + return +", + ); + // The type for `zqzqzq` *should* be `str`, but we consider all + // reachable declarations and bindings, which means we get a + // union of `int` and `str` here even though the `int` binding + // isn't live at the cursor position. + assert_snapshot!( + builder.skip_auto_import().type_signatures().build().snapshot(), + @"zqzqzq :: int | str", + ); + } + + #[test] + fn multiple_declarations_function_scope2() { + let builder = completion_test_builder( + "\ +def foo(): + zqzqzq: int = 1 + zqzq + zqzqzq: str = 'foo' + return +", + ); + // The type for `zqzqzq` *should* be `int`, but we consider all + // reachable declarations and bindings, which means we get a + // union of `int` and `str` here even though the `str` binding + // doesn't exist at the cursor position. + assert_snapshot!( + builder.skip_auto_import().type_signatures().build().snapshot(), + @"zqzqzq :: int | str", + ); + } + + #[test] + fn multiple_declarations_function_parameter() { + let builder = completion_test_builder( + "\ +from pathlib import Path +def f(zqzqzq: str): + zqzqzq: Path = Path(zqzqzq) + zqzq + return +", + ); + // The type for `zqzqzq` *should* be `Path`, but we consider all + // reachable declarations and bindings, which means we get a + // union of `str` and `Path` here even though the `str` binding + // isn't live at the cursor position. + assert_snapshot!( + builder.skip_auto_import().type_signatures().build().snapshot(), + @"zqzqzq :: str | Path", + ); + } + /// A way to create a simple single-file (named `main.py`) completion test /// builder. /// @@ -6224,12 +6648,19 @@ collabc &self.cursor_test.db } - /// When enabled, symbols that aren't in scope but available - /// in the environment will be included. + /// When set, symbols that aren't in scope but available + /// in the environment will NOT be included. /// - /// Not enabled by default. - fn auto_import(mut self) -> CompletionTestBuilder { - self.settings.auto_import = true; + /// Auto-import is enabled by default. So one must opt into + /// skipping them with this method if one wants to test + /// completions without auto-import enabled. + /// + /// It's somewhat common to want to skip auto-import + /// completions because they can otherwise inflate the + /// snapshot size quite a bit and obscure what is actually + /// being tested. + fn skip_auto_import(mut self) -> CompletionTestBuilder { + self.settings.auto_import = false; self } @@ -6374,6 +6805,11 @@ collabc fn completion_test_builder(&self) -> CompletionTestBuilder { CompletionTestBuilder { cursor_test: self.build(), + // N.B. We very much want to use the default settings + // here, so that our test environment matches the + // production environment. If a default changes, the + // tests should be fixed to accomodate that change + // as well. ---AG settings: CompletionSettings::default(), skip_builtins: false, skip_keywords: false, diff --git a/crates/ty_ide/src/find_references.rs b/crates/ty_ide/src/find_references.rs index e62608e555..274279c4e5 100644 --- a/crates/ty_ide/src/find_references.rs +++ b/crates/ty_ide/src/find_references.rs @@ -35,8 +35,6 @@ mod tests { use crate::tests::{CursorTest, IntoDiagnostic, cursor_test}; use insta::assert_snapshot; use ruff_db::diagnostic::{Annotation, Diagnostic, DiagnosticId, LintName, Severity, Span}; - use ruff_db::files::FileRange; - use ruff_text_size::Ranged; impl CursorTest { fn references(&self) -> String { @@ -52,20 +50,14 @@ mod tests { reference_results.sort_by_key(ReferenceTarget::file); - self.render_diagnostics(reference_results.into_iter().enumerate().map( - |(i, ref_item)| -> ReferenceResult { - ReferenceResult { - index: i, - file_range: FileRange::new(ref_item.file(), ref_item.range()), - } - }, - )) + self.render_diagnostics([ReferenceResult { + references: reference_results, + }]) } } struct ReferenceResult { - index: usize, - file_range: FileRange, + references: Vec, } impl IntoDiagnostic for ReferenceResult { @@ -73,11 +65,14 @@ mod tests { let mut main = Diagnostic::new( DiagnosticId::Lint(LintName::of("references")), Severity::Info, - format!("Reference {}", self.index + 1), + format!("Found {} references", self.references.len()), ); - main.annotate(Annotation::primary( - Span::from(self.file_range.file()).with_range(self.file_range.range()), - )); + + for reference in self.references { + main.annotate(Annotation::secondary( + Span::from(reference.file()).with_range(reference.range()), + )); + } main } @@ -97,55 +92,24 @@ result = calculate_sum(value=42) ", ); - assert_snapshot!(test.references(), @r###" - info[references]: Reference 1 + assert_snapshot!(test.references(), @r" + info[references]: Found 5 references --> main.py:2:19 | 2 | def calculate_sum(value: int) -> int: - | ^^^^^ + | ----- 3 | doubled = value * 2 + | ----- 4 | result = value + doubled - | - - info[references]: Reference 2 - --> main.py:3:15 - | - 2 | def calculate_sum(value: int) -> int: - 3 | doubled = value * 2 - | ^^^^^ - 4 | result = value + doubled + | ----- 5 | return value - | - - info[references]: Reference 3 - --> main.py:4:14 - | - 2 | def calculate_sum(value: int) -> int: - 3 | doubled = value * 2 - 4 | result = value + doubled - | ^^^^^ - 5 | return value - | - - info[references]: Reference 4 - --> main.py:5:12 - | - 3 | doubled = value * 2 - 4 | result = value + doubled - 5 | return value - | ^^^^^ + | ----- 6 | - 7 | # Call with keyword argument - | - - info[references]: Reference 5 - --> main.py:8:24 - | 7 | # Call with keyword argument 8 | result = calculate_sum(value=42) - | ^^^^^ + | ----- | - "###); + "); } #[test] @@ -176,95 +140,36 @@ def outer_function(): ); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 - --> main.py:3:5 - | - 2 | def outer_function(): - 3 | counter = 0 - | ^^^^^^^ - 4 | - 5 | def increment(): - | - - info[references]: Reference 2 - --> main.py:6:18 - | - 5 | def increment(): - 6 | nonlocal counter - | ^^^^^^^ - 7 | counter += 1 - 8 | return counter - | - - info[references]: Reference 3 - --> main.py:7:9 - | - 5 | def increment(): - 6 | nonlocal counter - 7 | counter += 1 - | ^^^^^^^ - 8 | return counter - | - - info[references]: Reference 4 - --> main.py:8:16 + info[references]: Found 9 references + --> main.py:3:5 | + 2 | def outer_function(): + 3 | counter = 0 + | ------- + 4 | + 5 | def increment(): 6 | nonlocal counter + | ------- 7 | counter += 1 + | ------- 8 | return counter - | ^^^^^^^ + | ------- 9 | - 10 | def decrement(): - | - - info[references]: Reference 5 - --> main.py:11:18 - | 10 | def decrement(): 11 | nonlocal counter - | ^^^^^^^ + | ------- 12 | counter -= 1 + | ------- 13 | return counter - | - - info[references]: Reference 6 - --> main.py:12:9 - | - 10 | def decrement(): - 11 | nonlocal counter - 12 | counter -= 1 - | ^^^^^^^ - 13 | return counter - | - - info[references]: Reference 7 - --> main.py:13:16 - | - 11 | nonlocal counter - 12 | counter -= 1 - 13 | return counter - | ^^^^^^^ + | ------- 14 | - 15 | # Use counter in outer scope - | - - info[references]: Reference 8 - --> main.py:16:15 - | 15 | # Use counter in outer scope 16 | initial = counter - | ^^^^^^^ - 17 | increment() - 18 | decrement() - | - - info[references]: Reference 9 - --> main.py:19:13 - | + | ------- 17 | increment() 18 | decrement() 19 | final = counter - | ^^^^^^^ + | ------- 20 | 21 | return increment, decrement | @@ -296,94 +201,35 @@ final_value = global_counter ); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 - --> main.py:2:1 - | - 2 | global_counter = 0 - | ^^^^^^^^^^^^^^ - 3 | - 4 | def increment_global(): - | - - info[references]: Reference 2 - --> main.py:5:12 - | - 4 | def increment_global(): - 5 | global global_counter - | ^^^^^^^^^^^^^^ - 6 | global_counter += 1 - 7 | return global_counter - | - - info[references]: Reference 3 - --> main.py:6:5 - | - 4 | def increment_global(): - 5 | global global_counter - 6 | global_counter += 1 - | ^^^^^^^^^^^^^^ - 7 | return global_counter - | - - info[references]: Reference 4 - --> main.py:7:12 - | - 5 | global global_counter - 6 | global_counter += 1 - 7 | return global_counter - | ^^^^^^^^^^^^^^ - 8 | - 9 | def decrement_global(): - | - - info[references]: Reference 5 - --> main.py:10:12 + info[references]: Found 9 references + --> main.py:2:1 | + 2 | global_counter = 0 + | -------------- + 3 | + 4 | def increment_global(): + 5 | global global_counter + | -------------- + 6 | global_counter += 1 + | -------------- + 7 | return global_counter + | -------------- + 8 | 9 | def decrement_global(): 10 | global global_counter - | ^^^^^^^^^^^^^^ + | -------------- 11 | global_counter -= 1 + | -------------- 12 | return global_counter - | - - info[references]: Reference 6 - --> main.py:11:5 - | - 9 | def decrement_global(): - 10 | global global_counter - 11 | global_counter -= 1 - | ^^^^^^^^^^^^^^ - 12 | return global_counter - | - - info[references]: Reference 7 - --> main.py:12:12 - | - 10 | global global_counter - 11 | global_counter -= 1 - 12 | return global_counter - | ^^^^^^^^^^^^^^ + | -------------- 13 | - 14 | # Use global_counter at module level - | - - info[references]: Reference 8 - --> main.py:15:17 - | 14 | # Use global_counter at module level 15 | initial_value = global_counter - | ^^^^^^^^^^^^^^ - 16 | increment_global() - 17 | decrement_global() - | - - info[references]: Reference 9 - --> main.py:18:15 - | + | -------------- 16 | increment_global() 17 | decrement_global() 18 | final_value = global_counter - | ^^^^^^^^^^^^^^ + | -------------- | "); } @@ -406,45 +252,23 @@ except ValueError as err: ); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 - --> main.py:4:29 - | - 2 | try: - 3 | x = 1 / 0 - 4 | except ZeroDivisionError as err: - | ^^^ - 5 | print(f'Error: {err}') - 6 | return err - | - - info[references]: Reference 2 - --> main.py:5:21 - | - 3 | x = 1 / 0 - 4 | except ZeroDivisionError as err: - 5 | print(f'Error: {err}') - | ^^^ - 6 | return err - | - - info[references]: Reference 3 - --> main.py:6:12 - | - 4 | except ZeroDivisionError as err: - 5 | print(f'Error: {err}') - 6 | return err - | ^^^ - 7 | - 8 | try: - | - - info[references]: Reference 4 - --> main.py:11:31 + info[references]: Found 4 references + --> main.py:4:29 | + 2 | try: + 3 | x = 1 / 0 + 4 | except ZeroDivisionError as err: + | --- + 5 | print(f'Error: {err}') + | --- + 6 | return err + | --- + 7 | + 8 | try: 9 | y = 2 / 0 10 | except ValueError as err: 11 | print(f'Different error: {err}') - | ^^^ + | --- | "); } @@ -462,39 +286,21 @@ match x: ", ); - assert_snapshot!(test.references(), @r###" - info[references]: Reference 1 + assert_snapshot!(test.references(), @r" + info[references]: Found 3 references --> main.py:3:20 | 2 | match x: 3 | case [a, b] as pattern: - | ^^^^^^^ + | ------- 4 | print(f'Matched: {pattern}') + | ------- 5 | return pattern - | - - info[references]: Reference 2 - --> main.py:4:27 - | - 2 | match x: - 3 | case [a, b] as pattern: - 4 | print(f'Matched: {pattern}') - | ^^^^^^^ - 5 | return pattern - 6 | case _: - | - - info[references]: Reference 3 - --> main.py:5:16 - | - 3 | case [a, b] as pattern: - 4 | print(f'Matched: {pattern}') - 5 | return pattern - | ^^^^^^^ + | ------- 6 | case _: 7 | pass | - "###); + "); } #[test] @@ -509,47 +315,21 @@ match data: ", ); - assert_snapshot!(test.references(), @r###" - info[references]: Reference 1 + assert_snapshot!(test.references(), @r" + info[references]: Found 4 references --> main.py:3:29 | 2 | match data: 3 | case {'a': a, 'b': b, **rest}: - | ^^^^ + | ---- 4 | print(f'Rest data: {rest}') + | ---- 5 | process(rest) - | - - info[references]: Reference 2 - --> main.py:4:29 - | - 2 | match data: - 3 | case {'a': a, 'b': b, **rest}: - 4 | print(f'Rest data: {rest}') - | ^^^^ - 5 | process(rest) + | ---- 6 | return rest + | ---- | - - info[references]: Reference 3 - --> main.py:5:17 - | - 3 | case {'a': a, 'b': b, **rest}: - 4 | print(f'Rest data: {rest}') - 5 | process(rest) - | ^^^^ - 6 | return rest - | - - info[references]: Reference 4 - --> main.py:6:16 - | - 4 | print(f'Rest data: {rest}') - 5 | process(rest) - 6 | return rest - | ^^^^ - | - "###); + "); } #[test] @@ -573,60 +353,30 @@ value = my_function ); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 - --> main.py:2:5 - | - 2 | def my_function(): - | ^^^^^^^^^^^ - 3 | return 42 - | - - info[references]: Reference 2 - --> main.py:6:11 - | - 5 | # Call the function multiple times - 6 | result1 = my_function() - | ^^^^^^^^^^^ - 7 | result2 = my_function() - | - - info[references]: Reference 3 - --> main.py:7:11 - | - 5 | # Call the function multiple times - 6 | result1 = my_function() - 7 | result2 = my_function() - | ^^^^^^^^^^^ - 8 | - 9 | # Function passed as an argument - | - - info[references]: Reference 4 - --> main.py:10:12 + info[references]: Found 6 references + --> main.py:2:5 | + 2 | def my_function(): + | ----------- + 3 | return 42 + | + ::: main.py:6:11 + | + 5 | # Call the function multiple times + 6 | result1 = my_function() + | ----------- + 7 | result2 = my_function() + | ----------- + 8 | 9 | # Function passed as an argument 10 | callback = my_function - | ^^^^^^^^^^^ + | ----------- 11 | - 12 | # Function used in different contexts - | - - info[references]: Reference 5 - --> main.py:13:7 - | 12 | # Function used in different contexts 13 | print(my_function()) - | ^^^^^^^^^^^ + | ----------- 14 | value = my_function - | - - info[references]: Reference 6 - --> main.py:14:9 - | - 12 | # Function used in different contexts - 13 | print(my_function()) - 14 | value = my_function - | ^^^^^^^^^^^ + | ----------- | "); } @@ -653,59 +403,32 @@ cls = MyClass ); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 - --> main.py:2:7 - | - 2 | class MyClass: - | ^^^^^^^ - 3 | def __init__(self): - 4 | pass - | - - info[references]: Reference 2 - --> main.py:7:8 - | - 6 | # Create instances - 7 | obj1 = MyClass() - | ^^^^^^^ - 8 | obj2 = MyClass() - | - - info[references]: Reference 3 - --> main.py:8:8 + info[references]: Found 6 references + --> main.py:2:7 + | + 2 | class MyClass: + | ------- + 3 | def __init__(self): + 4 | pass + | + ::: main.py:7:8 | 6 | # Create instances 7 | obj1 = MyClass() + | ------- 8 | obj2 = MyClass() - | ^^^^^^^ + | ------- 9 | - 10 | # Use in type annotations - | - - info[references]: Reference 4 - --> main.py:11:23 - | 10 | # Use in type annotations 11 | def process(instance: MyClass) -> MyClass: - | ^^^^^^^ + | ------- ------- 12 | return instance | - - info[references]: Reference 5 - --> main.py:11:35 - | - 10 | # Use in type annotations - 11 | def process(instance: MyClass) -> MyClass: - | ^^^^^^^ - 12 | return instance - | - - info[references]: Reference 6 - --> main.py:15:7 + ::: main.py:15:7 | 14 | # Reference the class itself 15 | cls = MyClass - | ^^^^^^^ + | ------- | "); } @@ -722,22 +445,14 @@ cls = MyClass ); assert_snapshot!(test.references(), @r#" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:2:5 | 2 | a: "MyClass" = 1 - | ^^^^^^^ + | ------- 3 | 4 | class MyClass: - | - - info[references]: Reference 2 - --> main.py:4:7 - | - 2 | a: "MyClass" = 1 - 3 | - 4 | class MyClass: - | ^^^^^^^ + | ------- 5 | """some docs""" | "#); @@ -755,22 +470,14 @@ cls = MyClass ); assert_snapshot!(test.references(), @r#" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:2:12 | 2 | a: "None | MyClass" = 1 - | ^^^^^^^ + | ------- 3 | 4 | class MyClass: - | - - info[references]: Reference 2 - --> main.py:4:7 - | - 2 | a: "None | MyClass" = 1 - 3 | - 4 | class MyClass: - | ^^^^^^^ + | ------- 5 | """some docs""" | "#); @@ -802,22 +509,14 @@ cls = MyClass ); assert_snapshot!(test.references(), @r#" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:2:12 | 2 | a: "None | MyClass" = 1 - | ^^^^^^^ + | ------- 3 | 4 | class MyClass: - | - - info[references]: Reference 2 - --> main.py:4:7 - | - 2 | a: "None | MyClass" = 1 - 3 | - 4 | class MyClass: - | ^^^^^^^ + | ------- 5 | """some docs""" | "#); @@ -863,22 +562,14 @@ cls = MyClass ); assert_snapshot!(test.references(), @r#" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:2:5 | 2 | a: "MyClass | No" = 1 - | ^^^^^^^ + | ------- 3 | 4 | class MyClass: - | - - info[references]: Reference 2 - --> main.py:4:7 - | - 2 | a: "MyClass | No" = 1 - 3 | - 4 | class MyClass: - | ^^^^^^^ + | ------- 5 | """some docs""" | "#); @@ -907,18 +598,11 @@ cls = MyClass ); assert_snapshot!(test.references(), @r#" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:2:1 | 2 | ab: "ab" - | ^^ - | - - info[references]: Reference 2 - --> main.py:2:6 - | - 2 | ab: "ab" - | ^^ + | -- -- | "#); } @@ -946,23 +630,15 @@ cls = MyClass ); assert_snapshot!(test.references(), @r#" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:4:22 | 2 | def my_func(command: str): 3 | match command.split(): 4 | case ["get", ab]: - | ^^ + | -- 5 | x = ab - | - - info[references]: Reference 2 - --> main.py:5:17 - | - 3 | match command.split(): - 4 | case ["get", ab]: - 5 | x = ab - | ^^ + | -- | "#); } @@ -979,23 +655,15 @@ cls = MyClass ); assert_snapshot!(test.references(), @r#" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:4:22 | 2 | def my_func(command: str): 3 | match command.split(): 4 | case ["get", ab]: - | ^^ + | -- 5 | x = ab - | - - info[references]: Reference 2 - --> main.py:5:17 - | - 3 | match command.split(): - 4 | case ["get", ab]: - 5 | x = ab - | ^^ + | -- | "#); } @@ -1012,23 +680,15 @@ cls = MyClass ); assert_snapshot!(test.references(), @r#" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:4:23 | 2 | def my_func(command: str): 3 | match command.split(): 4 | case ["get", *ab]: - | ^^ + | -- 5 | x = ab - | - - info[references]: Reference 2 - --> main.py:5:17 - | - 3 | match command.split(): - 4 | case ["get", *ab]: - 5 | x = ab - | ^^ + | -- | "#); } @@ -1045,23 +705,15 @@ cls = MyClass ); assert_snapshot!(test.references(), @r#" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:4:23 | 2 | def my_func(command: str): 3 | match command.split(): 4 | case ["get", *ab]: - | ^^ + | -- 5 | x = ab - | - - info[references]: Reference 2 - --> main.py:5:17 - | - 3 | match command.split(): - 4 | case ["get", *ab]: - 5 | x = ab - | ^^ + | -- | "#); } @@ -1078,23 +730,15 @@ cls = MyClass ); assert_snapshot!(test.references(), @r#" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:4:37 | 2 | def my_func(command: str): 3 | match command.split(): 4 | case ["get", ("a" | "b") as ab]: - | ^^ + | -- 5 | x = ab - | - - info[references]: Reference 2 - --> main.py:5:17 - | - 3 | match command.split(): - 4 | case ["get", ("a" | "b") as ab]: - 5 | x = ab - | ^^ + | -- | "#); } @@ -1111,23 +755,15 @@ cls = MyClass ); assert_snapshot!(test.references(), @r#" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:4:37 | 2 | def my_func(command: str): 3 | match command.split(): 4 | case ["get", ("a" | "b") as ab]: - | ^^ + | -- 5 | x = ab - | - - info[references]: Reference 2 - --> main.py:5:17 - | - 3 | match command.split(): - 4 | case ["get", ("a" | "b") as ab]: - 5 | x = ab - | ^^ + | -- | "#); } @@ -1150,23 +786,15 @@ cls = MyClass ); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:10:30 | 8 | def my_func(event: Click): 9 | match event: 10 | case Click(x, button=ab): - | ^^ + | -- 11 | x = ab - | - - info[references]: Reference 2 - --> main.py:11:17 - | - 9 | match event: - 10 | case Click(x, button=ab): - 11 | x = ab - | ^^ + | -- | "); } @@ -1189,23 +817,15 @@ cls = MyClass ); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:10:30 | 8 | def my_func(event: Click): 9 | match event: 10 | case Click(x, button=ab): - | ^^ + | -- 11 | x = ab - | - - info[references]: Reference 2 - --> main.py:11:17 - | - 9 | match event: - 10 | case Click(x, button=ab): - 11 | x = ab - | ^^ + | -- | "); } @@ -1228,33 +848,23 @@ cls = MyClass ); assert_snapshot!(test.references(), @r#" - info[references]: Reference 1 - --> main.py:2:7 - | - 2 | class Click: - | ^^^^^ - 3 | __match_args__ = ("position", "button") - 4 | def __init__(self, pos, btn): - | - - info[references]: Reference 2 - --> main.py:8:20 + info[references]: Found 3 references + --> main.py:2:7 + | + 2 | class Click: + | ----- + 3 | __match_args__ = ("position", "button") + 4 | def __init__(self, pos, btn): + | + ::: main.py:8:20 | 6 | self.button: str = btn 7 | 8 | def my_func(event: Click): - | ^^^^^ + | ----- 9 | match event: 10 | case Click(x, button=ab): - | - - info[references]: Reference 3 - --> main.py:10:14 - | - 8 | def my_func(event: Click): - 9 | match event: - 10 | case Click(x, button=ab): - | ^^^^^ + | ----- 11 | x = ab | "#); @@ -1289,25 +899,11 @@ cls = MyClass ); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 3 references --> main.py:2:13 | 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ - | - - info[references]: Reference 2 - --> main.py:2:37 - | - 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ - | - - info[references]: Reference 3 - --> main.py:2:46 - | - 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ + | -- -- -- | "); } @@ -1321,25 +917,11 @@ cls = MyClass ); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 3 references --> main.py:2:13 | 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ - | - - info[references]: Reference 2 - --> main.py:2:37 - | - 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ - | - - info[references]: Reference 3 - --> main.py:2:46 - | - 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ + | -- -- -- | "); } @@ -1354,28 +936,12 @@ cls = MyClass ); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 3 references --> main.py:3:15 | 2 | from typing import Callable 3 | type Alias2[**AB = [int, str]] = Callable[AB, tuple[AB]] - | ^^ - | - - info[references]: Reference 2 - --> main.py:3:43 - | - 2 | from typing import Callable - 3 | type Alias2[**AB = [int, str]] = Callable[AB, tuple[AB]] - | ^^ - | - - info[references]: Reference 3 - --> main.py:3:53 - | - 2 | from typing import Callable - 3 | type Alias2[**AB = [int, str]] = Callable[AB, tuple[AB]] - | ^^ + | -- -- -- | "); } @@ -1390,28 +956,12 @@ cls = MyClass ); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 3 references --> main.py:3:15 | 2 | from typing import Callable 3 | type Alias2[**AB = [int, str]] = Callable[AB, tuple[AB]] - | ^^ - | - - info[references]: Reference 2 - --> main.py:3:43 - | - 2 | from typing import Callable - 3 | type Alias2[**AB = [int, str]] = Callable[AB, tuple[AB]] - | ^^ - | - - info[references]: Reference 3 - --> main.py:3:53 - | - 2 | from typing import Callable - 3 | type Alias2[**AB = [int, str]] = Callable[AB, tuple[AB]] - | ^^ + | -- -- -- | "); } @@ -1425,25 +975,11 @@ cls = MyClass ); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 3 references --> main.py:2:14 | 2 | type Alias3[*AB = ()] = tuple[tuple[*AB], tuple[*AB]] - | ^^ - | - - info[references]: Reference 2 - --> main.py:2:38 - | - 2 | type Alias3[*AB = ()] = tuple[tuple[*AB], tuple[*AB]] - | ^^ - | - - info[references]: Reference 3 - --> main.py:2:50 - | - 2 | type Alias3[*AB = ()] = tuple[tuple[*AB], tuple[*AB]] - | ^^ + | -- -- -- | "); } @@ -1457,25 +993,11 @@ cls = MyClass ); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 3 references --> main.py:2:14 | 2 | type Alias3[*AB = ()] = tuple[tuple[*AB], tuple[*AB]] - | ^^ - | - - info[references]: Reference 2 - --> main.py:2:38 - | - 2 | type Alias3[*AB = ()] = tuple[tuple[*AB], tuple[*AB]] - | ^^ - | - - info[references]: Reference 3 - --> main.py:2:50 - | - 2 | type Alias3[*AB = ()] = tuple[tuple[*AB], tuple[*AB]] - | ^^ + | -- -- -- | "); } @@ -1515,57 +1037,35 @@ class DataProcessor: .build(); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 - --> utils.py:2:5 - | - 2 | def func(x): - | ^^^^ - 3 | return x * 2 - | - - info[references]: Reference 2 - --> module.py:2:19 - | - 2 | from utils import func - | ^^^^ - 3 | - 4 | def process_data(data): - | - - info[references]: Reference 3 - --> module.py:5:12 - | - 4 | def process_data(data): - 5 | return func(data) - | ^^^^ - | - - info[references]: Reference 4 + info[references]: Found 6 references --> app.py:2:19 | 2 | from utils import func - | ^^^^ + | ---- 3 | - 4 | class DataProcessor: - | - - info[references]: Reference 5 - --> app.py:6:27 - | 4 | class DataProcessor: 5 | def __init__(self): 6 | self.multiplier = func - | ^^^^ + | ---- 7 | - 8 | def process(self, value): - | - - info[references]: Reference 6 - --> app.py:9:16 - | 8 | def process(self, value): 9 | return func(value) - | ^^^^ + | ---- + | + ::: module.py:2:19 + | + 2 | from utils import func + | ---- + 3 | + 4 | def process_data(data): + 5 | return func(data) + | ---- + | + ::: utils.py:2:5 + | + 2 | def func(x): + | ---- + 3 | return x * 2 | "); } @@ -1598,52 +1098,27 @@ def process_model(): .build(); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 - --> models.py:3:5 - | - 2 | class MyModel: - 3 | attr = 42 - | ^^^^ - 4 | - 5 | def get_attribute(self): - | - - info[references]: Reference 2 - --> models.py:6:24 - | - 5 | def get_attribute(self): - 6 | return MyModel.attr - | ^^^^ - | - - info[references]: Reference 3 + info[references]: Found 5 references --> main.py:6:19 | 4 | def process_model(): 5 | model = MyModel() 6 | value = model.attr - | ^^^^ + | ---- 7 | model.attr = 100 + | ---- 8 | return model.attr + | ---- | - - info[references]: Reference 4 - --> main.py:7:11 + ::: models.py:3:5 | - 5 | model = MyModel() - 6 | value = model.attr - 7 | model.attr = 100 - | ^^^^ - 8 | return model.attr - | - - info[references]: Reference 5 - --> main.py:8:18 - | - 6 | value = model.attr - 7 | model.attr = 100 - 8 | return model.attr - | ^^^^ + 2 | class MyModel: + 3 | attr = 42 + | ---- + 4 | + 5 | def get_attribute(self): + 6 | return MyModel.attr + | ---- | "); } @@ -1673,22 +1148,14 @@ func_alias() // When finding references to the alias, we should NOT find references // to the original function in the original module assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 2 references --> importer.py:2:30 | 2 | from original import func as func_alias - | ^^^^^^^^^^ + | ---------- 3 | 4 | func_alias() - | - - info[references]: Reference 2 - --> importer.py:4:1 - | - 2 | from original import func as func_alias - 3 | - 4 | func_alias() - | ^^^^^^^^^^ + | ---------- | "); } @@ -1721,42 +1188,23 @@ func_alias() ) .build(); - assert_snapshot!(test.references(), @r###" - info[references]: Reference 1 - --> path.pyi:2:7 - | - 2 | class Path: - | ^^^^ - 3 | def __init__(self, path: str): ... - | - - info[references]: Reference 2 + assert_snapshot!(test.references(), @r#" + info[references]: Found 4 references --> importer.py:2:18 | 2 | from path import Path - | ^^^^ + | ---- 3 | 4 | a: Path = Path("test") + | ---- ---- | - - info[references]: Reference 3 - --> importer.py:4:4 + ::: path.pyi:2:7 | - 2 | from path import Path - 3 | - 4 | a: Path = Path("test") - | ^^^^ + 2 | class Path: + | ---- + 3 | def __init__(self, path: str): ... | - - info[references]: Reference 4 - --> importer.py:4:11 - | - 2 | from path import Path - 3 | - 4 | a: Path = Path("test") - | ^^^^ - | - "###); + "#); } #[test] @@ -1775,23 +1223,15 @@ func_alias() .build(); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:3:20 | 2 | import warnings 3 | import warnings as abc - | ^^^ + | --- 4 | 5 | x = abc - | - - info[references]: Reference 2 - --> main.py:5:5 - | - 3 | import warnings as abc - 4 | - 5 | x = abc - | ^^^ + | --- 6 | y = warnings | "); @@ -1813,23 +1253,15 @@ func_alias() .build(); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:3:20 | 2 | import warnings 3 | import warnings as abc - | ^^^ + | --- 4 | 5 | x = abc - | - - info[references]: Reference 2 - --> main.py:5:5 - | - 3 | import warnings as abc - 4 | - 5 | x = abc - | ^^^ + | --- 6 | y = warnings | "); @@ -1851,21 +1283,15 @@ func_alias() .build(); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:2:36 | 2 | from warnings import deprecated as xyz - | ^^^ - 3 | from warnings import deprecated - | - - info[references]: Reference 2 - --> main.py:5:5 - | + | --- 3 | from warnings import deprecated 4 | 5 | y = xyz - | ^^^ + | --- 6 | z = deprecated | "); @@ -1887,21 +1313,15 @@ func_alias() .build(); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 2 references --> main.py:2:36 | 2 | from warnings import deprecated as xyz - | ^^^ - 3 | from warnings import deprecated - | - - info[references]: Reference 2 - --> main.py:5:5 - | + | --- 3 | from warnings import deprecated 4 | 5 | y = xyz - | ^^^ + | --- 6 | z = deprecated | "); @@ -1929,13 +1349,13 @@ func_alias() // TODO(submodule-imports): this should light up both instances of `subpkg` assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 1 references --> mypackage/__init__.py:4:5 | 2 | from .subpkg.submod import val 3 | 4 | x = subpkg - | ^^^^^^ + | ------ | "); } @@ -2055,29 +1475,19 @@ func_alias() .build(); assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 3 references --> mypackage/__init__.py:2:21 | 2 | from .subpkg import subpkg - | ^^^^^^ + | ------ 3 | 4 | x = subpkg + | ------ | - - info[references]: Reference 2 - --> mypackage/__init__.py:4:5 - | - 2 | from .subpkg import subpkg - 3 | - 4 | x = subpkg - | ^^^^^^ - | - - info[references]: Reference 3 - --> mypackage/subpkg/__init__.py:2:1 + ::: mypackage/subpkg/__init__.py:2:1 | 2 | subpkg: int = 10 - | ^^^^^^ + | ------ | "); } @@ -2103,13 +1513,13 @@ func_alias() // TODO: this should also highlight the RHS subpkg in the import assert_snapshot!(test.references(), @r" - info[references]: Reference 1 + info[references]: Found 1 references --> mypackage/__init__.py:4:5 | 2 | from .subpkg import subpkg 3 | 4 | x = subpkg - | ^^^^^^ + | ------ | "); } @@ -2131,33 +1541,17 @@ func_alias() .build(); assert_snapshot!(test.references(), @r#" - info[references]: Reference 1 + info[references]: Found 3 references --> main.py:2:1 | 2 | a: str = "test" - | ^ + | - 3 | 4 | a: int = 10 - | - - info[references]: Reference 2 - --> main.py:4:1 - | - 2 | a: str = "test" - 3 | - 4 | a: int = 10 - | ^ + | - 5 | 6 | print(a) - | - - info[references]: Reference 3 - --> main.py:6:7 - | - 4 | a: int = 10 - 5 | - 6 | print(a) - | ^ + | - | "#); } diff --git a/crates/ty_ide/src/goto.rs b/crates/ty_ide/src/goto.rs index 217f8b420b..e0f298024d 100644 --- a/crates/ty_ide/src/goto.rs +++ b/crates/ty_ide/src/goto.rs @@ -295,7 +295,7 @@ impl<'db> Definitions<'db> { impl GotoTarget<'_> { pub(crate) fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Option> { - let ty = match self { + match self { GotoTarget::Expression(expression) => expression.inferred_type(model), GotoTarget::FunctionDef(function) => function.inferred_type(model), GotoTarget::ClassDef(class) => class.inferred_type(model), @@ -317,7 +317,7 @@ impl GotoTarget<'_> { } => { // We don't currently support hovering the bare `.` so there is always a name let module = import_name(module_name, *component_index); - model.resolve_module_type(Some(module), *level)? + model.resolve_module_type(Some(module), *level) } GotoTarget::StringAnnotationSubexpr { string_expr, @@ -334,16 +334,16 @@ impl GotoTarget<'_> { } else { // TODO: force the typechecker to tell us its secrets // (it computes but then immediately discards these types) - return None; + None } } GotoTarget::BinOp { expression, .. } => { let (_, ty) = ty_python_semantic::definitions_for_bin_op(model, expression)?; - ty + Some(ty) } GotoTarget::UnaryOp { expression, .. } => { let (_, ty) = ty_python_semantic::definitions_for_unary_op(model, expression)?; - ty + Some(ty) } // TODO: Support identifier targets GotoTarget::PatternMatchRest(_) @@ -353,10 +353,8 @@ impl GotoTarget<'_> { | GotoTarget::TypeParamParamSpecName(_) | GotoTarget::TypeParamTypeVarTupleName(_) | GotoTarget::NonLocal { .. } - | GotoTarget::Globals { .. } => return None, - }; - - Some(ty) + | GotoTarget::Globals { .. } => None, + } } /// Try to get a simplified display of this callable type by resolving overloads diff --git a/crates/ty_ide/src/goto_declaration.rs b/crates/ty_ide/src/goto_declaration.rs index 8425654e19..2e390711b7 100644 --- a/crates/ty_ide/src/goto_declaration.rs +++ b/crates/ty_ide/src/goto_declaration.rs @@ -31,15 +31,9 @@ pub fn goto_declaration( #[cfg(test)] mod tests { - use crate::tests::{CursorTest, IntoDiagnostic, cursor_test}; - use crate::{NavigationTarget, goto_declaration}; + use crate::goto_declaration; + use crate::tests::{CursorTest, cursor_test}; use insta::assert_snapshot; - use ruff_db::diagnostic::{ - Annotation, Diagnostic, DiagnosticId, LintName, Severity, Span, SubDiagnostic, - SubDiagnosticSeverity, - }; - use ruff_db::files::FileRange; - use ruff_text_size::Ranged; #[test] fn goto_declaration_function_call_to_definition() { @@ -53,20 +47,20 @@ mod tests { ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:2:5 - | - 2 | def my_function(x, y): - | ^^^^^^^^^^^ - 3 | return x + y - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:5:10 | 3 | return x + y 4 | 5 | result = my_function(1, 2) - | ^^^^^^^^^^^ + | ^^^^^^^^^^^ Clicking here + | + info: Found 1 declaration + --> main.py:2:5 + | + 2 | def my_function(x, y): + | ----------- + 3 | return x + y | "); } @@ -81,19 +75,19 @@ mod tests { ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:2:1 - | - 2 | x = 42 - | ^ - 3 | y = x - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:3:5 | 2 | x = 42 3 | y = x - | ^ + | ^ Clicking here + | + info: Found 1 declaration + --> main.py:2:1 + | + 2 | x = 42 + | - + 3 | y = x | "); } @@ -111,39 +105,23 @@ mod tests { ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:6:12 + | + 4 | pass + 5 | + 6 | instance = MyClass() + | ^^^^^^^ Clicking here + | + info: Found 2 declarations --> main.py:2:7 | 2 | class MyClass: - | ^^^^^^^ + | ------- 3 | def __init__(self): + | -------- 4 | pass | - info: Source - --> main.py:6:12 - | - 4 | pass - 5 | - 6 | instance = MyClass() - | ^^^^^^^ - | - - info[goto-declaration]: Declaration - --> main.py:3:9 - | - 2 | class MyClass: - 3 | def __init__(self): - | ^^^^^^^^ - 4 | pass - | - info: Source - --> main.py:6:12 - | - 4 | pass - 5 | - 6 | instance = MyClass() - | ^^^^^^^ - | "); } @@ -157,19 +135,19 @@ mod tests { ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:2:9 - | - 2 | def foo(param): - | ^^^^^ - 3 | return param * 2 - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:3:12 | 2 | def foo(param): 3 | return param * 2 - | ^^^^^ + | ^^^^^ Clicking here + | + info: Found 1 declaration + --> main.py:2:9 + | + 2 | def foo(param): + | ----- + 3 | return param * 2 | "); } @@ -185,20 +163,20 @@ mod tests { ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:2:18 - | - 2 | def generic_func[T](value: T) -> T: - | ^ - 3 | v: T = value - 4 | return v - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:3:8 | 2 | def generic_func[T](value: T) -> T: 3 | v: T = value - | ^ + | ^ Clicking here + 4 | return v + | + info: Found 1 declaration + --> main.py:2:18 + | + 2 | def generic_func[T](value: T) -> T: + | - + 3 | v: T = value 4 | return v | "); @@ -215,20 +193,20 @@ mod tests { ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:2:20 - | - 2 | class GenericClass[T]: - | ^ - 3 | def __init__(self, value: T): - 4 | self.value = value - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:3:31 | 2 | class GenericClass[T]: 3 | def __init__(self, value: T): - | ^ + | ^ Clicking here + 4 | self.value = value + | + info: Found 1 declaration + --> main.py:2:20 + | + 2 | class GenericClass[T]: + | - + 3 | def __init__(self, value: T): 4 | self.value = value | "); @@ -247,23 +225,23 @@ mod tests { ); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> main.py:2:1 - | - 2 | x = "outer" - | ^ - 3 | def outer_func(): - 4 | def inner_func(): - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:5:16 | 3 | def outer_func(): 4 | def inner_func(): 5 | return x # Should find outer x - | ^ + | ^ Clicking here 6 | return inner_func | + info: Found 1 declaration + --> main.py:2:1 + | + 2 | x = "outer" + | - + 3 | def outer_func(): + 4 | def inner_func(): + | "#); } @@ -307,20 +285,20 @@ variable = 42 .build(); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> mymodule.py:1:1 - | - 1 | - | ^ - 2 | def function(): - 3 | return "hello from mymodule" - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:3:7 | 2 | import mymodule 3 | print(mymodule.function()) - | ^^^^^^^^ + | ^^^^^^^^ Clicking here + | + info: Found 1 declaration + --> mymodule.py:1:1 + | + 1 | + | - + 2 | def function(): + 3 | return "hello from mymodule" | "#); } @@ -348,19 +326,19 @@ def other_function(): .build(); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> mymodule.py:2:5 - | - 2 | def my_function(): - | ^^^^^^^^^^^ - 3 | return "hello" - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:3:7 | 2 | from mymodule import my_function 3 | print(my_function()) - | ^^^^^^^^^^^ + | ^^^^^^^^^^^ Clicking here + | + info: Found 1 declaration + --> mymodule.py:2:5 + | + 2 | def my_function(): + | ----------- + 3 | return "hello" | "#); } @@ -390,22 +368,22 @@ FOO = 0 .build(); // Should find the submodule file itself - assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> mymodule/submodule.py:1:1 - | - 1 | - | ^ - 2 | FOO = 0 - | - info: Source + assert_snapshot!(test.goto_declaration(), @r" + info[goto-declaration]: Go to declaration --> main.py:3:7 | 2 | import mymodule.submodule as sub 3 | print(sub.helper()) - | ^^^ + | ^^^ Clicking here | - "#); + info: Found 1 declaration + --> mymodule/submodule.py:1:1 + | + 1 | + | - + 2 | FOO = 0 + | + "); } #[test] @@ -429,19 +407,19 @@ def func(arg): // Should resolve to the actual function definition, not the import statement assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> utils.py:2:5 - | - 2 | def func(arg): - | ^^^^ - 3 | return f"Processed: {arg}" - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:3:7 | 2 | from utils import func as h 3 | print(h("test")) - | ^ + | ^ Clicking here + | + info: Found 1 declaration + --> utils.py:2:5 + | + 2 | def func(arg): + | ---- + 3 | return f"Processed: {arg}" | "#); } @@ -473,19 +451,19 @@ def shared_function(): .build(); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> original.py:2:5 - | - 2 | def shared_function(): - | ^^^^^^^^^^^^^^^ - 3 | return "from original" - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:3:7 | 2 | from intermediate import shared_function 3 | print(shared_function()) - | ^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^ Clicking here + | + info: Found 1 declaration + --> original.py:2:5 + | + 2 | def shared_function(): + | --------------- + 3 | return "from original" | "#); } @@ -515,20 +493,20 @@ def multiply_numbers(a, b): .build(); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> math_utils.py:2:5 - | - 2 | def add_numbers(a, b): - | ^^^^^^^^^^^ - 3 | """Add two numbers together.""" - 4 | return a + b - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:3:10 | 2 | from math_utils import * 3 | result = add_numbers(5, 3) - | ^^^^^^^^^^^ + | ^^^^^^^^^^^ Clicking here + | + info: Found 1 declaration + --> math_utils.py:2:5 + | + 2 | def add_numbers(a, b): + | ----------- + 3 | """Add two numbers together.""" + 4 | return a + b | "#); } @@ -565,20 +543,20 @@ def another_helper(): // Should resolve the relative import to find the actual function definition assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> package/utils.py:2:5 - | - 2 | def helper_function(arg): - | ^^^^^^^^^^^^^^^ - 3 | """A helper function in utils module.""" - 4 | return f"Processed: {arg}" - | - info: Source + info[goto-declaration]: Go to declaration --> package/main.py:3:10 | 2 | from .utils import helper_function 3 | result = helper_function("test") - | ^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^ Clicking here + | + info: Found 1 declaration + --> package/utils.py:2:5 + | + 2 | def helper_function(arg): + | --------------- + 3 | """A helper function in utils module.""" + 4 | return f"Processed: {arg}" | "#); } @@ -614,20 +592,20 @@ def another_helper(): .build(); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> package/utils.py:2:5 - | - 2 | def helper_function(arg): - | ^^^^^^^^^^^^^^^ - 3 | """A helper function in utils module.""" - 4 | return f"Processed: {arg}" - | - info: Source + info[goto-declaration]: Go to declaration --> package/main.py:3:10 | 2 | from .utils import * 3 | result = helper_function("test") - | ^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^ Clicking here + | + info: Found 1 declaration + --> package/utils.py:2:5 + | + 2 | def helper_function(arg): + | --------------- + 3 | """A helper function in utils module.""" + 4 | return f"Processed: {arg}" | "#); } @@ -657,20 +635,20 @@ FOO = 0 .build(); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> mymodule/submodule.py:1:1 - | - 1 | - | ^ - 2 | FOO = 0 - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:2:30 | 2 | import mymodule.submodule as sub - | ^^^ + | ^^^ Clicking here 3 | print(sub.helper()) | + info: Found 1 declaration + --> mymodule/submodule.py:1:1 + | + 1 | + | - + 2 | FOO = 0 + | "); } @@ -699,20 +677,20 @@ FOO = 0 .build(); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> mymodule/submodule.py:1:1 - | - 1 | - | ^ - 2 | FOO = 0 - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:2:17 | 2 | import mymodule.submodule as sub - | ^^^^^^^^^ + | ^^^^^^^^^ Clicking here 3 | print(sub.helper()) | + info: Found 1 declaration + --> mymodule/submodule.py:1:1 + | + 1 | + | - + 2 | FOO = 0 + | "); } @@ -745,20 +723,20 @@ def another_helper(path): .build(); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> mypackage/utils.py:2:5 - | - 2 | def helper(a, b): - | ^^^^^^ - 3 | return a + "/" + b - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:2:29 | 2 | from mypackage.utils import helper as h - | ^^^^^^ + | ^^^^^^ Clicking here 3 | result = h("/a", "/b") | + info: Found 1 declaration + --> mypackage/utils.py:2:5 + | + 2 | def helper(a, b): + | ------ + 3 | return a + "/" + b + | "#); } @@ -791,20 +769,20 @@ def another_helper(path): .build(); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> mypackage/utils.py:2:5 - | - 2 | def helper(a, b): - | ^^^^^^ - 3 | return a + "/" + b - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:2:39 | 2 | from mypackage.utils import helper as h - | ^ + | ^ Clicking here 3 | result = h("/a", "/b") | + info: Found 1 declaration + --> mypackage/utils.py:2:5 + | + 2 | def helper(a, b): + | ------ + 3 | return a + "/" + b + | "#); } @@ -837,21 +815,21 @@ def another_helper(path): .build(); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> mypackage/utils.py:1:1 - | - 1 | - | ^ - 2 | def helper(a, b): - 3 | return a + "/" + b - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:2:16 | 2 | from mypackage.utils import helper as h - | ^^^^^ + | ^^^^^ Clicking here 3 | result = h("/a", "/b") | + info: Found 1 declaration + --> mypackage/utils.py:1:1 + | + 1 | + | - + 2 | def helper(a, b): + 3 | return a + "/" + b + | "#); } @@ -869,23 +847,23 @@ def another_helper(path): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:7:7 + | + 6 | c = C() + 7 | y = c.x + | ^ Clicking here + | + info: Found 1 declaration --> main.py:4:9 | 2 | class C: 3 | def __init__(self): 4 | self.x: int = 1 - | ^^^^^^ + | ------ 5 | 6 | c = C() | - info: Source - --> main.py:7:7 - | - 6 | c = C() - 7 | y = c.x - | ^ - | "); } @@ -901,23 +879,23 @@ def another_helper(path): ); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:2:5 + | + 2 | a: "MyClass" = 1 + | ^^^^^^^ Clicking here + 3 | + 4 | class MyClass: + | + info: Found 1 declaration --> main.py:4:7 | 2 | a: "MyClass" = 1 3 | 4 | class MyClass: - | ^^^^^^^ + | ------- 5 | """some docs""" | - info: Source - --> main.py:2:5 - | - 2 | a: "MyClass" = 1 - | ^^^^^^^ - 3 | - 4 | class MyClass: - | "#); } @@ -933,23 +911,23 @@ def another_helper(path): ); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:2:12 + | + 2 | a: "None | MyClass" = 1 + | ^^^^^^^ Clicking here + 3 | + 4 | class MyClass: + | + info: Found 1 declaration --> main.py:4:7 | 2 | a: "None | MyClass" = 1 3 | 4 | class MyClass: - | ^^^^^^^ + | ------- 5 | """some docs""" | - info: Source - --> main.py:2:12 - | - 2 | a: "None | MyClass" = 1 - | ^^^^^^^ - 3 | - 4 | class MyClass: - | "#); } @@ -979,23 +957,23 @@ def another_helper(path): ); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:2:12 + | + 2 | a: "None | MyClass" = 1 + | ^^^^^^^ Clicking here + 3 | + 4 | class MyClass: + | + info: Found 1 declaration --> main.py:4:7 | 2 | a: "None | MyClass" = 1 3 | 4 | class MyClass: - | ^^^^^^^ + | ------- 5 | """some docs""" | - info: Source - --> main.py:2:12 - | - 2 | a: "None | MyClass" = 1 - | ^^^^^^^ - 3 | - 4 | class MyClass: - | "#); } @@ -1039,23 +1017,23 @@ def another_helper(path): ); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:2:5 + | + 2 | a: "MyClass | No" = 1 + | ^^^^^^^ Clicking here + 3 | + 4 | class MyClass: + | + info: Found 1 declaration --> main.py:4:7 | 2 | a: "MyClass | No" = 1 3 | 4 | class MyClass: - | ^^^^^^^ + | ------- 5 | """some docs""" | - info: Source - --> main.py:2:5 - | - 2 | a: "MyClass | No" = 1 - | ^^^^^^^ - 3 | - 4 | class MyClass: - | "#); } @@ -1082,17 +1060,17 @@ def another_helper(path): ); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> main.py:2:1 - | - 2 | ab: "ab" - | ^^ - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:2:6 | 2 | ab: "ab" - | ^^ + | ^^ Clicking here + | + info: Found 1 declaration + --> main.py:2:1 + | + 2 | ab: "ab" + | -- | "#); } @@ -1126,23 +1104,23 @@ def another_helper(path): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:11:9 + | + 10 | d = D() + 11 | y = d.y.x + | ^ Clicking here + | + info: Found 1 declaration --> main.py:4:9 | 2 | class C: 3 | def __init__(self): 4 | self.x: int = 1 - | ^^^^^^ + | ------ 5 | 6 | class D: | - info: Source - --> main.py:11:9 - | - 10 | d = D() - 11 | y = d.y.x - | ^ - | "); } @@ -1160,23 +1138,23 @@ def another_helper(path): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:7:7 + | + 6 | c = C() + 7 | y = c.x + | ^ Clicking here + | + info: Found 1 declaration --> main.py:4:9 | 2 | class C: 3 | def __init__(self): 4 | self.x = 1 - | ^^^^^^ + | ------ 5 | 6 | c = C() | - info: Source - --> main.py:7:7 - | - 6 | c = C() - 7 | y = c.x - | ^ - | "); } @@ -1194,20 +1172,20 @@ def another_helper(path): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:3:9 - | - 2 | class C: - 3 | def foo(self): - | ^^^ - 4 | return 42 - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:7:9 | 6 | c = C() 7 | res = c.foo() - | ^^^ + | ^^^ Clicking here + | + info: Found 1 declaration + --> main.py:3:9 + | + 2 | class C: + 3 | def foo(self): + | --- + 4 | return 42 | "); } @@ -1244,7 +1222,7 @@ x: int = 42 "Should find the int class definition" ); assert!( - result.contains("info[goto-declaration]: Declaration"), + result.contains("info[goto-declaration]: Go to declaration"), "Should be a goto-declaration result" ); } @@ -1267,25 +1245,25 @@ def outer(): // Should find the variable declaration in the outer scope, not the nonlocal statement assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> main.py:3:5 - | - 2 | def outer(): - 3 | x = "outer_value" - | ^ - 4 | - 5 | def inner(): - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:8:16 | 6 | nonlocal x 7 | x = "modified" 8 | return x # Should find the nonlocal x declaration in outer scope - | ^ + | ^ Clicking here 9 | 10 | return inner | + info: Found 1 declaration + --> main.py:3:5 + | + 2 | def outer(): + 3 | x = "outer_value" + | - + 4 | + 5 | def inner(): + | "#); } @@ -1307,24 +1285,24 @@ def outer(): // Should find the variable declaration in the outer scope, not the nonlocal statement assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> main.py:3:5 - | - 2 | def outer(): - 3 | xy = "outer_value" - | ^^ - 4 | - 5 | def inner(): - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:6:18 | 5 | def inner(): 6 | nonlocal xy - | ^^ + | ^^ Clicking here 7 | xy = "modified" 8 | return x # Should find the nonlocal x declaration in outer scope | + info: Found 1 declaration + --> main.py:3:5 + | + 2 | def outer(): + 3 | xy = "outer_value" + | -- + 4 | + 5 | def inner(): + | "#); } @@ -1343,21 +1321,21 @@ def function(): // Should find the global variable declaration, not the global statement assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> main.py:2:1 - | - 2 | global_var = "global_value" - | ^^^^^^^^^^ - 3 | - 4 | def function(): - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:7:12 | 5 | global global_var 6 | global_var = "modified" 7 | return global_var # Should find the global variable declaration - | ^^^^^^^^^^ + | ^^^^^^^^^^ Clicking here + | + info: Found 1 declaration + --> main.py:2:1 + | + 2 | global_var = "global_value" + | ---------- + 3 | + 4 | def function(): | "#); } @@ -1377,23 +1355,23 @@ def function(): // Should find the global variable declaration, not the global statement assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> main.py:2:1 - | - 2 | global_var = "global_value" - | ^^^^^^^^^^ - 3 | - 4 | def function(): - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:5:12 | 4 | def function(): 5 | global global_var - | ^^^^^^^^^^ + | ^^^^^^^^^^ Clicking here 6 | global_var = "modified" 7 | return global_var # Should find the global variable declaration | + info: Found 1 declaration + --> main.py:2:1 + | + 2 | global_var = "global_value" + | ---------- + 3 | + 4 | def function(): + | "#); } @@ -1413,21 +1391,21 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:3:5 - | - 2 | class A: - 3 | x = 10 - | ^ - 4 | - 5 | class B(A): - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:9:7 | 8 | b = B() 9 | y = b.x - | ^ + | ^ Clicking here + | + info: Found 1 declaration + --> main.py:3:5 + | + 2 | class A: + 3 | x = 10 + | - + 4 | + 5 | class B(A): | "); } @@ -1444,22 +1422,22 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration --> main.py:4:22 | 2 | def my_func(command: str): 3 | match command.split(): 4 | case ["get", ab]: - | ^^ + | ^^ Clicking here 5 | x = ab | - info: Source + info: Found 1 declaration --> main.py:4:22 | 2 | def my_func(command: str): 3 | match command.split(): 4 | case ["get", ab]: - | ^^ + | -- 5 | x = ab | "#); @@ -1477,22 +1455,22 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> main.py:4:22 - | - 2 | def my_func(command: str): - 3 | match command.split(): - 4 | case ["get", ab]: - | ^^ - 5 | x = ab - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:5:17 | 3 | match command.split(): 4 | case ["get", ab]: 5 | x = ab - | ^^ + | ^^ Clicking here + | + info: Found 1 declaration + --> main.py:4:22 + | + 2 | def my_func(command: str): + 3 | match command.split(): + 4 | case ["get", ab]: + | -- + 5 | x = ab | "#); } @@ -1509,22 +1487,22 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration --> main.py:4:23 | 2 | def my_func(command: str): 3 | match command.split(): 4 | case ["get", *ab]: - | ^^ + | ^^ Clicking here 5 | x = ab | - info: Source + info: Found 1 declaration --> main.py:4:23 | 2 | def my_func(command: str): 3 | match command.split(): 4 | case ["get", *ab]: - | ^^ + | -- 5 | x = ab | "#); @@ -1542,22 +1520,22 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> main.py:4:23 - | - 2 | def my_func(command: str): - 3 | match command.split(): - 4 | case ["get", *ab]: - | ^^ - 5 | x = ab - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:5:17 | 3 | match command.split(): 4 | case ["get", *ab]: 5 | x = ab - | ^^ + | ^^ Clicking here + | + info: Found 1 declaration + --> main.py:4:23 + | + 2 | def my_func(command: str): + 3 | match command.split(): + 4 | case ["get", *ab]: + | -- + 5 | x = ab | "#); } @@ -1574,22 +1552,22 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration --> main.py:4:37 | 2 | def my_func(command: str): 3 | match command.split(): 4 | case ["get", ("a" | "b") as ab]: - | ^^ + | ^^ Clicking here 5 | x = ab | - info: Source + info: Found 1 declaration --> main.py:4:37 | 2 | def my_func(command: str): 3 | match command.split(): 4 | case ["get", ("a" | "b") as ab]: - | ^^ + | -- 5 | x = ab | "#); @@ -1607,22 +1585,22 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> main.py:4:37 - | - 2 | def my_func(command: str): - 3 | match command.split(): - 4 | case ["get", ("a" | "b") as ab]: - | ^^ - 5 | x = ab - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:5:17 | 3 | match command.split(): 4 | case ["get", ("a" | "b") as ab]: 5 | x = ab - | ^^ + | ^^ Clicking here + | + info: Found 1 declaration + --> main.py:4:37 + | + 2 | def my_func(command: str): + 3 | match command.split(): + 4 | case ["get", ("a" | "b") as ab]: + | -- + 5 | x = ab | "#); } @@ -1645,22 +1623,22 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration --> main.py:10:30 | 8 | def my_func(event: Click): 9 | match event: 10 | case Click(x, button=ab): - | ^^ + | ^^ Clicking here 11 | x = ab | - info: Source + info: Found 1 declaration --> main.py:10:30 | 8 | def my_func(event: Click): 9 | match event: 10 | case Click(x, button=ab): - | ^^ + | -- 11 | x = ab | "); @@ -1684,22 +1662,22 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:10:30 - | - 8 | def my_func(event: Click): - 9 | match event: - 10 | case Click(x, button=ab): - | ^^ - 11 | x = ab - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:11:17 | 9 | match event: 10 | case Click(x, button=ab): 11 | x = ab - | ^^ + | ^^ Clicking here + | + info: Found 1 declaration + --> main.py:10:30 + | + 8 | def my_func(event: Click): + 9 | match event: + 10 | case Click(x, button=ab): + | -- + 11 | x = ab | "); } @@ -1722,23 +1700,23 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> main.py:2:7 - | - 2 | class Click: - | ^^^^^ - 3 | __match_args__ = ("position", "button") - 4 | def __init__(self, pos, btn): - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:10:14 | 8 | def my_func(event: Click): 9 | match event: 10 | case Click(x, button=ab): - | ^^^^^ + | ^^^^^ Clicking here 11 | x = ab | + info: Found 1 declaration + --> main.py:2:7 + | + 2 | class Click: + | ----- + 3 | __match_args__ = ("position", "button") + 4 | def __init__(self, pos, btn): + | "#); } @@ -1771,17 +1749,17 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration --> main.py:2:13 | 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ + | ^^ Clicking here | - info: Source + info: Found 1 declaration --> main.py:2:13 | 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ + | -- | "); } @@ -1795,17 +1773,17 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:2:13 - | - 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:2:37 | 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ + | ^^ Clicking here + | + info: Found 1 declaration + --> main.py:2:13 + | + 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] + | -- | "); } @@ -1820,19 +1798,19 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration --> main.py:3:15 | 2 | from typing import Callable 3 | type Alias2[**AB = [int, str]] = Callable[AB, tuple[AB]] - | ^^ + | ^^ Clicking here | - info: Source + info: Found 1 declaration --> main.py:3:15 | 2 | from typing import Callable 3 | type Alias2[**AB = [int, str]] = Callable[AB, tuple[AB]] - | ^^ + | -- | "); } @@ -1847,19 +1825,19 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:3:15 - | - 2 | from typing import Callable - 3 | type Alias2[**AB = [int, str]] = Callable[AB, tuple[AB]] - | ^^ - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:3:43 | 2 | from typing import Callable 3 | type Alias2[**AB = [int, str]] = Callable[AB, tuple[AB]] - | ^^ + | ^^ Clicking here + | + info: Found 1 declaration + --> main.py:3:15 + | + 2 | from typing import Callable + 3 | type Alias2[**AB = [int, str]] = Callable[AB, tuple[AB]] + | -- | "); } @@ -1873,17 +1851,17 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration --> main.py:2:14 | 2 | type Alias3[*AB = ()] = tuple[tuple[*AB], tuple[*AB]] - | ^^ + | ^^ Clicking here | - info: Source + info: Found 1 declaration --> main.py:2:14 | 2 | type Alias3[*AB = ()] = tuple[tuple[*AB], tuple[*AB]] - | ^^ + | -- | "); } @@ -1897,17 +1875,17 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:2:14 - | - 2 | type Alias3[*AB = ()] = tuple[tuple[*AB], tuple[*AB]] - | ^^ - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:2:38 | 2 | type Alias3[*AB = ()] = tuple[tuple[*AB], tuple[*AB]] - | ^^ + | ^^ Clicking here + | + info: Found 1 declaration + --> main.py:2:14 + | + 2 | type Alias3[*AB = ()] = tuple[tuple[*AB], tuple[*AB]] + | -- | "); } @@ -1930,21 +1908,21 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:7:9 - | - 6 | @property - 7 | def value(self): - | ^^^^^ - 8 | return self._value - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:11:3 | 10 | c = C() 11 | c.value = 42 - | ^^^^^ + | ^^^^^ Clicking here | + info: Found 1 declaration + --> main.py:7:9 + | + 6 | @property + 7 | def value(self): + | ----- + 8 | return self._value + | "); } @@ -1982,7 +1960,7 @@ def function(): "Should find the __doc__ attribute definition" ); assert!( - result.contains("info[goto-declaration]: Declaration"), + result.contains("info[goto-declaration]: Go to declaration"), "Should be a goto-declaration result" ); } @@ -2003,23 +1981,23 @@ def function(): ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:9:9 + | + 8 | def use_drawable(obj: Drawable): + 9 | obj.name + | ^^^^ Clicking here + | + info: Found 1 declaration --> main.py:6:5 | 4 | class Drawable(Protocol): 5 | def draw(self) -> None: ... 6 | name: str - | ^^^^ + | ---- 7 | 8 | def use_drawable(obj: Drawable): | - info: Source - --> main.py:9:9 - | - 8 | def use_drawable(obj: Drawable): - 9 | obj.name - | ^^^^ - | "); } @@ -2037,24 +2015,24 @@ class MyClass: // Should find the ClassType defined in the class body, not fail to resolve assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:3:5 - | - 2 | class MyClass: - 3 | ClassType = int - | ^^^^^^^^^ - 4 | - 5 | def generic_method[T](self, value: ClassType) -> T: - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:5:40 | 3 | ClassType = int 4 | 5 | def generic_method[T](self, value: ClassType) -> T: - | ^^^^^^^^^ + | ^^^^^^^^^ Clicking here 6 | return value | + info: Found 1 declaration + --> main.py:3:5 + | + 2 | class MyClass: + 3 | ClassType = int + | --------- + 4 | + 5 | def generic_method[T](self, value: ClassType) -> T: + | "); } @@ -2070,20 +2048,20 @@ class MyClass: ); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> main.py:2:20 - | - 2 | def my_function(x, y, z=10): - | ^ - 3 | return x + y + z - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:5:25 | 3 | return x + y + z 4 | 5 | result = my_function(1, y=2, z=3) - | ^ + | ^ Clicking here + | + info: Found 1 declaration + --> main.py:2:20 + | + 2 | def my_function(x, y, z=10): + | - + 3 | return x + y + z | "); } @@ -2110,39 +2088,26 @@ class MyClass: // Should navigate to the parameter in both matching overloads assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration - --> main.py:5:24 - | - 4 | @overload - 5 | def process(data: str, format: str) -> str: ... - | ^^^^^^ - 6 | - 7 | @overload - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:14:27 | 13 | # Call the overloaded function 14 | result = process("hello", format="json") - | ^^^^^^ + | ^^^^^^ Clicking here | - - info[goto-declaration]: Declaration - --> main.py:8:24 + info: Found 2 declarations + --> main.py:5:24 | + 4 | @overload + 5 | def process(data: str, format: str) -> str: ... + | ------ + 6 | 7 | @overload 8 | def process(data: int, format: int) -> int: ... - | ^^^^^^ + | ------ 9 | 10 | def process(data, format): | - info: Source - --> main.py:14:27 - | - 13 | # Call the overloaded function - 14 | result = process("hello", format="json") - | ^^^^^^ - | "#); } @@ -2179,38 +2144,24 @@ def ab(a: str): ... .build(); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:4:1 + | + 2 | from mymodule import ab + 3 | + 4 | ab(1) + | ^^ Clicking here + | + info: Found 2 declarations --> mymodule.pyi:5:5 | 4 | @overload 5 | def ab(a: int): ... - | ^^ + | -- 6 | - 7 | @overload - | - info: Source - --> main.py:4:1 - | - 2 | from mymodule import ab - 3 | - 4 | ab(1) - | ^^ - | - - info[goto-declaration]: Declaration - --> mymodule.pyi:8:5 - | 7 | @overload 8 | def ab(a: str): ... - | ^^ - | - info: Source - --> main.py:4:1 - | - 2 | from mymodule import ab - 3 | - 4 | ab(1) - | ^^ + | -- | "); } @@ -2248,38 +2199,24 @@ def ab(a: str): ... .build(); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:4:1 + | + 2 | from mymodule import ab + 3 | + 4 | ab("hello") + | ^^ Clicking here + | + info: Found 2 declarations --> mymodule.pyi:5:5 | 4 | @overload 5 | def ab(a: int): ... - | ^^ + | -- 6 | - 7 | @overload - | - info: Source - --> main.py:4:1 - | - 2 | from mymodule import ab - 3 | - 4 | ab("hello") - | ^^ - | - - info[goto-declaration]: Declaration - --> mymodule.pyi:8:5 - | 7 | @overload 8 | def ab(a: str): ... - | ^^ - | - info: Source - --> main.py:4:1 - | - 2 | from mymodule import ab - 3 | - 4 | ab("hello") - | ^^ + | -- | "#); } @@ -2317,38 +2254,24 @@ def ab(a: int): ... .build(); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:4:1 + | + 2 | from mymodule import ab + 3 | + 4 | ab(1, 2) + | ^^ Clicking here + | + info: Found 2 declarations --> mymodule.pyi:5:5 | 4 | @overload 5 | def ab(a: int, b: int): ... - | ^^ + | -- 6 | - 7 | @overload - | - info: Source - --> main.py:4:1 - | - 2 | from mymodule import ab - 3 | - 4 | ab(1, 2) - | ^^ - | - - info[goto-declaration]: Declaration - --> mymodule.pyi:8:5 - | 7 | @overload 8 | def ab(a: int): ... - | ^^ - | - info: Source - --> main.py:4:1 - | - 2 | from mymodule import ab - 3 | - 4 | ab(1, 2) - | ^^ + | -- | "); } @@ -2386,38 +2309,24 @@ def ab(a: int): ... .build(); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:4:1 + | + 2 | from mymodule import ab + 3 | + 4 | ab(1) + | ^^ Clicking here + | + info: Found 2 declarations --> mymodule.pyi:5:5 | 4 | @overload 5 | def ab(a: int, b: int): ... - | ^^ + | -- 6 | - 7 | @overload - | - info: Source - --> main.py:4:1 - | - 2 | from mymodule import ab - 3 | - 4 | ab(1) - | ^^ - | - - info[goto-declaration]: Declaration - --> mymodule.pyi:8:5 - | 7 | @overload 8 | def ab(a: int): ... - | ^^ - | - info: Source - --> main.py:4:1 - | - 2 | from mymodule import ab - 3 | - 4 | ab(1) - | ^^ + | -- | "); } @@ -2458,57 +2367,29 @@ def ab(a: int, *, c: int): ... .build(); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> mymodule.pyi:5:5 - | - 4 | @overload - 5 | def ab(a: int): ... - | ^^ - 6 | - 7 | @overload - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:4:1 | 2 | from mymodule import ab 3 | 4 | ab(1, b=2) - | ^^ + | ^^ Clicking here | - - info[goto-declaration]: Declaration - --> mymodule.pyi:8:5 + info: Found 3 declarations + --> mymodule.pyi:5:5 | + 4 | @overload + 5 | def ab(a: int): ... + | -- + 6 | 7 | @overload 8 | def ab(a: int, *, b: int): ... - | ^^ + | -- 9 | - 10 | @overload - | - info: Source - --> main.py:4:1 - | - 2 | from mymodule import ab - 3 | - 4 | ab(1, b=2) - | ^^ - | - - info[goto-declaration]: Declaration - --> mymodule.pyi:11:5 - | 10 | @overload 11 | def ab(a: int, *, c: int): ... - | ^^ + | -- | - info: Source - --> main.py:4:1 - | - 2 | from mymodule import ab - 3 | - 4 | ab(1, b=2) - | ^^ - | "); } @@ -2548,57 +2429,29 @@ def ab(a: int, *, c: int): ... .build(); assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> mymodule.pyi:5:5 - | - 4 | @overload - 5 | def ab(a: int): ... - | ^^ - 6 | - 7 | @overload - | - info: Source + info[goto-declaration]: Go to declaration --> main.py:4:1 | 2 | from mymodule import ab 3 | 4 | ab(1, c=2) - | ^^ + | ^^ Clicking here | - - info[goto-declaration]: Declaration - --> mymodule.pyi:8:5 + info: Found 3 declarations + --> mymodule.pyi:5:5 | + 4 | @overload + 5 | def ab(a: int): ... + | -- + 6 | 7 | @overload 8 | def ab(a: int, *, b: int): ... - | ^^ + | -- 9 | - 10 | @overload - | - info: Source - --> main.py:4:1 - | - 2 | from mymodule import ab - 3 | - 4 | ab(1, c=2) - | ^^ - | - - info[goto-declaration]: Declaration - --> mymodule.pyi:11:5 - | 10 | @overload 11 | def ab(a: int, *, c: int): ... - | ^^ + | -- | - info: Source - --> main.py:4:1 - | - 2 | from mymodule import ab - 3 | - 4 | ab(1, c=2) - | ^^ - | "); } @@ -2627,21 +2480,21 @@ def ab(a: int, *, c: int): ... // which is correct but unhelpful. Unfortunately even if it only claimed the LHS identifier it // would highlight `subpkg.submod` which is strictly better but still isn't what we want. assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> mypackage/__init__.py:2:1 - | - 2 | from .subpkg.submod import val - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - 3 | - 4 | x = subpkg - | - info: Source + info[goto-declaration]: Go to declaration --> mypackage/__init__.py:4:5 | 2 | from .subpkg.submod import val 3 | 4 | x = subpkg - | ^^^^^^ + | ^^^^^^ Clicking here + | + info: Found 1 declaration + --> mypackage/__init__.py:2:1 + | + 2 | from .subpkg.submod import val + | ------------------------------ + 3 | + 4 | x = subpkg | "); } @@ -2671,18 +2524,18 @@ def ab(a: int, *, c: int): ... // `subpkg = mypackage.subpkg`. As in, it's both defining a local `subpkg` and // loading the module `mypackage.subpkg`, so, it's understandable to get confused! assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> mypackage/subpkg/__init__.py:1:1 - | - | - info: Source + info[goto-declaration]: Go to declaration --> mypackage/__init__.py:2:7 | 2 | from .subpkg.submod import val - | ^^^^^^ + | ^^^^^^ Clicking here 3 | 4 | x = subpkg | + info: Found 1 declaration + --> mypackage/subpkg/__init__.py:1:1 + | + | "); } @@ -2732,21 +2585,21 @@ def ab(a: int, *, c: int): ... // Going to the submod module is correct! assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> mypackage/subpkg/submod.py:1:1 - | - 1 | - | ^ - 2 | val: int = 0 - | - info: Source + info[goto-declaration]: Go to declaration --> mypackage/__init__.py:2:14 | 2 | from .subpkg.submod import val - | ^^^^^^ + | ^^^^^^ Clicking here 3 | 4 | x = submod | + info: Found 1 declaration + --> mypackage/subpkg/submod.py:1:1 + | + 1 | + | - + 2 | val: int = 0 + | "); } @@ -2771,21 +2624,21 @@ def ab(a: int, *, c: int): ... // Going to the subpkg module is correct! assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> mypackage/subpkg/__init__.py:1:1 - | - 1 | - | ^ - 2 | subpkg: int = 10 - | - info: Source + info[goto-declaration]: Go to declaration --> mypackage/__init__.py:2:7 | 2 | from .subpkg import subpkg - | ^^^^^^ + | ^^^^^^ Clicking here 3 | 4 | x = subpkg | + info: Found 1 declaration + --> mypackage/subpkg/__init__.py:1:1 + | + 1 | + | - + 2 | subpkg: int = 10 + | "); } @@ -2810,20 +2663,20 @@ def ab(a: int, *, c: int): ... // Going to the subpkg `int` is correct! assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration - --> mypackage/subpkg/__init__.py:2:1 - | - 2 | subpkg: int = 10 - | ^^^^^^ - | - info: Source + info[goto-declaration]: Go to declaration --> mypackage/__init__.py:2:21 | 2 | from .subpkg import subpkg - | ^^^^^^ + | ^^^^^^ Clicking here 3 | 4 | x = subpkg | + info: Found 1 declaration + --> mypackage/subpkg/__init__.py:2:1 + | + 2 | subpkg: int = 10 + | ------ + | "); } @@ -2860,36 +2713,26 @@ def ab(a: int, *, c: int): ... // that *immediately* overwrites the `ImportFromSubmodule`'s definition // This span seemingly doesn't appear at all!? Is it getting hidden by the LHS span? assert_snapshot!(test.goto_declaration(), @r" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> mypackage/__init__.py:4:5 + | + 2 | from .subpkg import subpkg + 3 | + 4 | x = subpkg + | ^^^^^^ Clicking here + | + info: Found 2 declarations --> mypackage/__init__.py:2:1 | 2 | from .subpkg import subpkg - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | -------------------------- 3 | 4 | x = subpkg | - info: Source - --> mypackage/__init__.py:4:5 - | - 2 | from .subpkg import subpkg - 3 | - 4 | x = subpkg - | ^^^^^^ - | - - info[goto-declaration]: Declaration - --> mypackage/subpkg/__init__.py:2:1 + ::: mypackage/subpkg/__init__.py:2:1 | 2 | subpkg: int = 10 - | ^^^^^^ - | - info: Source - --> mypackage/__init__.py:4:5 - | - 2 | from .subpkg import subpkg - 3 | - 4 | x = subpkg - | ^^^^^^ + | ------ | "); } @@ -2913,71 +2756,38 @@ def ab(a: int, *, c: int): ... .build(); assert_snapshot!(test.goto_declaration(), @r#" - info[goto-declaration]: Declaration + info[goto-declaration]: Go to declaration + --> main.py:6:7 + | + 4 | a: int = 10 + 5 | + 6 | print(a) + | ^ Clicking here + 7 | + 8 | a: bool = True + | + info: Found 3 declarations --> main.py:2:1 | 2 | a: str = "test" - | ^ + | - 3 | 4 | a: int = 10 - | - info: Source - --> main.py:6:7 - | - 4 | a: int = 10 + | - 5 | - 6 | print(a) - | ^ - 7 | - 8 | a: bool = True - | - - info[goto-declaration]: Declaration - --> main.py:4:1 - | - 2 | a: str = "test" - 3 | - 4 | a: int = 10 - | ^ - 5 | - 6 | print(a) - | - info: Source - --> main.py:6:7 - | - 4 | a: int = 10 - 5 | - 6 | print(a) - | ^ - 7 | - 8 | a: bool = True - | - - info[goto-declaration]: Declaration - --> main.py:8:1 - | 6 | print(a) 7 | 8 | a: bool = True - | ^ - | - info: Source - --> main.py:6:7 - | - 4 | a: int = 10 - 5 | - 6 | print(a) - | ^ - 7 | - 8 | a: bool = True + | - | "#); } impl CursorTest { fn goto_declaration(&self) -> String { - let Some(targets) = goto_declaration(&self.db, self.cursor.file, self.cursor.offset) - else { + let Some(targets) = salsa::attach(&self.db, || { + goto_declaration(&self.db, self.cursor.file, self.cursor.offset) + }) else { return "No goto target found".to_string(); }; @@ -2985,47 +2795,10 @@ def ab(a: int, *, c: int): ... return "No declarations found".to_string(); } - let source = targets.range; - self.render_diagnostics( - targets - .into_iter() - .map(|target| GotoDeclarationDiagnostic::new(source, &target)), - ) - } - } - - struct GotoDeclarationDiagnostic { - source: FileRange, - target: FileRange, - } - - impl GotoDeclarationDiagnostic { - fn new(source: FileRange, target: &NavigationTarget) -> Self { - Self { - source, - target: FileRange::new(target.file(), target.focus_range()), - } - } - } - - impl IntoDiagnostic for GotoDeclarationDiagnostic { - fn into_diagnostic(self) -> Diagnostic { - let mut source = SubDiagnostic::new(SubDiagnosticSeverity::Info, "Source"); - source.annotate(Annotation::primary( - Span::from(self.source.file()).with_range(self.source.range()), - )); - - let mut main = Diagnostic::new( - DiagnosticId::Lint(LintName::of("goto-declaration")), - Severity::Info, - "Declaration".to_string(), - ); - main.annotate(Annotation::primary( - Span::from(self.target.file()).with_range(self.target.range()), - )); - main.sub(source); - - main + self.render_diagnostics([crate::goto_definition::test::GotoDiagnostic::new( + crate::goto_definition::test::GotoAction::Declaration, + targets, + )]) } } } diff --git a/crates/ty_ide/src/goto_definition.rs b/crates/ty_ide/src/goto_definition.rs index 00e08957cb..e942e8040d 100644 --- a/crates/ty_ide/src/goto_definition.rs +++ b/crates/ty_ide/src/goto_definition.rs @@ -30,15 +30,15 @@ pub fn goto_definition( } #[cfg(test)] -mod test { +pub(super) mod test { + use crate::tests::{CursorTest, IntoDiagnostic}; - use crate::{NavigationTarget, goto_definition}; + use crate::{NavigationTargets, RangedValue, goto_definition}; use insta::assert_snapshot; use ruff_db::diagnostic::{ Annotation, Diagnostic, DiagnosticId, LintName, Severity, Span, SubDiagnostic, SubDiagnosticSeverity, }; - use ruff_db::files::FileRange; use ruff_text_size::Ranged; /// goto-definition on a module should go to the .py not the .pyi @@ -70,19 +70,19 @@ def my_function(): ... .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition - --> mymodule.py:1:1 - | - 1 | - | ^ - 2 | def my_function(): - 3 | return "hello" - | - info: Source + info[goto-definition]: Go to definition --> main.py:2:6 | 2 | from mymodule import my_function - | ^^^^^^^^ + | ^^^^^^^^ Clicking here + | + info: Found 1 definition + --> mymodule.py:1:1 + | + 1 | + | - + 2 | def my_function(): + 3 | return "hello" | "#); } @@ -114,20 +114,20 @@ def my_function(): ... .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition - --> mymodule.py:1:1 - | - 1 | - | ^ - 2 | def my_function(): - 3 | return "hello" - | - info: Source + info[goto-definition]: Go to definition --> main.py:3:5 | 2 | import mymodule 3 | x = mymodule - | ^^^^^^^^ + | ^^^^^^^^ Clicking here + | + info: Found 1 definition + --> mymodule.py:1:1 + | + 1 | + | - + 2 | def my_function(): + 3 | return "hello" | "#); } @@ -164,19 +164,19 @@ def other_function(): ... .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition - --> mymodule.py:2:5 - | - 2 | def my_function(): - | ^^^^^^^^^^^ - 3 | return "hello" - | - info: Source + info[goto-definition]: Go to definition --> main.py:3:7 | 2 | from mymodule import my_function 3 | print(my_function()) - | ^^^^^^^^^^^ + | ^^^^^^^^^^^ Clicking here + | + info: Found 1 definition + --> mymodule.py:2:5 + | + 2 | def my_function(): + | ----------- + 3 | return "hello" | "#); } @@ -206,21 +206,21 @@ def other_function(): ... .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition - --> mymodule.py:2:5 - | - 2 | def my_function(): - | ^^^^^^^^^^^ - 3 | return "hello" - | - info: Source + info[goto-definition]: Go to definition --> mymodule.pyi:2:5 | 2 | def my_function(): ... - | ^^^^^^^^^^^ + | ^^^^^^^^^^^ Clicking here 3 | 4 | def other_function(): ... | + info: Found 1 definition + --> mymodule.py:2:5 + | + 2 | def my_function(): + | ----------- + 3 | return "hello" + | "#); } @@ -266,54 +266,28 @@ def other_function(): ... .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition + info[goto-definition]: Go to definition + --> main.py:3:7 + | + 2 | from mymodule import my_function + 3 | print(my_function()) + | ^^^^^^^^^^^ Clicking here + | + info: Found 3 definitions --> mymodule.py:2:5 | 2 | def my_function(): - | ^^^^^^^^^^^ - 3 | return "hello" - | - info: Source - --> main.py:3:7 - | - 2 | from mymodule import my_function - 3 | print(my_function()) - | ^^^^^^^^^^^ - | - - info[goto-definition]: Definition - --> mymodule.py:5:5 - | + | ----------- 3 | return "hello" 4 | 5 | def my_function(): - | ^^^^^^^^^^^ - 6 | return "hello again" - | - info: Source - --> main.py:3:7 - | - 2 | from mymodule import my_function - 3 | print(my_function()) - | ^^^^^^^^^^^ - | - - info[goto-definition]: Definition - --> mymodule.py:8:5 - | + | ----------- 6 | return "hello again" 7 | 8 | def my_function(): - | ^^^^^^^^^^^ + | ----------- 9 | return "we can't keep doing this" | - info: Source - --> main.py:3:7 - | - 2 | from mymodule import my_function - 3 | print(my_function()) - | ^^^^^^^^^^^ - | "#); } @@ -353,20 +327,20 @@ class MyOtherClass: .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> mymodule.py:2:7 - | - 2 | class MyClass: - | ^^^^^^^ - 3 | def __init__(self, val): - 4 | self.val = val - | - info: Source + info[goto-definition]: Go to definition --> main.py:3:5 | 2 | from mymodule import MyClass 3 | x = MyClass - | ^^^^^^^ + | ^^^^^^^ Clicking here + | + info: Found 1 definition + --> mymodule.py:2:7 + | + 2 | class MyClass: + | ------- + 3 | def __init__(self, val): + 4 | self.val = val | "); } @@ -400,21 +374,21 @@ class MyOtherClass: .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> mymodule.py:2:7 - | - 2 | class MyClass: - | ^^^^^^^ - 3 | def __init__(self, val): - 4 | self.val = val - | - info: Source + info[goto-definition]: Go to definition --> mymodule.pyi:2:7 | 2 | class MyClass: - | ^^^^^^^ + | ^^^^^^^ Clicking here 3 | def __init__(self, val: bool): ... | + info: Found 1 definition + --> mymodule.py:2:7 + | + 2 | class MyClass: + | ------- + 3 | def __init__(self, val): + 4 | self.val = val + | "); } @@ -454,37 +428,22 @@ class MyOtherClass: .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition + info[goto-definition]: Go to definition + --> main.py:3:5 + | + 2 | from mymodule import MyClass + 3 | x = MyClass(0) + | ^^^^^^^ Clicking here + | + info: Found 2 definitions --> mymodule.py:2:7 | 2 | class MyClass: - | ^^^^^^^ + | ------- 3 | def __init__(self, val): + | -------- 4 | self.val = val | - info: Source - --> main.py:3:5 - | - 2 | from mymodule import MyClass - 3 | x = MyClass(0) - | ^^^^^^^ - | - - info[goto-definition]: Definition - --> mymodule.py:3:9 - | - 2 | class MyClass: - 3 | def __init__(self, val): - | ^^^^^^^^ - 4 | self.val = val - | - info: Source - --> main.py:3:5 - | - 2 | from mymodule import MyClass - 3 | x = MyClass(0) - | ^^^^^^^ - | "); } @@ -528,22 +487,22 @@ class MyOtherClass: .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> mymodule.py:5:9 - | - 3 | def __init__(self, val): - 4 | self.val = val - 5 | def action(self): - | ^^^^^^ - 6 | print(self.val) - | - info: Source + info[goto-definition]: Go to definition --> main.py:4:3 | 2 | from mymodule import MyClass 3 | x = MyClass(0) 4 | x.action() - | ^^^^^^ + | ^^^^^^ Clicking here + | + info: Found 1 definition + --> mymodule.py:5:9 + | + 3 | def __init__(self, val): + 4 | self.val = val + 5 | def action(self): + | ------ + 6 | print(self.val) | "); } @@ -587,22 +546,22 @@ class MyOtherClass: .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition + info[goto-definition]: Go to definition + --> main.py:3:13 + | + 2 | from mymodule import MyClass + 3 | x = MyClass.action() + | ^^^^^^ Clicking here + | + info: Found 1 definition --> mymodule.py:5:9 | 3 | def __init__(self, val): 4 | self.val = val 5 | def action(): - | ^^^^^^ + | ------ 6 | print("hi!") | - info: Source - --> main.py:3:13 - | - 2 | from mymodule import MyClass - 3 | x = MyClass.action() - | ^^^^^^ - | "#); } @@ -631,17 +590,17 @@ class MyClass: ... .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> mymodule.py:2:7 - | - 2 | class MyClass: ... - | ^^^^^^^ - | - info: Source + info[goto-definition]: Go to definition --> main.py:2:22 | 2 | from mymodule import MyClass - | ^^^^^^^ + | ^^^^^^^ Clicking here + | + info: Found 1 definition + --> mymodule.py:2:7 + | + 2 | class MyClass: ... + | ------- | "); } @@ -665,22 +624,22 @@ my_func(my_other_func(ab=5, y=2), 0) .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:2:13 - | - 2 | def my_func(ab, y, z = None): ... - | ^^ - 3 | def my_other_func(ab, y): ... - | - info: Source + info[goto-definition]: Go to definition --> main.py:5:23 | 3 | def my_other_func(ab, y): ... 4 | 5 | my_other_func(my_func(ab=5, y=2), 0) - | ^^ + | ^^ Clicking here 6 | my_func(my_other_func(ab=5, y=2), 0) | + info: Found 1 definition + --> main.py:2:13 + | + 2 | def my_func(ab, y, z = None): ... + | -- + 3 | def my_other_func(ab, y): ... + | "); } @@ -703,21 +662,21 @@ my_func(my_other_func(ab=5, y=2), 0) .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:3:19 - | - 2 | def my_func(ab, y, z = None): ... - 3 | def my_other_func(ab, y): ... - | ^^ - 4 | - 5 | my_other_func(my_func(ab=5, y=2), 0) - | - info: Source + info[goto-definition]: Go to definition --> main.py:6:23 | 5 | my_other_func(my_func(ab=5, y=2), 0) 6 | my_func(my_other_func(ab=5, y=2), 0) - | ^^ + | ^^ Clicking here + | + info: Found 1 definition + --> main.py:3:19 + | + 2 | def my_func(ab, y, z = None): ... + 3 | def my_other_func(ab, y): ... + | -- + 4 | + 5 | my_other_func(my_func(ab=5, y=2), 0) | "); } @@ -741,22 +700,22 @@ my_func(my_other_func(ab=5, y=2), 0) .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:2:13 - | - 2 | def my_func(ab, y): ... - | ^^ - 3 | def my_other_func(ab, y): ... - | - info: Source + info[goto-definition]: Go to definition --> main.py:5:23 | 3 | def my_other_func(ab, y): ... 4 | 5 | my_other_func(my_func(ab=5, y=2), 0) - | ^^ + | ^^ Clicking here 6 | my_func(my_other_func(ab=5, y=2), 0) | + info: Found 1 definition + --> main.py:2:13 + | + 2 | def my_func(ab, y): ... + | -- + 3 | def my_other_func(ab, y): ... + | "); } @@ -779,21 +738,21 @@ my_func(my_other_func(ab=5, y=2), 0) .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:3:19 - | - 2 | def my_func(ab, y): ... - 3 | def my_other_func(ab, y): ... - | ^^ - 4 | - 5 | my_other_func(my_func(ab=5, y=2), 0) - | - info: Source + info[goto-definition]: Go to definition --> main.py:6:23 | 5 | my_other_func(my_func(ab=5, y=2), 0) 6 | my_func(my_other_func(ab=5, y=2), 0) - | ^^ + | ^^ Clicking here + | + info: Found 1 definition + --> main.py:3:19 + | + 2 | def my_func(ab, y): ... + 3 | def my_other_func(ab, y): ... + | -- + 4 | + 5 | my_other_func(my_func(ab=5, y=2), 0) | "); } @@ -831,20 +790,20 @@ def ab(a: str): ... .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition - --> mymodule.py:2:5 - | - 2 | def ab(a): - | ^^ - 3 | """the real implementation!""" - | - info: Source + info[goto-definition]: Go to definition --> main.py:4:1 | 2 | from mymodule import ab 3 | 4 | ab(1) - | ^^ + | ^^ Clicking here + | + info: Found 1 definition + --> mymodule.py:2:5 + | + 2 | def ab(a): + | -- + 3 | """the real implementation!""" | "#); } @@ -882,20 +841,20 @@ def ab(a: str): ... .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition - --> mymodule.py:2:5 - | - 2 | def ab(a): - | ^^ - 3 | """the real implementation!""" - | - info: Source + info[goto-definition]: Go to definition --> main.py:4:1 | 2 | from mymodule import ab 3 | 4 | ab("hello") - | ^^ + | ^^ Clicking here + | + info: Found 1 definition + --> mymodule.py:2:5 + | + 2 | def ab(a): + | -- + 3 | """the real implementation!""" | "#); } @@ -933,20 +892,20 @@ def ab(a: int): ... .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition - --> mymodule.py:2:5 - | - 2 | def ab(a, b = None): - | ^^ - 3 | """the real implementation!""" - | - info: Source + info[goto-definition]: Go to definition --> main.py:4:1 | 2 | from mymodule import ab 3 | 4 | ab(1, 2) - | ^^ + | ^^ Clicking here + | + info: Found 1 definition + --> mymodule.py:2:5 + | + 2 | def ab(a, b = None): + | -- + 3 | """the real implementation!""" | "#); } @@ -984,20 +943,20 @@ def ab(a: int): ... .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition - --> mymodule.py:2:5 - | - 2 | def ab(a, b = None): - | ^^ - 3 | """the real implementation!""" - | - info: Source + info[goto-definition]: Go to definition --> main.py:4:1 | 2 | from mymodule import ab 3 | 4 | ab(1) - | ^^ + | ^^ Clicking here + | + info: Found 1 definition + --> mymodule.py:2:5 + | + 2 | def ab(a, b = None): + | -- + 3 | """the real implementation!""" | "#); } @@ -1038,20 +997,20 @@ def ab(a: int, *, c: int): ... .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition - --> mymodule.py:2:5 - | - 2 | def ab(a, *, b = None, c = None): - | ^^ - 3 | """the real implementation!""" - | - info: Source + info[goto-definition]: Go to definition --> main.py:4:1 | 2 | from mymodule import ab 3 | 4 | ab(1, b=2) - | ^^ + | ^^ Clicking here + | + info: Found 1 definition + --> mymodule.py:2:5 + | + 2 | def ab(a, *, b = None, c = None): + | -- + 3 | """the real implementation!""" | "#); } @@ -1092,20 +1051,20 @@ def ab(a: int, *, c: int): ... .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition - --> mymodule.py:2:5 - | - 2 | def ab(a, *, b = None, c = None): - | ^^ - 3 | """the real implementation!""" - | - info: Source + info[goto-definition]: Go to definition --> main.py:4:1 | 2 | from mymodule import ab 3 | 4 | ab(1, c=2) - | ^^ + | ^^ Clicking here + | + info: Found 1 definition + --> mymodule.py:2:5 + | + 2 | def ab(a, *, b = None, c = None): + | -- + 3 | """the real implementation!""" | "#); } @@ -1130,22 +1089,22 @@ a + b .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:3:9 - | - 2 | class Test: - 3 | def __add__(self, other): - | ^^^^^^^ - 4 | return Test() - | - info: Source + info[goto-definition]: Go to definition --> main.py:10:3 | 8 | b = Test() 9 | 10 | a + b - | ^ + | ^ Clicking here | + info: Found 1 definition + --> main.py:3:9 + | + 2 | class Test: + 3 | def __add__(self, other): + | ------- + 4 | return Test() + | "); } @@ -1167,21 +1126,21 @@ B() + A() .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:3:9 - | - 2 | class A: - 3 | def __radd__(self, other) -> A: - | ^^^^^^^^ - 4 | return self - | - info: Source + info[goto-definition]: Go to definition --> main.py:8:5 | 6 | class B: ... 7 | 8 | B() + A() - | ^ + | ^ Clicking here + | + info: Found 1 definition + --> main.py:3:9 + | + 2 | class A: + 3 | def __radd__(self, other) -> A: + | -------- + 4 | return self | "); } @@ -1206,22 +1165,22 @@ a+b .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:3:9 - | - 2 | class Test: - 3 | def __add__(self, other): - | ^^^^^^^ - 4 | return Test() - | - info: Source + info[goto-definition]: Go to definition --> main.py:10:2 | 8 | b = Test() 9 | 10 | a+b - | ^ + | ^ Clicking here | + info: Found 1 definition + --> main.py:3:9 + | + 2 | class Test: + 3 | def __add__(self, other): + | ------- + 4 | return Test() + | "); } @@ -1245,22 +1204,22 @@ a+b .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:8:1 - | - 7 | a = Test() - 8 | b = Test() - | ^ - 9 | - 10 | a+b - | - info: Source + info[goto-definition]: Go to definition --> main.py:10:3 | 8 | b = Test() 9 | 10 | a+b - | ^ + | ^ Clicking here + | + info: Found 1 definition + --> main.py:8:1 + | + 7 | a = Test() + 8 | b = Test() + | - + 9 | + 10 | a+b | "); } @@ -1304,22 +1263,22 @@ a = Test() .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:3:9 - | - 2 | class Test: - 3 | def __invert__(self) -> 'Test': ... - | ^^^^^^^^^^ - 4 | - 5 | a = Test() - | - info: Source + info[goto-definition]: Go to definition --> main.py:7:1 | 5 | a = Test() 6 | 7 | ~a - | ^ + | ^ Clicking here + | + info: Found 1 definition + --> main.py:3:9 + | + 2 | class Test: + 3 | def __invert__(self) -> 'Test': ... + | ---------- + 4 | + 5 | a = Test() | "); } @@ -1342,22 +1301,22 @@ a = Test() .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:3:9 - | - 2 | class Test: - 3 | def __invert__(self, extra_arg) -> 'Test': ... - | ^^^^^^^^^^ - 4 | - 5 | a = Test() - | - info: Source + info[goto-definition]: Go to definition --> main.py:7:1 | 5 | a = Test() 6 | 7 | ~a - | ^ + | ^ Clicking here + | + info: Found 1 definition + --> main.py:3:9 + | + 2 | class Test: + 3 | def __invert__(self, extra_arg) -> 'Test': ... + | ---------- + 4 | + 5 | a = Test() | "); } @@ -1379,22 +1338,22 @@ a = Test() .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:3:9 - | - 2 | class Test: - 3 | def __invert__(self) -> 'Test': ... - | ^^^^^^^^^^ - 4 | - 5 | a = Test() - | - info: Source + info[goto-definition]: Go to definition --> main.py:7:1 | 5 | a = Test() 6 | 7 | ~ a - | ^ + | ^ Clicking here + | + info: Found 1 definition + --> main.py:3:9 + | + 2 | class Test: + 3 | def __invert__(self) -> 'Test': ... + | ---------- + 4 | + 5 | a = Test() | "); } @@ -1416,23 +1375,23 @@ a = Test() .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:5:1 - | - 3 | def __invert__(self) -> 'Test': ... - 4 | - 5 | a = Test() - | ^ - 6 | - 7 | -a - | - info: Source + info[goto-definition]: Go to definition --> main.py:7:2 | 5 | a = Test() 6 | 7 | -a - | ^ + | ^ Clicking here + | + info: Found 1 definition + --> main.py:5:1 + | + 3 | def __invert__(self) -> 'Test': ... + 4 | + 5 | a = Test() + | - + 6 | + 7 | -a | "); } @@ -1454,22 +1413,22 @@ a = Test() .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:3:9 - | - 2 | class Test: - 3 | def __bool__(self) -> bool: ... - | ^^^^^^^^ - 4 | - 5 | a = Test() - | - info: Source + info[goto-definition]: Go to definition --> main.py:7:1 | 5 | a = Test() 6 | 7 | not a - | ^^^ + | ^^^ Clicking here + | + info: Found 1 definition + --> main.py:3:9 + | + 2 | class Test: + 3 | def __bool__(self) -> bool: ... + | -------- + 4 | + 5 | a = Test() | "); } @@ -1491,22 +1450,22 @@ a = Test() .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:3:9 - | - 2 | class Test: - 3 | def __len__(self) -> 42: ... - | ^^^^^^^ - 4 | - 5 | a = Test() - | - info: Source + info[goto-definition]: Go to definition --> main.py:7:1 | 5 | a = Test() 6 | 7 | not a - | ^^^ + | ^^^ Clicking here + | + info: Found 1 definition + --> main.py:3:9 + | + 2 | class Test: + 3 | def __len__(self) -> 42: ... + | ------- + 4 | + 5 | a = Test() | "); } @@ -1532,21 +1491,21 @@ a = Test() .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:3:9 - | - 2 | class Test: - 3 | def __bool__(self, extra_arg) -> bool: ... - | ^^^^^^^^ - 4 | def __len__(self) -> 42: ... - | - info: Source + info[goto-definition]: Go to definition --> main.py:8:1 | 6 | a = Test() 7 | 8 | not a - | ^^^ + | ^^^ Clicking here + | + info: Found 1 definition + --> main.py:3:9 + | + 2 | class Test: + 3 | def __bool__(self, extra_arg) -> bool: ... + | -------- + 4 | def __len__(self) -> 42: ... | "); } @@ -1572,22 +1531,22 @@ a = Test() .build(); assert_snapshot!(test.goto_definition(), @r" - info[goto-definition]: Definition - --> main.py:3:9 - | - 2 | class Test: - 3 | def __len__(self, extra_arg) -> 42: ... - | ^^^^^^^ - 4 | - 5 | a = Test() - | - info: Source + info[goto-definition]: Go to definition --> main.py:7:1 | 5 | a = Test() 6 | 7 | not a - | ^^^ + | ^^^ Clicking here + | + info: Found 1 definition + --> main.py:3:9 + | + 2 | class Test: + 3 | def __len__(self, extra_arg) -> 42: ... + | ------- + 4 | + 5 | a = Test() | "); } @@ -1604,36 +1563,28 @@ a: float = 3.14 .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition + info[goto-definition]: Go to definition + --> main.py:2:4 + | + 2 | a: float = 3.14 + | ^^^^^ Clicking here + | + info: Found 2 definitions --> stdlib/builtins.pyi:348:7 | 347 | @disjoint_base 348 | class int: - | ^^^ + | --- 349 | """int([x]) -> integer 350 | int(x, base=10) -> integer | - info: Source - --> main.py:2:4 - | - 2 | a: float = 3.14 - | ^^^^^ - | - - info[goto-definition]: Definition - --> stdlib/builtins.pyi:661:7 + ::: stdlib/builtins.pyi:661:7 | 660 | @disjoint_base 661 | class float: - | ^^^^^ + | ----- 662 | """Convert a string or number to a floating-point number, if possible.""" | - info: Source - --> main.py:2:4 - | - 2 | a: float = 3.14 - | ^^^^^ - | "#); } @@ -1649,51 +1600,35 @@ a: complex = 3.14 .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition + info[goto-definition]: Go to definition + --> main.py:2:4 + | + 2 | a: complex = 3.14 + | ^^^^^^^ Clicking here + | + info: Found 3 definitions --> stdlib/builtins.pyi:348:7 | 347 | @disjoint_base 348 | class int: - | ^^^ + | --- 349 | """int([x]) -> integer 350 | int(x, base=10) -> integer | - info: Source - --> main.py:2:4 - | - 2 | a: complex = 3.14 - | ^^^^^^^ - | - - info[goto-definition]: Definition - --> stdlib/builtins.pyi:661:7 + ::: stdlib/builtins.pyi:661:7 | 660 | @disjoint_base 661 | class float: - | ^^^^^ + | ----- 662 | """Convert a string or number to a floating-point number, if possible.""" | - info: Source - --> main.py:2:4 - | - 2 | a: complex = 3.14 - | ^^^^^^^ - | - - info[goto-definition]: Definition - --> stdlib/builtins.pyi:822:7 + ::: stdlib/builtins.pyi:822:7 | 821 | @disjoint_base 822 | class complex: - | ^^^^^^^ + | ------- 823 | """Create a complex number from a string or numbers. | - info: Source - --> main.py:2:4 - | - 2 | a: complex = 3.14 - | ^^^^^^^ - | "#); } @@ -1733,71 +1668,38 @@ TracebackType .build(); assert_snapshot!(test.goto_definition(), @r#" - info[goto-definition]: Definition + info[goto-definition]: Go to definition + --> main.py:6:7 + | + 4 | a: int = 10 + 5 | + 6 | print(a) + | ^ Clicking here + 7 | + 8 | a: bool = True + | + info: Found 3 definitions --> main.py:2:1 | 2 | a: str = "test" - | ^ + | - 3 | 4 | a: int = 10 - | - info: Source - --> main.py:6:7 - | - 4 | a: int = 10 + | - 5 | - 6 | print(a) - | ^ - 7 | - 8 | a: bool = True - | - - info[goto-definition]: Definition - --> main.py:4:1 - | - 2 | a: str = "test" - 3 | - 4 | a: int = 10 - | ^ - 5 | - 6 | print(a) - | - info: Source - --> main.py:6:7 - | - 4 | a: int = 10 - 5 | - 6 | print(a) - | ^ - 7 | - 8 | a: bool = True - | - - info[goto-definition]: Definition - --> main.py:8:1 - | 6 | print(a) 7 | 8 | a: bool = True - | ^ - | - info: Source - --> main.py:6:7 - | - 4 | a: int = 10 - 5 | - 6 | print(a) - | ^ - 7 | - 8 | a: bool = True + | - | "#); } impl CursorTest { fn goto_definition(&self) -> String { - let Some(targets) = goto_definition(&self.db, self.cursor.file, self.cursor.offset) - else { + let Some(targets) = salsa::attach(&self.db, || { + goto_definition(&self.db, self.cursor.file, self.cursor.offset) + }) else { return "No goto target found".to_string(); }; @@ -1805,47 +1707,86 @@ TracebackType return "No definitions found".to_string(); } - let source = targets.range; - self.render_diagnostics( - targets - .into_iter() - .map(|target| GotoDefinitionDiagnostic::new(source, &target)), - ) + self.render_diagnostics([GotoDiagnostic::new(GotoAction::Definition, targets)]) } } - struct GotoDefinitionDiagnostic { - source: FileRange, - target: FileRange, + pub(crate) struct GotoDiagnostic { + action: GotoAction, + targets: RangedValue, } - impl GotoDefinitionDiagnostic { - fn new(source: FileRange, target: &NavigationTarget) -> Self { - Self { - source, - target: FileRange::new(target.file(), target.focus_range()), - } + impl GotoDiagnostic { + pub(crate) fn new(action: GotoAction, targets: RangedValue) -> Self { + Self { action, targets } } } - impl IntoDiagnostic for GotoDefinitionDiagnostic { + impl IntoDiagnostic for GotoDiagnostic { fn into_diagnostic(self) -> Diagnostic { - let mut source = SubDiagnostic::new(SubDiagnosticSeverity::Info, "Source"); - source.annotate(Annotation::primary( - Span::from(self.source.file()).with_range(self.source.range()), - )); - + let source = self.targets.range; let mut main = Diagnostic::new( - DiagnosticId::Lint(LintName::of("goto-definition")), + DiagnosticId::Lint(LintName::of(self.action.name())), Severity::Info, - "Definition".to_string(), + self.action.label().to_string(), ); - main.annotate(Annotation::primary( - Span::from(self.target.file()).with_range(self.target.range()), - )); - main.sub(source); + + main.annotate( + Annotation::primary(Span::from(source.file()).with_range(source.range())) + .message("Clicking here"), + ); + + let mut sub = SubDiagnostic::new( + SubDiagnosticSeverity::Info, + format_args!( + "Found {} {}{}", + self.targets.len(), + self.action.item_label(), + if self.targets.len() == 1 { "" } else { "s" } + ), + ); + + for target in self.targets { + sub.annotate(Annotation::secondary( + Span::from(target.file()).with_range(target.focus_range()), + )); + } + + main.sub(sub); main } } + + pub(crate) enum GotoAction { + Definition, + Declaration, + TypeDefinition, + } + + impl GotoAction { + fn name(&self) -> &'static str { + match self { + GotoAction::Definition => "goto-definition", + GotoAction::Declaration => "goto-declaration", + GotoAction::TypeDefinition => "goto-type definition", + } + } + + fn label(&self) -> &'static str { + match self { + GotoAction::Definition => "Go to definition", + GotoAction::Declaration => "Go to declaration", + GotoAction::TypeDefinition => "Go to type definition", + } + } + + fn item_label(&self) -> &'static str { + match self { + GotoAction::Definition => "definition", + GotoAction::Declaration => "declaration", + GotoAction::TypeDefinition => "type definition", + } + } + } } diff --git a/crates/ty_ide/src/goto_type_definition.rs b/crates/ty_ide/src/goto_type_definition.rs index 53cc98413d..3d7f25f81f 100644 --- a/crates/ty_ide/src/goto_type_definition.rs +++ b/crates/ty_ide/src/goto_type_definition.rs @@ -28,15 +28,9 @@ pub fn goto_type_definition( #[cfg(test)] mod tests { - use crate::tests::{CursorTest, IntoDiagnostic, cursor_test}; - use crate::{NavigationTarget, goto_type_definition}; + use crate::goto_type_definition; + use crate::tests::{CursorTest, cursor_test}; use insta::assert_snapshot; - use ruff_db::diagnostic::{ - Annotation, Diagnostic, DiagnosticId, LintName, Severity, Span, SubDiagnostic, - SubDiagnosticSeverity, - }; - use ruff_db::files::FileRange; - use ruff_text_size::Ranged; #[test] fn goto_type_of_expression_with_class_type() { @@ -49,21 +43,21 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> main.py:2:7 - | - 2 | class Test: ... - | ^^^^ - 3 | - 4 | ab = Test() - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:4:1 | 2 | class Test: ... 3 | 4 | ab = Test() - | ^^ + | ^^ Clicking here + | + info: Found 1 type definition + --> main.py:2:7 + | + 2 | class Test: ... + | ---- + 3 | + 4 | ab = Test() | "); } @@ -79,23 +73,23 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> stdlib/typing.pyi:351:1 - | - 349 | Final: _SpecialForm - 350 | - 351 | Literal: _SpecialForm - | ^^^^^^^ - 352 | TypedDict: _SpecialForm - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:4:1 | 2 | from typing import Literal 3 | 4 | ab = Literal - | ^^ + | ^^ Clicking here | + info: Found 1 type definition + --> stdlib/typing.pyi:351:1 + | + 349 | Final: _SpecialForm + 350 | + 351 | Literal: _SpecialForm + | ------- + 352 | TypedDict: _SpecialForm + | "); } @@ -112,23 +106,23 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> stdlib/typing.pyi:166:7 - | - 164 | # from _typeshed import AnnotationForm - 165 | - 166 | class Any: - | ^^^ - 167 | """Special type indicating an unconstrained type. - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:4:1 | 2 | from typing import Any 3 | 4 | ab = Any - | ^^ + | ^^ Clicking here | + info: Found 1 type definition + --> stdlib/typing.pyi:166:7 + | + 164 | # from _typeshed import AnnotationForm + 165 | + 166 | class Any: + | --- + 167 | """Special type indicating an unconstrained type. + | "#); } @@ -144,24 +138,24 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> stdlib/typing.pyi:781:1 - | - 779 | def __class_getitem__(cls, args: TypeVar | tuple[TypeVar, ...]) -> _Final: ... - 780 | - 781 | Generic: type[_Generic] - | ^^^^^^^ - 782 | - 783 | class _ProtocolMeta(ABCMeta): - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:4:1 | 2 | from typing import Generic 3 | 4 | ab = Generic - | ^^ + | ^^ Clicking here | + info: Found 1 type definition + --> stdlib/typing.pyi:781:1 + | + 779 | def __class_getitem__(cls, args: TypeVar | tuple[TypeVar, ...]) -> _Final: ... + 780 | + 781 | Generic: type[_Generic] + | ------- + 782 | + 783 | class _ProtocolMeta(ABCMeta): + | "); } @@ -176,23 +170,23 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> stdlib/ty_extensions.pyi:21:1 - | - 19 | # Types - 20 | Unknown = object() - 21 | AlwaysTruthy = object() - | ^^^^^^^^^^^^ - 22 | AlwaysFalsy = object() - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:4:1 | 2 | from ty_extensions import AlwaysTruthy 3 | 4 | ab = AlwaysTruthy - | ^^ + | ^^ Clicking here | + info: Found 1 type definition + --> stdlib/ty_extensions.pyi:21:1 + | + 19 | # Types + 20 | Unknown = object() + 21 | AlwaysTruthy = object() + | ------------ + 22 | AlwaysFalsy = object() + | "); } @@ -209,21 +203,21 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> main.py:2:5 - | - 2 | def foo(a, b): ... - | ^^^ - 3 | - 4 | ab = foo - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:6:1 | 4 | ab = foo 5 | 6 | ab - | ^^ + | ^^ Clicking here + | + info: Found 1 type definition + --> main.py:2:5 + | + 2 | def foo(a, b): ... + | --- + 3 | + 4 | ab = foo | "); } @@ -247,41 +241,25 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition + info[goto-type definition]: Go to type definition + --> main.py:12:1 + | + 10 | a = bar + 11 | + 12 | a + | ^ Clicking here + | + info: Found 2 type definitions --> main.py:3:5 | 3 | def foo(a, b): ... - | ^^^ + | --- 4 | 5 | def bar(a, b): ... - | - info: Source - --> main.py:12:1 - | - 10 | a = bar - 11 | - 12 | a - | ^ - | - - info[goto-type-definition]: Type definition - --> main.py:5:5 - | - 3 | def foo(a, b): ... - 4 | - 5 | def bar(a, b): ... - | ^^^ + | --- 6 | 7 | if random.choice(): | - info: Source - --> main.py:12:1 - | - 10 | a = bar - 11 | - 12 | a - | ^ - | "); } @@ -296,17 +274,17 @@ mod tests { test.write_file("lib.py", "a = 10").unwrap(); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> lib.py:1:1 - | - 1 | a = 10 - | ^^^^^^ - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:2:8 | 2 | import lib - | ^^^ + | ^^^ Clicking here + | + info: Found 1 type definition + --> lib.py:1:1 + | + 1 | a = 10 + | ------ | "); } @@ -323,17 +301,17 @@ mod tests { test.write_file("lib/submod.py", "a = 10").unwrap(); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> lib/__init__.py:1:1 - | - 1 | b = 7 - | ^^^^^ - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:2:8 | 2 | import lib.submod - | ^^^ + | ^^^ Clicking here + | + info: Found 1 type definition + --> lib/__init__.py:1:1 + | + 1 | b = 7 + | ----- | "); } @@ -350,17 +328,17 @@ mod tests { test.write_file("lib/submod.py", "a = 10").unwrap(); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> lib/submod.py:1:1 - | - 1 | a = 10 - | ^^^^^^ - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:2:12 | 2 | import lib.submod - | ^^^^^^ + | ^^^^^^ Clicking here + | + info: Found 1 type definition + --> lib/submod.py:1:1 + | + 1 | a = 10 + | ------ | "); } @@ -376,17 +354,17 @@ mod tests { test.write_file("lib.py", "a = 10").unwrap(); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> lib.py:1:1 - | - 1 | a = 10 - | ^^^^^^ - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:2:6 | 2 | from lib import a - | ^^^ + | ^^^ Clicking here + | + info: Found 1 type definition + --> lib.py:1:1 + | + 1 | a = 10 + | ------ | "); } @@ -403,17 +381,17 @@ mod tests { test.write_file("lib/submod.py", "a = 10").unwrap(); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> lib/__init__.py:1:1 - | - 1 | b = 7 - | ^^^^^ - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:2:6 | 2 | from lib.submod import a - | ^^^ + | ^^^ Clicking here + | + info: Found 1 type definition + --> lib/__init__.py:1:1 + | + 1 | b = 7 + | ----- | "); } @@ -430,17 +408,17 @@ mod tests { test.write_file("lib/submod.py", "a = 10").unwrap(); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> lib/submod.py:1:1 - | - 1 | a = 10 - | ^^^^^^ - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:2:10 | 2 | from lib.submod import a - | ^^^^^^ + | ^^^^^^ Clicking here + | + info: Found 1 type definition + --> lib/submod.py:1:1 + | + 1 | a = 10 + | ------ | "); } @@ -466,19 +444,19 @@ mod tests { .unwrap(); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> lib/sub/bot/botmod.py:1:1 - | - 1 | botmod = 31 - | ^^^^^^^^^^^ - | - info: Source + info[goto-type definition]: Go to type definition --> lib/sub/__init__.py:2:11 | 2 | from .bot.botmod import * - | ^^^^^^ + | ^^^^^^ Clicking here 3 | sub = 2 | + info: Found 1 type definition + --> lib/sub/bot/botmod.py:1:1 + | + 1 | botmod = 31 + | ----------- + | "); } @@ -503,19 +481,19 @@ mod tests { .unwrap(); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> lib/sub/bot/__init__.py:1:1 - | - 1 | bot = 3 - | ^^^^^^^ - | - info: Source + info[goto-type definition]: Go to type definition --> lib/sub/__init__.py:2:7 | 2 | from .bot.botmod import * - | ^^^ + | ^^^ Clicking here 3 | sub = 2 | + info: Found 1 type definition + --> lib/sub/bot/__init__.py:1:1 + | + 1 | bot = 3 + | ------- + | "); } @@ -540,19 +518,19 @@ mod tests { .unwrap(); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> lib/sub/bot/__init__.py:1:1 - | - 1 | bot = 3 - | ^^^^^^^ - | - info: Source + info[goto-type definition]: Go to type definition --> lib/sub/__init__.py:2:7 | 2 | from .bot.botmod import * - | ^^^ + | ^^^ Clicking here 3 | sub = 2 | + info: Found 1 type definition + --> lib/sub/bot/__init__.py:1:1 + | + 1 | bot = 3 + | ------- + | "); } @@ -592,19 +570,19 @@ mod tests { test.write_file("lib.py", "a = 10").unwrap(); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> lib.py:1:1 - | - 1 | a = 10 - | ^^^^^^ - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:4:1 | 2 | import lib 3 | 4 | lib - | ^^^ + | ^^^ Clicking here + | + info: Found 1 type definition + --> lib.py:1:1 + | + 1 | a = 10 + | ------ | "); } @@ -620,23 +598,23 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:915:7 - | - 914 | @disjoint_base - 915 | class str(Sequence[str]): - | ^^^ - 916 | """str(object='') -> str - 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:4:1 | 2 | a: str = "test" 3 | 4 | a - | ^ + | ^ Clicking here | + info: Found 1 type definition + --> stdlib/builtins.pyi:915:7 + | + 914 | @disjoint_base + 915 | class str(Sequence[str]): + | --- + 916 | """str(object='') -> str + 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str + | "#); } #[test] @@ -648,21 +626,21 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition + info[goto-type definition]: Go to type definition + --> main.py:2:10 + | + 2 | a: str = "test" + | ^^^^^^ Clicking here + | + info: Found 1 type definition --> stdlib/builtins.pyi:915:7 | 914 | @disjoint_base 915 | class str(Sequence[str]): - | ^^^ + | --- 916 | """str(object='') -> str 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str | - info: Source - --> main.py:2:10 - | - 2 | a: str = "test" - | ^^^^^^ - | "#); } @@ -675,17 +653,17 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> main.py:2:12 - | - 2 | type Alias[T: int = bool] = list[T] - | ^ - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:2:34 | 2 | type Alias[T: int = bool] = list[T] - | ^ + | ^ Clicking here + | + info: Found 1 type definition + --> main.py:2:12 + | + 2 | type Alias[T: int = bool] = list[T] + | - | "); } @@ -699,17 +677,17 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> main.py:2:14 - | - 2 | type Alias[**P = [int, str]] = Callable[P, int] - | ^ - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:2:41 | 2 | type Alias[**P = [int, str]] = Callable[P, int] - | ^ + | ^ Clicking here + | + info: Found 1 type definition + --> main.py:2:14 + | + 2 | type Alias[**P = [int, str]] = Callable[P, int] + | - | "); } @@ -756,23 +734,23 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition + info[goto-type definition]: Go to type definition + --> main.py:2:5 + | + 2 | a: "MyClass" = 1 + | ^^^^^^^ Clicking here + 3 | + 4 | class MyClass: + | + info: Found 1 type definition --> main.py:4:7 | 2 | a: "MyClass" = 1 3 | 4 | class MyClass: - | ^^^^^^^ + | ------- 5 | """some docs""" | - info: Source - --> main.py:2:5 - | - 2 | a: "MyClass" = 1 - | ^^^^^^^ - 3 | - 4 | class MyClass: - | "#); } @@ -802,41 +780,31 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> main.py:4:7 - | - 2 | a: "None | MyClass" = 1 - 3 | - 4 | class MyClass: - | ^^^^^^^ - 5 | """some docs""" - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:2:4 | 2 | a: "None | MyClass" = 1 - | ^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^ Clicking here 3 | 4 | class MyClass: | - - info[goto-type-definition]: Type definition - --> stdlib/types.pyi:950:11 + info: Found 2 type definitions + --> main.py:4:7 + | + 2 | a: "None | MyClass" = 1 + 3 | + 4 | class MyClass: + | ------- + 5 | """some docs""" + | + ::: stdlib/types.pyi:950:11 | 948 | if sys.version_info >= (3, 10): 949 | @final 950 | class NoneType: - | ^^^^^^^^ + | -------- 951 | """The type of the None singleton.""" | - info: Source - --> main.py:2:4 - | - 2 | a: "None | MyClass" = 1 - | ^^^^^^^^^^^^^^^^ - 3 | - 4 | class MyClass: - | "#); } @@ -866,41 +834,31 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> main.py:4:7 - | - 2 | a: "None | MyClass" = 1 - 3 | - 4 | class MyClass: - | ^^^^^^^ - 5 | """some docs""" - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:2:4 | 2 | a: "None | MyClass" = 1 - | ^^^^^^^^^^^^^^^^ + | ^^^^^^^^^^^^^^^^ Clicking here 3 | 4 | class MyClass: | - - info[goto-type-definition]: Type definition - --> stdlib/types.pyi:950:11 + info: Found 2 type definitions + --> main.py:4:7 + | + 2 | a: "None | MyClass" = 1 + 3 | + 4 | class MyClass: + | ------- + 5 | """some docs""" + | + ::: stdlib/types.pyi:950:11 | 948 | if sys.version_info >= (3, 10): 949 | @final 950 | class NoneType: - | ^^^^^^^^ + | -------- 951 | """The type of the None singleton.""" | - info: Source - --> main.py:2:4 - | - 2 | a: "None | MyClass" = 1 - | ^^^^^^^^^^^^^^^^ - 3 | - 4 | class MyClass: - | "#); } @@ -916,23 +874,23 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition + info[goto-type definition]: Go to type definition + --> main.py:2:4 + | + 2 | a: "MyClass |" = 1 + | ^^^^^^^^^^^ Clicking here + 3 | + 4 | class MyClass: + | + info: Found 1 type definition --> stdlib/ty_extensions.pyi:20:1 | 19 | # Types 20 | Unknown = object() - | ^^^^^^^ + | ------- 21 | AlwaysTruthy = object() 22 | AlwaysFalsy = object() | - info: Source - --> main.py:2:4 - | - 2 | a: "MyClass |" = 1 - | ^^^^^^^^^^^ - 3 | - 4 | class MyClass: - | "#); } @@ -973,21 +931,21 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition + info[goto-type definition]: Go to type definition + --> main.py:2:6 + | + 2 | ab: "ab" + | ^^ Clicking here + | + info: Found 1 type definition --> stdlib/ty_extensions.pyi:20:1 | 19 | # Types 20 | Unknown = object() - | ^^^^^^^ + | ------- 21 | AlwaysTruthy = object() 22 | AlwaysFalsy = object() | - info: Source - --> main.py:2:6 - | - 2 | ab: "ab" - | ^^ - | "#); } @@ -1000,21 +958,21 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition + info[goto-type definition]: Go to type definition + --> main.py:2:5 + | + 2 | x: "foobar" + | ^^^^^^ Clicking here + | + info: Found 1 type definition --> stdlib/ty_extensions.pyi:20:1 | 19 | # Types 20 | Unknown = object() - | ^^^^^^^ + | ------- 21 | AlwaysTruthy = object() 22 | AlwaysFalsy = object() | - info: Source - --> main.py:2:5 - | - 2 | x: "foobar" - | ^^^^^^ - | "#); } @@ -1160,23 +1118,23 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> main.py:2:7 - | - 2 | class Click: - | ^^^^^ - 3 | __match_args__ = ("position", "button") - 4 | def __init__(self, pos, btn): - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:10:14 | 8 | def my_func(event: Click): 9 | match event: 10 | case Click(x, button=ab): - | ^^^^^ + | ^^^^^ Clicking here 11 | x = ab | + info: Found 1 type definition + --> main.py:2:7 + | + 2 | class Click: + | ----- + 3 | __match_args__ = ("position", "button") + 4 | def __init__(self, pos, btn): + | "#); } @@ -1209,17 +1167,17 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition + info[goto-type definition]: Go to type definition --> main.py:2:13 | 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ + | ^^ Clicking here | - info: Source + info: Found 1 type definition --> main.py:2:13 | 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ + | -- | "); } @@ -1233,17 +1191,17 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> main.py:2:13 - | - 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:2:37 | 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] - | ^^ + | ^^ Clicking here + | + info: Found 1 type definition + --> main.py:2:13 + | + 2 | type Alias1[AB: int = bool] = tuple[AB, list[AB]] + | -- | "); } @@ -1305,23 +1263,23 @@ mod tests { ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:915:7 - | - 914 | @disjoint_base - 915 | class str(Sequence[str]): - | ^^^ - 916 | """str(object='') -> str - 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:4:6 | 2 | def test(a: str): ... 3 | 4 | test(a= "123") - | ^ + | ^ Clicking here | + info: Found 1 type definition + --> stdlib/builtins.pyi:915:7 + | + 914 | @disjoint_base + 915 | class str(Sequence[str]): + | --- + 916 | """str(object='') -> str + 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str + | "#); } @@ -1339,23 +1297,23 @@ mod tests { // the keyword is typed as a string. It's only the passed argument that // is an int. Navigating to `str` would match pyright's behavior. assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:348:7 - | - 347 | @disjoint_base - 348 | class int: - | ^^^ - 349 | """int([x]) -> integer - 350 | int(x, base=10) -> integer - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:4:6 | 2 | def test(a: str): ... 3 | 4 | test(a= 123) - | ^ + | ^ Clicking here | + info: Found 1 type definition + --> stdlib/builtins.pyi:348:7 + | + 347 | @disjoint_base + 348 | class int: + | --- + 349 | """int([x]) -> integer + 350 | int(x, base=10) -> integer + | "#); } @@ -1372,23 +1330,23 @@ f(**kwargs) ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:2920:7 - | - 2919 | @disjoint_base - 2920 | class dict(MutableMapping[_KT, _VT]): - | ^^^^ - 2921 | """dict() -> new empty dictionary - 2922 | dict(mapping) -> new dictionary initialized from a mapping object's - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:6:5 | 4 | kwargs = { "name": "test"} 5 | 6 | f(**kwargs) - | ^^^^^^ + | ^^^^^^ Clicking here | + info: Found 1 type definition + --> stdlib/builtins.pyi:2920:7 + | + 2919 | @disjoint_base + 2920 | class dict(MutableMapping[_KT, _VT]): + | ---- + 2921 | """dict() -> new empty dictionary + 2922 | dict(mapping) -> new dictionary initialized from a mapping object's + | "#); } @@ -1410,25 +1368,25 @@ def outer(): // Should find the variable declaration in the outer scope, not the nonlocal statement assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:915:7 - | - 914 | @disjoint_base - 915 | class str(Sequence[str]): - | ^^^ - 916 | """str(object='') -> str - 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:8:16 | 6 | nonlocal x 7 | x = "modified" 8 | return x # Should find the nonlocal x declaration in outer scope - | ^ + | ^ Clicking here 9 | 10 | return inner | + info: Found 1 type definition + --> stdlib/builtins.pyi:915:7 + | + 914 | @disjoint_base + 915 | class str(Sequence[str]): + | --- + 916 | """str(object='') -> str + 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str + | "#); } @@ -1467,23 +1425,23 @@ def function(): // Should find the global variable declaration, not the global statement assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:915:7 - | - 914 | @disjoint_base - 915 | class str(Sequence[str]): - | ^^^ - 916 | """str(object='') -> str - 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:7:12 | 5 | global global_var 6 | global_var = "modified" 7 | return global_var # Should find the global variable declaration - | ^^^^^^^^^^ + | ^^^^^^^^^^ Clicking here | + info: Found 1 type definition + --> stdlib/builtins.pyi:915:7 + | + 914 | @disjoint_base + 915 | class str(Sequence[str]): + | --- + 916 | """str(object='') -> str + 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str + | "#); } @@ -1514,22 +1472,22 @@ def function(): ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:915:7 - | - 914 | @disjoint_base - 915 | class str(Sequence[str]): - | ^^^ - 916 | """str(object='') -> str - 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:3:5 | 2 | def foo(a: str): 3 | a - | ^ + | ^ Clicking here | + info: Found 1 type definition + --> stdlib/builtins.pyi:915:7 + | + 914 | @disjoint_base + 915 | class str(Sequence[str]): + | --- + 916 | """str(object='') -> str + 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str + | "#); } @@ -1547,20 +1505,20 @@ def function(): ); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> main.py:2:7 - | - 2 | class X: - | ^ - 3 | def foo(a, b): ... - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:7:1 | 5 | x = X() 6 | 7 | x.foo() - | ^ + | ^ Clicking here + | + info: Found 1 type definition + --> main.py:2:7 + | + 2 | class X: + | - + 3 | def foo(a, b): ... | "); } @@ -1576,21 +1534,21 @@ def function(): ); assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> main.py:2:5 - | - 2 | def foo(a, b): ... - | ^^^ - 3 | - 4 | foo() - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:4:1 | 2 | def foo(a, b): ... 3 | 4 | foo() - | ^^^ + | ^^^ Clicking here + | + info: Found 1 type definition + --> main.py:2:5 + | + 2 | def foo(a, b): ... + | --- + 3 | + 4 | foo() | "); } @@ -1606,23 +1564,23 @@ def function(): ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:915:7 - | - 914 | @disjoint_base - 915 | class str(Sequence[str]): - | ^^^ - 916 | """str(object='') -> str - 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:4:15 | 2 | def foo(a: str | None, b): 3 | if a is not None: 4 | print(a) - | ^ + | ^ Clicking here | + info: Found 1 type definition + --> stdlib/builtins.pyi:915:7 + | + 914 | @disjoint_base + 915 | class str(Sequence[str]): + | --- + 916 | """str(object='') -> str + 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str + | "#); } @@ -1636,39 +1594,30 @@ def function(): ); assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> stdlib/types.pyi:950:11 - | - 948 | if sys.version_info >= (3, 10): - 949 | @final - 950 | class NoneType: - | ^^^^^^^^ - 951 | """The type of the None singleton.""" - | - info: Source + info[goto-type definition]: Go to type definition --> main.py:3:5 | 2 | def foo(a: str | None, b): 3 | a - | ^ + | ^ Clicking here | - - info[goto-type-definition]: Type definition + info: Found 2 type definitions --> stdlib/builtins.pyi:915:7 | 914 | @disjoint_base 915 | class str(Sequence[str]): - | ^^^ + | --- 916 | """str(object='') -> str 917 | str(bytes_or_buffer[, encoding[, errors]]) -> str | - info: Source - --> main.py:3:5 - | - 2 | def foo(a: str | None, b): - 3 | a - | ^ - | + ::: stdlib/types.pyi:950:11 + | + 948 | if sys.version_info >= (3, 10): + 949 | @final + 950 | class NoneType: + | -------- + 951 | """The type of the None singleton.""" + | "#); } @@ -1694,18 +1643,18 @@ def function(): // The module is the correct type definition assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> mypackage/subpkg/__init__.py:1:1 - | - | - info: Source + info[goto-type definition]: Go to type definition --> mypackage/__init__.py:4:5 | 2 | from .subpkg.submod import val 3 | 4 | x = subpkg - | ^^^^^^ + | ^^^^^^ Clicking here | + info: Found 1 type definition + --> mypackage/subpkg/__init__.py:1:1 + | + | "); } @@ -1731,18 +1680,18 @@ def function(): // The module is the correct type definition assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> mypackage/subpkg/__init__.py:1:1 - | - | - info: Source + info[goto-type definition]: Go to type definition --> mypackage/__init__.py:2:7 | 2 | from .subpkg.submod import val - | ^^^^^^ + | ^^^^^^ Clicking here 3 | 4 | x = subpkg | + info: Found 1 type definition + --> mypackage/subpkg/__init__.py:1:1 + | + | "); } @@ -1768,23 +1717,23 @@ def function(): // Unknown is correct, `submod` is not in scope assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition - --> stdlib/ty_extensions.pyi:20:1 - | - 19 | # Types - 20 | Unknown = object() - | ^^^^^^^ - 21 | AlwaysTruthy = object() - 22 | AlwaysFalsy = object() - | - info: Source + info[goto-type definition]: Go to type definition --> mypackage/__init__.py:4:5 | 2 | from .subpkg.submod import val 3 | 4 | x = submod - | ^^^^^^ + | ^^^^^^ Clicking here | + info: Found 1 type definition + --> stdlib/ty_extensions.pyi:20:1 + | + 19 | # Types + 20 | Unknown = object() + | ------- + 21 | AlwaysTruthy = object() + 22 | AlwaysFalsy = object() + | "); } @@ -1810,20 +1759,20 @@ def function(): // The module is correct assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition + info[goto-type definition]: Go to type definition + --> mypackage/__init__.py:2:14 + | + 2 | from .subpkg.submod import val + | ^^^^^^ Clicking here + 3 | + 4 | x = submod + | + info: Found 1 type definition --> mypackage/subpkg/submod.py:1:1 | 1 | / 2 | | val: int = 0 - | |_____________^ - | - info: Source - --> mypackage/__init__.py:2:14 - | - 2 | from .subpkg.submod import val - | ^^^^^^ - 3 | - 4 | x = submod + | |_____________- | "); } @@ -1849,20 +1798,20 @@ def function(): // The module is correct assert_snapshot!(test.goto_type_definition(), @r" - info[goto-type-definition]: Type definition + info[goto-type definition]: Go to type definition + --> mypackage/__init__.py:2:7 + | + 2 | from .subpkg import subpkg + | ^^^^^^ Clicking here + 3 | + 4 | x = subpkg + | + info: Found 1 type definition --> mypackage/subpkg/__init__.py:1:1 | 1 | / 2 | | subpkg: int = 10 - | |_________________^ - | - info: Source - --> mypackage/__init__.py:2:7 - | - 2 | from .subpkg import subpkg - | ^^^^^^ - 3 | - 4 | x = subpkg + | |_________________- | "); } @@ -1888,23 +1837,23 @@ def function(): // `int` is correct assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition + info[goto-type definition]: Go to type definition + --> mypackage/__init__.py:2:21 + | + 2 | from .subpkg import subpkg + | ^^^^^^ Clicking here + 3 | + 4 | x = subpkg + | + info: Found 1 type definition --> stdlib/builtins.pyi:348:7 | 347 | @disjoint_base 348 | class int: - | ^^^ + | --- 349 | """int([x]) -> integer 350 | int(x, base=10) -> integer | - info: Source - --> mypackage/__init__.py:2:21 - | - 2 | from .subpkg import subpkg - | ^^^^^^ - 3 | - 4 | x = subpkg - | "#); } @@ -1929,31 +1878,31 @@ def function(): // `int` is correct assert_snapshot!(test.goto_type_definition(), @r#" - info[goto-type-definition]: Type definition - --> stdlib/builtins.pyi:348:7 - | - 347 | @disjoint_base - 348 | class int: - | ^^^ - 349 | """int([x]) -> integer - 350 | int(x, base=10) -> integer - | - info: Source + info[goto-type definition]: Go to type definition --> mypackage/__init__.py:4:5 | 2 | from .subpkg import subpkg 3 | 4 | x = subpkg - | ^^^^^^ + | ^^^^^^ Clicking here | + info: Found 1 type definition + --> stdlib/builtins.pyi:348:7 + | + 347 | @disjoint_base + 348 | class int: + | --- + 349 | """int([x]) -> integer + 350 | int(x, base=10) -> integer + | "#); } impl CursorTest { fn goto_type_definition(&self) -> String { - let Some(targets) = + let Some(targets) = salsa::attach(&self.db, || { goto_type_definition(&self.db, self.cursor.file, self.cursor.offset) - else { + }) else { return "No goto target found".to_string(); }; @@ -1961,47 +1910,10 @@ def function(): return "No type definitions found".to_string(); } - let source = targets.range; - self.render_diagnostics( - targets - .into_iter() - .map(|target| GotoTypeDefinitionDiagnostic::new(source, &target)), - ) - } - } - - struct GotoTypeDefinitionDiagnostic { - source: FileRange, - target: FileRange, - } - - impl GotoTypeDefinitionDiagnostic { - fn new(source: FileRange, target: &NavigationTarget) -> Self { - Self { - source, - target: FileRange::new(target.file(), target.focus_range()), - } - } - } - - impl IntoDiagnostic for GotoTypeDefinitionDiagnostic { - fn into_diagnostic(self) -> Diagnostic { - let mut source = SubDiagnostic::new(SubDiagnosticSeverity::Info, "Source"); - source.annotate(Annotation::primary( - Span::from(self.source.file()).with_range(self.source.range()), - )); - - let mut main = Diagnostic::new( - DiagnosticId::Lint(LintName::of("goto-type-definition")), - Severity::Info, - "Type definition".to_string(), - ); - main.annotate(Annotation::primary( - Span::from(self.target.file()).with_range(self.target.range()), - )); - main.sub(source); - - main + self.render_diagnostics([crate::goto_definition::test::GotoDiagnostic::new( + crate::goto_definition::test::GotoAction::TypeDefinition, + targets, + )]) } } } diff --git a/crates/ty_ide/src/hover.rs b/crates/ty_ide/src/hover.rs index 8f9add508a..8430a46edc 100644 --- a/crates/ty_ide/src/hover.rs +++ b/crates/ty_ide/src/hover.rs @@ -3610,6 +3610,20 @@ def function(): "); } + #[test] + fn hover_tuple_assignment_target() { + let test = CursorTest::builder() + .source( + "test.py", + r#" + (x, y) = "test", 10 + "#, + ) + .build(); + + assert_snapshot!(test.hover(), @"Hover provided no content"); + } + impl CursorTest { fn hover(&self) -> String { use std::fmt::Write; diff --git a/crates/ty_ide/src/inlay_hints.rs b/crates/ty_ide/src/inlay_hints.rs index 1d26d3cdd2..d0742f58ec 100644 --- a/crates/ty_ide/src/inlay_hints.rs +++ b/crates/ty_ide/src/inlay_hints.rs @@ -19,11 +19,22 @@ pub struct InlayHint { } impl InlayHint { - fn variable_type(expr: &Expr, ty: Type, db: &dyn Db, allow_edits: bool) -> Self { + fn variable_type( + expr: &Expr, + rhs: &Expr, + ty: Type, + db: &dyn Db, + allow_edits: bool, + ) -> Option { let position = expr.range().end(); // Render the type to a string, and get subspans for all the types that make it up let details = ty.display(db).to_string_parts(); + // Filter out a reptitive hints like `x: T = T()` + if call_matches_name(rhs, &details.label) { + return None; + } + // Ok so the idea here is that we potentially have a random soup of spans here, // and each byte of the string can have at most one target associate with it. // Thankfully, they were generally pushed in print order, with the inner smaller types @@ -73,12 +84,12 @@ impl InlayHint { vec![] }; - Self { + Some(Self { position, kind: InlayHintKind::Type, label: InlayHintLabel { parts: label_parts }, text_edits, - } + }) } fn call_argument_name( @@ -250,7 +261,7 @@ struct InlayHintVisitor<'a, 'db> { db: &'db dyn Db, model: SemanticModel<'db>, hints: Vec, - in_assignment: bool, + assignment_rhs: Option<&'a Expr>, range: TextRange, settings: &'a InlayHintSettings, in_no_edits_allowed: bool, @@ -262,21 +273,21 @@ impl<'a, 'db> InlayHintVisitor<'a, 'db> { db, model: SemanticModel::new(db, file), hints: Vec::new(), - in_assignment: false, + assignment_rhs: None, range, settings, in_no_edits_allowed: false, } } - fn add_type_hint(&mut self, expr: &Expr, ty: Type<'db>, allow_edits: bool) { + fn add_type_hint(&mut self, expr: &Expr, rhs: &Expr, ty: Type<'db>, allow_edits: bool) { if !self.settings.variable_types { return; } - let inlay_hint = InlayHint::variable_type(expr, ty, self.db, allow_edits); - - self.hints.push(inlay_hint); + if let Some(inlay_hint) = InlayHint::variable_type(expr, rhs, ty, self.db, allow_edits) { + self.hints.push(inlay_hint); + } } fn add_call_argument_name( @@ -299,8 +310,8 @@ impl<'a, 'db> InlayHintVisitor<'a, 'db> { } } -impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> { - fn enter_node(&mut self, node: AnyNodeRef<'_>) -> TraversalSignal { +impl<'a> SourceOrderVisitor<'a> for InlayHintVisitor<'a, '_> { + fn enter_node(&mut self, node: AnyNodeRef<'a>) -> TraversalSignal { if self.range.intersect(node.range()).is_some() { TraversalSignal::Traverse } else { @@ -308,7 +319,7 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> { } } - fn visit_stmt(&mut self, stmt: &Stmt) { + fn visit_stmt(&mut self, stmt: &'a Stmt) { let node = AnyNodeRef::from(stmt); if !self.enter_node(node).is_traverse() { @@ -317,7 +328,9 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> { match stmt { Stmt::Assign(assign) => { - self.in_assignment = !type_hint_is_excessive_for_expr(&assign.value); + if !type_hint_is_excessive_for_expr(&assign.value) { + self.assignment_rhs = Some(&*assign.value); + } if !annotations_are_valid_syntax(assign) { self.in_no_edits_allowed = true; } @@ -325,7 +338,7 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> { self.visit_expr(target); } self.in_no_edits_allowed = false; - self.in_assignment = false; + self.assignment_rhs = None; self.visit_expr(&assign.value); @@ -344,22 +357,24 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> { source_order::walk_stmt(self, stmt); } - fn visit_expr(&mut self, expr: &'_ Expr) { + fn visit_expr(&mut self, expr: &'a Expr) { match expr { Expr::Name(name) => { - if self.in_assignment { + if let Some(rhs) = self.assignment_rhs { if name.ctx.is_store() { - let ty = expr.inferred_type(&self.model); - self.add_type_hint(expr, ty, !self.in_no_edits_allowed); + if let Some(ty) = expr.inferred_type(&self.model) { + self.add_type_hint(expr, rhs, ty, !self.in_no_edits_allowed); + } } } source_order::walk_expr(self, expr); } Expr::Attribute(attribute) => { - if self.in_assignment { + if let Some(rhs) = self.assignment_rhs { if attribute.ctx.is_store() { - let ty = expr.inferred_type(&self.model); - self.add_type_hint(expr, ty, !self.in_no_edits_allowed); + if let Some(ty) = expr.inferred_type(&self.model) { + self.add_type_hint(expr, rhs, ty, !self.in_no_edits_allowed); + } } } source_order::walk_expr(self, expr); @@ -416,6 +431,26 @@ fn arg_matches_name(arg_or_keyword: &ArgOrKeyword, name: &str) -> bool { } } +/// Given a function call, check if the expression is the "same name" +/// as the function being called. +/// +/// This allows us to filter out reptitive inlay hints like `x: T = T(...)`. +/// While still allowing non-trivial ones like `x: T[U] = T()`. +fn call_matches_name(expr: &Expr, name: &str) -> bool { + // Only care about function calls + let Expr::Call(call) = expr else { + return false; + }; + + match &*call.func { + // `x: T = T()` is a match + Expr::Name(expr_name) => expr_name.id.as_str() == name, + // `x: T = a.T()` is a match + Expr::Attribute(expr_attribute) => expr_attribute.attr.as_str() == name, + _ => false, + } +} + /// Given an expression that's the RHS of an assignment, would it be excessive to /// emit an inlay type hint for the variable assigned to it? /// @@ -1829,35 +1864,16 @@ mod tests { ", ); - assert_snapshot!(test.inlay_hints(), @r#" + assert_snapshot!(test.inlay_hints(), @r" class A: def __init__(self, y): - self.x[: int] = int(1) + self.x = int(1) self.y[: Unknown] = y - a[: A] = A([y=]2) - a.y[: int] = int(3) + a = A([y=]2) + a.y = int(3) --------------------------------------------- - info[inlay-hint-location]: Inlay Hint Target - --> stdlib/builtins.pyi:348:7 - | - 347 | @disjoint_base - 348 | class int: - | ^^^ - 349 | """int([x]) -> integer - 350 | int(x, base=10) -> integer - | - info: Source - --> main2.py:4:18 - | - 2 | class A: - 3 | def __init__(self, y): - 4 | self.x[: int] = int(1) - | ^^^ - 5 | self.y[: Unknown] = y - | - info[inlay-hint-location]: Inlay Hint Target --> stdlib/ty_extensions.pyi:20:1 | @@ -1871,29 +1887,11 @@ mod tests { --> main2.py:5:18 | 3 | def __init__(self, y): - 4 | self.x[: int] = int(1) + 4 | self.x = int(1) 5 | self.y[: Unknown] = y | ^^^^^^^ 6 | - 7 | a[: A] = A([y=]2) - | - - info[inlay-hint-location]: Inlay Hint Target - --> main.py:2:7 - | - 2 | class A: - | ^ - 3 | def __init__(self, y): - 4 | self.x = int(1) - | - info: Source - --> main2.py:7:5 - | - 5 | self.y[: Unknown] = y - 6 | - 7 | a[: A] = A([y=]2) - | ^ - 8 | a.y[: int] = int(3) + 7 | a = A([y=]2) | info[inlay-hint-location]: Inlay Hint Target @@ -1906,30 +1904,13 @@ mod tests { 5 | self.y = y | info: Source - --> main2.py:7:13 + --> main2.py:7:8 | 5 | self.y[: Unknown] = y 6 | - 7 | a[: A] = A([y=]2) - | ^ - 8 | a.y[: int] = int(3) - | - - info[inlay-hint-location]: Inlay Hint Target - --> stdlib/builtins.pyi:348:7 - | - 347 | @disjoint_base - 348 | class int: - | ^^^ - 349 | """int([x]) -> integer - 350 | int(x, base=10) -> integer - | - info: Source - --> main2.py:8:7 - | - 7 | a[: A] = A([y=]2) - 8 | a.y[: int] = int(3) - | ^^^ + 7 | a = A([y=]2) + | ^ + 8 | a.y = int(3) | --------------------------------------------- @@ -1938,12 +1919,12 @@ mod tests { class A: def __init__(self, y): - self.x: int = int(1) + self.x = int(1) self.y: Unknown = y - a: A = A(2) - a.y: int = int(3) - "#); + a = A(2) + a.y = int(3) + "); } #[test] @@ -2937,31 +2918,12 @@ mod tests { def __init__(self): self.x: int = 1 - x[: MyClass] = MyClass() + x = MyClass() y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) a[: MyClass], b[: MyClass] = MyClass(), MyClass() c[: MyClass], d[: MyClass] = (MyClass(), MyClass()) --------------------------------------------- - info[inlay-hint-location]: Inlay Hint Target - --> main.py:2:7 - | - 2 | class MyClass: - | ^^^^^^^ - 3 | def __init__(self): - 4 | self.x: int = 1 - | - info: Source - --> main2.py:6:5 - | - 4 | self.x: int = 1 - 5 | - 6 | x[: MyClass] = MyClass() - | ^^^^^^^ - 7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) - 8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass() - | - info[inlay-hint-location]: Inlay Hint Target --> stdlib/builtins.pyi:2695:7 | @@ -2973,7 +2935,7 @@ mod tests { info: Source --> main2.py:7:5 | - 6 | x[: MyClass] = MyClass() + 6 | x = MyClass() 7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) | ^^^^^ 8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass() @@ -2991,7 +2953,7 @@ mod tests { info: Source --> main2.py:7:11 | - 6 | x[: MyClass] = MyClass() + 6 | x = MyClass() 7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) | ^^^^^^^ 8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass() @@ -3009,7 +2971,7 @@ mod tests { info: Source --> main2.py:7:20 | - 6 | x[: MyClass] = MyClass() + 6 | x = MyClass() 7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) | ^^^^^^^ 8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass() @@ -3027,7 +2989,7 @@ mod tests { info: Source --> main2.py:8:5 | - 6 | x[: MyClass] = MyClass() + 6 | x = MyClass() 7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) 8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass() | ^^^^^^^ @@ -3045,7 +3007,7 @@ mod tests { info: Source --> main2.py:8:19 | - 6 | x[: MyClass] = MyClass() + 6 | x = MyClass() 7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) 8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass() | ^^^^^^^ @@ -3094,7 +3056,7 @@ mod tests { def __init__(self): self.x: int = 1 - x: MyClass = MyClass() + x = MyClass() y: tuple[MyClass, MyClass] = (MyClass(), MyClass()) a, b = MyClass(), MyClass() c, d = (MyClass(), MyClass()) @@ -4097,31 +4059,11 @@ mod tests { def __init__(self): self.x: int = 1 self.y: int = 2 - val[: MyClass] = MyClass() + val = MyClass() foo(val.x) foo([x=]val.y) --------------------------------------------- - info[inlay-hint-location]: Inlay Hint Target - --> main.py:3:7 - | - 2 | def foo(x: int): pass - 3 | class MyClass: - | ^^^^^^^ - 4 | def __init__(self): - 5 | self.x: int = 1 - | - info: Source - --> main2.py:7:7 - | - 5 | self.x: int = 1 - 6 | self.y: int = 2 - 7 | val[: MyClass] = MyClass() - | ^^^^^^^ - 8 | - 9 | foo(val.x) - | - info[inlay-hint-location]: Inlay Hint Target --> main.py:2:9 | @@ -4137,20 +4079,6 @@ mod tests { 10 | foo([x=]val.y) | ^ | - - --------------------------------------------- - info[inlay-hint-edit]: File after edits - info: Source - - def foo(x: int): pass - class MyClass: - def __init__(self): - self.x: int = 1 - self.y: int = 2 - val: MyClass = MyClass() - - foo(val.x) - foo(val.y) "); } @@ -4176,31 +4104,11 @@ mod tests { def __init__(self): self.x: int = 1 self.y: int = 2 - x[: MyClass] = MyClass() + x = MyClass() foo(x.x) foo([x=]x.y) --------------------------------------------- - info[inlay-hint-location]: Inlay Hint Target - --> main.py:3:7 - | - 2 | def foo(x: int): pass - 3 | class MyClass: - | ^^^^^^^ - 4 | def __init__(self): - 5 | self.x: int = 1 - | - info: Source - --> main2.py:7:5 - | - 5 | self.x: int = 1 - 6 | self.y: int = 2 - 7 | x[: MyClass] = MyClass() - | ^^^^^^^ - 8 | - 9 | foo(x.x) - | - info[inlay-hint-location]: Inlay Hint Target --> main.py:2:9 | @@ -4216,20 +4124,6 @@ mod tests { 10 | foo([x=]x.y) | ^ | - - --------------------------------------------- - info[inlay-hint-edit]: File after edits - info: Source - - def foo(x: int): pass - class MyClass: - def __init__(self): - self.x: int = 1 - self.y: int = 2 - x: MyClass = MyClass() - - foo(x.x) - foo(x.y) "); } @@ -4258,31 +4152,11 @@ mod tests { return 1 def y() -> int: return 2 - val[: MyClass] = MyClass() + val = MyClass() foo(val.x()) foo([x=]val.y()) --------------------------------------------- - info[inlay-hint-location]: Inlay Hint Target - --> main.py:3:7 - | - 2 | def foo(x: int): pass - 3 | class MyClass: - | ^^^^^^^ - 4 | def __init__(self): - 5 | def x() -> int: - | - info: Source - --> main2.py:9:7 - | - 7 | def y() -> int: - 8 | return 2 - 9 | val[: MyClass] = MyClass() - | ^^^^^^^ - 10 | - 11 | foo(val.x()) - | - info[inlay-hint-location]: Inlay Hint Target --> main.py:2:9 | @@ -4298,22 +4172,6 @@ mod tests { 12 | foo([x=]val.y()) | ^ | - - --------------------------------------------- - info[inlay-hint-edit]: File after edits - info: Source - - def foo(x: int): pass - class MyClass: - def __init__(self): - def x() -> int: - return 1 - def y() -> int: - return 2 - val: MyClass = MyClass() - - foo(val.x()) - foo(val.y()) "); } @@ -4346,31 +4204,11 @@ mod tests { return 1 def y() -> List[int]: return 2 - val[: MyClass] = MyClass() + val = MyClass() foo(val.x()[0]) foo([x=]val.y()[1]) --------------------------------------------- - info[inlay-hint-location]: Inlay Hint Target - --> main.py:5:7 - | - 4 | def foo(x: int): pass - 5 | class MyClass: - | ^^^^^^^ - 6 | def __init__(self): - 7 | def x() -> List[int]: - | - info: Source - --> main2.py:11:7 - | - 9 | def y() -> List[int]: - 10 | return 2 - 11 | val[: MyClass] = MyClass() - | ^^^^^^^ - 12 | - 13 | foo(val.x()[0]) - | - info[inlay-hint-location]: Inlay Hint Target --> main.py:4:9 | @@ -4388,24 +4226,6 @@ mod tests { 14 | foo([x=]val.y()[1]) | ^ | - - --------------------------------------------- - info[inlay-hint-edit]: File after edits - info: Source - - from typing import List - - def foo(x: int): pass - class MyClass: - def __init__(self): - def x() -> List[int]: - return 1 - def y() -> List[int]: - return 2 - val: MyClass = MyClass() - - foo(val.x()[0]) - foo(val.y()[1]) "); } @@ -4697,7 +4517,7 @@ mod tests { class Foo: def __init__(self, x: int): pass Foo([x=]1) - f[: Foo] = Foo([x=]1) + f = Foo([x=]1) --------------------------------------------- info[inlay-hint-location]: Inlay Hint Target --> main.py:3:24 @@ -4715,24 +4535,7 @@ mod tests { 3 | def __init__(self, x: int): pass 4 | Foo([x=]1) | ^ - 5 | f[: Foo] = Foo([x=]1) - | - - info[inlay-hint-location]: Inlay Hint Target - --> main.py:2:7 - | - 2 | class Foo: - | ^^^ - 3 | def __init__(self, x: int): pass - 4 | Foo(1) - | - info: Source - --> main2.py:5:5 - | - 3 | def __init__(self, x: int): pass - 4 | Foo([x=]1) - 5 | f[: Foo] = Foo([x=]1) - | ^^^ + 5 | f = Foo([x=]1) | info[inlay-hint-location]: Inlay Hint Target @@ -4745,22 +4548,13 @@ mod tests { 5 | f = Foo(1) | info: Source - --> main2.py:5:17 + --> main2.py:5:10 | 3 | def __init__(self, x: int): pass 4 | Foo([x=]1) - 5 | f[: Foo] = Foo([x=]1) - | ^ + 5 | f = Foo([x=]1) + | ^ | - - --------------------------------------------- - info[inlay-hint-edit]: File after edits - info: Source - - class Foo: - def __init__(self, x: int): pass - Foo(1) - f: Foo = Foo(1) "); } @@ -4778,7 +4572,7 @@ mod tests { class Foo: def __new__(cls, x: int): pass Foo([x=]1) - f[: Foo] = Foo([x=]1) + f = Foo([x=]1) --------------------------------------------- info[inlay-hint-location]: Inlay Hint Target --> main.py:3:22 @@ -4796,24 +4590,7 @@ mod tests { 3 | def __new__(cls, x: int): pass 4 | Foo([x=]1) | ^ - 5 | f[: Foo] = Foo([x=]1) - | - - info[inlay-hint-location]: Inlay Hint Target - --> main.py:2:7 - | - 2 | class Foo: - | ^^^ - 3 | def __new__(cls, x: int): pass - 4 | Foo(1) - | - info: Source - --> main2.py:5:5 - | - 3 | def __new__(cls, x: int): pass - 4 | Foo([x=]1) - 5 | f[: Foo] = Foo([x=]1) - | ^^^ + 5 | f = Foo([x=]1) | info[inlay-hint-location]: Inlay Hint Target @@ -4826,22 +4603,13 @@ mod tests { 5 | f = Foo(1) | info: Source - --> main2.py:5:17 + --> main2.py:5:10 | 3 | def __new__(cls, x: int): pass 4 | Foo([x=]1) - 5 | f[: Foo] = Foo([x=]1) - | ^ + 5 | f = Foo([x=]1) + | ^ | - - --------------------------------------------- - info[inlay-hint-edit]: File after edits - info: Source - - class Foo: - def __new__(cls, x: int): pass - Foo(1) - f: Foo = Foo(1) "); } diff --git a/crates/ty_ide/src/lib.rs b/crates/ty_ide/src/lib.rs index 28989dcf8f..7d65984f8e 100644 --- a/crates/ty_ide/src/lib.rs +++ b/crates/ty_ide/src/lib.rs @@ -230,6 +230,11 @@ impl NavigationTargets { fn is_empty(&self) -> bool { self.0.is_empty() } + + #[cfg(test)] + fn len(&self) -> usize { + self.0.len() + } } impl IntoIterator for NavigationTargets { diff --git a/crates/ty_ide/src/rename.rs b/crates/ty_ide/src/rename.rs index fe51f06615..ecea0ffc36 100644 --- a/crates/ty_ide/src/rename.rs +++ b/crates/ty_ide/src/rename.rs @@ -98,7 +98,9 @@ mod tests { impl CursorTest { fn prepare_rename(&self) -> String { - let Some(range) = can_rename(&self.db, self.cursor.file, self.cursor.offset) else { + let Some(range) = salsa::attach(&self.db, || { + can_rename(&self.db, self.cursor.file, self.cursor.offset) + }) else { return "Cannot rename".to_string(); }; @@ -106,13 +108,13 @@ mod tests { } fn rename(&self, new_name: &str) -> String { - let Some(_) = can_rename(&self.db, self.cursor.file, self.cursor.offset) else { - return "Cannot rename".to_string(); - }; + let rename_results = salsa::attach(&self.db, || { + can_rename(&self.db, self.cursor.file, self.cursor.offset)?; - let Some(rename_results) = rename(&self.db, self.cursor.file, self.cursor.offset, new_name) - else { + }); + + let Some(rename_results) = rename_results else { return "Cannot rename".to_string(); }; diff --git a/crates/ty_ide/src/semantic_tokens.rs b/crates/ty_ide/src/semantic_tokens.rs index 5667d1506f..79b37265aa 100644 --- a/crates/ty_ide/src/semantic_tokens.rs +++ b/crates/ty_ide/src/semantic_tokens.rs @@ -273,7 +273,7 @@ impl<'db> SemanticTokenVisitor<'db> { } // Fall back to type-based classification. - let ty = name.inferred_type(self.model); + let ty = name.inferred_type(self.model).unwrap_or(Type::unknown()); let name_str = name.id.as_str(); self.classify_from_type_and_name_str(ty, name_str) } @@ -302,7 +302,9 @@ impl<'db> SemanticTokenVisitor<'db> { let parsed = parsed_module(db, definition.file(db)); let ty = parameter.node(&parsed.load(db)).inferred_type(&model); - if let Type::TypeVar(type_var) = ty { + if let Some(ty) = ty + && let Type::TypeVar(type_var) = ty + { match type_var.typevar(db).kind(db) { TypeVarKind::TypingSelf => { return Some((SemanticTokenType::SelfParameter, modifiers)); @@ -344,9 +346,9 @@ impl<'db> SemanticTokenVisitor<'db> { _ => None, }; - if let Some(value) = value { - let value_ty = value.inferred_type(&model); - + if let Some(value) = value + && let Some(value_ty) = value.inferred_type(&model) + { if value_ty.is_class_literal() || value_ty.is_subclass_of() || value_ty.is_generic_alias() @@ -710,12 +712,12 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> { for alias in &import.names { if let Some(asname) = &alias.asname { // For aliased imports (from X import Y as Z), classify Z based on what Y is - let ty = alias.inferred_type(self.model); + let ty = alias.inferred_type(self.model).unwrap_or(Type::unknown()); let (token_type, modifiers) = self.classify_from_alias_type(ty, asname); self.add_token(asname, token_type, modifiers); } else { // For direct imports (from X import Y), use semantic classification - let ty = alias.inferred_type(self.model); + let ty = alias.inferred_type(self.model).unwrap_or(Type::unknown()); let (token_type, modifiers) = self.classify_from_alias_type(ty, &alias.name); self.add_token(&alias.name, token_type, modifiers); @@ -835,7 +837,7 @@ impl SourceOrderVisitor<'_> for SemanticTokenVisitor<'_> { self.visit_expr(&attr.value); // Then add token for the attribute name (e.g., 'path' in 'os.path') - let ty = expr.inferred_type(self.model); + let ty = expr.inferred_type(self.model).unwrap_or(Type::unknown()); let (token_type, modifiers) = Self::classify_from_type_for_attribute(ty, &attr.attr); self.add_token(&attr.attr, token_type, modifiers); diff --git a/crates/ty_ide/src/signature_help.rs b/crates/ty_ide/src/signature_help.rs index d79f298dd6..a4746f1563 100644 --- a/crates/ty_ide/src/signature_help.rs +++ b/crates/ty_ide/src/signature_help.rs @@ -20,6 +20,7 @@ use ty_python_semantic::semantic_index::definition::Definition; use ty_python_semantic::types::ide_support::{ CallSignatureDetails, call_signature_details, find_active_signature_from_details, }; +use ty_python_semantic::types::{ParameterKind, Type}; // TODO: We may want to add special-case handling for calls to constructors // so the class docstring is used in place of (or inaddition to) any docstring @@ -27,25 +28,29 @@ use ty_python_semantic::types::ide_support::{ /// Information about a function parameter #[derive(Debug, Clone, PartialEq, Eq)] -pub struct ParameterDetails { +pub struct ParameterDetails<'db> { /// The parameter name (e.g., "param1") pub name: String, /// The parameter label in the signature (e.g., "param1: str") pub label: String, + /// The annotated type of the parameter, if any + pub ty: Option>, /// Documentation specific to the parameter, typically extracted from the /// function's docstring pub documentation: Option, + /// True if the parameter is positional-only. + pub is_positional_only: bool, } /// Information about a function signature #[derive(Debug, Clone, PartialEq, Eq)] -pub struct SignatureDetails { +pub struct SignatureDetails<'db> { /// Text representation of the full signature (including input parameters and return type). pub label: String, /// Documentation for the signature, typically from the function's docstring. pub documentation: Option, /// Information about each of the parameters in left-to-right order. - pub parameters: Vec, + pub parameters: Vec>, /// Index of the parameter that corresponds to the argument where the /// user's cursor is currently positioned. pub active_parameter: Option, @@ -53,18 +58,18 @@ pub struct SignatureDetails { /// Signature help information for function calls #[derive(Debug, Clone, PartialEq, Eq)] -pub struct SignatureHelpInfo { +pub struct SignatureHelpInfo<'db> { /// Information about each of the signatures for the function call. We /// need to handle multiple because of unions, overloads, and composite /// calls like constructors (which invoke both __new__ and __init__). - pub signatures: Vec, + pub signatures: Vec>, /// Index of the "active signature" which is the first signature where /// all arguments that are currently present in the code map to parameters. pub active_signature: Option, } /// Signature help information for function calls at the given position -pub fn signature_help(db: &dyn Db, file: File, offset: TextSize) -> Option { +pub fn signature_help(db: &dyn Db, file: File, offset: TextSize) -> Option> { let parsed = parsed_module(db, file).load(db); // Get the call expression at the given position. @@ -166,11 +171,11 @@ fn get_argument_index(call_expr: &ast::ExprCall, offset: TextSize) -> usize { } /// Create signature details from `CallSignatureDetails`. -fn create_signature_details_from_call_signature_details( +fn create_signature_details_from_call_signature_details<'db>( db: &dyn crate::Db, - details: &CallSignatureDetails, + details: &CallSignatureDetails<'db>, current_arg_index: usize, -) -> SignatureDetails { +) -> SignatureDetails<'db> { let signature_label = details.label.clone(); let documentation = get_callable_documentation(db, details.definition); @@ -200,6 +205,8 @@ fn create_signature_details_from_call_signature_details( &signature_label, documentation.as_ref(), &details.parameter_names, + &details.parameter_kinds, + &details.parameter_types, ); SignatureDetails { label: signature_label, @@ -218,12 +225,14 @@ fn get_callable_documentation( } /// Create `ParameterDetails` objects from parameter label offsets. -fn create_parameters_from_offsets( +fn create_parameters_from_offsets<'db>( parameter_offsets: &[TextRange], signature_label: &str, docstring: Option<&Docstring>, parameter_names: &[String], -) -> Vec { + parameter_kinds: &[ParameterKind], + parameter_types: &[Option>], +) -> Vec> { // Extract parameter documentation from the function's docstring if available. let param_docs = if let Some(docstring) = docstring { docstring.parameter_documentation() @@ -245,11 +254,18 @@ fn create_parameters_from_offsets( // Get the parameter name for documentation lookup. let param_name = parameter_names.get(i).map(String::as_str).unwrap_or(""); + let is_positional_only = matches!( + parameter_kinds.get(i), + Some(ParameterKind::PositionalOnly { .. }) + ); + let ty = parameter_types.get(i).copied().flatten(); ParameterDetails { name: param_name.to_string(), label, + ty, documentation: param_docs.get(param_name).cloned(), + is_positional_only, } }) .collect() @@ -1173,7 +1189,7 @@ def ab(a: int, *, c: int): } impl CursorTest { - fn signature_help(&self) -> Option { + fn signature_help(&self) -> Option> { crate::signature_help::signature_help(&self.db, self.cursor.file, self.cursor.offset) } diff --git a/crates/ty_ide/src/workspace_symbols.rs b/crates/ty_ide/src/workspace_symbols.rs index a9e5e78820..3224c50baf 100644 --- a/crates/ty_ide/src/workspace_symbols.rs +++ b/crates/ty_ide/src/workspace_symbols.rs @@ -1,4 +1,4 @@ -use crate::symbols::{QueryPattern, SymbolInfo, symbols_for_file_global_only}; +use crate::symbols::{QueryPattern, SymbolInfo, symbols_for_file}; use ruff_db::files::File; use ty_project::Db; @@ -26,7 +26,7 @@ pub fn workspace_symbols(db: &dyn Db, query: &str) -> Vec { for file in files.iter() { let db = db.dyn_clone(); s.spawn(move |_| { - for (_, symbol) in symbols_for_file_global_only(&*db, *file).search(query) { + for (_, symbol) in symbols_for_file(&*db, *file).search(query) { // It seems like we could do better here than // locking `results` for every single symbol, // but this works pretty well as it is. @@ -64,7 +64,7 @@ mod tests { }; #[test] - fn test_workspace_symbols_multi_file() { + fn workspace_symbols_multi_file() { let test = CursorTest::builder() .source( "utils.py", @@ -126,6 +126,30 @@ API_BASE_URL = 'https://api.example.com' "); } + #[test] + fn members() { + let test = CursorTest::builder() + .source( + "utils.py", + " +class Test: + def from_path(): ... +", + ) + .build(); + + assert_snapshot!(test.workspace_symbols("from"), @r" + info[workspace-symbols]: WorkspaceSymbolInfo + --> utils.py:3:9 + | + 2 | class Test: + 3 | def from_path(): ... + | ^^^^^^^^^ + | + info: Method from_path + "); + } + impl CursorTest { fn workspace_symbols(&self, query: &str) -> String { let symbols = workspace_symbols(&self.db, query); diff --git a/crates/ty_project/src/walk.rs b/crates/ty_project/src/walk.rs index 8c9958c416..16a73dd65f 100644 --- a/crates/ty_project/src/walk.rs +++ b/crates/ty_project/src/walk.rs @@ -202,11 +202,16 @@ impl<'a> ProjectFilesWalker<'a> { } } else { // Ignore any non python files to avoid creating too many entries in `Files`. - if entry - .path() - .extension() - .and_then(PySourceType::try_from_extension) - .is_none() + // Unless the file is explicitly passed, we then always assume it's a python file. + let source_type = entry.path().extension().and_then(PySourceType::try_from_extension).or_else(|| { + if entry.depth() == 0 { + Some(PySourceType::Python) + } else { + db.system().source_type(entry.path()) + } + }); + + if source_type.is_none() { return WalkState::Continue; } diff --git a/crates/ty_python_semantic/Cargo.toml b/crates/ty_python_semantic/Cargo.toml index 140eec33be..957126cf3a 100644 --- a/crates/ty_python_semantic/Cargo.toml +++ b/crates/ty_python_semantic/Cargo.toml @@ -33,11 +33,11 @@ camino = { workspace = true } colored = { workspace = true } compact_str = { workspace = true } drop_bomb = { workspace = true } -get-size2 = { workspace = true, features = ["indexmap"]} +get-size2 = { workspace = true, features = ["indexmap", "ordermap"]} indexmap = { workspace = true } itertools = { workspace = true } ordermap = { workspace = true } -salsa = { workspace = true, features = ["compact_str"] } +salsa = { workspace = true, features = ["compact_str", "ordermap"] } thiserror = { workspace = true } tracing = { workspace = true } rustc-hash = { workspace = true } diff --git a/crates/ty_python_semantic/mdtest.py b/crates/ty_python_semantic/mdtest.py index c3260637cc..2acc6f452b 100644 --- a/crates/ty_python_semantic/mdtest.py +++ b/crates/ty_python_semantic/mdtest.py @@ -37,14 +37,16 @@ class MDTestRunner: mdtest_executable: Path | None console: Console filters: list[str] + enable_external: bool - def __init__(self, filters: list[str] | None = None) -> None: + def __init__(self, filters: list[str] | None, enable_external: bool) -> None: self.mdtest_executable = None self.console = Console() self.filters = [ f.removesuffix(".md").replace("/", "_").replace("-", "_") for f in (filters or []) ] + self.enable_external = enable_external def _run_cargo_test(self, *, message_format: Literal["human", "json"]) -> str: return subprocess.check_output( @@ -120,6 +122,7 @@ class MDTestRunner: CLICOLOR_FORCE="1", INSTA_FORCE_PASS="1", INSTA_OUTPUT="none", + MDTEST_EXTERNAL="1" if self.enable_external else "0", ), capture_output=capture_output, text=True, @@ -266,11 +269,19 @@ def main() -> None: nargs="*", help="Partial paths or mangled names, e.g., 'loops/for.md' or 'loops_for'", ) + parser.add_argument( + "--enable-external", + "-e", + action="store_true", + help="Enable tests with external dependencies", + ) args = parser.parse_args() try: - runner = MDTestRunner(filters=args.filters) + runner = MDTestRunner( + filters=args.filters, enable_external=args.enable_external + ) runner.watch() except KeyboardInterrupt: print() diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/new_types.md b/crates/ty_python_semantic/resources/mdtest/annotations/new_types.md index 39a88cff49..ab55503691 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/new_types.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/new_types.md @@ -146,9 +146,10 @@ Foo = NewType(name, int) reveal_type(Foo) # revealed: ``` -## The second argument must be a class type or another newtype +## The base must be a class type or another newtype -Other typing constructs like `Union` are not allowed. +Other typing constructs like `Union` are not _generally_ allowed. (However, see the next section for +a couple special cases.) ```py from typing_extensions import NewType @@ -167,6 +168,61 @@ on top of that: Foo = NewType("Foo", 42) ``` +## `float` and `complex` special cases + +`float` and `complex` are subject to a special case in the typing spec, which we currently interpret +to mean that `float` in type position is `int | float`, and `complex` in type position is +`int | float | complex`. This is awkward for `NewType`, because as we just tested above, unions +aren't generally valid `NewType` bases. However, `float` and `complex` _are_ valid `NewType` bases, +and we accept the unions they expand into. + +```py +from typing import NewType + +Foo = NewType("Foo", float) +Foo(3.14) +Foo(42) +Foo("hello") # error: [invalid-argument-type] "Argument is incorrect: Expected `int | float`, found `Literal["hello"]`" + +reveal_type(Foo(3.14).__class__) # revealed: type[int] | type[float] +reveal_type(Foo(42).__class__) # revealed: type[int] | type[float] + +Bar = NewType("Bar", complex) +Bar(1 + 2j) +Bar(3.14) +Bar(42) +Bar("goodbye") # error: [invalid-argument-type] + +reveal_type(Bar(1 + 2j).__class__) # revealed: type[int] | type[float] | type[complex] +reveal_type(Bar(3.14).__class__) # revealed: type[int] | type[float] | type[complex] +reveal_type(Bar(42).__class__) # revealed: type[int] | type[float] | type[complex] +``` + +We don't currently try to distinguish between an implicit union (e.g. `float`) and the equivalent +explicit union (e.g. `int | float`), so these two explicit unions are also allowed. But again, most +unions are not allowed: + +```py +Baz = NewType("Baz", int | float) +Baz = NewType("Baz", int | float | complex) +Baz = NewType("Baz", int | str) # error: [invalid-newtype] "invalid base for `typing.NewType`" +``` + +Similarly, a `NewType` of `float` or `complex` is valid as a `Callable` of the corresponding union +type: + +```py +from collections.abc import Callable + +def f(_: Callable[[int | float], Foo]): ... + +f(Foo) + +def g(_: Callable[[int | float | complex], Bar]): ... + +g(Bar) +``` + ## A `NewType` definition must be a simple variable assignment ```py @@ -179,7 +235,7 @@ N: NewType = NewType("N", int) # error: [invalid-newtype] "A `NewType` definiti Cyclic newtypes are kind of silly, but it's possible for the user to express them, and it's important that we don't go into infinite recursive loops and crash with a stack overflow. In fact, -this is *why* base type evaluation is deferred; otherwise Salsa itself would crash. +this is _why_ base type evaluation is deferred; otherwise Salsa itself would crash. ```py from typing_extensions import NewType, reveal_type, cast diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/self.md b/crates/ty_python_semantic/resources/mdtest/annotations/self.md index 57690b3dc3..016cc848b8 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/self.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/self.md @@ -63,6 +63,12 @@ python-version = "3.12" from typing import Self class A: + def __init__(self): + reveal_type(self) # revealed: Self@__init__ + + def __init_subclass__(cls, default_name, **kwargs): + reveal_type(cls) # revealed: type[Self@__init_subclass__] + def implicit_self(self) -> Self: reveal_type(self) # revealed: Self@implicit_self @@ -91,8 +97,7 @@ class A: @classmethod def a_classmethod(cls) -> Self: - # TODO: This should be type[Self@bar] - reveal_type(cls) # revealed: Unknown + reveal_type(cls) # revealed: type[Self@a_classmethod] return cls() @staticmethod @@ -277,8 +282,10 @@ reveal_type(C().method()) # revealed: C ## Class Methods +### Explicit + ```py -from typing import Self, TypeVar +from typing import Self class Shape: def foo(self: Self) -> Self: @@ -293,6 +300,64 @@ class Circle(Shape): ... reveal_type(Shape().foo()) # revealed: Shape reveal_type(Shape.bar()) # revealed: Shape + +reveal_type(Circle().foo()) # revealed: Circle +reveal_type(Circle.bar()) # revealed: Circle +``` + +### Implicit + +```py +from typing import Self + +class Shape: + def foo(self) -> Self: + return self + + @classmethod + def bar(cls) -> Self: + reveal_type(cls) # revealed: type[Self@bar] + return cls() + +class Circle(Shape): ... + +reveal_type(Shape().foo()) # revealed: Shape +reveal_type(Shape.bar()) # revealed: Shape + +reveal_type(Circle().foo()) # revealed: Circle +reveal_type(Circle.bar()) # revealed: Circle +``` + +### Implicit in generic class + +```py +from typing import Self + +class GenericShape[T]: + def foo(self) -> Self: + return self + + @classmethod + def bar(cls) -> Self: + reveal_type(cls) # revealed: type[Self@bar] + return cls() + + @classmethod + def baz[U](cls, u: U) -> "GenericShape[U]": + reveal_type(cls) # revealed: type[Self@baz] + return cls() + +class GenericCircle[T](GenericShape[T]): ... + +reveal_type(GenericShape().foo()) # revealed: GenericShape[Unknown] +reveal_type(GenericShape.bar()) # revealed: GenericShape[Unknown] +reveal_type(GenericShape[int].bar()) # revealed: GenericShape[int] +reveal_type(GenericShape.baz(1)) # revealed: GenericShape[Literal[1]] + +reveal_type(GenericCircle().foo()) # revealed: GenericCircle[Unknown] +reveal_type(GenericCircle.bar()) # revealed: GenericCircle[Unknown] +reveal_type(GenericCircle[int].bar()) # revealed: GenericCircle[int] +reveal_type(GenericCircle.baz(1)) # revealed: GenericShape[Literal[1]] ``` ## Attributes diff --git a/crates/ty_python_semantic/resources/mdtest/annotations/string.md b/crates/ty_python_semantic/resources/mdtest/annotations/string.md index 5777070441..db152ae907 100644 --- a/crates/ty_python_semantic/resources/mdtest/annotations/string.md +++ b/crates/ty_python_semantic/resources/mdtest/annotations/string.md @@ -156,6 +156,9 @@ a: "1 or 2" b: "(x := 1)" # error: [invalid-type-form] c: "1 + 2" +# Regression test for https://github.com/astral-sh/ty/issues/1847 +# error: [invalid-type-form] +c2: "a*(i for i in [])" d: "lambda x: x" e: "x if True else y" f: "{'a': 1, 'b': 2}" diff --git a/crates/ty_python_semantic/resources/mdtest/assignment/augmented.md b/crates/ty_python_semantic/resources/mdtest/assignment/augmented.md index d800c86c6a..cec8c6de43 100644 --- a/crates/ty_python_semantic/resources/mdtest/assignment/augmented.md +++ b/crates/ty_python_semantic/resources/mdtest/assignment/augmented.md @@ -44,7 +44,7 @@ class C: return 42 x = C() -# error: [unsupported-operator] "Operator `-=` is unsupported between objects of type `C` and `Literal[1]`" +# error: [unsupported-operator] "Operator `-=` is not supported between objects of type `C` and `Literal[1]`" x -= 1 reveal_type(x) # revealed: int @@ -79,7 +79,7 @@ def _(flag: bool): f = Foo() - # error: [unsupported-operator] "Operator `+=` is unsupported between objects of type `Foo` and `Literal["Hello, world!"]`" + # error: [unsupported-operator] "Operator `+=` is not supported between objects of type `Foo` and `Literal["Hello, world!"]`" f += "Hello, world!" reveal_type(f) # revealed: int | Unknown diff --git a/crates/ty_python_semantic/resources/mdtest/binary/classes.md b/crates/ty_python_semantic/resources/mdtest/binary/classes.md index 4a3580a8de..e0da23bf50 100644 --- a/crates/ty_python_semantic/resources/mdtest/binary/classes.md +++ b/crates/ty_python_semantic/resources/mdtest/binary/classes.md @@ -27,7 +27,7 @@ python-version = "3.9" class A: ... class B: ... -# error: "Operator `|` is unsupported between objects of type `` and ``" +# error: "Operator `|` is not supported between objects of type `` and ``" reveal_type(A | B) # revealed: Unknown ``` diff --git a/crates/ty_python_semantic/resources/mdtest/binary/custom.md b/crates/ty_python_semantic/resources/mdtest/binary/custom.md index d2587b7a75..9bd0852253 100644 --- a/crates/ty_python_semantic/resources/mdtest/binary/custom.md +++ b/crates/ty_python_semantic/resources/mdtest/binary/custom.md @@ -79,59 +79,59 @@ reveal_type(Sub() & Sub()) # revealed: Literal["&"] reveal_type(Sub() // Sub()) # revealed: Literal["//"] # No does not implement any of the dunder methods. -# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `No` and `No`" +# error: [unsupported-operator] "Operator `+` is not supported between objects of type `No` and `No`" reveal_type(No() + No()) # revealed: Unknown -# error: [unsupported-operator] "Operator `-` is unsupported between objects of type `No` and `No`" +# error: [unsupported-operator] "Operator `-` is not supported between objects of type `No` and `No`" reveal_type(No() - No()) # revealed: Unknown -# error: [unsupported-operator] "Operator `*` is unsupported between objects of type `No` and `No`" +# error: [unsupported-operator] "Operator `*` is not supported between objects of type `No` and `No`" reveal_type(No() * No()) # revealed: Unknown -# error: [unsupported-operator] "Operator `@` is unsupported between objects of type `No` and `No`" +# error: [unsupported-operator] "Operator `@` is not supported between objects of type `No` and `No`" reveal_type(No() @ No()) # revealed: Unknown -# error: [unsupported-operator] "Operator `/` is unsupported between objects of type `No` and `No`" +# error: [unsupported-operator] "Operator `/` is not supported between objects of type `No` and `No`" reveal_type(No() / No()) # revealed: Unknown -# error: [unsupported-operator] "Operator `%` is unsupported between objects of type `No` and `No`" +# error: [unsupported-operator] "Operator `%` is not supported between objects of type `No` and `No`" reveal_type(No() % No()) # revealed: Unknown -# error: [unsupported-operator] "Operator `**` is unsupported between objects of type `No` and `No`" +# error: [unsupported-operator] "Operator `**` is not supported between objects of type `No` and `No`" reveal_type(No() ** No()) # revealed: Unknown -# error: [unsupported-operator] "Operator `<<` is unsupported between objects of type `No` and `No`" +# error: [unsupported-operator] "Operator `<<` is not supported between objects of type `No` and `No`" reveal_type(No() << No()) # revealed: Unknown -# error: [unsupported-operator] "Operator `>>` is unsupported between objects of type `No` and `No`" +# error: [unsupported-operator] "Operator `>>` is not supported between objects of type `No` and `No`" reveal_type(No() >> No()) # revealed: Unknown -# error: [unsupported-operator] "Operator `|` is unsupported between objects of type `No` and `No`" +# error: [unsupported-operator] "Operator `|` is not supported between objects of type `No` and `No`" reveal_type(No() | No()) # revealed: Unknown -# error: [unsupported-operator] "Operator `^` is unsupported between objects of type `No` and `No`" +# error: [unsupported-operator] "Operator `^` is not supported between objects of type `No` and `No`" reveal_type(No() ^ No()) # revealed: Unknown -# error: [unsupported-operator] "Operator `&` is unsupported between objects of type `No` and `No`" +# error: [unsupported-operator] "Operator `&` is not supported between objects of type `No` and `No`" reveal_type(No() & No()) # revealed: Unknown -# error: [unsupported-operator] "Operator `//` is unsupported between objects of type `No` and `No`" +# error: [unsupported-operator] "Operator `//` is not supported between objects of type `No` and `No`" reveal_type(No() // No()) # revealed: Unknown # Yes does not implement any of the reflected dunder methods. -# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `No` and `Yes`" +# error: [unsupported-operator] "Operator `+` is not supported between objects of type `No` and `Yes`" reveal_type(No() + Yes()) # revealed: Unknown -# error: [unsupported-operator] "Operator `-` is unsupported between objects of type `No` and `Yes`" +# error: [unsupported-operator] "Operator `-` is not supported between objects of type `No` and `Yes`" reveal_type(No() - Yes()) # revealed: Unknown -# error: [unsupported-operator] "Operator `*` is unsupported between objects of type `No` and `Yes`" +# error: [unsupported-operator] "Operator `*` is not supported between objects of type `No` and `Yes`" reveal_type(No() * Yes()) # revealed: Unknown -# error: [unsupported-operator] "Operator `@` is unsupported between objects of type `No` and `Yes`" +# error: [unsupported-operator] "Operator `@` is not supported between objects of type `No` and `Yes`" reveal_type(No() @ Yes()) # revealed: Unknown -# error: [unsupported-operator] "Operator `/` is unsupported between objects of type `No` and `Yes`" +# error: [unsupported-operator] "Operator `/` is not supported between objects of type `No` and `Yes`" reveal_type(No() / Yes()) # revealed: Unknown -# error: [unsupported-operator] "Operator `%` is unsupported between objects of type `No` and `Yes`" +# error: [unsupported-operator] "Operator `%` is not supported between objects of type `No` and `Yes`" reveal_type(No() % Yes()) # revealed: Unknown -# error: [unsupported-operator] "Operator `**` is unsupported between objects of type `No` and `Yes`" +# error: [unsupported-operator] "Operator `**` is not supported between objects of type `No` and `Yes`" reveal_type(No() ** Yes()) # revealed: Unknown -# error: [unsupported-operator] "Operator `<<` is unsupported between objects of type `No` and `Yes`" +# error: [unsupported-operator] "Operator `<<` is not supported between objects of type `No` and `Yes`" reveal_type(No() << Yes()) # revealed: Unknown -# error: [unsupported-operator] "Operator `>>` is unsupported between objects of type `No` and `Yes`" +# error: [unsupported-operator] "Operator `>>` is not supported between objects of type `No` and `Yes`" reveal_type(No() >> Yes()) # revealed: Unknown -# error: [unsupported-operator] "Operator `|` is unsupported between objects of type `No` and `Yes`" +# error: [unsupported-operator] "Operator `|` is not supported between objects of type `No` and `Yes`" reveal_type(No() | Yes()) # revealed: Unknown -# error: [unsupported-operator] "Operator `^` is unsupported between objects of type `No` and `Yes`" +# error: [unsupported-operator] "Operator `^` is not supported between objects of type `No` and `Yes`" reveal_type(No() ^ Yes()) # revealed: Unknown -# error: [unsupported-operator] "Operator `&` is unsupported between objects of type `No` and `Yes`" +# error: [unsupported-operator] "Operator `&` is not supported between objects of type `No` and `Yes`" reveal_type(No() & Yes()) # revealed: Unknown -# error: [unsupported-operator] "Operator `//` is unsupported between objects of type `No` and `Yes`" +# error: [unsupported-operator] "Operator `//` is not supported between objects of type `No` and `Yes`" reveal_type(No() // Yes()) # revealed: Unknown ``` @@ -307,11 +307,11 @@ class Yes: class Sub(Yes): ... class No: ... -# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `` and ``" +# error: [unsupported-operator] "Operator `+` is not supported between objects of type `` and ``" reveal_type(Yes + Yes) # revealed: Unknown -# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `` and ``" +# error: [unsupported-operator] "Operator `+` is not supported between objects of type `` and ``" reveal_type(Sub + Sub) # revealed: Unknown -# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `` and ``" +# error: [unsupported-operator] "Operator `+` is not supported between objects of type `` and ``" reveal_type(No + No) # revealed: Unknown ``` @@ -336,11 +336,11 @@ def sub() -> type[Sub]: def no() -> type[No]: return No -# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `type[Yes]` and `type[Yes]`" +# error: [unsupported-operator] "Operator `+` is not supported between objects of type `type[Yes]` and `type[Yes]`" reveal_type(yes() + yes()) # revealed: Unknown -# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `type[Sub]` and `type[Sub]`" +# error: [unsupported-operator] "Operator `+` is not supported between objects of type `type[Sub]` and `type[Sub]`" reveal_type(sub() + sub()) # revealed: Unknown -# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `type[No]` and `type[No]`" +# error: [unsupported-operator] "Operator `+` is not supported between objects of type `type[No]` and `type[No]`" reveal_type(no() + no()) # revealed: Unknown ``` @@ -350,30 +350,30 @@ reveal_type(no() + no()) # revealed: Unknown def f(): pass -# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" +# error: [unsupported-operator] "Operator `+` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" reveal_type(f + f) # revealed: Unknown -# error: [unsupported-operator] "Operator `-` is unsupported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" +# error: [unsupported-operator] "Operator `-` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" reveal_type(f - f) # revealed: Unknown -# error: [unsupported-operator] "Operator `*` is unsupported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" +# error: [unsupported-operator] "Operator `*` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" reveal_type(f * f) # revealed: Unknown -# error: [unsupported-operator] "Operator `@` is unsupported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" +# error: [unsupported-operator] "Operator `@` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" reveal_type(f @ f) # revealed: Unknown -# error: [unsupported-operator] "Operator `/` is unsupported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" +# error: [unsupported-operator] "Operator `/` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" reveal_type(f / f) # revealed: Unknown -# error: [unsupported-operator] "Operator `%` is unsupported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" +# error: [unsupported-operator] "Operator `%` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" reveal_type(f % f) # revealed: Unknown -# error: [unsupported-operator] "Operator `**` is unsupported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" +# error: [unsupported-operator] "Operator `**` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" reveal_type(f**f) # revealed: Unknown -# error: [unsupported-operator] "Operator `<<` is unsupported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" +# error: [unsupported-operator] "Operator `<<` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" reveal_type(f << f) # revealed: Unknown -# error: [unsupported-operator] "Operator `>>` is unsupported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" +# error: [unsupported-operator] "Operator `>>` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" reveal_type(f >> f) # revealed: Unknown -# error: [unsupported-operator] "Operator `|` is unsupported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" +# error: [unsupported-operator] "Operator `|` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" reveal_type(f | f) # revealed: Unknown -# error: [unsupported-operator] "Operator `^` is unsupported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" +# error: [unsupported-operator] "Operator `^` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" reveal_type(f ^ f) # revealed: Unknown -# error: [unsupported-operator] "Operator `&` is unsupported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" +# error: [unsupported-operator] "Operator `&` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" reveal_type(f & f) # revealed: Unknown -# error: [unsupported-operator] "Operator `//` is unsupported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" +# error: [unsupported-operator] "Operator `//` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`" reveal_type(f // f) # revealed: Unknown ``` diff --git a/crates/ty_python_semantic/resources/mdtest/binary/instances.md b/crates/ty_python_semantic/resources/mdtest/binary/instances.md index c981a570b0..c1cc6f924e 100644 --- a/crates/ty_python_semantic/resources/mdtest/binary/instances.md +++ b/crates/ty_python_semantic/resources/mdtest/binary/instances.md @@ -386,7 +386,7 @@ class A(metaclass=Meta): ... class B(metaclass=Meta): ... reveal_type(A + B) # revealed: int -# error: [unsupported-operator] "Operator `-` is unsupported between objects of type `` and ``" +# error: [unsupported-operator] "Operator `-` is not supported between objects of type `` and ``" reveal_type(A - B) # revealed: Unknown reveal_type(A < B) # revealed: bool @@ -412,7 +412,7 @@ class A: def __init__(self): self.__add__ = add_impl -# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `A` and `A`" +# error: [unsupported-operator] "Operator `+` is not supported between objects of type `A` and `A`" # revealed: Unknown reveal_type(A() + A()) ``` diff --git a/crates/ty_python_semantic/resources/mdtest/binary/integers.md b/crates/ty_python_semantic/resources/mdtest/binary/integers.md index a021a15ae1..401c09d756 100644 --- a/crates/ty_python_semantic/resources/mdtest/binary/integers.md +++ b/crates/ty_python_semantic/resources/mdtest/binary/integers.md @@ -13,7 +13,7 @@ reveal_type(3 | 4) # revealed: Literal[7] reveal_type(5 & 6) # revealed: Literal[4] reveal_type(7 ^ 2) # revealed: Literal[5] -# error: [unsupported-operator] "Operator `+` is unsupported between objects of type `Literal[2]` and `Literal["f"]`" +# error: [unsupported-operator] "Operator `+` is not supported between objects of type `Literal[2]` and `Literal["f"]`" reveal_type(2 + "f") # revealed: Unknown def lhs(x: int): diff --git a/crates/ty_python_semantic/resources/mdtest/binary/unions.md b/crates/ty_python_semantic/resources/mdtest/binary/unions.md index 1ec0794cc4..1b980170d4 100644 --- a/crates/ty_python_semantic/resources/mdtest/binary/unions.md +++ b/crates/ty_python_semantic/resources/mdtest/binary/unions.md @@ -5,9 +5,9 @@ combinations of types: ```py def f1(i: int, u: int | None): - # error: [unsupported-operator] "Operator `+` is unsupported between objects of type `int` and `int | None`" + # error: [unsupported-operator] "Operator `+` is not supported between objects of type `int` and `int | None`" reveal_type(i + u) # revealed: Unknown - # error: [unsupported-operator] "Operator `+` is unsupported between objects of type `int | None` and `int`" + # error: [unsupported-operator] "Operator `+` is not supported between objects of type `int | None` and `int`" reveal_type(u + i) # revealed: Unknown ``` @@ -18,7 +18,7 @@ cannot be added, because that would require addition of `int` and `str` or vice def f2(i: int, s: str, int_or_str: int | str): i + i s + s - # error: [unsupported-operator] "Operator `+` is unsupported between objects of type `int | str` and `int | str`" + # error: [unsupported-operator] "Operator `+` is not supported between objects of type `int | str` and `int | str`" reveal_type(int_or_str + int_or_str) # revealed: Unknown ``` diff --git a/crates/ty_python_semantic/resources/mdtest/call/methods.md b/crates/ty_python_semantic/resources/mdtest/call/methods.md index 054f6d6a6a..0536ded1e6 100644 --- a/crates/ty_python_semantic/resources/mdtest/call/methods.md +++ b/crates/ty_python_semantic/resources/mdtest/call/methods.md @@ -607,7 +607,7 @@ class X: def __init__(self, val: int): ... def make_another(self) -> Self: reveal_type(self.__new__) # revealed: def __new__(cls) -> Self@__new__ - return self.__new__(X) + return self.__new__(type(self)) ``` ## Builtin functions and methods diff --git a/crates/ty_python_semantic/resources/mdtest/call/overloads.md b/crates/ty_python_semantic/resources/mdtest/call/overloads.md index e6ef48276a..72672b31ed 100644 --- a/crates/ty_python_semantic/resources/mdtest/call/overloads.md +++ b/crates/ty_python_semantic/resources/mdtest/call/overloads.md @@ -925,7 +925,7 @@ def _(t: tuple[int, str] | tuple[int, str, int]) -> None: f(*t) # error: [no-matching-overload] ``` -## Filtering based on variaidic arguments +## Filtering based on variadic arguments This is step 4 of the overload call evaluation algorithm which specifies that: @@ -1469,6 +1469,79 @@ def _(arg: list[Any]): reveal_type(f4(*arg)) # revealed: Unknown ``` +### Varidic argument with generics + +`overloaded.pyi`: + +```pyi +from typing import Any, TypeVar, overload + +T1 = TypeVar("T1") +T2 = TypeVar("T2") +T3 = TypeVar("T3") + +@overload +def f1(x: T1, /) -> tuple[T1]: ... +@overload +def f1(x1: T1, x2: T2, /) -> tuple[T1, T2]: ... +@overload +def f1(x1: T1, x2: T2, x3: T3, /) -> tuple[T1, T2, T3]: ... +@overload +def f1(*args: Any) -> tuple[Any, ...]: ... + +@overload +def f2(x1: T1) -> tuple[T1]: ... +@overload +def f2(x1: T1, x2: T2) -> tuple[T1, T2]: ... +@overload +def f2(*args: Any, **kwargs: Any) -> tuple[Any, ...]: ... + +@overload +def f3(x: T1) -> tuple[T1]: ... +@overload +def f3(x1: T1, x2: T2) -> tuple[T1, T2]: ... +@overload +def f3(*args: Any) -> tuple[Any, ...]: ... +@overload +def f3(**kwargs: Any) -> dict[str, Any]: ... +``` + +```py +from overloaded import f1, f2, f3 +from typing import Any + +# These calls only match the last overload +reveal_type(f1()) # revealed: tuple[Any, ...] +reveal_type(f1(1, 2, 3, 4)) # revealed: tuple[Any, ...] + +# While these calls match multiple overloads but step 5 filters out all the remaining overloads +# except the most specific one in terms of the number of arguments. +reveal_type(f1(1)) # revealed: tuple[Literal[1]] +reveal_type(f1(1, 2)) # revealed: tuple[Literal[1], Literal[2]] +reveal_type(f1(1, 2, 3)) # revealed: tuple[Literal[1], Literal[2], Literal[3]] + +def _(args1: list[int], args2: list[Any]): + reveal_type(f1(*args1)) # revealed: tuple[Any, ...] + reveal_type(f1(*args2)) # revealed: tuple[Any, ...] + +reveal_type(f2()) # revealed: tuple[Any, ...] +reveal_type(f2(1, 2)) # revealed: tuple[Literal[1], Literal[2]] +# TODO: Should be `tuple[Literal[1], Literal[2]]` +reveal_type(f2(x1=1, x2=2)) # revealed: Unknown +# TODO: Should be `tuple[Literal[2], Literal[1]]` +reveal_type(f2(x2=1, x1=2)) # revealed: Unknown +reveal_type(f2(1, 2, z=3)) # revealed: tuple[Any, ...] + +reveal_type(f3(1, 2)) # revealed: tuple[Literal[1], Literal[2]] +reveal_type(f3(1, 2, 3)) # revealed: tuple[Any, ...] +# TODO: Should be `tuple[Literal[1], Literal[2]]` +reveal_type(f3(x1=1, x2=2)) # revealed: Unknown +reveal_type(f3(z=1)) # revealed: dict[str, Any] + +# error: [no-matching-overload] +reveal_type(f3(1, 2, x=3)) # revealed: Unknown +``` + ### Non-participating fully-static parameter Ref: diff --git a/crates/ty_python_semantic/resources/mdtest/class/super.md b/crates/ty_python_semantic/resources/mdtest/class/super.md index 750f589125..4c3d09560f 100644 --- a/crates/ty_python_semantic/resources/mdtest/class/super.md +++ b/crates/ty_python_semantic/resources/mdtest/class/super.md @@ -174,8 +174,7 @@ class B(A): @classmethod def f(cls): - # TODO: Once `cls` is supported, this should be `, >` - reveal_type(super()) # revealed: , Unknown> + reveal_type(super()) # revealed: , > super().f() super(B, B(42)).__init__(42) diff --git a/crates/ty_python_semantic/resources/mdtest/conditional/if_expression.md b/crates/ty_python_semantic/resources/mdtest/conditional/if_expression.md index 48b912cce1..082e0d43db 100644 --- a/crates/ty_python_semantic/resources/mdtest/conditional/if_expression.md +++ b/crates/ty_python_semantic/resources/mdtest/conditional/if_expression.md @@ -42,6 +42,6 @@ def _(flag: bool): class NotBoolable: __bool__: int = 3 -# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`" +# error: [unsupported-bool-conversion] "Boolean conversion is not supported for type `NotBoolable`" 3 if NotBoolable() else 4 ``` diff --git a/crates/ty_python_semantic/resources/mdtest/conditional/if_statement.md b/crates/ty_python_semantic/resources/mdtest/conditional/if_statement.md index c7a8c7732b..f55dc41160 100644 --- a/crates/ty_python_semantic/resources/mdtest/conditional/if_statement.md +++ b/crates/ty_python_semantic/resources/mdtest/conditional/if_statement.md @@ -154,10 +154,10 @@ def _(flag: bool): class NotBoolable: __bool__: int = 3 -# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`" +# error: [unsupported-bool-conversion] "Boolean conversion is not supported for type `NotBoolable`" if NotBoolable(): ... -# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`" +# error: [unsupported-bool-conversion] "Boolean conversion is not supported for type `NotBoolable`" elif NotBoolable(): ... ``` diff --git a/crates/ty_python_semantic/resources/mdtest/conditional/match.md b/crates/ty_python_semantic/resources/mdtest/conditional/match.md index c0df166e03..492ca8ef53 100644 --- a/crates/ty_python_semantic/resources/mdtest/conditional/match.md +++ b/crates/ty_python_semantic/resources/mdtest/conditional/match.md @@ -378,7 +378,7 @@ class NotBoolable: def _(target: int, flag: NotBoolable): y = 1 match target: - # error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`" + # error: [unsupported-bool-conversion] "Boolean conversion is not supported for type `NotBoolable`" case 1 if flag: y = 2 case 2: diff --git a/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md b/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md index fa6f76de75..7c64175658 100644 --- a/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md +++ b/crates/ty_python_semantic/resources/mdtest/dataclasses/dataclasses.md @@ -701,6 +701,111 @@ Employee("Alice", e_id=1) Employee("Alice", 1) # error: [too-many-positional-arguments] ``` +### Inherited fields with class-level `kw_only` + +When a child dataclass uses `@dataclass(kw_only=True)`, the `kw_only` setting should only apply to +fields defined in the child class, not to inherited fields from parent classes. + +This is a regression test for . + +```toml +[environment] +python-version = "3.10" +``` + +```py +from dataclasses import dataclass + +@dataclass +class Inner: + inner: int + +@dataclass(kw_only=True) +class Outer(Inner): + outer: int + +# Inherited field `inner` is positional, new field `outer` is keyword-only +reveal_type(Outer.__init__) # revealed: (self: Outer, inner: int, *, outer: int) -> None + +Outer(0, outer=5) # OK +Outer(inner=0, outer=5) # Also OK +# error: [missing-argument] +# error: [too-many-positional-arguments] +Outer(0, 5) +``` + +This also works when the parent class uses the `KW_ONLY` sentinel: + +```py +from dataclasses import dataclass, KW_ONLY + +@dataclass +class Parent: + a: int + _: KW_ONLY + b: str + +@dataclass(kw_only=True) +class Child(Parent): + c: bytes + +# `a` is positional (from parent), `b` is keyword-only (from parent's KW_ONLY), +# `c` is keyword-only (from child's kw_only=True) +reveal_type(Child.__init__) # revealed: (self: Child, a: int, *, b: str, c: bytes) -> None + +Child(1, b="hello", c=b"world") # OK +# error: [missing-argument] "No arguments provided for required parameters `b`, `c`" +# error: [too-many-positional-arguments] +Child(1, "hello", b"world") +``` + +And when the child class uses the `KW_ONLY` sentinel while inheriting from a parent: + +```py +from dataclasses import dataclass, KW_ONLY + +@dataclass +class Base: + x: int + +@dataclass +class Derived(Base): + y: str + _: KW_ONLY + z: bytes + +# `x` and `y` are positional, `z` is keyword-only (from Derived's KW_ONLY) +reveal_type(Derived.__init__) # revealed: (self: Derived, x: int, y: str, *, z: bytes) -> None + +Derived(1, "hello", z=b"world") # OK +# error: [missing-argument] +# error: [too-many-positional-arguments] +Derived(1, "hello", b"world") +``` + +The reverse case also works: when a parent has `kw_only=True` but the child doesn't, the parent's +fields stay keyword-only while the child's fields are positional: + +```py +from dataclasses import dataclass + +@dataclass(kw_only=True) +class KwOnlyParent: + parent_field: int + +@dataclass +class PositionalChild(KwOnlyParent): + child_field: str + +# `child_field` is positional (child's default), `parent_field` stays keyword-only +reveal_type(PositionalChild.__init__) # revealed: (self: PositionalChild, child_field: str, *, parent_field: int) -> None + +PositionalChild("hello", parent_field=1) # OK +# error: [missing-argument] +# error: [too-many-positional-arguments] +PositionalChild("hello", 1) +``` + ### `slots` If a dataclass is defined with `slots=True`, the `__slots__` attribute is generated as a tuple. It diff --git a/crates/ty_python_semantic/resources/mdtest/expression/assert.md b/crates/ty_python_semantic/resources/mdtest/expression/assert.md index ddb429a576..6fd9e7700f 100644 --- a/crates/ty_python_semantic/resources/mdtest/expression/assert.md +++ b/crates/ty_python_semantic/resources/mdtest/expression/assert.md @@ -4,6 +4,6 @@ class NotBoolable: __bool__: int = 3 -# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`" +# error: [unsupported-bool-conversion] "Boolean conversion is not supported for type `NotBoolable`" assert NotBoolable() ``` diff --git a/crates/ty_python_semantic/resources/mdtest/expression/boolean.md b/crates/ty_python_semantic/resources/mdtest/expression/boolean.md index 413acf8e39..67ba0f4f05 100644 --- a/crates/ty_python_semantic/resources/mdtest/expression/boolean.md +++ b/crates/ty_python_semantic/resources/mdtest/expression/boolean.md @@ -232,7 +232,7 @@ if NotBoolable(): class NotBoolable: __bool__: None = None -# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`" +# error: [unsupported-bool-conversion] "Boolean conversion is not supported for type `NotBoolable`" if NotBoolable(): ... ``` @@ -244,7 +244,7 @@ def test(cond: bool): class NotBoolable: __bool__: int | None = None if cond else 3 - # error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`" + # error: [unsupported-bool-conversion] "Boolean conversion is not supported for type `NotBoolable`" if NotBoolable(): ... ``` @@ -258,7 +258,7 @@ def test(cond: bool): a = 10 if cond else NotBoolable() - # error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `Literal[10] | NotBoolable`" + # error: [unsupported-bool-conversion] "Boolean conversion is not supported for type `Literal[10] | NotBoolable`" if a: ... ``` diff --git a/crates/ty_python_semantic/resources/mdtest/external/README.md b/crates/ty_python_semantic/resources/mdtest/external/README.md new file mode 100644 index 0000000000..a54c31c862 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/external/README.md @@ -0,0 +1,4 @@ +# mdtests with external dependencies + +This directory contains mdtests that make use of external packages. See the mdtest `README.md` for +more information. diff --git a/crates/ty_python_semantic/resources/mdtest/external/attrs.md b/crates/ty_python_semantic/resources/mdtest/external/attrs.md new file mode 100644 index 0000000000..3b4bc342a6 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/external/attrs.md @@ -0,0 +1,78 @@ +# attrs + +```toml +[environment] +python-version = "3.13" +python-platform = "linux" + +[project] +dependencies = ["attrs==25.4.0"] +``` + +## Basic class (`attr`) + +```py +import attr + +@attr.s +class User: + id: int = attr.ib() + name: str = attr.ib() + +user = User(id=1, name="John Doe") + +reveal_type(user.id) # revealed: int +reveal_type(user.name) # revealed: str +``` + +## Basic class (`define`) + +```py +from attrs import define, field + +@define +class User: + id: int = field() + internal_name: str = field(alias="name") + +user = User(id=1, name="John Doe") +reveal_type(user.id) # revealed: int +reveal_type(user.internal_name) # revealed: str +``` + +## Usage of `field` parameters + +```py +from attrs import define, field + +@define +class Product: + id: int = field(init=False) + name: str = field() + price_cent: int = field(kw_only=True) + +reveal_type(Product.__init__) # revealed: (self: Product, name: str, *, price_cent: int) -> None +``` + +## Dedicated support for the `default` decorator? + +We currently do not support this: + +```py +from attrs import define, field + +@define +class Person: + id: int = field() + name: str = field() + + # error: [call-non-callable] "Object of type `_MISSING_TYPE` is not callable" + @id.default + def _default_id(self) -> int: + raise NotImplementedError + +# error: [missing-argument] "No argument provided for required parameter `id`" +person = Person(name="Alice") +reveal_type(person.id) # revealed: int +reveal_type(person.name) # revealed: str +``` diff --git a/crates/ty_python_semantic/resources/mdtest/external/numpy.md b/crates/ty_python_semantic/resources/mdtest/external/numpy.md new file mode 100644 index 0000000000..39bfa6d110 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/external/numpy.md @@ -0,0 +1,23 @@ +# numpy + +```toml +[environment] +python-version = "3.13" +python-platform = "linux" + +[project] +dependencies = ["numpy==2.3.0"] +``` + +## Basic usage + +```py +import numpy as np + +xs = np.array([1, 2, 3]) +reveal_type(xs) # revealed: ndarray[tuple[Any, ...], dtype[Any]] + +xs = np.array([1.0, 2.0, 3.0], dtype=np.float64) +# TODO: should be `ndarray[tuple[Any, ...], dtype[float64]]` +reveal_type(xs) # revealed: ndarray[tuple[Any, ...], dtype[Unknown]] +``` diff --git a/crates/ty_python_semantic/resources/mdtest/external/pydantic.md b/crates/ty_python_semantic/resources/mdtest/external/pydantic.md new file mode 100644 index 0000000000..6fb82840f5 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/external/pydantic.md @@ -0,0 +1,48 @@ +# Pydantic + +```toml +[environment] +python-version = "3.12" +python-platform = "linux" + +[project] +dependencies = ["pydantic==2.12.2"] +``` + +## Basic model + +```py +from pydantic import BaseModel + +class User(BaseModel): + id: int + name: str + +reveal_type(User.__init__) # revealed: (self: User, *, id: int, name: str) -> None + +user = User(id=1, name="John Doe") +reveal_type(user.id) # revealed: int +reveal_type(user.name) # revealed: str + +# error: [missing-argument] "No argument provided for required parameter `name`" +invalid_user = User(id=2) +``` + +## Usage of `Field` + +```py +from pydantic import BaseModel, Field + +class Product(BaseModel): + id: int = Field(init=False) + name: str = Field(..., kw_only=False, min_length=1) + internal_price_cent: int = Field(..., gt=0, alias="price_cent") + +reveal_type(Product.__init__) # revealed: (self: Product, name: str = Any, *, price_cent: int = Any) -> None + +product = Product("Laptop", price_cent=999_00) + +reveal_type(product.id) # revealed: int +reveal_type(product.name) # revealed: str +reveal_type(product.internal_price_cent) # revealed: int +``` diff --git a/crates/ty_python_semantic/resources/mdtest/external/pytest.md b/crates/ty_python_semantic/resources/mdtest/external/pytest.md new file mode 100644 index 0000000000..823ef4d162 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/external/pytest.md @@ -0,0 +1,27 @@ +# pytest + +```toml +[environment] +python-version = "3.13" +python-platform = "linux" + +[project] +dependencies = ["pytest==9.0.1"] +``` + +## `pytest.fail` + +Make sure that we recognize `pytest.fail` calls as terminal: + +```py +import pytest + +def some_runtime_condition() -> bool: + return True + +def test_something(): + if not some_runtime_condition(): + pytest.fail("Runtime condition failed") + + no_error_here_this_is_unreachable +``` diff --git a/crates/ty_python_semantic/resources/mdtest/external/sqlalchemy.md b/crates/ty_python_semantic/resources/mdtest/external/sqlalchemy.md new file mode 100644 index 0000000000..43fff45058 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/external/sqlalchemy.md @@ -0,0 +1,195 @@ +# SQLAlchemy + +```toml +[environment] +python-version = "3.13" +python-platform = "linux" + +[project] +dependencies = ["SQLAlchemy==2.0.44"] +``` + +## ORM Model + +This test makes sure that ty understands SQLAlchemy's `dataclass_transform` setup: + +```py +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + +class Base(DeclarativeBase): + pass + +class User(Base): + __tablename__ = "user" + + id: Mapped[int] = mapped_column(primary_key=True, init=False) + internal_name: Mapped[str] = mapped_column(alias="name") + +user = User(name="John Doe") +reveal_type(user.id) # revealed: int +reveal_type(user.internal_name) # revealed: str +``` + +Unfortunately, SQLAlchemy overrides `__init__` and explicitly accepts all combinations of keyword +arguments. This is why we currently cannot flag invalid constructor calls: + +```py +reveal_type(User.__init__) # revealed: def __init__(self, **kw: Any) -> Unknown + +# TODO: this should ideally be an error +invalid_user = User(invalid_arg=42) +``` + +## Basic query example + +First, set up a `Session`: + +```py +from sqlalchemy import select, Integer, Text, Boolean +from sqlalchemy.orm import Session +from sqlalchemy.orm import DeclarativeBase +from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy import create_engine + +engine = create_engine("sqlite://example.db") +session = Session(engine) +``` + +And define a simple model: + +```py +class Base(DeclarativeBase): + pass + +class User(Base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + name: Mapped[str] = mapped_column(Text) + is_admin: Mapped[bool] = mapped_column(Boolean, default=False) +``` + +Finally, we can execute queries: + +```py +stmt = select(User) +reveal_type(stmt) # revealed: Select[tuple[User]] + +users = session.scalars(stmt).all() +reveal_type(users) # revealed: Sequence[User] + +for row in session.execute(stmt): + reveal_type(row) # revealed: Row[tuple[User]] + +stmt = select(User).where(User.name == "Alice") +alice1 = session.scalars(stmt).first() +reveal_type(alice1) # revealed: User | None + +alice2 = session.scalar(stmt) +reveal_type(alice2) # revealed: User | None + +result = session.execute(stmt) +row = result.one_or_none() +assert row is not None +(alice3,) = row._tuple() +reveal_type(alice3) # revealed: User +``` + +This also works with more complex queries: + +```py +stmt = select(User).where(User.is_admin == True).order_by(User.name).limit(10) +admin_users = session.scalars(stmt).all() +reveal_type(admin_users) # revealed: Sequence[User] +``` + +We can also specify particular columns to select: + +```py +stmt = select(User.id, User.name) +reveal_type(stmt) # revealed: Select[tuple[int, str]] + +ids_and_names = session.execute(stmt).all() +reveal_type(ids_and_names) # revealed: Sequence[Row[tuple[int, str]]] + +for row in session.execute(stmt): + reveal_type(row) # revealed: Row[tuple[int, str]] + +for user_id, name in session.execute(stmt).tuples(): + reveal_type(user_id) # revealed: int + reveal_type(name) # revealed: str + +result = session.execute(stmt) +row = result.one_or_none() +assert row is not None +(user_id, name) = row._tuple() +reveal_type(user_id) # revealed: int +reveal_type(name) # revealed: str + +stmt = select(User.id).where(User.name == "Alice") + +reveal_type(stmt) # revealed: Select[tuple[int]] + +alice_id = session.scalars(stmt).first() +reveal_type(alice_id) # revealed: int | None + +alice_id = session.scalar(stmt) +reveal_type(alice_id) # revealed: int | None +``` + +Using the legacy `query` API also works: + +```py +users_legacy = session.query(User).all() +reveal_type(users_legacy) # revealed: list[User] + +query = session.query(User) +reveal_type(query) # revealed: Query[User] + +reveal_type(query.all()) # revealed: list[User] + +for row in query: + reveal_type(row) # revealed: User +``` + +And similarly when specifying particular columns: + +```py +query = session.query(User.id, User.name) +reveal_type(query) # revealed: RowReturningQuery[tuple[int, str]] + +reveal_type(query.all()) # revealed: list[Row[tuple[int, str]]] + +for row in query: + reveal_type(row) # revealed: Row[tuple[int, str]] +``` + +## Async API + +The async API is supported as well: + +```py +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy import select, Integer, Text +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column + +class Base(DeclarativeBase): + pass + +class User(Base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + name: Mapped[str] = mapped_column(Text) + +async def test_async(session: AsyncSession): + stmt = select(User).where(User.name == "Alice") + alice = await session.scalar(stmt) + reveal_type(alice) # revealed: User | None + + stmt = select(User.id, User.name) + result = await session.execute(stmt) + for user_id, name in result.tuples(): + reveal_type(user_id) # revealed: int + reveal_type(name) # revealed: str +``` diff --git a/crates/ty_python_semantic/resources/mdtest/external/sqlmodel.md b/crates/ty_python_semantic/resources/mdtest/external/sqlmodel.md new file mode 100644 index 0000000000..54ab9012c2 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/external/sqlmodel.md @@ -0,0 +1,34 @@ +# SQLModel + +```toml +[environment] +python-version = "3.13" +python-platform = "linux" + +[project] +dependencies = [ + "sqlmodel==0.0.27", + # TODO: remove this pin, once we have a lockfile + "sqlalchemy==2.0.44" +] +``` + +## Basic model + +```py +from sqlmodel import SQLModel + +class User(SQLModel): + id: int + name: str + +user = User(id=1, name="John Doe") +reveal_type(user.id) # revealed: int +reveal_type(user.name) # revealed: str + +# TODO: this should not mention `__pydantic_self__`, and have proper parameters defined by the fields +reveal_type(User.__init__) # revealed: def __init__(__pydantic_self__, **data: Any) -> None + +# TODO: this should be an error +User() +``` diff --git a/crates/ty_python_semantic/resources/mdtest/external/strawberry.md b/crates/ty_python_semantic/resources/mdtest/external/strawberry.md new file mode 100644 index 0000000000..849b50aa74 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/external/strawberry.md @@ -0,0 +1,27 @@ +# Strawberry GraphQL + +```toml +[environment] +python-version = "3.13" +python-platform = "linux" + +[project] +dependencies = ["strawberry-graphql==0.283.3"] +``` + +## Basic model + +```py +import strawberry + +@strawberry.type +class User: + id: int + role: str = strawberry.field(default="user") + +reveal_type(User.__init__) # revealed: (self: User, *, id: int, role: str = Any) -> None + +user = User(id=1) +reveal_type(user.id) # revealed: int +reveal_type(user.role) # revealed: str +``` diff --git a/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md b/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md index bc6ccdb7c1..30d6a89ec0 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/legacy/classes.md @@ -860,9 +860,6 @@ reveal_type(Sub) # revealed: U = TypeVar("U") class Base2(Generic[T, U]): ... - -# TODO: no error -# error: [unsupported-base] "Unsupported class base with type ` | `" class Sub2(Base2["Sub2", U]): ... ``` @@ -888,8 +885,6 @@ from typing_extensions import Generic, TypeVar T = TypeVar("T") -# TODO: no error "Unsupported class base with type ` | `" -# error: [unsupported-base] class Derived(list[Derived[T]], Generic[T]): ... ``` diff --git a/crates/ty_python_semantic/resources/mdtest/generics/legacy/functions.md b/crates/ty_python_semantic/resources/mdtest/generics/legacy/functions.md index 2fce911026..f883d1fb1a 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/legacy/functions.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/legacy/functions.md @@ -277,7 +277,7 @@ T = TypeVar("T", int, str) def same_constrained_types(t1: T, t2: T) -> T: # TODO: no error - # error: [unsupported-operator] "Operator `+` is unsupported between objects of type `T@same_constrained_types` and `T@same_constrained_types`" + # error: [unsupported-operator] "Operator `+` is not supported between objects of type `T@same_constrained_types` and `T@same_constrained_types`" return t1 + t2 ``` @@ -287,7 +287,7 @@ and an `int` and a `str` cannot be added together: ```py def unions_are_different(t1: int | str, t2: int | str) -> int | str: - # error: [unsupported-operator] "Operator `+` is unsupported between objects of type `int | str` and `int | str`" + # error: [unsupported-operator] "Operator `+` is not supported between objects of type `int | str` and `int | str`" return t1 + t2 ``` @@ -337,6 +337,44 @@ reveal_type(union_and_nonunion_params(3, 1)) # revealed: Literal[1] reveal_type(union_and_nonunion_params("a", 1)) # revealed: Literal["a", 1] ``` +This also works if the typevar has a bound: + +```py +T_str = TypeVar("T_str", bound=str) + +def accepts_t_or_int(x: T_str | int) -> T_str: + raise NotImplementedError + +reveal_type(accepts_t_or_int("a")) # revealed: Literal["a"] +reveal_type(accepts_t_or_int(1)) # revealed: Unknown + +class Unrelated: ... + +# error: [invalid-argument-type] "Argument type `Unrelated` does not satisfy upper bound `str` of type variable `T_str`" +reveal_type(accepts_t_or_int(Unrelated())) # revealed: Unknown +``` + +```py +T_str = TypeVar("T_str", bound=str) + +def accepts_t_or_list_of_t(x: T_str | list[T_str]) -> T_str: + raise NotImplementedError + +reveal_type(accepts_t_or_list_of_t("a")) # revealed: Literal["a"] +# error: [invalid-argument-type] "Argument type `Literal[1]` does not satisfy upper bound `str` of type variable `T_str`" +reveal_type(accepts_t_or_list_of_t(1)) # revealed: Unknown + +def _(list_ofstr: list[str], list_of_int: list[int]): + reveal_type(accepts_t_or_list_of_t(list_ofstr)) # revealed: str + + # TODO: the error message here could be improved by referring to the second union element + # error: [invalid-argument-type] "Argument type `list[int]` does not satisfy upper bound `str` of type variable `T_str`" + reveal_type(accepts_t_or_list_of_t(list_of_int)) # revealed: Unknown +``` + +Here, we make sure that `S` is solved as `Literal[1]` instead of a union of the two literals, which +would also be a valid solution: + ```py S = TypeVar("S") @@ -347,6 +385,138 @@ reveal_type(tuple_param("a", ("a", 1))) # revealed: tuple[Literal["a"], Literal reveal_type(tuple_param(1, ("a", 1))) # revealed: tuple[Literal["a"], Literal[1]] ``` +When a union parameter contains generic classes like `P[T] | Q[T]`, we can infer the typevar from +the actual argument even for non-final classes. + +```py +from typing import TypeVar, Generic + +T = TypeVar("T") + +class P(Generic[T]): + x: T + +class Q(Generic[T]): + x: T + +def extract_t(x: P[T] | Q[T]) -> T: + raise NotImplementedError + +reveal_type(extract_t(P[int]())) # revealed: int +reveal_type(extract_t(Q[str]())) # revealed: str +``` + +Passing anything else results in an error: + +```py +# error: [invalid-argument-type] +reveal_type(extract_t([1, 2])) # revealed: Unknown +``` + +This also works when different union elements have different typevars: + +```py +S = TypeVar("S") + +def extract_both(x: P[T] | Q[S]) -> tuple[T, S]: + raise NotImplementedError + +reveal_type(extract_both(P[int]())) # revealed: tuple[int, Unknown] +reveal_type(extract_both(Q[str]())) # revealed: tuple[Unknown, str] +``` + +Inference also works when passing subclasses of the generic classes in the union. + +```py +class SubP(P[T]): + pass + +class SubQ(Q[T]): + pass + +reveal_type(extract_t(SubP[int]())) # revealed: int +reveal_type(extract_t(SubQ[str]())) # revealed: str + +reveal_type(extract_both(SubP[int]())) # revealed: tuple[int, Unknown] +reveal_type(extract_both(SubQ[str]())) # revealed: tuple[Unknown, str] +``` + +When a type is a subclass of both `P` and `Q` with different specializations, we cannot infer a +single type for `T` in `extract_t`, because `P` and `Q` are invariant. However, we can still infer +both types in a call to `extract_both`: + +```py +class PandQ(P[int], Q[str]): + pass + +# TODO: Ideally, we would return `Unknown` here. +# error: [invalid-argument-type] +reveal_type(extract_t(PandQ())) # revealed: int | str + +reveal_type(extract_both(PandQ())) # revealed: tuple[int, str] +``` + +When non-generic types are part of the union, we can still infer typevars for the remaining generic +types: + +```py +def extract_optional_t(x: None | P[T]) -> T: + raise NotImplementedError + +reveal_type(extract_optional_t(None)) # revealed: Unknown +reveal_type(extract_optional_t(P[int]())) # revealed: int +``` + +Passing anything else results in an error: + +```py +# error: [invalid-argument-type] +reveal_type(extract_optional_t(Q[str]())) # revealed: Unknown +``` + +If the union contains contains parent and child of a generic class, we ideally pick the union +element that is more precise: + +```py +class Base(Generic[T]): + x: T + +class Sub(Base[T]): ... + +def f(t: Base[T] | Sub[T | None]) -> T: + raise NotImplementedError + +reveal_type(f(Base[int]())) # revealed: int +# TODO: Should ideally be `str` +reveal_type(f(Sub[str | None]())) # revealed: str | None +``` + +If we have a case like the following, where only one of the union elements matches due to the +typevar bound, we do not emit a specialization error: + +```py +from typing import TypeVar + +I_int = TypeVar("I_int", bound=int) +S_str = TypeVar("S_str", bound=str) + +class P(Generic[T]): + value: T + +def f(t: P[I_int] | P[S_str]) -> tuple[I_int, S_str]: + raise NotImplementedError + +reveal_type(f(P[int]())) # revealed: tuple[int, Unknown] +reveal_type(f(P[str]())) # revealed: tuple[Unknown, str] +``` + +However, if we pass something that does not match _any_ union element, we do emit an error: + +```py +# error: [invalid-argument-type] +reveal_type(f(P[bytes]())) # revealed: tuple[Unknown, Unknown] +``` + ## Inferring nested generic function calls We can infer type assignments in nested calls to multiple generic functions. If they use the same diff --git a/crates/ty_python_semantic/resources/mdtest/generics/legacy/paramspec.md b/crates/ty_python_semantic/resources/mdtest/generics/legacy/paramspec.md index efa9a37d0f..ec3d608506 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/legacy/paramspec.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/legacy/paramspec.md @@ -102,6 +102,38 @@ Other values are invalid. P4 = ParamSpec("P4", default=int) ``` +### `default` parameter in `typing_extensions.ParamSpec` + +```toml +[environment] +python-version = "3.12" +``` + +The `default` parameter to `ParamSpec` is available from `typing_extensions` in Python 3.12 and +earlier. + +```py +from typing import ParamSpec +from typing_extensions import ParamSpec as ExtParamSpec + +# This shouldn't emit a diagnostic +P1 = ExtParamSpec("P1", default=[int, str]) + +# But, this should +# error: [invalid-paramspec] "The `default` parameter of `typing.ParamSpec` was added in Python 3.13" +P2 = ParamSpec("P2", default=[int, str]) +``` + +And, it allows the same set of values as `typing.ParamSpec`. + +```py +P3 = ExtParamSpec("P3", default=...) +P4 = ExtParamSpec("P4", default=P3) + +# error: [invalid-paramspec] +P5 = ExtParamSpec("P5", default=int) +``` + ### Forward references in stub files Stubs natively support forward references, so patterns that would raise `NameError` at runtime are diff --git a/crates/ty_python_semantic/resources/mdtest/generics/pep695/functions.md b/crates/ty_python_semantic/resources/mdtest/generics/pep695/functions.md index 8af1b948ee..cb8f7dbc90 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/pep695/functions.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/pep695/functions.md @@ -246,7 +246,7 @@ methods that are compatible with the return type, so the `return` expression is ```py def same_constrained_types[T: (int, str)](t1: T, t2: T) -> T: # TODO: no error - # error: [unsupported-operator] "Operator `+` is unsupported between objects of type `T@same_constrained_types` and `T@same_constrained_types`" + # error: [unsupported-operator] "Operator `+` is not supported between objects of type `T@same_constrained_types` and `T@same_constrained_types`" return t1 + t2 ``` @@ -256,7 +256,7 @@ and an `int` and a `str` cannot be added together: ```py def unions_are_different(t1: int | str, t2: int | str) -> int | str: - # error: [unsupported-operator] "Operator `+` is unsupported between objects of type `int | str` and `int | str`" + # error: [unsupported-operator] "Operator `+` is not supported between objects of type `int | str` and `int | str`" return t1 + t2 ``` @@ -302,6 +302,38 @@ reveal_type(union_and_nonunion_params(3, 1)) # revealed: Literal[1] reveal_type(union_and_nonunion_params("a", 1)) # revealed: Literal["a", 1] ``` +This also works if the typevar has a bound: + +```py +def accepts_t_or_int[T_str: str](x: T_str | int) -> T_str: + raise NotImplementedError + +reveal_type(accepts_t_or_int("a")) # revealed: Literal["a"] +reveal_type(accepts_t_or_int(1)) # revealed: Unknown + +class Unrelated: ... + +# error: [invalid-argument-type] "Argument type `Unrelated` does not satisfy upper bound `str` of type variable `T_str`" +reveal_type(accepts_t_or_int(Unrelated())) # revealed: Unknown + +def accepts_t_or_list_of_t[T: str](x: T | list[T]) -> T: + raise NotImplementedError + +reveal_type(accepts_t_or_list_of_t("a")) # revealed: Literal["a"] +# error: [invalid-argument-type] "Argument type `Literal[1]` does not satisfy upper bound `str` of type variable `T`" +reveal_type(accepts_t_or_list_of_t(1)) # revealed: Unknown + +def _(list_ofstr: list[str], list_of_int: list[int]): + reveal_type(accepts_t_or_list_of_t(list_ofstr)) # revealed: str + + # TODO: the error message here could be improved by referring to the second union element + # error: [invalid-argument-type] "Argument type `list[int]` does not satisfy upper bound `str` of type variable `T`" + reveal_type(accepts_t_or_list_of_t(list_of_int)) # revealed: Unknown +``` + +Here, we make sure that `S` is solved as `Literal[1]` instead of a union of the two literals, which +would also be a valid solution: + ```py def tuple_param[T, S](x: T | S, y: tuple[T, S]) -> tuple[T, S]: return y @@ -310,6 +342,127 @@ reveal_type(tuple_param("a", ("a", 1))) # revealed: tuple[Literal["a"], Literal reveal_type(tuple_param(1, ("a", 1))) # revealed: tuple[Literal["a"], Literal[1]] ``` +When a union parameter contains generic classes like `P[T] | Q[T]`, we can infer the typevar from +the actual argument even for non-final classes. + +```py +class P[T]: + x: T # invariant + +class Q[T]: + x: T # invariant + +def extract_t[T](x: P[T] | Q[T]) -> T: + raise NotImplementedError + +reveal_type(extract_t(P[int]())) # revealed: int +reveal_type(extract_t(Q[str]())) # revealed: str +``` + +Passing anything else results in an error: + +```py +# error: [invalid-argument-type] +reveal_type(extract_t([1, 2])) # revealed: Unknown +``` + +This also works when different union elements have different typevars: + +```py +def extract_both[T, S](x: P[T] | Q[S]) -> tuple[T, S]: + raise NotImplementedError + +reveal_type(extract_both(P[int]())) # revealed: tuple[int, Unknown] +reveal_type(extract_both(Q[str]())) # revealed: tuple[Unknown, str] +``` + +Inference also works when passing subclasses of the generic classes in the union. + +```py +class SubP[T](P[T]): + pass + +class SubQ[T](Q[T]): + pass + +reveal_type(extract_t(SubP[int]())) # revealed: int +reveal_type(extract_t(SubQ[str]())) # revealed: str + +reveal_type(extract_both(SubP[int]())) # revealed: tuple[int, Unknown] +reveal_type(extract_both(SubQ[str]())) # revealed: tuple[Unknown, str] +``` + +When a type is a subclass of both `P` and `Q` with different specializations, we cannot infer a +single type for `T` in `extract_t`, because `P` and `Q` are invariant. However, we can still infer +both types in a call to `extract_both`: + +```py +class PandQ(P[int], Q[str]): + pass + +# TODO: Ideally, we would return `Unknown` here. +# error: [invalid-argument-type] +reveal_type(extract_t(PandQ())) # revealed: int | str + +reveal_type(extract_both(PandQ())) # revealed: tuple[int, str] +``` + +When non-generic types are part of the union, we can still infer typevars for the remaining generic +types: + +```py +def extract_optional_t[T](x: None | P[T]) -> T: + raise NotImplementedError + +reveal_type(extract_optional_t(None)) # revealed: Unknown +reveal_type(extract_optional_t(P[int]())) # revealed: int +``` + +Passing anything else results in an error: + +```py +# error: [invalid-argument-type] +reveal_type(extract_optional_t(Q[str]())) # revealed: Unknown +``` + +If the union contains contains parent and child of a generic class, we ideally pick the union +element that is more precise: + +```py +class Base[T]: + x: T + +class Sub[T](Base[T]): ... + +def f[T](t: Base[T] | Sub[T | None]) -> T: + raise NotImplementedError + +reveal_type(f(Base[int]())) # revealed: int +# TODO: Should ideally be `str` +reveal_type(f(Sub[str | None]())) # revealed: str | None +``` + +If we have a case like the following, where only one of the union elements matches due to the +typevar bound, we do not emit a specialization error: + +```py +class P[T]: + value: T + +def f[I: int, S: str](t: P[I] | P[S]) -> tuple[I, S]: + raise NotImplementedError + +reveal_type(f(P[int]())) # revealed: tuple[int, Unknown] +reveal_type(f(P[str]())) # revealed: tuple[Unknown, str] +``` + +However, if we pass something that does not match _any_ union element, we do emit an error: + +```py +# error: [invalid-argument-type] +reveal_type(f(P[bytes]())) # revealed: tuple[Unknown, Unknown] +``` + ## Inferring nested generic function calls We can infer type assignments in nested calls to multiple generic functions. If they use the same diff --git a/crates/ty_python_semantic/resources/mdtest/generics/pep695/paramspec.md b/crates/ty_python_semantic/resources/mdtest/generics/pep695/paramspec.md index 2d0a4c287d..92e1e64116 100644 --- a/crates/ty_python_semantic/resources/mdtest/generics/pep695/paramspec.md +++ b/crates/ty_python_semantic/resources/mdtest/generics/pep695/paramspec.md @@ -417,16 +417,13 @@ The `converter` function act as a decorator here: def f3(x: int, y: str) -> int: return 1 -# TODO: This should reveal `(x: int, y: str) -> bool` but there's a cycle: https://github.com/astral-sh/ty/issues/1729 -reveal_type(f3) # revealed: ((x: int, y: str) -> bool) | ((x: Divergent, y: Divergent) -> bool) +reveal_type(f3) # revealed: (x: int, y: str) -> bool reveal_type(f3(1, "a")) # revealed: bool reveal_type(f3(x=1, y="a")) # revealed: bool reveal_type(f3(1, y="a")) # revealed: bool reveal_type(f3(y="a", x=1)) # revealed: bool -# TODO: There should only be one error but the type of `f3` is a union: https://github.com/astral-sh/ty/issues/1729 -# error: [missing-argument] "No argument provided for required parameter `y`" # error: [missing-argument] "No argument provided for required parameter `y`" f3(1) # error: [invalid-argument-type] "Argument is incorrect: Expected `int`, found `Literal["a"]`" diff --git a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md index 3637970d9e..c2569023f1 100644 --- a/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md +++ b/crates/ty_python_semantic/resources/mdtest/implicit_type_aliases.md @@ -214,7 +214,7 @@ def _(int_or_int: IntOrInt, list_of_int_or_list_of_int: ListOfIntOrListOfInt): `NoneType` has no special or-operator behavior, so this is an error: ```py -None | None # error: [unsupported-operator] "Operator `|` is unsupported between objects of type `None` and `None`" +None | None # error: [unsupported-operator] "Operator `|` is not supported between objects of type `None` and `None`" ``` When constructing something nonsensical like `int | 1`, we emit a diagnostic for the expression diff --git a/crates/ty_python_semantic/resources/mdtest/invalid_syntax.md b/crates/ty_python_semantic/resources/mdtest/invalid_syntax.md index 9594492982..cac8006cd6 100644 --- a/crates/ty_python_semantic/resources/mdtest/invalid_syntax.md +++ b/crates/ty_python_semantic/resources/mdtest/invalid_syntax.md @@ -128,3 +128,16 @@ InvalidEmptyUnion = Union[] def _(u: InvalidEmptyUnion): reveal_type(u) # revealed: Unknown ``` + +### `typing.Annotated` + +```py +from typing import Annotated + +# error: [invalid-syntax] "Expected index or slice expression" +# error: [invalid-type-form] "Special form `typing.Annotated` expected at least 2 arguments (one type and at least one metadata element)" +InvalidEmptyAnnotated = Annotated[] + +def _(a: InvalidEmptyAnnotated): + reveal_type(a) # revealed: Unknown +``` diff --git a/crates/ty_python_semantic/resources/mdtest/loops/while_loop.md b/crates/ty_python_semantic/resources/mdtest/loops/while_loop.md index 5a5784b85d..41e48b1404 100644 --- a/crates/ty_python_semantic/resources/mdtest/loops/while_loop.md +++ b/crates/ty_python_semantic/resources/mdtest/loops/while_loop.md @@ -123,7 +123,7 @@ def _(flag: bool, flag2: bool): class NotBoolable: __bool__: int = 3 -# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `NotBoolable`" +# error: [unsupported-bool-conversion] "Boolean conversion is not supported for type `NotBoolable`" while NotBoolable(): ... ``` diff --git a/crates/ty_python_semantic/resources/mdtest/named_tuple.md b/crates/ty_python_semantic/resources/mdtest/named_tuple.md index ab1dafa187..5fd71b988b 100644 --- a/crates/ty_python_semantic/resources/mdtest/named_tuple.md +++ b/crates/ty_python_semantic/resources/mdtest/named_tuple.md @@ -271,8 +271,7 @@ reveal_type(Person._make) # revealed: bound method ._make(itera reveal_type(Person._asdict) # revealed: def _asdict(self) -> dict[str, Any] reveal_type(Person._replace) # revealed: def _replace(self, **kwargs: Any) -> Self@_replace -# TODO: should be `Person` once we support implicit type of `self` -reveal_type(Person._make(("Alice", 42))) # revealed: Unknown +reveal_type(Person._make(("Alice", 42))) # revealed: Person person = Person("Alice", 42) diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md b/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md index ed9964274a..6c45c19ef2 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/issubclass.md @@ -220,6 +220,48 @@ def f(x: type[int | str | bytes | range]): reveal_type(x) # revealed: ``` +## `classinfo` is a generic final class + +```toml +[environment] +python-version = "3.12" +``` + +When we check a generic `@final` class against `type[GenericFinal]`, we can conclude that the check +always succeeds: + +```py +from typing import final + +@final +class GenericFinal[T]: + x: T # invariant + +def f(x: type[GenericFinal]): + reveal_type(x) # revealed: + + if issubclass(x, GenericFinal): + reveal_type(x) # revealed: + else: + reveal_type(x) # revealed: Never +``` + +This also works if the typevar has an upper bound: + +```py +@final +class BoundedGenericFinal[T: int]: + x: T # invariant + +def g(x: type[BoundedGenericFinal]): + reveal_type(x) # revealed: + + if issubclass(x, BoundedGenericFinal): + reveal_type(x) # revealed: + else: + reveal_type(x) # revealed: Never +``` + ## Special cases ### Emit a diagnostic if the first argument is of wrong type diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/truthiness.md b/crates/ty_python_semantic/resources/mdtest/narrow/truthiness.md index d6df7c3276..cb9f0c4545 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/truthiness.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/truthiness.md @@ -270,7 +270,7 @@ def _( if af: reveal_type(af) # revealed: type[AmbiguousClass] & ~AlwaysFalsy - # error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `MetaDeferred`" + # error: [unsupported-bool-conversion] "Boolean conversion is not supported for type `MetaDeferred`" if d: # TODO: Should be `Unknown` reveal_type(d) # revealed: type[DeferredClass] & ~AlwaysFalsy diff --git a/crates/ty_python_semantic/resources/mdtest/narrow/type.md b/crates/ty_python_semantic/resources/mdtest/narrow/type.md index 3cf1aa23db..de962d2075 100644 --- a/crates/ty_python_semantic/resources/mdtest/narrow/type.md +++ b/crates/ty_python_semantic/resources/mdtest/narrow/type.md @@ -92,8 +92,7 @@ def f(x: A[int] | B): reveal_type(x) # revealed: A[int] | B if type(x) is A: - # TODO: this should be `A[int]`, but `A[int] | B` would be better than `Never` - reveal_type(x) # revealed: Never + reveal_type(x) # revealed: A[int] else: reveal_type(x) # revealed: A[int] | B @@ -111,8 +110,7 @@ def f(x: A[int] | B): if type(x) is not A: reveal_type(x) # revealed: A[int] | B else: - # TODO: this should be `A[int]`, but `A[int] | B` would be better than `Never` - reveal_type(x) # revealed: Never + reveal_type(x) # revealed: A[int] if type(x) is not B: reveal_type(x) # revealed: A[int] | B diff --git a/crates/ty_python_semantic/resources/mdtest/protocols.md b/crates/ty_python_semantic/resources/mdtest/protocols.md index 28069bd07c..40b180fb35 100644 --- a/crates/ty_python_semantic/resources/mdtest/protocols.md +++ b/crates/ty_python_semantic/resources/mdtest/protocols.md @@ -3010,6 +3010,31 @@ class Bar(Protocol[S]): z: S | Bar[S] ``` +### Recursive generic protocols with growing specializations + +This snippet caused a stack overflow in because the +type parameter grows with each recursive call (`C[set[T]]` leads to `C[set[set[T]]]`, then +`C[set[set[set[T]]]]`, etc.): + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import Protocol + +class C[T](Protocol): + a: "C[set[T]]" + +def takes_c(c: C[set[int]]) -> None: ... +def f(c: C[int]) -> None: + # The key thing is that we don't stack overflow while checking this. + # The cycle detection assumes compatibility when it detects potential + # infinite recursion between protocol specializations. + takes_c(c) +``` + ### Recursive legacy generic protocol ```py diff --git a/crates/ty_python_semantic/resources/mdtest/regression/paramspec_on_python39.md b/crates/ty_python_semantic/resources/mdtest/regression/paramspec_on_python39.md new file mode 100644 index 0000000000..1760669cf0 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/regression/paramspec_on_python39.md @@ -0,0 +1,19 @@ +# `ParamSpec` regression on 3.9 + +```toml +[environment] +python-version = "3.9" +``` + +This used to panic when run on Python 3.9 because `ParamSpec` was introduced in Python 3.10 and the +diagnostic message for `invalid-exception-caught` expects to construct `typing.ParamSpec`. + +```py +# error: [invalid-syntax] +def foo[**P]() -> None: + try: + pass + # error: [invalid-exception-caught] "Invalid object caught in an exception handler: Object has type `typing.ParamSpec`" + except P: + pass +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/annotations.md_-_Assignment_with_anno…_-_PEP-604_in_non-type-…_-_Earlier_versions_(f2859c9800f37c7).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/annotations.md_-_Assignment_with_anno…_-_PEP-604_in_non-type-…_-_Earlier_versions_(f2859c9800f37c7).snap index 3cfe98c8f7..83b964676a 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/annotations.md_-_Assignment_with_anno…_-_PEP-604_in_non-type-…_-_Earlier_versions_(f2859c9800f37c7).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/annotations.md_-_Assignment_with_anno…_-_PEP-604_in_non-type-…_-_Earlier_versions_(f2859c9800f37c7).snap @@ -19,7 +19,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/assignment/annotations.m # Diagnostics ``` -error[unsupported-operator]: Operator `|` is unsupported between objects of type `` and `` +error[unsupported-operator]: Operator `|` is not supported between objects of type `` and `` --> src/mdtest_snippet.py:2:12 | 1 | # error: [unsupported-operator] diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/instances.md_-_Binary_operations_on…_-_Operations_involving…_(492b1163b8163c05).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/instances.md_-_Binary_operations_on…_-_Operations_involving…_(492b1163b8163c05).snap index ac85e00fa0..2fba47e078 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/instances.md_-_Binary_operations_on…_-_Operations_involving…_(492b1163b8163c05).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/instances.md_-_Binary_operations_on…_-_Operations_involving…_(492b1163b8163c05).snap @@ -24,7 +24,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/binary/instances.md # Diagnostics ``` -error[unsupported-bool-conversion]: Boolean conversion is unsupported for type `NotBoolable` +error[unsupported-bool-conversion]: Boolean conversion is not supported for type `NotBoolable` --> src/mdtest_snippet.py:7:8 | 6 | # error: [unsupported-bool-conversion] diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/membership_test.md_-_Comparison___Membersh…_-_Return_type_that_doe…_(feccf6b9da1e7cd3).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/membership_test.md_-_Comparison___Membersh…_-_Return_type_that_doe…_(feccf6b9da1e7cd3).snap index d6698b03f4..26415ace68 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/membership_test.md_-_Comparison___Membersh…_-_Return_type_that_doe…_(feccf6b9da1e7cd3).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/membership_test.md_-_Comparison___Membersh…_-_Return_type_that_doe…_(feccf6b9da1e7cd3).snap @@ -28,7 +28,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/comparison/instances/mem # Diagnostics ``` -error[unsupported-bool-conversion]: Boolean conversion is unsupported for type `NotBoolable` +error[unsupported-bool-conversion]: Boolean conversion is not supported for type `NotBoolable` --> src/mdtest_snippet.py:9:1 | 8 | # error: [unsupported-bool-conversion] @@ -43,7 +43,7 @@ info: rule `unsupported-bool-conversion` is enabled by default ``` ``` -error[unsupported-bool-conversion]: Boolean conversion is unsupported for type `NotBoolable` +error[unsupported-bool-conversion]: Boolean conversion is not supported for type `NotBoolable` --> src/mdtest_snippet.py:11:1 | 9 | 10 in WithContains() diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/not.md_-_Unary_not_-_Object_that_implemen…_(ab3f546bf004e24d).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/not.md_-_Unary_not_-_Object_that_implemen…_(ab3f546bf004e24d).snap index defb8528ec..f5e88ebed0 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/not.md_-_Unary_not_-_Object_that_implemen…_(ab3f546bf004e24d).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/not.md_-_Unary_not_-_Object_that_implemen…_(ab3f546bf004e24d).snap @@ -22,7 +22,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/unary/not.md # Diagnostics ``` -error[unsupported-bool-conversion]: Boolean conversion is unsupported for type `NotBoolable` +error[unsupported-bool-conversion]: Boolean conversion is not supported for type `NotBoolable` --> src/mdtest_snippet.py:5:1 | 4 | # error: [unsupported-bool-conversion] diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/rich_comparison.md_-_Comparison___Rich_Com…_-_Chained_comparisons_…_(c391c13e2abc18a0).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/rich_comparison.md_-_Comparison___Rich_Com…_-_Chained_comparisons_…_(c391c13e2abc18a0).snap index e40ecc8361..f62a90156b 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/rich_comparison.md_-_Comparison___Rich_Com…_-_Chained_comparisons_…_(c391c13e2abc18a0).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/rich_comparison.md_-_Comparison___Rich_Com…_-_Chained_comparisons_…_(c391c13e2abc18a0).snap @@ -33,7 +33,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/comparison/instances/ric # Diagnostics ``` -error[unsupported-bool-conversion]: Boolean conversion is unsupported for type `NotBoolable` +error[unsupported-bool-conversion]: Boolean conversion is not supported for type `NotBoolable` --> src/mdtest_snippet.py:12:1 | 11 | # error: [unsupported-bool-conversion] @@ -48,7 +48,7 @@ info: rule `unsupported-bool-conversion` is enabled by default ``` ``` -error[unsupported-bool-conversion]: Boolean conversion is unsupported for type `NotBoolable` +error[unsupported-bool-conversion]: Boolean conversion is not supported for type `NotBoolable` --> src/mdtest_snippet.py:14:1 | 12 | 10 < Comparable() < 20 diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/super.md_-_Super_-_Basic_Usage_-_Implicit_Super_Objec…_(f9e5e48e3a4a4c12).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/super.md_-_Super_-_Basic_Usage_-_Implicit_Super_Objec…_(f9e5e48e3a4a4c12).snap index 75eefd748b..8a51223e6d 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/super.md_-_Super_-_Basic_Usage_-_Implicit_Super_Objec…_(f9e5e48e3a4a4c12).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/super.md_-_Super_-_Basic_Usage_-_Implicit_Super_Objec…_(f9e5e48e3a4a4c12).snap @@ -27,135 +27,134 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/class/super.md 13 | 14 | @classmethod 15 | def f(cls): - 16 | # TODO: Once `cls` is supported, this should be `, >` - 17 | reveal_type(super()) # revealed: , Unknown> - 18 | super().f() - 19 | - 20 | super(B, B(42)).__init__(42) - 21 | super(B, B).f() - 22 | import enum - 23 | from typing import Any, Self, Never, Protocol, Callable - 24 | from ty_extensions import Intersection - 25 | - 26 | class BuilderMeta(type): - 27 | def __new__( - 28 | cls: type[Any], - 29 | name: str, - 30 | bases: tuple[type, ...], - 31 | dct: dict[str, Any], - 32 | ) -> BuilderMeta: - 33 | # revealed: , Any> - 34 | s = reveal_type(super()) - 35 | # revealed: Any - 36 | return reveal_type(s.__new__(cls, name, bases, dct)) - 37 | - 38 | class BuilderMeta2(type): - 39 | def __new__( - 40 | cls: type[BuilderMeta2], - 41 | name: str, - 42 | bases: tuple[type, ...], - 43 | dct: dict[str, Any], - 44 | ) -> BuilderMeta2: - 45 | # revealed: , > - 46 | s = reveal_type(super()) - 47 | return reveal_type(s.__new__(cls, name, bases, dct)) # revealed: BuilderMeta2 - 48 | - 49 | class Foo[T]: - 50 | x: T - 51 | - 52 | def method(self: Any): - 53 | reveal_type(super()) # revealed: , Any> - 54 | - 55 | if isinstance(self, Foo): - 56 | reveal_type(super()) # revealed: , Any> - 57 | - 58 | def method2(self: Foo[T]): - 59 | # revealed: , Foo[T@Foo]> - 60 | reveal_type(super()) - 61 | - 62 | def method3(self: Foo): - 63 | # revealed: , Foo[Unknown]> - 64 | reveal_type(super()) - 65 | - 66 | def method4(self: Self): - 67 | # revealed: , Foo[T@Foo]> - 68 | reveal_type(super()) - 69 | - 70 | def method5[S: Foo[int]](self: S, other: S) -> S: - 71 | # revealed: , Foo[int]> - 72 | reveal_type(super()) - 73 | return self - 74 | - 75 | def method6[S: (Foo[int], Foo[str])](self: S, other: S) -> S: - 76 | # revealed: , Foo[int]> | , Foo[str]> - 77 | reveal_type(super()) - 78 | return self - 79 | - 80 | def method7[S](self: S, other: S) -> S: - 81 | # error: [invalid-super-argument] - 82 | # revealed: Unknown - 83 | reveal_type(super()) - 84 | return self - 85 | - 86 | def method8[S: int](self: S, other: S) -> S: - 87 | # error: [invalid-super-argument] - 88 | # revealed: Unknown - 89 | reveal_type(super()) - 90 | return self - 91 | - 92 | def method9[S: (int, str)](self: S, other: S) -> S: - 93 | # error: [invalid-super-argument] - 94 | # revealed: Unknown - 95 | reveal_type(super()) - 96 | return self - 97 | - 98 | def method10[S: Callable[..., str]](self: S, other: S) -> S: - 99 | # error: [invalid-super-argument] -100 | # revealed: Unknown -101 | reveal_type(super()) -102 | return self -103 | -104 | type Alias = Bar -105 | -106 | class Bar: -107 | def method(self: Alias): -108 | # revealed: , Bar> -109 | reveal_type(super()) -110 | -111 | def pls_dont_call_me(self: Never): -112 | # revealed: , Unknown> -113 | reveal_type(super()) -114 | -115 | def only_call_me_on_callable_subclasses(self: Intersection[Bar, Callable[..., object]]): -116 | # revealed: , Bar> -117 | reveal_type(super()) -118 | -119 | class P(Protocol): -120 | def method(self: P): -121 | # revealed: , P> -122 | reveal_type(super()) -123 | -124 | class E(enum.Enum): -125 | X = 1 -126 | -127 | def method(self: E): -128 | match self: -129 | case E.X: -130 | # revealed: , E> -131 | reveal_type(super()) + 16 | reveal_type(super()) # revealed: , > + 17 | super().f() + 18 | + 19 | super(B, B(42)).__init__(42) + 20 | super(B, B).f() + 21 | import enum + 22 | from typing import Any, Self, Never, Protocol, Callable + 23 | from ty_extensions import Intersection + 24 | + 25 | class BuilderMeta(type): + 26 | def __new__( + 27 | cls: type[Any], + 28 | name: str, + 29 | bases: tuple[type, ...], + 30 | dct: dict[str, Any], + 31 | ) -> BuilderMeta: + 32 | # revealed: , Any> + 33 | s = reveal_type(super()) + 34 | # revealed: Any + 35 | return reveal_type(s.__new__(cls, name, bases, dct)) + 36 | + 37 | class BuilderMeta2(type): + 38 | def __new__( + 39 | cls: type[BuilderMeta2], + 40 | name: str, + 41 | bases: tuple[type, ...], + 42 | dct: dict[str, Any], + 43 | ) -> BuilderMeta2: + 44 | # revealed: , > + 45 | s = reveal_type(super()) + 46 | return reveal_type(s.__new__(cls, name, bases, dct)) # revealed: BuilderMeta2 + 47 | + 48 | class Foo[T]: + 49 | x: T + 50 | + 51 | def method(self: Any): + 52 | reveal_type(super()) # revealed: , Any> + 53 | + 54 | if isinstance(self, Foo): + 55 | reveal_type(super()) # revealed: , Any> + 56 | + 57 | def method2(self: Foo[T]): + 58 | # revealed: , Foo[T@Foo]> + 59 | reveal_type(super()) + 60 | + 61 | def method3(self: Foo): + 62 | # revealed: , Foo[Unknown]> + 63 | reveal_type(super()) + 64 | + 65 | def method4(self: Self): + 66 | # revealed: , Foo[T@Foo]> + 67 | reveal_type(super()) + 68 | + 69 | def method5[S: Foo[int]](self: S, other: S) -> S: + 70 | # revealed: , Foo[int]> + 71 | reveal_type(super()) + 72 | return self + 73 | + 74 | def method6[S: (Foo[int], Foo[str])](self: S, other: S) -> S: + 75 | # revealed: , Foo[int]> | , Foo[str]> + 76 | reveal_type(super()) + 77 | return self + 78 | + 79 | def method7[S](self: S, other: S) -> S: + 80 | # error: [invalid-super-argument] + 81 | # revealed: Unknown + 82 | reveal_type(super()) + 83 | return self + 84 | + 85 | def method8[S: int](self: S, other: S) -> S: + 86 | # error: [invalid-super-argument] + 87 | # revealed: Unknown + 88 | reveal_type(super()) + 89 | return self + 90 | + 91 | def method9[S: (int, str)](self: S, other: S) -> S: + 92 | # error: [invalid-super-argument] + 93 | # revealed: Unknown + 94 | reveal_type(super()) + 95 | return self + 96 | + 97 | def method10[S: Callable[..., str]](self: S, other: S) -> S: + 98 | # error: [invalid-super-argument] + 99 | # revealed: Unknown +100 | reveal_type(super()) +101 | return self +102 | +103 | type Alias = Bar +104 | +105 | class Bar: +106 | def method(self: Alias): +107 | # revealed: , Bar> +108 | reveal_type(super()) +109 | +110 | def pls_dont_call_me(self: Never): +111 | # revealed: , Unknown> +112 | reveal_type(super()) +113 | +114 | def only_call_me_on_callable_subclasses(self: Intersection[Bar, Callable[..., object]]): +115 | # revealed: , Bar> +116 | reveal_type(super()) +117 | +118 | class P(Protocol): +119 | def method(self: P): +120 | # revealed: , P> +121 | reveal_type(super()) +122 | +123 | class E(enum.Enum): +124 | X = 1 +125 | +126 | def method(self: E): +127 | match self: +128 | case E.X: +129 | # revealed: , E> +130 | reveal_type(super()) ``` # Diagnostics ``` error[invalid-super-argument]: `S@method7` is not an instance or subclass of `` in `super(, S@method7)` call - --> src/mdtest_snippet.py:83:21 + --> src/mdtest_snippet.py:82:21 | -81 | # error: [invalid-super-argument] -82 | # revealed: Unknown -83 | reveal_type(super()) +80 | # error: [invalid-super-argument] +81 | # revealed: Unknown +82 | reveal_type(super()) | ^^^^^^^ -84 | return self +83 | return self | info: Type variable `S` has `object` as its implicit upper bound info: `object` is not an instance or subclass of `` @@ -166,13 +165,13 @@ info: rule `invalid-super-argument` is enabled by default ``` error[invalid-super-argument]: `S@method8` is not an instance or subclass of `` in `super(, S@method8)` call - --> src/mdtest_snippet.py:89:21 + --> src/mdtest_snippet.py:88:21 | -87 | # error: [invalid-super-argument] -88 | # revealed: Unknown -89 | reveal_type(super()) +86 | # error: [invalid-super-argument] +87 | # revealed: Unknown +88 | reveal_type(super()) | ^^^^^^^ -90 | return self +89 | return self | info: Type variable `S` has upper bound `int` info: `int` is not an instance or subclass of `` @@ -182,13 +181,13 @@ info: rule `invalid-super-argument` is enabled by default ``` error[invalid-super-argument]: `S@method9` is not an instance or subclass of `` in `super(, S@method9)` call - --> src/mdtest_snippet.py:95:21 + --> src/mdtest_snippet.py:94:21 | -93 | # error: [invalid-super-argument] -94 | # revealed: Unknown -95 | reveal_type(super()) +92 | # error: [invalid-super-argument] +93 | # revealed: Unknown +94 | reveal_type(super()) | ^^^^^^^ -96 | return self +95 | return self | info: Type variable `S` has constraints `int, str` info: `int | str` is not an instance or subclass of `` @@ -198,13 +197,13 @@ info: rule `invalid-super-argument` is enabled by default ``` error[invalid-super-argument]: `S@method10` is a type variable with an abstract/structural type as its bounds or constraints, in `super(, S@method10)` call - --> src/mdtest_snippet.py:101:21 + --> src/mdtest_snippet.py:100:21 | - 99 | # error: [invalid-super-argument] -100 | # revealed: Unknown -101 | reveal_type(super()) + 98 | # error: [invalid-super-argument] + 99 | # revealed: Unknown +100 | reveal_type(super()) | ^^^^^^^ -102 | return self +101 | return self | info: Type variable `S` has upper bound `(...) -> str` info: rule `invalid-super-argument` is enabled by default diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/tuples.md_-_Comparison___Tuples_-_Chained_comparisons_…_(f45f1da2f8ca693d).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/tuples.md_-_Comparison___Tuples_-_Chained_comparisons_…_(f45f1da2f8ca693d).snap index e8fe6a5285..52bb93b61e 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/tuples.md_-_Comparison___Tuples_-_Chained_comparisons_…_(f45f1da2f8ca693d).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/tuples.md_-_Comparison___Tuples_-_Chained_comparisons_…_(f45f1da2f8ca693d).snap @@ -34,7 +34,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/comparison/tuples.md # Diagnostics ``` -error[unsupported-bool-conversion]: Boolean conversion is unsupported for type `NotBoolable | Literal[False]` +error[unsupported-bool-conversion]: Boolean conversion is not supported for type `NotBoolable | Literal[False]` --> src/mdtest_snippet.py:15:1 | 14 | # error: [unsupported-bool-conversion] diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/tuples.md_-_Comparison___Tuples_-_Equality_with_elemen…_(39b614d4707c0661).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/tuples.md_-_Comparison___Tuples_-_Equality_with_elemen…_(39b614d4707c0661).snap index 2c9fbd885a..c530be195b 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/tuples.md_-_Comparison___Tuples_-_Equality_with_elemen…_(39b614d4707c0661).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/tuples.md_-_Comparison___Tuples_-_Equality_with_elemen…_(39b614d4707c0661).snap @@ -58,7 +58,7 @@ info: rule `invalid-method-override` is enabled by default ``` ``` -error[unsupported-bool-conversion]: Boolean conversion is unsupported for type `NotBoolable` +error[unsupported-bool-conversion]: Boolean conversion is not supported for type `NotBoolable` --> src/mdtest_snippet.py:10:1 | 9 | # error: [unsupported-bool-conversion] diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Redundant_cast_warni…_(75ac240a2d1f7108).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Redundant_cast_warni…_(75ac240a2d1f7108).snap new file mode 100644 index 0000000000..16f2235fc9 --- /dev/null +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/typed_dict.md_-_`TypedDict`_-_Redundant_cast_warni…_(75ac240a2d1f7108).snap @@ -0,0 +1,55 @@ +--- +source: crates/ty_test/src/lib.rs +expression: snapshot +--- +--- +mdtest name: typed_dict.md - `TypedDict` - Redundant cast warnings +mdtest path: crates/ty_python_semantic/resources/mdtest/typed_dict.md +--- + +# Python source files + +## mdtest_snippet.py + +``` + 1 | from typing import TypedDict, cast + 2 | + 3 | class Foo2(TypedDict): + 4 | x: int + 5 | + 6 | class Bar2(TypedDict): + 7 | x: int + 8 | + 9 | foo: Foo2 = {"x": 1} +10 | _ = cast(Foo2, foo) # error: [redundant-cast] +11 | _ = cast(Bar2, foo) # error: [redundant-cast] +``` + +# Diagnostics + +``` +warning[redundant-cast]: Value is already of type `Foo2` + --> src/mdtest_snippet.py:10:5 + | + 9 | foo: Foo2 = {"x": 1} +10 | _ = cast(Foo2, foo) # error: [redundant-cast] + | ^^^^^^^^^^^^^^^ +11 | _ = cast(Bar2, foo) # error: [redundant-cast] + | +info: rule `redundant-cast` is enabled by default + +``` + +``` +warning[redundant-cast]: Value is already of type `Bar2` + --> src/mdtest_snippet.py:11:5 + | + 9 | foo: Foo2 = {"x": 1} +10 | _ = cast(Foo2, foo) # error: [redundant-cast] +11 | _ = cast(Bar2, foo) # error: [redundant-cast] + | ^^^^^^^^^^^^^^^ + | +info: `Bar2` is equivalent to `Foo2` +info: rule `redundant-cast` is enabled by default + +``` diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Has_a_`__bool__`_att…_(2721d40bf12fe8b7).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Has_a_`__bool__`_att…_(2721d40bf12fe8b7).snap index 343aaccc77..bcb4ffb0c6 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Has_a_`__bool__`_att…_(2721d40bf12fe8b7).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Has_a_`__bool__`_att…_(2721d40bf12fe8b7).snap @@ -24,7 +24,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/diagnostics/unsupported_ # Diagnostics ``` -error[unsupported-bool-conversion]: Boolean conversion is unsupported for type `NotBoolable` +error[unsupported-bool-conversion]: Boolean conversion is not supported for type `NotBoolable` --> src/mdtest_snippet.py:7:8 | 6 | # error: [unsupported-bool-conversion] diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Has_a_`__bool__`_met…_(15636dc4074e5335).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Has_a_`__bool__`_met…_(15636dc4074e5335).snap index 09343ef5d6..b3a86fac93 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Has_a_`__bool__`_met…_(15636dc4074e5335).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Has_a_`__bool__`_met…_(15636dc4074e5335).snap @@ -25,7 +25,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/diagnostics/unsupported_ # Diagnostics ``` -error[unsupported-bool-conversion]: Boolean conversion is unsupported for type `NotBoolable` +error[unsupported-bool-conversion]: Boolean conversion is not supported for type `NotBoolable` --> src/mdtest_snippet.py:8:8 | 7 | # error: [unsupported-bool-conversion] diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Has_a_`__bool__`_met…_(ce8b8da49eaf4cda).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Has_a_`__bool__`_met…_(ce8b8da49eaf4cda).snap index 9957e4c64f..0e35519ee4 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Has_a_`__bool__`_met…_(ce8b8da49eaf4cda).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Has_a_`__bool__`_met…_(ce8b8da49eaf4cda).snap @@ -25,7 +25,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/diagnostics/unsupported_ # Diagnostics ``` -error[unsupported-bool-conversion]: Boolean conversion is unsupported for type `NotBoolable` +error[unsupported-bool-conversion]: Boolean conversion is not supported for type `NotBoolable` --> src/mdtest_snippet.py:8:8 | 7 | # error: [unsupported-bool-conversion] diff --git a/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Part_of_a_union_wher…_(7cca8063ea43c1a).snap b/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Part_of_a_union_wher…_(7cca8063ea43c1a).snap index 22d0cc6ada..d86b17b1dc 100644 --- a/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Part_of_a_union_wher…_(7cca8063ea43c1a).snap +++ b/crates/ty_python_semantic/resources/mdtest/snapshots/unsupported_bool_con…_-_Different_ways_that_…_-_Part_of_a_union_wher…_(7cca8063ea43c1a).snap @@ -32,7 +32,7 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/diagnostics/unsupported_ # Diagnostics ``` -error[unsupported-bool-conversion]: Boolean conversion is unsupported for union `NotBoolable1 | NotBoolable2 | NotBoolable3` because `NotBoolable1` doesn't implement `__bool__` correctly +error[unsupported-bool-conversion]: Boolean conversion is not supported for union `NotBoolable1 | NotBoolable2 | NotBoolable3` because `NotBoolable1` doesn't implement `__bool__` correctly --> src/mdtest_snippet.py:15:8 | 14 | # error: [unsupported-bool-conversion] diff --git a/crates/ty_python_semantic/resources/mdtest/ty_extensions.md b/crates/ty_python_semantic/resources/mdtest/ty_extensions.md index 9cb9ca40f4..09315697da 100644 --- a/crates/ty_python_semantic/resources/mdtest/ty_extensions.md +++ b/crates/ty_python_semantic/resources/mdtest/ty_extensions.md @@ -237,7 +237,7 @@ class InvalidBoolDunder: def __bool__(self) -> int: return 1 -# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `InvalidBoolDunder`" +# error: [unsupported-bool-conversion] "Boolean conversion is not supported for type `InvalidBoolDunder`" static_assert(InvalidBoolDunder()) ``` diff --git a/crates/ty_python_semantic/resources/mdtest/type_of/basic.md b/crates/ty_python_semantic/resources/mdtest/type_of/basic.md index 4e3d9e9f07..6d747cb6f1 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_of/basic.md +++ b/crates/ty_python_semantic/resources/mdtest/type_of/basic.md @@ -152,61 +152,6 @@ class Foo(type[int]): ... reveal_mro(Foo) # revealed: (, , ) ``` -## Display of generic `type[]` types - -```toml -[environment] -python-version = "3.12" -``` - -```py -from typing import Generic, TypeVar - -class Foo[T]: ... - -S = TypeVar("S") - -class Bar(Generic[S]): ... - -def _(x: Foo[int], y: Bar[str], z: list[bytes]): - reveal_type(type(x)) # revealed: type[Foo[int]] - reveal_type(type(y)) # revealed: type[Bar[str]] - reveal_type(type(z)) # revealed: type[list[bytes]] -``` - -## Checking generic `type[]` types - -```toml -[environment] -python-version = "3.12" -``` - -```py -class C[T]: - pass - -class D[T]: - pass - -var: type[C[int]] = C[int] -var: type[C[int]] = D[int] # error: [invalid-assignment] "Object of type `` is not assignable to `type[C[int]]`" -``` - -However, generic `Protocol` classes are still TODO: - -```py -from typing import Protocol - -class Proto[U](Protocol): - def some_method(self): ... - -# TODO: should be error: [invalid-assignment] -var: type[Proto[int]] = C[int] - -def _(p: type[Proto[int]]): - reveal_type(p) # revealed: type[@Todo(type[T] for protocols)] -``` - ## `@final` classes `type[]` types are eagerly converted to class-literal types if a class decorated with `@final` is @@ -215,7 +160,7 @@ same also applies to enum classes with members, which are implicitly final: ```toml [environment] -python-version = "3.10" +python-version = "3.12" ``` ```py @@ -235,3 +180,177 @@ def _(x: type[Foo], y: type[EllipsisType], z: type[Answer]): reveal_type(y) # revealed: reveal_type(z) # revealed: ``` + +## Subtyping `@final` classes + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import final, Any +from ty_extensions import is_assignable_to, is_subtype_of, is_disjoint_from, static_assert + +class Biv[T]: ... + +class Cov[T]: + def pop(self) -> T: + raise NotImplementedError + +class Contra[T]: + def push(self, value: T) -> None: + pass + +class Inv[T]: + x: T + +@final +class BivSub[T](Biv[T]): ... + +@final +class CovSub[T](Cov[T]): ... + +@final +class ContraSub[T](Contra[T]): ... + +@final +class InvSub[T](Inv[T]): ... + +def _[T, U](): + static_assert(is_subtype_of(type[BivSub[T]], type[BivSub[U]])) + static_assert(not is_disjoint_from(type[BivSub[U]], type[BivSub[T]])) + + # `T` and `U` could specialize to the same type. + static_assert(not is_subtype_of(type[CovSub[T]], type[CovSub[U]])) + static_assert(not is_disjoint_from(type[CovSub[U]], type[CovSub[T]])) + + static_assert(not is_subtype_of(type[ContraSub[T]], type[ContraSub[U]])) + static_assert(not is_disjoint_from(type[ContraSub[U]], type[ContraSub[T]])) + + static_assert(not is_subtype_of(type[InvSub[T]], type[InvSub[U]])) + static_assert(not is_disjoint_from(type[InvSub[U]], type[InvSub[T]])) + +def _(): + static_assert(is_subtype_of(type[BivSub[bool]], type[BivSub[int]])) + static_assert(is_subtype_of(type[BivSub[int]], type[BivSub[bool]])) + static_assert(not is_disjoint_from(type[BivSub[bool]], type[BivSub[int]])) + # `BivSub[int]` and `BivSub[str]` are mutual subtypes. + static_assert(not is_disjoint_from(type[BivSub[int]], type[BivSub[str]])) + + static_assert(is_subtype_of(type[CovSub[bool]], type[CovSub[int]])) + static_assert(not is_subtype_of(type[CovSub[int]], type[CovSub[bool]])) + static_assert(not is_disjoint_from(type[CovSub[bool]], type[CovSub[int]])) + # `CovSub[Never]` is a subtype of both `CovSub[int]` and `CovSub[str]`. + static_assert(not is_disjoint_from(type[CovSub[int]], type[CovSub[str]])) + + static_assert(not is_subtype_of(type[ContraSub[bool]], type[ContraSub[int]])) + static_assert(is_subtype_of(type[ContraSub[int]], type[ContraSub[bool]])) + static_assert(not is_disjoint_from(type[ContraSub[bool]], type[ContraSub[int]])) + # `ContraSub[int | str]` is a subtype of both `ContraSub[int]` and `ContraSub[str]`. + static_assert(not is_disjoint_from(type[ContraSub[int]], type[ContraSub[str]])) + + static_assert(not is_subtype_of(type[InvSub[bool]], type[InvSub[int]])) + static_assert(not is_subtype_of(type[InvSub[int]], type[InvSub[bool]])) + static_assert(is_disjoint_from(type[InvSub[int]], type[InvSub[str]])) + # TODO: These are disjoint. + static_assert(not is_disjoint_from(type[InvSub[bool]], type[InvSub[int]])) + +def _[T](): + static_assert(is_subtype_of(type[BivSub[T]], type[BivSub[Any]])) + static_assert(is_subtype_of(type[BivSub[Any]], type[BivSub[T]])) + static_assert(is_assignable_to(type[BivSub[T]], type[BivSub[Any]])) + static_assert(is_assignable_to(type[BivSub[Any]], type[BivSub[T]])) + static_assert(not is_disjoint_from(type[BivSub[T]], type[BivSub[Any]])) + + static_assert(not is_subtype_of(type[CovSub[T]], type[CovSub[Any]])) + static_assert(not is_subtype_of(type[CovSub[Any]], type[CovSub[T]])) + static_assert(is_assignable_to(type[CovSub[T]], type[CovSub[Any]])) + static_assert(is_assignable_to(type[CovSub[Any]], type[CovSub[T]])) + static_assert(not is_disjoint_from(type[CovSub[T]], type[CovSub[Any]])) + + static_assert(not is_subtype_of(type[ContraSub[T]], type[ContraSub[Any]])) + static_assert(not is_subtype_of(type[ContraSub[Any]], type[ContraSub[T]])) + static_assert(is_assignable_to(type[ContraSub[T]], type[ContraSub[Any]])) + static_assert(is_assignable_to(type[ContraSub[Any]], type[ContraSub[T]])) + static_assert(not is_disjoint_from(type[ContraSub[T]], type[ContraSub[Any]])) + + static_assert(not is_subtype_of(type[InvSub[T]], type[InvSub[Any]])) + static_assert(not is_subtype_of(type[InvSub[Any]], type[InvSub[T]])) + static_assert(is_assignable_to(type[InvSub[T]], type[InvSub[Any]])) + static_assert(is_assignable_to(type[InvSub[Any]], type[InvSub[T]])) + static_assert(not is_disjoint_from(type[InvSub[T]], type[InvSub[Any]])) + +def _[T, U](): + static_assert(is_subtype_of(type[BivSub[T]], type[Biv[T]])) + static_assert(not is_subtype_of(type[Biv[T]], type[BivSub[T]])) + static_assert(not is_disjoint_from(type[BivSub[T]], type[Biv[T]])) + static_assert(not is_disjoint_from(type[BivSub[U]], type[Biv[T]])) + static_assert(not is_disjoint_from(type[BivSub[U]], type[Biv[U]])) + + static_assert(is_subtype_of(type[CovSub[T]], type[Cov[T]])) + static_assert(not is_subtype_of(type[Cov[T]], type[CovSub[T]])) + static_assert(not is_disjoint_from(type[CovSub[T]], type[Cov[T]])) + static_assert(not is_disjoint_from(type[CovSub[U]], type[Cov[T]])) + static_assert(not is_disjoint_from(type[CovSub[U]], type[Cov[U]])) + + static_assert(is_subtype_of(type[ContraSub[T]], type[Contra[T]])) + static_assert(not is_subtype_of(type[Contra[T]], type[ContraSub[T]])) + static_assert(not is_disjoint_from(type[ContraSub[T]], type[Contra[T]])) + static_assert(not is_disjoint_from(type[ContraSub[U]], type[Contra[T]])) + static_assert(not is_disjoint_from(type[ContraSub[U]], type[Contra[U]])) + + static_assert(is_subtype_of(type[InvSub[T]], type[Inv[T]])) + static_assert(not is_subtype_of(type[Inv[T]], type[InvSub[T]])) + static_assert(not is_disjoint_from(type[InvSub[T]], type[Inv[T]])) + static_assert(not is_disjoint_from(type[InvSub[U]], type[Inv[T]])) + static_assert(not is_disjoint_from(type[InvSub[U]], type[Inv[U]])) + +def _(): + static_assert(is_subtype_of(type[BivSub[bool]], type[Biv[int]])) + static_assert(is_subtype_of(type[BivSub[int]], type[Biv[bool]])) + static_assert(not is_disjoint_from(type[BivSub[bool]], type[Biv[int]])) + static_assert(not is_disjoint_from(type[BivSub[int]], type[Biv[bool]])) + + static_assert(is_subtype_of(type[CovSub[bool]], type[Cov[int]])) + static_assert(not is_subtype_of(type[CovSub[int]], type[Cov[bool]])) + static_assert(not is_disjoint_from(type[CovSub[bool]], type[Cov[int]])) + static_assert(not is_disjoint_from(type[CovSub[int]], type[Cov[bool]])) + + static_assert(not is_subtype_of(type[ContraSub[bool]], type[Contra[int]])) + static_assert(is_subtype_of(type[ContraSub[int]], type[Contra[bool]])) + static_assert(not is_disjoint_from(type[ContraSub[int]], type[Contra[bool]])) + static_assert(not is_disjoint_from(type[ContraSub[bool]], type[Contra[int]])) + + static_assert(not is_subtype_of(type[InvSub[bool]], type[Inv[int]])) + static_assert(not is_subtype_of(type[InvSub[int]], type[Inv[bool]])) + # TODO: These are disjoint. + static_assert(not is_disjoint_from(type[InvSub[bool]], type[Inv[int]])) + # TODO: These are disjoint. + static_assert(not is_disjoint_from(type[InvSub[int]], type[Inv[bool]])) + +def _[T](): + static_assert(is_subtype_of(type[BivSub[T]], type[Biv[Any]])) + static_assert(is_subtype_of(type[BivSub[Any]], type[Biv[T]])) + static_assert(is_assignable_to(type[BivSub[T]], type[Biv[Any]])) + static_assert(is_assignable_to(type[BivSub[Any]], type[Biv[T]])) + static_assert(not is_disjoint_from(type[BivSub[T]], type[Biv[Any]])) + + static_assert(not is_subtype_of(type[CovSub[T]], type[Cov[Any]])) + static_assert(not is_subtype_of(type[CovSub[Any]], type[Cov[T]])) + static_assert(is_assignable_to(type[CovSub[T]], type[Cov[Any]])) + static_assert(is_assignable_to(type[CovSub[Any]], type[Cov[T]])) + static_assert(not is_disjoint_from(type[CovSub[T]], type[Cov[Any]])) + + static_assert(not is_subtype_of(type[ContraSub[T]], type[Contra[Any]])) + static_assert(not is_subtype_of(type[ContraSub[Any]], type[Contra[T]])) + static_assert(is_assignable_to(type[ContraSub[T]], type[Contra[Any]])) + static_assert(is_assignable_to(type[ContraSub[Any]], type[Contra[T]])) + static_assert(not is_disjoint_from(type[ContraSub[T]], type[Contra[Any]])) + + static_assert(not is_subtype_of(type[InvSub[T]], type[Inv[Any]])) + static_assert(not is_subtype_of(type[InvSub[Any]], type[Inv[T]])) + static_assert(is_assignable_to(type[InvSub[T]], type[Inv[Any]])) + static_assert(is_assignable_to(type[InvSub[Any]], type[Inv[T]])) + static_assert(not is_disjoint_from(type[InvSub[T]], type[Inv[Any]])) +``` diff --git a/crates/ty_python_semantic/resources/mdtest/type_of/generics.md b/crates/ty_python_semantic/resources/mdtest/type_of/generics.md index 3c8f157dad..198390adb9 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_of/generics.md +++ b/crates/ty_python_semantic/resources/mdtest/type_of/generics.md @@ -274,3 +274,180 @@ class Foo[T]: ... # error: [invalid-parameter-default] "Default value of type `` is not assignable to annotated parameter type `type[T@f]`" def f[T: Foo[Any]](x: type[T] = Foo): ... ``` + +## Display of generic `type[]` types + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import Generic, TypeVar + +class Foo[T]: ... + +S = TypeVar("S") + +class Bar(Generic[S]): ... + +def _(x: Foo[int], y: Bar[str], z: list[bytes]): + reveal_type(type(x)) # revealed: type[Foo[int]] + reveal_type(type(y)) # revealed: type[Bar[str]] + reveal_type(type(z)) # revealed: type[list[bytes]] +``` + +## Checking generic `type[]` types + +```toml +[environment] +python-version = "3.12" +``` + +```py +class C[T]: + pass + +class D[T]: + pass + +var: type[C[int]] = C[int] +var: type[C[int]] = D[int] # error: [invalid-assignment] "Object of type `` is not assignable to `type[C[int]]`" +``` + +However, generic `Protocol` classes are still TODO: + +```py +from typing import Protocol + +class Proto[U](Protocol): + def some_method(self): ... + +# TODO: should be error: [invalid-assignment] +var: type[Proto[int]] = C[int] + +def _(p: type[Proto[int]]): + reveal_type(p) # revealed: type[@Todo(type[T] for protocols)] +``` + +## Generic `@final` classes + +```toml +[environment] +python-version = "3.13" +``` + +An unspecialized generic final class object is assignable to its default-specialized `type[]` type +(which is actually internally simplified to a GenericAlias type, since there cannot be subclasses.) + +```py +from typing import final + +@final +class P[T]: + x: T + +def expects_type_p(x: type[P]): + pass + +def expects_type_p_of_int(x: type[P[int]]): + pass + +# OK, the default specialization of `P` is assignable to `type[P[Unknown]]` +expects_type_p(P) + +# Also OK, because `P[int]` and `P[str]` are both assignable to `P[Unknown]` +expects_type_p(P[int]) +expects_type_p(P[str]) + +# Also OK, because the default specialization is `P[Unknown]` which is assignable to `P[int]` +expects_type_p_of_int(P) +expects_type_p_of_int(P[int]) + +# Not OK, because `P[str]` is not assignable to `P[int]` +expects_type_p_of_int(P[str]) # error: [invalid-argument-type] +``` + +The same principles apply when typevar defaults are used, but the results are a bit different +because the default-specialization is no longer a forgiving `Unknown` type: + +```py +@final +class P[T = str]: + x: T + +def expects_type_p(x: type[P]): + pass + +def expects_type_p_of_int(x: type[P[int]]): + pass + +def expects_type_p_of_str(x: type[P[str]]): + pass + +# OK, the default specialization is now `P[str]`, but we have the default specialization on both +# sides, so it is assignable. +expects_type_p(P) + +# Also OK if the explicit specialization lines up with the default, in either direction: +expects_type_p(P[str]) +expects_type_p_of_str(P) +expects_type_p_of_str(P[str]) + +# Not OK if the specializations don't line up: +expects_type_p(P[int]) # error: [invalid-argument-type] +expects_type_p_of_int(P[str]) # error: [invalid-argument-type] +expects_type_p_of_int(P) # error: [invalid-argument-type] +expects_type_p_of_str(P[int]) # error: [invalid-argument-type] +``` + +This also works with `ParamSpec`: + +```py +@final +class C[**P]: ... + +def expects_type_c(f: type[C]): ... +def expects_type_c_of_int_and_str(x: type[C[int, str]]): ... + +# OK, the unspecialized `C` is assignable to `type[C[...]]` +expects_type_c(C) + +# Also OK, any specialization is assignable to the unspecialized `C` +expects_type_c(C[int]) +expects_type_c(C[str, int, bytes]) + +# Ok, the unspecialized `C` is assignable to `type[C[int, str]]` +expects_type_c_of_int_and_str(C) + +# Also OK, the specialized `C[int, str]` is assignable to `type[C[int, str]]` +expects_type_c_of_int_and_str(C[int, str]) + +# TODO: these should be errors +expects_type_c_of_int_and_str(C[str]) +expects_type_c_of_int_and_str(C[int, str, bytes]) +expects_type_c_of_int_and_str(C[str, int]) +``` + +And with a `ParamSpec` that has a default: + +```py +@final +class C[**P = [int, str]]: ... + +def expects_type_c_default(f: type[C]): ... +def expects_type_c_default_of_int(f: type[C[int]]): ... +def expects_type_c_default_of_int_str(f: type[C[int, str]]): ... + +expects_type_c_default(C) +expects_type_c_default(C[int, str]) +expects_type_c_default_of_int(C) +expects_type_c_default_of_int(C[int]) +expects_type_c_default_of_int_str(C) +expects_type_c_default_of_int_str(C[int, str]) + +# TODO: these should be errors +expects_type_c_default(C[int]) +expects_type_c_default_of_int(C[str]) +expects_type_c_default_of_int_str(C[str, int]) +``` diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/constraints.md b/crates/ty_python_semantic/resources/mdtest/type_properties/constraints.md index f83bee977c..ec4a31a711 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_properties/constraints.md +++ b/crates/ty_python_semantic/resources/mdtest/type_properties/constraints.md @@ -126,7 +126,7 @@ strict subtype of the lower bound, a strict supertype of the upper bound, or inc ```py from typing import Any, final, Never, Sequence -from ty_extensions import ConstraintSet, static_assert +from ty_extensions import ConstraintSet, Not, static_assert class Super: ... class Base(Super): ... @@ -207,6 +207,15 @@ def _[T]() -> None: static_assert(constraints == expected) ``` +A negated _type_ is not the same thing as a negated _range_. + +```py +def _[T]() -> None: + negated_type = ConstraintSet.range(Never, T, Not[int]) + negated_constraint = ~ConstraintSet.range(Never, T, int) + static_assert(negated_type != negated_constraint) +``` + ## Intersection The intersection of two constraint sets requires that the constraints in both sets hold. In many diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/is_assignable_to.md b/crates/ty_python_semantic/resources/mdtest/type_properties/is_assignable_to.md index f9f85641f7..f2e38485c5 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_properties/is_assignable_to.md +++ b/crates/ty_python_semantic/resources/mdtest/type_properties/is_assignable_to.md @@ -1338,6 +1338,40 @@ def g3(obj: Foo[tuple[A]]): f3(obj) ``` +## Generic aliases + +```py +from typing import final +from ty_extensions import static_assert, is_assignable_to, TypeOf + +class GenericClass[T]: + x: T # invariant + +static_assert(is_assignable_to(TypeOf[GenericClass], type[GenericClass])) +static_assert(is_assignable_to(TypeOf[GenericClass[int]], type[GenericClass])) +static_assert(is_assignable_to(TypeOf[GenericClass], type[GenericClass[int]])) +static_assert(is_assignable_to(TypeOf[GenericClass[int]], type[GenericClass[int]])) +static_assert(not is_assignable_to(TypeOf[GenericClass[str]], type[GenericClass[int]])) + +class GenericClassIntBound[T: int]: + x: T # invariant + +static_assert(is_assignable_to(TypeOf[GenericClassIntBound], type[GenericClassIntBound])) +static_assert(is_assignable_to(TypeOf[GenericClassIntBound[int]], type[GenericClassIntBound])) +static_assert(is_assignable_to(TypeOf[GenericClassIntBound], type[GenericClassIntBound[int]])) +static_assert(is_assignable_to(TypeOf[GenericClassIntBound[int]], type[GenericClassIntBound[int]])) + +@final +class GenericFinalClass[T]: + x: T # invariant + +static_assert(is_assignable_to(TypeOf[GenericFinalClass], type[GenericFinalClass])) +static_assert(is_assignable_to(TypeOf[GenericFinalClass[int]], type[GenericFinalClass])) +static_assert(is_assignable_to(TypeOf[GenericFinalClass], type[GenericFinalClass[int]])) +static_assert(is_assignable_to(TypeOf[GenericFinalClass[int]], type[GenericFinalClass[int]])) +static_assert(not is_assignable_to(TypeOf[GenericFinalClass[str]], type[GenericFinalClass[int]])) +``` + ## `TypeGuard` and `TypeIs` `TypeGuard[...]` and `TypeIs[...]` are always assignable to `bool`. diff --git a/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md b/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md index db4b0f5f98..d4aa7db231 100644 --- a/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md +++ b/crates/ty_python_semantic/resources/mdtest/type_properties/is_disjoint_from.md @@ -95,7 +95,7 @@ python-version = "3.12" ``` ```py -from typing import final +from typing import Any, final from ty_extensions import static_assert, is_disjoint_from @final @@ -106,9 +106,12 @@ class Foo[T]: class A: ... class B: ... +static_assert(not is_disjoint_from(A, B)) static_assert(not is_disjoint_from(Foo[A], Foo[B])) +static_assert(not is_disjoint_from(Foo[A], Foo[Any])) +static_assert(not is_disjoint_from(Foo[Any], Foo[B])) -# TODO: `int` and `str` are disjoint bases, so these should be disjoint. +# `Foo[Never]` is a subtype of both `Foo[int]` and `Foo[str]`. static_assert(not is_disjoint_from(Foo[int], Foo[str])) ``` @@ -666,6 +669,45 @@ static_assert(is_disjoint_from(Path, tuple[Path | None, str, int])) static_assert(is_disjoint_from(Path, Path2)) ``` +## Generic aliases + +```toml +[environment] +python-version = "3.12" +``` + +```py +from typing import final +from ty_extensions import static_assert, is_disjoint_from, TypeOf + +class GenericClass[T]: + x: T # invariant + +static_assert(not is_disjoint_from(TypeOf[GenericClass], type[GenericClass])) +static_assert(not is_disjoint_from(TypeOf[GenericClass[int]], type[GenericClass])) +static_assert(not is_disjoint_from(TypeOf[GenericClass], type[GenericClass[int]])) +static_assert(not is_disjoint_from(TypeOf[GenericClass[int]], type[GenericClass[int]])) +static_assert(is_disjoint_from(TypeOf[GenericClass[str]], type[GenericClass[int]])) + +class GenericClassIntBound[T: int]: + x: T # invariant + +static_assert(not is_disjoint_from(TypeOf[GenericClassIntBound], type[GenericClassIntBound])) +static_assert(not is_disjoint_from(TypeOf[GenericClassIntBound[int]], type[GenericClassIntBound])) +static_assert(not is_disjoint_from(TypeOf[GenericClassIntBound], type[GenericClassIntBound[int]])) +static_assert(not is_disjoint_from(TypeOf[GenericClassIntBound[int]], type[GenericClassIntBound[int]])) + +@final +class GenericFinalClass[T]: + x: T # invariant + +static_assert(not is_disjoint_from(TypeOf[GenericFinalClass], type[GenericFinalClass])) +static_assert(not is_disjoint_from(TypeOf[GenericFinalClass[int]], type[GenericFinalClass])) +static_assert(not is_disjoint_from(TypeOf[GenericFinalClass], type[GenericFinalClass[int]])) +static_assert(not is_disjoint_from(TypeOf[GenericFinalClass[int]], type[GenericFinalClass[int]])) +static_assert(is_disjoint_from(TypeOf[GenericFinalClass[str]], type[GenericFinalClass[int]])) +``` + ## Callables No two callable types are disjoint because there exists a non-empty callable type diff --git a/crates/ty_python_semantic/resources/mdtest/typed_dict.md b/crates/ty_python_semantic/resources/mdtest/typed_dict.md index aadf8249ae..ad15f28250 100644 --- a/crates/ty_python_semantic/resources/mdtest/typed_dict.md +++ b/crates/ty_python_semantic/resources/mdtest/typed_dict.md @@ -868,6 +868,172 @@ def _(o1: Outer1, o2: Outer2, o3: Outer3, o4: Outer4): static_assert(is_subtype_of(Outer4, Outer4)) ``` +## Structural equivalence + +Two `TypedDict`s with equivalent fields are equivalent types. This includes fields with gradual +types: + +```py +from typing_extensions import Any, TypedDict, ReadOnly, assert_type +from ty_extensions import is_assignable_to, is_equivalent_to, static_assert + +class Foo(TypedDict): + x: int + y: Any + +# exactly the same fields +class Bar(TypedDict): + x: int + y: Any + +# the same fields but in a different order +class Baz(TypedDict): + y: Any + x: int + +static_assert(is_assignable_to(Foo, Bar)) +static_assert(is_equivalent_to(Foo, Bar)) +static_assert(is_assignable_to(Foo, Baz)) +static_assert(is_equivalent_to(Foo, Baz)) + +foo: Foo = {"x": 1, "y": "hello"} +assert_type(foo, Foo) +assert_type(foo, Bar) +assert_type(foo, Baz) +``` + +Equivalent `TypedDict`s within unions can also produce equivalent unions, which currently relies on +"normalization" machinery: + +```py +def f(var: Foo | int): + assert_type(var, Foo | int) + assert_type(var, Bar | int) + assert_type(var, Baz | int) + # TODO: Union simplification compares `TypedDict`s by name/identity to avoid cycles. This assert + # should also pass once that's fixed. + assert_type(var, Foo | Bar | Baz | int) # error: [type-assertion-failure] +``` + +Here are several cases that are not equivalent. In particular, assignability does not imply +equivalence: + +```py +class FewerFields(TypedDict): + x: int + +static_assert(is_assignable_to(Foo, FewerFields)) +static_assert(not is_equivalent_to(Foo, FewerFields)) + +class DifferentMutability(TypedDict): + x: int + y: ReadOnly[Any] + +static_assert(is_assignable_to(Foo, DifferentMutability)) +static_assert(not is_equivalent_to(Foo, DifferentMutability)) + +class MoreFields(TypedDict): + x: int + y: Any + z: str + +static_assert(not is_assignable_to(Foo, MoreFields)) +static_assert(not is_equivalent_to(Foo, MoreFields)) + +class DifferentFieldStaticType(TypedDict): + x: str + y: Any + +static_assert(not is_assignable_to(Foo, DifferentFieldStaticType)) +static_assert(not is_equivalent_to(Foo, DifferentFieldStaticType)) + +class DifferentFieldGradualType(TypedDict): + x: int + y: Any | str + +static_assert(is_assignable_to(Foo, DifferentFieldGradualType)) +static_assert(not is_equivalent_to(Foo, DifferentFieldGradualType)) +``` + +## Structural equivalence understands the interaction between `Required`/`NotRequired` and `total` + +```py +from ty_extensions import static_assert, is_equivalent_to +from typing_extensions import TypedDict, Required, NotRequired + +class Foo1(TypedDict, total=False): + x: int + y: str + +class Foo2(TypedDict): + y: NotRequired[str] + x: NotRequired[int] + +static_assert(is_equivalent_to(Foo1, Foo2)) +static_assert(is_equivalent_to(Foo1 | int, int | Foo2)) + +class Bar1(TypedDict, total=False): + x: int + y: Required[str] + +class Bar2(TypedDict): + y: str + x: NotRequired[int] + +static_assert(is_equivalent_to(Bar1, Bar2)) +static_assert(is_equivalent_to(Bar1 | int, int | Bar2)) +``` + +## Assignability and equivalence work with recursive `TypedDict`s + +```py +from typing_extensions import TypedDict +from ty_extensions import static_assert, is_assignable_to, is_equivalent_to + +class Node1(TypedDict): + value: int + next: "Node1" | None + +class Node2(TypedDict): + value: int + next: "Node2" | None + +static_assert(is_assignable_to(Node1, Node2)) +static_assert(is_equivalent_to(Node1, Node2)) + +class Person1(TypedDict): + name: str + friends: list["Person1"] + +class Person2(TypedDict): + name: str + friends: list["Person2"] + +static_assert(is_assignable_to(Person1, Person2)) +static_assert(is_equivalent_to(Person1, Person2)) +``` + +## Redundant cast warnings + + + +Casting between equivalent types produces a redundant cast warning. When the types have different +names, the warning makes that clear: + +```py +from typing import TypedDict, cast + +class Foo2(TypedDict): + x: int + +class Bar2(TypedDict): + x: int + +foo: Foo2 = {"x": 1} +_ = cast(Foo2, foo) # error: [redundant-cast] +_ = cast(Bar2, foo) # error: [redundant-cast] +``` + ## Key-based access ### Reading diff --git a/crates/ty_python_semantic/resources/mdtest/unary/custom.md b/crates/ty_python_semantic/resources/mdtest/unary/custom.md index 1544e42890..2b2eb3a619 100644 --- a/crates/ty_python_semantic/resources/mdtest/unary/custom.md +++ b/crates/ty_python_semantic/resources/mdtest/unary/custom.md @@ -24,11 +24,11 @@ reveal_type(+Sub()) # revealed: bool reveal_type(-Sub()) # revealed: str reveal_type(~Sub()) # revealed: int -# error: [unsupported-operator] "Unary operator `+` is unsupported for type `No`" +# error: [unsupported-operator] "Unary operator `+` is not supported for type `No`" reveal_type(+No()) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `-` is unsupported for type `No`" +# error: [unsupported-operator] "Unary operator `-` is not supported for type `No`" reveal_type(-No()) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `~` is unsupported for type `No`" +# error: [unsupported-operator] "Unary operator `~` is not supported for type `No`" reveal_type(~No()) # revealed: Unknown ``` @@ -52,25 +52,25 @@ class Yes: class Sub(Yes): ... class No: ... -# error: [unsupported-operator] "Unary operator `+` is unsupported for type ``" +# error: [unsupported-operator] "Unary operator `+` is not supported for type ``" reveal_type(+Yes) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `-` is unsupported for type ``" +# error: [unsupported-operator] "Unary operator `-` is not supported for type ``" reveal_type(-Yes) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `~` is unsupported for type ``" +# error: [unsupported-operator] "Unary operator `~` is not supported for type ``" reveal_type(~Yes) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `+` is unsupported for type ``" +# error: [unsupported-operator] "Unary operator `+` is not supported for type ``" reveal_type(+Sub) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `-` is unsupported for type ``" +# error: [unsupported-operator] "Unary operator `-` is not supported for type ``" reveal_type(-Sub) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `~` is unsupported for type ``" +# error: [unsupported-operator] "Unary operator `~` is not supported for type ``" reveal_type(~Sub) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `+` is unsupported for type ``" +# error: [unsupported-operator] "Unary operator `+` is not supported for type ``" reveal_type(+No) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `-` is unsupported for type ``" +# error: [unsupported-operator] "Unary operator `-` is not supported for type ``" reveal_type(-No) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `~` is unsupported for type ``" +# error: [unsupported-operator] "Unary operator `~` is not supported for type ``" reveal_type(~No) # revealed: Unknown ``` @@ -80,11 +80,11 @@ reveal_type(~No) # revealed: Unknown def f(): pass -# error: [unsupported-operator] "Unary operator `+` is unsupported for type `def f() -> Unknown`" +# error: [unsupported-operator] "Unary operator `+` is not supported for type `def f() -> Unknown`" reveal_type(+f) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `-` is unsupported for type `def f() -> Unknown`" +# error: [unsupported-operator] "Unary operator `-` is not supported for type `def f() -> Unknown`" reveal_type(-f) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `~` is unsupported for type `def f() -> Unknown`" +# error: [unsupported-operator] "Unary operator `~` is not supported for type `def f() -> Unknown`" reveal_type(~f) # revealed: Unknown ``` @@ -113,25 +113,25 @@ def sub() -> type[Sub]: def no() -> type[No]: return No -# error: [unsupported-operator] "Unary operator `+` is unsupported for type `type[Yes]`" +# error: [unsupported-operator] "Unary operator `+` is not supported for type `type[Yes]`" reveal_type(+yes()) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `-` is unsupported for type `type[Yes]`" +# error: [unsupported-operator] "Unary operator `-` is not supported for type `type[Yes]`" reveal_type(-yes()) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `~` is unsupported for type `type[Yes]`" +# error: [unsupported-operator] "Unary operator `~` is not supported for type `type[Yes]`" reveal_type(~yes()) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `+` is unsupported for type `type[Sub]`" +# error: [unsupported-operator] "Unary operator `+` is not supported for type `type[Sub]`" reveal_type(+sub()) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `-` is unsupported for type `type[Sub]`" +# error: [unsupported-operator] "Unary operator `-` is not supported for type `type[Sub]`" reveal_type(-sub()) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `~` is unsupported for type `type[Sub]`" +# error: [unsupported-operator] "Unary operator `~` is not supported for type `type[Sub]`" reveal_type(~sub()) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `+` is unsupported for type `type[No]`" +# error: [unsupported-operator] "Unary operator `+` is not supported for type `type[No]`" reveal_type(+no()) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `-` is unsupported for type `type[No]`" +# error: [unsupported-operator] "Unary operator `-` is not supported for type `type[No]`" reveal_type(-no()) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `~` is unsupported for type `type[No]`" +# error: [unsupported-operator] "Unary operator `~` is not supported for type `type[No]`" reveal_type(~no()) # revealed: Unknown ``` @@ -160,10 +160,10 @@ reveal_type(+Sub) # revealed: bool reveal_type(-Sub) # revealed: str reveal_type(~Sub) # revealed: int -# error: [unsupported-operator] "Unary operator `+` is unsupported for type ``" +# error: [unsupported-operator] "Unary operator `+` is not supported for type ``" reveal_type(+No) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `-` is unsupported for type ``" +# error: [unsupported-operator] "Unary operator `-` is not supported for type ``" reveal_type(-No) # revealed: Unknown -# error: [unsupported-operator] "Unary operator `~` is unsupported for type ``" +# error: [unsupported-operator] "Unary operator `~` is not supported for type ``" reveal_type(~No) # revealed: Unknown ``` diff --git a/crates/ty_python_semantic/resources/mdtest/unary/invert_add_usub.md b/crates/ty_python_semantic/resources/mdtest/unary/invert_add_usub.md index 6b07cb1d2e..100176e1cb 100644 --- a/crates/ty_python_semantic/resources/mdtest/unary/invert_add_usub.md +++ b/crates/ty_python_semantic/resources/mdtest/unary/invert_add_usub.md @@ -27,7 +27,7 @@ reveal_type(~a) # revealed: Literal[True] class NoDunder: ... b = NoDunder() -+b # error: [unsupported-operator] "Unary operator `+` is unsupported for type `NoDunder`" --b # error: [unsupported-operator] "Unary operator `-` is unsupported for type `NoDunder`" -~b # error: [unsupported-operator] "Unary operator `~` is unsupported for type `NoDunder`" ++b # error: [unsupported-operator] "Unary operator `+` is not supported for type `NoDunder`" +-b # error: [unsupported-operator] "Unary operator `-` is not supported for type `NoDunder`" +~b # error: [unsupported-operator] "Unary operator `~` is not supported for type `NoDunder`" ``` diff --git a/crates/ty_python_semantic/resources/mdtest/unary/not.md b/crates/ty_python_semantic/resources/mdtest/unary/not.md index e01796a9f7..e0cb63d2b5 100644 --- a/crates/ty_python_semantic/resources/mdtest/unary/not.md +++ b/crates/ty_python_semantic/resources/mdtest/unary/not.md @@ -187,7 +187,7 @@ class MethodBoolInvalid: def __bool__(self) -> int: return 0 -# error: [unsupported-bool-conversion] "Boolean conversion is unsupported for type `MethodBoolInvalid`" +# error: [unsupported-bool-conversion] "Boolean conversion is not supported for type `MethodBoolInvalid`" # revealed: bool reveal_type(not MethodBoolInvalid()) diff --git a/crates/ty_python_semantic/resources/mdtest/with/async.md b/crates/ty_python_semantic/resources/mdtest/with/async.md index 2a0d7165de..9802c85c4e 100644 --- a/crates/ty_python_semantic/resources/mdtest/with/async.md +++ b/crates/ty_python_semantic/resources/mdtest/with/async.md @@ -212,13 +212,46 @@ class Session: ... async def connect() -> AsyncGenerator[Session]: yield Session() -# TODO: this should be `() -> _AsyncGeneratorContextManager[Session, None]` -reveal_type(connect) # revealed: () -> _AsyncGeneratorContextManager[Unknown, None] +# revealed: () -> _AsyncGeneratorContextManager[Session, None] +reveal_type(connect) async def main(): async with connect() as session: - # TODO: should be `Session` - reveal_type(session) # revealed: Unknown + reveal_type(session) # revealed: Session +``` + +This also works with `AsyncIterator` return types: + +```py +from typing import AsyncIterator + +@asynccontextmanager +async def connect_iterator() -> AsyncIterator[Session]: + yield Session() + +# revealed: () -> _AsyncGeneratorContextManager[Session, None] +reveal_type(connect_iterator) + +async def main_iterator(): + async with connect_iterator() as session: + reveal_type(session) # revealed: Session +``` + +And with `AsyncGeneratorType` return types: + +```py +from types import AsyncGeneratorType + +@asynccontextmanager +async def connect_async_generator() -> AsyncGeneratorType[Session]: + yield Session() + +# revealed: () -> _AsyncGeneratorContextManager[Session, None] +reveal_type(connect_async_generator) + +async def main_async_generator(): + async with connect_async_generator() as session: + reveal_type(session) # revealed: Session ``` ## `asyncio.timeout` diff --git a/crates/ty_python_semantic/src/diagnostic.rs b/crates/ty_python_semantic/src/diagnostic.rs index b8ff2861ea..a7857b102e 100644 --- a/crates/ty_python_semantic/src/diagnostic.rs +++ b/crates/ty_python_semantic/src/diagnostic.rs @@ -58,9 +58,8 @@ pub fn add_inferred_python_version_hint_to_diagnostic( SubDiagnosticSeverity::Info, format_args!("Python {version} was assumed when {action}"), ); - sub_diagnostic.annotate(Annotation::primary(span).message(format_args!( - "Python {version} assumed due to this configuration setting" - ))); + sub_diagnostic + .annotate(Annotation::primary(span).message("Python version configuration")); diagnostic.sub(sub_diagnostic); } else { diagnostic.info(format_args!( @@ -76,10 +75,8 @@ pub fn add_inferred_python_version_hint_to_diagnostic( "Python {version} was assumed when {action} because of your virtual environment" ), ); - sub_diagnostic.annotate( - Annotation::primary(span) - .message("Python version inferred from virtual environment metadata file"), - ); + sub_diagnostic + .annotate(Annotation::primary(span).message("Virtual environment metadata")); // TODO: it would also be nice to tell them how we resolved their virtual environment... diagnostic.sub(sub_diagnostic); } else { diff --git a/crates/ty_python_semantic/src/module_resolver/module.rs b/crates/ty_python_semantic/src/module_resolver/module.rs index 118c2aff45..31c3ba6eef 100644 --- a/crates/ty_python_semantic/src/module_resolver/module.rs +++ b/crates/ty_python_semantic/src/module_resolver/module.rs @@ -319,6 +319,7 @@ pub enum KnownModule { Tempfile, Pathlib, Abc, + Contextlib, Dataclasses, Collections, Inspect, @@ -351,6 +352,7 @@ impl KnownModule { Self::Tempfile => "tempfile", Self::Pathlib => "pathlib", Self::Abc => "abc", + Self::Contextlib => "contextlib", Self::Dataclasses => "dataclasses", Self::Collections => "collections", Self::Inspect => "inspect", diff --git a/crates/ty_python_semantic/src/module_resolver/resolver.rs b/crates/ty_python_semantic/src/module_resolver/resolver.rs index 123b4ac31e..60b565a564 100644 --- a/crates/ty_python_semantic/src/module_resolver/resolver.rs +++ b/crates/ty_python_semantic/src/module_resolver/resolver.rs @@ -265,7 +265,14 @@ fn desperately_resolve_module<'db>( let _span = tracing::trace_span!("desperately_resolve_module", %name).entered(); let Some(resolved) = desperately_resolve_name(db, importing_file, name, mode) else { - tracing::debug!("Module `{name}` not found while looking in parent dirs"); + let extra = match module_name.mode(db) { + ModuleResolveMode::StubsAllowed => "neither stub nor real module file", + ModuleResolveMode::StubsNotAllowed => "stubs not allowed", + ModuleResolveMode::StubsNotAllowedSomeShadowingAllowed => { + "stubs not allowed but some shadowing allowed" + } + }; + tracing::debug!("Module `{name}` not found while looking in parent dirs ({extra})"); return None; }; diff --git a/crates/ty_python_semantic/src/place.rs b/crates/ty_python_semantic/src/place.rs index 98f7a2b8e4..a1319a3250 100644 --- a/crates/ty_python_semantic/src/place.rs +++ b/crates/ty_python_semantic/src/place.rs @@ -783,7 +783,7 @@ pub(crate) fn place_by_id<'db>( let declarations = match considered_definitions { ConsideredDefinitions::EndOfScope => use_def.end_of_scope_declarations(place_id), - ConsideredDefinitions::AllReachable => use_def.all_reachable_declarations(place_id), + ConsideredDefinitions::AllReachable => use_def.reachable_declarations(place_id), }; let declared = place_from_declarations_impl(db, declarations, requires_explicit_reexport) @@ -791,7 +791,7 @@ pub(crate) fn place_by_id<'db>( let all_considered_bindings = || match considered_definitions { ConsideredDefinitions::EndOfScope => use_def.end_of_scope_bindings(place_id), - ConsideredDefinitions::AllReachable => use_def.all_reachable_bindings(place_id), + ConsideredDefinitions::AllReachable => use_def.reachable_bindings(place_id), }; // If a symbol is undeclared, but qualified with `typing.Final`, we use the right-hand side diff --git a/crates/ty_python_semantic/src/semantic_index.rs b/crates/ty_python_semantic/src/semantic_index.rs index f4ab765f08..a38b0a7ded 100644 --- a/crates/ty_python_semantic/src/semantic_index.rs +++ b/crates/ty_python_semantic/src/semantic_index.rs @@ -113,10 +113,7 @@ pub(crate) fn attribute_assignments<'db, 's>( let place_table = index.place_table(function_scope_id); let member = place_table.member_id_by_instance_attribute_name(name)?; let use_def = &index.use_def_maps[function_scope_id]; - Some(( - use_def.all_reachable_member_bindings(member), - function_scope_id, - )) + Some((use_def.reachable_member_bindings(member), function_scope_id)) }) } @@ -138,7 +135,7 @@ pub(crate) fn attribute_declarations<'db, 's>( let member = place_table.member_id_by_instance_attribute_name(name)?; let use_def = &index.use_def_maps[function_scope_id]; Some(( - use_def.all_reachable_member_declarations(member), + use_def.reachable_member_declarations(member), function_scope_id, )) }) diff --git a/crates/ty_python_semantic/src/semantic_index/use_def.rs b/crates/ty_python_semantic/src/semantic_index/use_def.rs index a7c7520806..dbd26595fd 100644 --- a/crates/ty_python_semantic/src/semantic_index/use_def.rs +++ b/crates/ty_python_semantic/src/semantic_index/use_def.rs @@ -453,17 +453,17 @@ impl<'db> UseDefMap<'db> { ) } - pub(crate) fn all_reachable_bindings( + pub(crate) fn reachable_bindings( &self, place: ScopedPlaceId, ) -> BindingWithConstraintsIterator<'_, 'db> { match place { - ScopedPlaceId::Symbol(symbol) => self.all_reachable_symbol_bindings(symbol), - ScopedPlaceId::Member(member) => self.all_reachable_member_bindings(member), + ScopedPlaceId::Symbol(symbol) => self.reachable_symbol_bindings(symbol), + ScopedPlaceId::Member(member) => self.reachable_member_bindings(member), } } - pub(crate) fn all_reachable_symbol_bindings( + pub(crate) fn reachable_symbol_bindings( &self, symbol: ScopedSymbolId, ) -> BindingWithConstraintsIterator<'_, 'db> { @@ -471,7 +471,7 @@ impl<'db> UseDefMap<'db> { self.bindings_iterator(bindings, BoundnessAnalysis::AssumeBound) } - pub(crate) fn all_reachable_member_bindings( + pub(crate) fn reachable_member_bindings( &self, symbol: ScopedMemberId, ) -> BindingWithConstraintsIterator<'_, 'db> { @@ -547,7 +547,7 @@ impl<'db> UseDefMap<'db> { self.declarations_iterator(declarations, BoundnessAnalysis::BasedOnUnboundVisibility) } - pub(crate) fn all_reachable_symbol_declarations( + pub(crate) fn reachable_symbol_declarations( &self, symbol: ScopedSymbolId, ) -> DeclarationsIterator<'_, 'db> { @@ -555,7 +555,7 @@ impl<'db> UseDefMap<'db> { self.declarations_iterator(declarations, BoundnessAnalysis::AssumeBound) } - pub(crate) fn all_reachable_member_declarations( + pub(crate) fn reachable_member_declarations( &self, member: ScopedMemberId, ) -> DeclarationsIterator<'_, 'db> { @@ -563,13 +563,13 @@ impl<'db> UseDefMap<'db> { self.declarations_iterator(declarations, BoundnessAnalysis::AssumeBound) } - pub(crate) fn all_reachable_declarations( + pub(crate) fn reachable_declarations( &self, place: ScopedPlaceId, ) -> DeclarationsIterator<'_, 'db> { match place { - ScopedPlaceId::Symbol(symbol) => self.all_reachable_symbol_declarations(symbol), - ScopedPlaceId::Member(member) => self.all_reachable_member_declarations(member), + ScopedPlaceId::Symbol(symbol) => self.reachable_symbol_declarations(symbol), + ScopedPlaceId::Member(member) => self.reachable_member_declarations(member), } } @@ -590,6 +590,30 @@ impl<'db> UseDefMap<'db> { .map(|symbol_id| (symbol_id, self.end_of_scope_symbol_bindings(symbol_id))) } + pub(crate) fn all_reachable_symbols<'map>( + &'map self, + ) -> impl Iterator< + Item = ( + ScopedSymbolId, + DeclarationsIterator<'map, 'db>, + BindingWithConstraintsIterator<'map, 'db>, + ), + > + 'map { + self.reachable_definitions_by_symbol.iter_enumerated().map( + |(symbol_id, reachable_definitions)| { + let declarations = self.declarations_iterator( + &reachable_definitions.declarations, + BoundnessAnalysis::AssumeBound, + ); + let bindings = self.bindings_iterator( + &reachable_definitions.bindings, + BoundnessAnalysis::AssumeBound, + ); + (symbol_id, declarations, bindings) + }, + ) + } + /// This function is intended to be called only once inside `TypeInferenceBuilder::infer_function_body`. pub(crate) fn can_implicitly_return_none(&self, db: &dyn crate::Db) -> bool { !self diff --git a/crates/ty_python_semantic/src/semantic_model.rs b/crates/ty_python_semantic/src/semantic_model.rs index 4dc8a59bab..fb3895a0e0 100644 --- a/crates/ty_python_semantic/src/semantic_model.rs +++ b/crates/ty_python_semantic/src/semantic_model.rs @@ -11,7 +11,7 @@ use crate::module_resolver::{KnownModule, Module, list_modules, resolve_module}; use crate::semantic_index::definition::Definition; use crate::semantic_index::scope::FileScopeId; use crate::semantic_index::semantic_index; -use crate::types::list_members::{Member, all_members, all_members_of_scope}; +use crate::types::list_members::{Member, all_members, all_reachable_members}; use crate::types::{Type, binding_type, infer_scope_types}; use crate::{Db, resolve_real_shadowable_module}; @@ -76,7 +76,7 @@ impl<'db> SemanticModel<'db> { for (file_scope, _) in index.ancestor_scopes(file_scope) { for memberdef in - all_members_of_scope(self.db, file_scope.to_scope_id(self.db, self.file)) + all_reachable_members(self.db, file_scope.to_scope_id(self.db, self.file)) { members.insert( memberdef.member.name, @@ -196,7 +196,10 @@ impl<'db> SemanticModel<'db> { /// Returns completions for symbols available in a `object.` context. pub fn attribute_completions(&self, node: &ast::ExprAttribute) -> Vec> { - let ty = node.value.inferred_type(self); + let Some(ty) = node.value.inferred_type(self) else { + return Vec::new(); + }; + all_members(self.db, ty) .into_iter() .map(|member| Completion { @@ -221,7 +224,7 @@ impl<'db> SemanticModel<'db> { let mut completions = vec![]; for (file_scope, _) in index.ancestor_scopes(file_scope) { completions.extend( - all_members_of_scope(self.db, file_scope.to_scope_id(self.db, self.file)).map( + all_reachable_members(self.db, file_scope.to_scope_id(self.db, self.file)).map( |memberdef| Completion { name: memberdef.member.name, ty: Some(memberdef.member.ty), @@ -236,11 +239,45 @@ impl<'db> SemanticModel<'db> { completions } - /// Get the scope of the given node (handles string annotations) + /// Returns the scope in which `node` is defined (handles string annotations). pub fn scope(&self, node: ast::AnyNodeRef<'_>) -> Option { let index = semantic_index(self.db, self.file); match self.node_in_ast(node) { ast::AnyNodeRef::Identifier(identifier) => index.try_expression_scope_id(identifier), + + // Nodes implementing `HasDefinition` + ast::AnyNodeRef::StmtFunctionDef(function) => Some( + function + .definition(self) + .scope(self.db) + .file_scope_id(self.db), + ), + ast::AnyNodeRef::StmtClassDef(class) => { + Some(class.definition(self).scope(self.db).file_scope_id(self.db)) + } + ast::AnyNodeRef::Parameter(parameter) => Some( + parameter + .definition(self) + .scope(self.db) + .file_scope_id(self.db), + ), + ast::AnyNodeRef::ParameterWithDefault(parameter) => Some( + parameter + .definition(self) + .scope(self.db) + .file_scope_id(self.db), + ), + ast::AnyNodeRef::ExceptHandlerExceptHandler(handler) => Some( + handler + .definition(self) + .scope(self.db) + .file_scope_id(self.db), + ), + ast::AnyNodeRef::TypeParamTypeVar(var) => { + Some(var.definition(self).scope(self.db).file_scope_id(self.db)) + } + + // Fallback node => match node.as_expr_ref() { // If we couldn't identify a specific // expression that we're in, then just @@ -400,7 +437,7 @@ pub trait HasType { /// /// ## Panics /// May panic if `self` is from another file than `model`. - fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db>; + fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Option>; } pub trait HasDefinition { @@ -412,18 +449,16 @@ pub trait HasDefinition { } impl HasType for ast::ExprRef<'_> { - fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> { + fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Option> { let index = semantic_index(model.db, model.file); // TODO(#1637): semantic tokens is making this crash even with // `try_expr_ref_in_ast` guarding this, for now just use `try_expression_scope_id`. // The problematic input is `x: "float` (with a dangling quote). I imagine the issue // is we're too eagerly setting `is_string_annotation` in inference. - let Some(file_scope) = index.try_expression_scope_id(&model.expr_ref_in_ast(*self)) else { - return Type::unknown(); - }; + let file_scope = index.try_expression_scope_id(&model.expr_ref_in_ast(*self))?; let scope = file_scope.to_scope_id(model.db, model.file); - infer_scope_types(model.db, scope).expression_type(*self) + infer_scope_types(model.db, scope).try_expression_type(*self) } } @@ -431,7 +466,7 @@ macro_rules! impl_expression_has_type { ($ty: ty) => { impl HasType for $ty { #[inline] - fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> { + fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Option> { let expression_ref = ExprRef::from(self); expression_ref.inferred_type(model) } @@ -474,7 +509,7 @@ impl_expression_has_type!(ast::ExprSlice); impl_expression_has_type!(ast::ExprIpyEscapeCommand); impl HasType for ast::Expr { - fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> { + fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Option> { match self { Expr::BoolOp(inner) => inner.inferred_type(model), Expr::Named(inner) => inner.inferred_type(model), @@ -525,9 +560,9 @@ macro_rules! impl_binding_has_ty_def { impl HasType for $ty { #[inline] - fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> { + fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Option> { let binding = HasDefinition::definition(self, model); - binding_type(model.db, binding) + Some(binding_type(model.db, binding)) } } }; @@ -541,12 +576,12 @@ impl_binding_has_ty_def!(ast::ExceptHandlerExceptHandler); impl_binding_has_ty_def!(ast::TypeParamTypeVar); impl HasType for ast::Alias { - fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Type<'db> { + fn inferred_type<'db>(&self, model: &SemanticModel<'db>) -> Option> { if &self.name == "*" { - return Type::Never; + return Some(Type::Never); } let index = semantic_index(model.db, model.file); - binding_type(model.db, index.expect_single_definition(self)) + Some(binding_type(model.db, index.expect_single_definition(self))) } } @@ -584,7 +619,7 @@ mod tests { let function = ast.suite()[0].as_function_def_stmt().unwrap(); let model = SemanticModel::new(&db, foo); - let ty = function.inferred_type(&model); + let ty = function.inferred_type(&model).unwrap(); assert!(ty.is_function_literal()); @@ -603,7 +638,7 @@ mod tests { let class = ast.suite()[0].as_class_def_stmt().unwrap(); let model = SemanticModel::new(&db, foo); - let ty = class.inferred_type(&model); + let ty = class.inferred_type(&model).unwrap(); assert!(ty.is_class_literal()); @@ -624,7 +659,7 @@ mod tests { let import = ast.suite()[0].as_import_from_stmt().unwrap(); let alias = &import.names[0]; let model = SemanticModel::new(&db, bar); - let ty = alias.inferred_type(&model); + let ty = alias.inferred_type(&model).unwrap(); assert!(ty.is_class_literal()); diff --git a/crates/ty_python_semantic/src/types.rs b/crates/ty_python_semantic/src/types.rs index c93a6d2cc9..19322fb279 100644 --- a/crates/ty_python_semantic/src/types.rs +++ b/crates/ty_python_semantic/src/types.rs @@ -30,6 +30,7 @@ pub(crate) use self::infer::{ TypeContext, infer_deferred_types, infer_definition_types, infer_expression_type, infer_expression_types, infer_scope_types, static_expression_truthiness, }; +pub use self::signatures::ParameterKind; pub(crate) use self::signatures::{CallableSignature, Signature}; pub(crate) use self::subclass_of::{SubclassOfInner, SubclassOfType}; pub use crate::diagnostic::add_inferred_python_version_hint_to_diagnostic; @@ -877,7 +878,7 @@ impl<'db> Type<'db> { Self::Dynamic(DynamicType::Any) } - pub(crate) const fn unknown() -> Self { + pub const fn unknown() -> Self { Self::Dynamic(DynamicType::Unknown) } @@ -911,8 +912,19 @@ impl<'db> Type<'db> { previous: Self, cycle: &salsa::Cycle, ) -> Self { - UnionType::from_elements_cycle_recovery(db, [self, previous]) - .recursive_type_normalized(db, cycle) + // Avoid unioning two generic aliases of the same class together; this union will never + // simplify and is likely to cause downstream problems. This introduces the theoretical + // possibility of cycle oscillation involving such types (because we are not strictly + // widening the type on each iteration), but so far we have not seen an example of that. + match (previous, self) { + (Type::GenericAlias(prev_alias), Type::GenericAlias(curr_alias)) + if prev_alias.origin(db) == curr_alias.origin(db) => + { + self + } + _ => UnionType::from_elements_cycle_recovery(db, [self, previous]), + } + .recursive_type_normalized(db, cycle) } fn is_none(&self, db: &'db dyn Db) -> bool { @@ -1284,7 +1296,7 @@ impl<'db> Type<'db> { } } - pub(crate) const fn is_union(&self) -> bool { + pub(crate) const fn is_union(self) -> bool { matches!(self, Type::Union(_)) } @@ -1300,6 +1312,16 @@ impl<'db> Type<'db> { self.as_union().expect("Expected a Type::Union variant") } + /// Returns whether this is a "real" intersection type. (Negated types are represented by an + /// intersection containing a single negative branch, which this method does _not_ consider a + /// "real" intersection.) + pub(crate) fn is_nontrivial_intersection(self, db: &'db dyn Db) -> bool { + match self { + Type::Intersection(intersection) => !intersection.is_simple_negation(db), + _ => false, + } + } + pub(crate) const fn as_function_literal(self) -> Option> { match self { Type::FunctionLiteral(function_type) => Some(function_type), @@ -1493,6 +1515,7 @@ impl<'db> Type<'db> { /// - Strips the types of default values from parameters in `Callable` types: only whether a parameter /// *has* or *does not have* a default value is relevant to whether two `Callable` types are equivalent. /// - Converts class-based protocols into synthesized protocols + /// - Converts class-based typeddicts into synthesized typeddicts #[must_use] pub(crate) fn normalized(self, db: &'db dyn Db) -> Self { self.normalized_impl(db, &NormalizedVisitor::default()) @@ -1551,10 +1574,9 @@ impl<'db> Type<'db> { // Always normalize single-member enums to their class instance (`Literal[Single.VALUE]` => `Single`) enum_literal.enum_class_instance(db) } - Type::TypedDict(_) => { - // TODO: Normalize TypedDicts - self - } + Type::TypedDict(typed_dict) => visitor.visit(self, || { + Type::TypedDict(typed_dict.normalized_impl(db, visitor)) + }), Type::TypeAlias(alias) => alias.value_type(db).normalized_impl(db, visitor), Type::NewTypeInstance(newtype) => { visitor.visit(self, || { @@ -1796,7 +1818,7 @@ impl<'db> Type<'db> { Type::GenericAlias(alias) => Some(ClassType::Generic(alias).into_callable(db)), Type::NewTypeInstance(newtype) => { - Type::instance(db, newtype.base_class_type(db)).try_upcast_to_callable(db) + newtype.concrete_base_type(db).try_upcast_to_callable(db) } // TODO: This is unsound so in future we can consider an opt-in option to disable it. @@ -2737,6 +2759,34 @@ impl<'db> Type<'db> { ) }) .unwrap_or_else(|| ConstraintSet::from(relation.is_assignability())), + + // Similarly, `` is assignable to `` (a generic-alias type) + // if the default specialization of `C` is assignable to `C[...]`. This scenario occurs + // with final generic types, where `type[C[...]]` is simplified to the generic-alias + // type ``, due to the fact that `C[...]` has no subclasses. + (Type::ClassLiteral(class), Type::GenericAlias(target_alias)) => { + class.default_specialization(db).has_relation_to_impl( + db, + ClassType::Generic(target_alias), + inferable, + relation, + relation_visitor, + disjointness_visitor, + ) + } + + // For generic aliases, we delegate to the underlying class type. + (Type::GenericAlias(self_alias), Type::GenericAlias(target_alias)) => { + ClassType::Generic(self_alias).has_relation_to_impl( + db, + ClassType::Generic(target_alias), + inferable, + relation, + relation_visitor, + disjointness_visitor, + ) + } + (Type::GenericAlias(alias), Type::SubclassOf(target_subclass_ty)) => target_subclass_ty .subclass_of() .into_class(db) @@ -2889,17 +2939,16 @@ impl<'db> Type<'db> { self_newtype.has_relation_to_impl(db, target_newtype) } - ( - Type::NewTypeInstance(self_newtype), - Type::NominalInstance(target_nominal_instance), - ) => self_newtype.base_class_type(db).has_relation_to_impl( - db, - target_nominal_instance.class(db), - inferable, - relation, - relation_visitor, - disjointness_visitor, - ), + (Type::NewTypeInstance(self_newtype), _) => { + self_newtype.concrete_base_type(db).has_relation_to_impl( + db, + target, + inferable, + relation, + relation_visitor, + disjointness_visitor, + ) + } (Type::PropertyInstance(_), _) => { KnownClass::Property.to_instance(db).has_relation_to_impl( @@ -2920,10 +2969,9 @@ impl<'db> Type<'db> { disjointness_visitor, ), - // Other than the special cases enumerated above, nominal-instance types, and - // newtype-instance types are never subtypes of any other variants + // Other than the special cases enumerated above, nominal-instance types are never + // subtypes of any other variants (Type::NominalInstance(_), _) => ConstraintSet::from(false), - (Type::NewTypeInstance(_), _) => ConstraintSet::from(false), } } @@ -3047,6 +3095,10 @@ impl<'db> Type<'db> { left.is_equivalent_to_impl(db, right, inferable, visitor) } + (Type::TypedDict(left), Type::TypedDict(right)) => visitor.visit((self, other), || { + left.is_equivalent_to_impl(db, right, inferable, visitor) + }), + _ => ConstraintSet::from(false), } } @@ -3347,7 +3399,6 @@ impl<'db> Type<'db> { | Type::WrapperDescriptor(..) | Type::ModuleLiteral(..) | Type::ClassLiteral(..) - | Type::GenericAlias(..) | Type::SpecialForm(..) | Type::KnownInstance(..)), right @ (Type::BooleanLiteral(..) @@ -3361,7 +3412,6 @@ impl<'db> Type<'db> { | Type::WrapperDescriptor(..) | Type::ModuleLiteral(..) | Type::ClassLiteral(..) - | Type::GenericAlias(..) | Type::SpecialForm(..) | Type::KnownInstance(..)), ) => ConstraintSet::from(left != right), @@ -3544,13 +3594,39 @@ impl<'db> Type<'db> { ConstraintSet::from(true) } + (Type::GenericAlias(left_alias), Type::GenericAlias(right_alias)) => { + ConstraintSet::from(left_alias.origin(db) != right_alias.origin(db)).or(db, || { + left_alias.specialization(db).is_disjoint_from_impl( + db, + right_alias.specialization(db), + inferable, + disjointness_visitor, + relation_visitor, + ) + }) + } + + (Type::ClassLiteral(class_literal), other @ Type::GenericAlias(_)) + | (other @ Type::GenericAlias(_), Type::ClassLiteral(class_literal)) => class_literal + .default_specialization(db) + .into_generic_alias() + .when_none_or(|alias| { + other.is_disjoint_from_impl( + db, + Type::GenericAlias(alias), + inferable, + disjointness_visitor, + relation_visitor, + ) + }), + (Type::SubclassOf(subclass_of_ty), Type::ClassLiteral(class_b)) | (Type::ClassLiteral(class_b), Type::SubclassOf(subclass_of_ty)) => { match subclass_of_ty.subclass_of() { SubclassOfInner::Dynamic(_) => ConstraintSet::from(false), - SubclassOfInner::Class(class_a) => { - class_b.when_subclass_of(db, None, class_a).negate(db) - } + SubclassOfInner::Class(class_a) => ConstraintSet::from( + !class_a.could_exist_in_mro_of(db, ClassType::NonGeneric(class_b)), + ), SubclassOfInner::TypeVar(_) => unreachable!(), } } @@ -3559,9 +3635,9 @@ impl<'db> Type<'db> { | (Type::GenericAlias(alias_b), Type::SubclassOf(subclass_of_ty)) => { match subclass_of_ty.subclass_of() { SubclassOfInner::Dynamic(_) => ConstraintSet::from(false), - SubclassOfInner::Class(class_a) => ClassType::from(alias_b) - .when_subclass_of(db, class_a, inferable) - .negate(db), + SubclassOfInner::Class(class_a) => ConstraintSet::from( + !class_a.could_exist_in_mro_of(db, ClassType::Generic(alias_b)), + ), SubclassOfInner::TypeVar(_) => unreachable!(), } } @@ -3824,7 +3900,7 @@ impl<'db> Type<'db> { left.is_disjoint_from_impl(db, right) } (Type::NewTypeInstance(newtype), other) | (other, Type::NewTypeInstance(newtype)) => { - Type::instance(db, newtype.base_class_type(db)).is_disjoint_from_impl( + newtype.concrete_base_type(db).is_disjoint_from_impl( db, other, inferable, @@ -3855,6 +3931,8 @@ impl<'db> Type<'db> { relation_visitor, ) } + + (Type::GenericAlias(_), _) | (_, Type::GenericAlias(_)) => ConstraintSet::from(true), } } @@ -4053,9 +4131,7 @@ impl<'db> Type<'db> { Type::TypeIs(type_is) => type_is.is_bound(db), Type::TypedDict(_) => false, Type::TypeAlias(alias) => alias.value_type(db).is_singleton(db), - Type::NewTypeInstance(newtype) => { - Type::instance(db, newtype.base_class_type(db)).is_singleton(db) - } + Type::NewTypeInstance(newtype) => newtype.concrete_base_type(db).is_singleton(db), } } @@ -4106,9 +4182,7 @@ impl<'db> Type<'db> { } Type::NominalInstance(instance) => instance.is_single_valued(db), - Type::NewTypeInstance(newtype) => { - Type::instance(db, newtype.base_class_type(db)).is_single_valued(db) - } + Type::NewTypeInstance(newtype) => newtype.concrete_base_type(db).is_single_valued(db), Type::BoundSuper(_) => { // At runtime two super instances never compare equal, even if their arguments are identical. @@ -4360,7 +4434,9 @@ impl<'db> Type<'db> { Type::Dynamic(_) | Type::Never => Place::bound(self).into(), Type::NominalInstance(instance) => instance.class(db).instance_member(db, name), - Type::NewTypeInstance(newtype) => newtype.base_class_type(db).instance_member(db, name), + Type::NewTypeInstance(newtype) => { + newtype.concrete_base_type(db).instance_member(db, name) + } Type::ProtocolInstance(protocol) => protocol.instance_member(db, name), @@ -5476,8 +5552,11 @@ impl<'db> Type<'db> { .value_type(db) .try_bool_impl(db, allow_short_circuit, visitor) })?, - Type::NewTypeInstance(newtype) => Type::instance(db, newtype.base_class_type(db)) - .try_bool_impl(db, allow_short_circuit, visitor)?, + Type::NewTypeInstance(newtype) => { + newtype + .concrete_base_type(db) + .try_bool_impl(db, allow_short_circuit, visitor)? + } }; Ok(truthiness) @@ -6445,7 +6524,7 @@ impl<'db> Type<'db> { match ty { Type::NominalInstance(nominal) => nominal.tuple_spec(db), - Type::NewTypeInstance(newtype) => non_async_special_case(db, Type::instance(db, newtype.base_class_type(db))), + Type::NewTypeInstance(newtype) => non_async_special_case(db, newtype.concrete_base_type(db)), Type::GenericAlias(alias) if alias.origin(db).is_tuple(db) => { Some(Cow::Owned(TupleSpec::homogeneous(todo_type!( "*tuple[] annotations" @@ -7352,7 +7431,9 @@ impl<'db> Type<'db> { }); }; - Ok(typing_self(db, scope_id, typevar_binding_context, class).unwrap_or(*self)) + Ok(typing_self(db, scope_id, typevar_binding_context, class) + .map(Type::TypeVar) + .unwrap_or(*self)) } // We ensure that `typing.TypeAlias` used in the expected position (annotating an // annotated assignment statement) doesn't reach here. Using it in any other type @@ -7548,9 +7629,15 @@ impl<'db> Type<'db> { Type::ProtocolInstance(protocol) => protocol.to_meta_type(db), // `TypedDict` instances are instances of `dict` at runtime, but its important that we // understand a more specific meta type in order to correctly handle `__getitem__`. - Type::TypedDict(typed_dict) => SubclassOfType::from(db, typed_dict.defining_class()), + Type::TypedDict(typed_dict) => match typed_dict { + TypedDictType::Class(class) => SubclassOfType::from(db, class), + TypedDictType::Synthesized(_) => SubclassOfType::from( + db, + todo_type!("TypedDict synthesized meta-type").expect_dynamic(), + ), + }, Type::TypeAlias(alias) => alias.value_type(db).to_meta_type(db), - Type::NewTypeInstance(newtype) => Type::from(newtype.base_class_type(db)), + Type::NewTypeInstance(newtype) => newtype.concrete_base_type(db).to_meta_type(db), } } @@ -8257,7 +8344,7 @@ impl<'db> Type<'db> { }, Self::TypedDict(typed_dict) => { - Some(TypeDefinition::Class(typed_dict.defining_class().definition(db))) + typed_dict.definition(db).map(TypeDefinition::Class) } Self::Union(_) | Self::Intersection(_) => None, @@ -8756,9 +8843,7 @@ fn walk_known_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>( visitor.visit_callable_type(db, callable); } KnownInstanceType::NewType(newtype) => { - if let ClassType::Generic(generic_alias) = newtype.base_class_type(db) { - visitor.visit_generic_alias_type(db, generic_alias); - } + visitor.visit_type(db, newtype.concrete_base_type(db)); } } } @@ -11530,7 +11615,7 @@ impl<'db> BoolError<'db> { not_boolable_type, .. } => { let mut diag = builder.into_diagnostic(format_args!( - "Boolean conversion is unsupported for type `{}`", + "Boolean conversion is not supported for type `{}`", not_boolable_type.display(context.db()) )); let mut sub = SubDiagnostic::new( @@ -11555,7 +11640,7 @@ impl<'db> BoolError<'db> { return_type, } => { let mut diag = builder.into_diagnostic(format_args!( - "Boolean conversion is unsupported for type `{not_boolable}`", + "Boolean conversion is not supported for type `{not_boolable}`", not_boolable = not_boolable_type.display(context.db()), )); let mut sub = SubDiagnostic::new( @@ -11581,7 +11666,7 @@ impl<'db> BoolError<'db> { } Self::NotCallable { not_boolable_type } => { let mut diag = builder.into_diagnostic(format_args!( - "Boolean conversion is unsupported for type `{}`", + "Boolean conversion is not supported for type `{}`", not_boolable_type.display(context.db()) )); let sub = SubDiagnostic::new( @@ -11604,7 +11689,7 @@ impl<'db> BoolError<'db> { .unwrap(); builder.into_diagnostic(format_args!( - "Boolean conversion is unsupported for union `{}` \ + "Boolean conversion is not supported for union `{}` \ because `{}` doesn't implement `__bool__` correctly", Type::Union(*union).display(context.db()), first_error.not_boolable_type().display(context.db()), @@ -11613,7 +11698,7 @@ impl<'db> BoolError<'db> { Self::Other { not_boolable_type } => { builder.into_diagnostic(format_args!( - "Boolean conversion is unsupported for type `{}`; \ + "Boolean conversion is not supported for type `{}`; \ it incorrectly implements `__bool__`", not_boolable_type.display(context.db()) )); @@ -13863,6 +13948,39 @@ impl<'db> UnionType<'db> { ConstraintSet::from(sorted_self == other.normalized(db)) } + + /// Identify some specific unions of known classes, currently the ones that `float` and + /// `complex` expand into in type position. + pub(crate) fn known(self, db: &'db dyn Db) -> Option { + let mut has_int = false; + let mut has_float = false; + let mut has_complex = false; + for element in self.elements(db) { + if let Type::NominalInstance(nominal) = element + && let Some(known) = nominal.known_class(db) + { + match known { + KnownClass::Int => has_int = true, + KnownClass::Float => has_float = true, + KnownClass::Complex => has_complex = true, + _ => return None, + } + } else { + return None; + } + } + match (has_int, has_float, has_complex) { + (true, true, false) => Some(KnownUnion::Float), + (true, true, true) => Some(KnownUnion::Complex), + _ => None, + } + } +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(crate) enum KnownUnion { + Float, // `int | float` + Complex, // `int | float | complex` } #[salsa::interned(debug, heap_size=IntersectionType::heap_size)] @@ -14141,10 +14259,18 @@ impl<'db> IntersectionType<'db> { self.positive(db).iter().copied() } + pub fn iter_negative(self, db: &'db dyn Db) -> impl Iterator> { + self.negative(db).iter().copied() + } + pub(crate) fn has_one_element(self, db: &'db dyn Db) -> bool { (self.positive(db).len() + self.negative(db).len()) == 1 } + pub(crate) fn is_simple_negation(self, db: &'db dyn Db) -> bool { + self.positive(db).is_empty() && self.negative(db).len() == 1 + } + fn heap_size((positive, negative): &(FxOrderSet>, FxOrderSet>)) -> usize { ruff_memory_usage::order_set_heap_size(positive) + ruff_memory_usage::order_set_heap_size(negative) diff --git a/crates/ty_python_semantic/src/types/bound_super.rs b/crates/ty_python_semantic/src/types/bound_super.rs index 442ae0d0b9..cea3bc3e54 100644 --- a/crates/ty_python_semantic/src/types/bound_super.rs +++ b/crates/ty_python_semantic/src/types/bound_super.rs @@ -429,7 +429,7 @@ impl<'db> BoundSuperType<'db> { ); } Type::NewTypeInstance(newtype) => { - return delegate_to(Type::instance(db, newtype.base_class_type(db))); + return delegate_to(newtype.concrete_base_type(db)); } Type::Callable(callable) if callable.is_function_like(db) => { return delegate_to(KnownClass::FunctionType.to_instance(db)); diff --git a/crates/ty_python_semantic/src/types/call/arguments.rs b/crates/ty_python_semantic/src/types/call/arguments.rs index cc8f377271..ae377785b8 100644 --- a/crates/ty_python_semantic/src/types/call/arguments.rs +++ b/crates/ty_python_semantic/src/types/call/arguments.rs @@ -1,4 +1,5 @@ use std::borrow::Cow; +use std::fmt::Display; use itertools::{Either, Itertools}; use ruff_python_ast as ast; @@ -228,7 +229,7 @@ impl<'a, 'db> CallArguments<'a, 'db> { if expansion_size > MAX_EXPANSIONS { tracing::debug!( "Skipping argument type expansion as it would exceed the \ - maximum number of expansions ({MAX_EXPANSIONS})" + maximum number of expansions ({MAX_EXPANSIONS})" ); return Some(State::LimitReached(index)); } @@ -263,6 +264,52 @@ impl<'a, 'db> CallArguments<'a, 'db> { State::Expanding(ExpandingState::Expanded(expanded)) => Expansion::Expanded(expanded), }) } + + pub(super) fn display(&self, db: &'db dyn Db) -> impl Display { + struct DisplayCallArguments<'a, 'db> { + call_arguments: &'a CallArguments<'a, 'db>, + db: &'db dyn Db, + } + + impl std::fmt::Display for DisplayCallArguments<'_, '_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("(")?; + for (index, (argument, ty)) in self.call_arguments.iter().enumerate() { + if index > 0 { + write!(f, ", ")?; + } + match argument { + Argument::Synthetic => write!( + f, + "self: {}", + ty.unwrap_or_else(Type::unknown).display(self.db) + )?, + Argument::Positional => { + write!(f, "{}", ty.unwrap_or_else(Type::unknown).display(self.db))?; + } + Argument::Variadic => { + write!(f, "*{}", ty.unwrap_or_else(Type::unknown).display(self.db))?; + } + Argument::Keyword(name) => write!( + f, + "{}={}", + name, + ty.unwrap_or_else(Type::unknown).display(self.db) + )?, + Argument::Keywords => { + write!(f, "**{}", ty.unwrap_or_else(Type::unknown).display(self.db))?; + } + } + } + f.write_str(")") + } + } + + DisplayCallArguments { + call_arguments: self, + db, + } + } } /// Represents a single element of the expansion process for argument types for [`expand`]. diff --git a/crates/ty_python_semantic/src/types/call/bind.rs b/crates/ty_python_semantic/src/types/call/bind.rs index 10e9ddfca3..e81d26d8b8 100644 --- a/crates/ty_python_semantic/src/types/call/bind.rs +++ b/crates/ty_python_semantic/src/types/call/bind.rs @@ -2,6 +2,13 @@ //! arguments against the parameters of the callable. Like with //! [signatures][crate::types::signatures], we have to handle the fact that the callable might be a //! union of types, each of which might contain multiple overloads. +//! +//! ### Tracing +//! +//! This module is instrumented with debug-level `tracing` messages. You can set the `TY_LOG` +//! environment variable to see this output when testing locally. `tracing` log messages typically +//! have a `target` field, which is the name of the module the message appears in — in this case, +//! `ty_python_semantic::types::call::bind`. use std::borrow::Cow; use std::collections::HashSet; @@ -17,7 +24,8 @@ use super::{Argument, CallArguments, CallError, CallErrorKind, InferContext, Sig use crate::Program; use crate::db::Db; use crate::dunder_all::dunder_all_names; -use crate::place::{Definedness, Place}; +use crate::module_resolver::KnownModule; +use crate::place::{Definedness, Place, known_module_symbol}; use crate::types::call::arguments::{Expansion, is_expandable_type}; use crate::types::constraints::ConstraintSet; use crate::types::diagnostic::{ @@ -36,13 +44,14 @@ use crate::types::generics::{ use crate::types::signatures::{Parameter, ParameterForm, ParameterKind, Parameters}; use crate::types::tuple::{TupleLength, TupleSpec, TupleType}; use crate::types::{ - BoundMethodType, BoundTypeVarIdentity, BoundTypeVarInstance, CallableSignature, + BoundMethodType, BoundTypeVarIdentity, BoundTypeVarInstance, CallableSignature, CallableType, CallableTypeKind, ClassLiteral, DATACLASS_FLAGS, DataclassFlags, DataclassParams, FieldInstance, KnownBoundMethodType, KnownClass, KnownInstanceType, MemberLookupPolicy, NominalInstanceType, PropertyInstanceType, SpecialFormType, TrackedConstraintSet, TypeAliasType, TypeContext, TypeVarVariance, UnionBuilder, UnionType, WrapperDescriptorKind, enums, list_members, todo_type, }; +use crate::unpack::EvaluationMode; use ruff_db::diagnostic::{Annotation, Diagnostic, SubDiagnostic, SubDiagnosticSeverity}; use ruff_python_ast::{self as ast, ArgOrKeyword, PythonVersion}; @@ -934,6 +943,18 @@ impl<'db> Bindings<'db> { } } + // TODO: Remove this special handling once we have full support for + // generic protocols in the solver. + Some(KnownFunction::AsyncContextManager) => { + if let [Some(callable)] = overload.parameter_types() { + if let Some(return_ty) = + asynccontextmanager_return_type(db, *callable) + { + overload.set_return_type(return_ty); + } + } + } + Some(KnownFunction::IsProtocol) => { if let [Some(ty)] = overload.parameter_types() { // We evaluate this to `Literal[True]` only if the runtime function `typing.is_protocol` @@ -1582,6 +1603,19 @@ impl<'db> CallableBinding<'db> { // before checking. let argument_types = argument_types.with_self(self.bound_type); + let _span = tracing::trace_span!( + "CallableBinding::check_types", + arguments = %argument_types.display(db), + signature = %self.signature_type.display(db), + ) + .entered(); + + tracing::trace!( + target: "ty_python_semantic::types::call::bind", + matching_overload_index = ?self.matching_overload_index(), + "after step 1", + ); + // Step 1: Check the result of the arity check which is done by `match_parameters` let matching_overload_indexes = match self.matching_overload_index() { MatchingOverloadIndex::None => { @@ -1612,6 +1646,12 @@ impl<'db> CallableBinding<'db> { overload.check_types(db, argument_types.as_ref(), call_expression_tcx); } + tracing::trace!( + target: "ty_python_semantic::types::call::bind", + matching_overload_index = ?self.matching_overload_index(), + "after step 2", + ); + match self.matching_overload_index() { MatchingOverloadIndex::None => { // If all overloads result in errors, proceed to step 3. @@ -1624,6 +1664,12 @@ impl<'db> CallableBinding<'db> { // If two or more candidate overloads remain, proceed to step 4. self.filter_overloads_containing_variadic(&indexes); + tracing::trace!( + target: "ty_python_semantic::types::call::bind", + matching_overload_index = ?self.matching_overload_index(), + "after step 4", + ); + match self.matching_overload_index() { MatchingOverloadIndex::None => { // This shouldn't be possible because step 4 can only filter out overloads @@ -1642,6 +1688,12 @@ impl<'db> CallableBinding<'db> { argument_types.as_ref(), &indexes, ); + + tracing::trace!( + target: "ty_python_semantic::types::call::bind", + matching_overload_index = ?self.matching_overload_index(), + "after step 5", + ); } } @@ -1744,12 +1796,24 @@ impl<'db> CallableBinding<'db> { overload.match_parameters(db, expanded_arguments, &mut argument_forms); } + tracing::trace!( + target: "ty_python_semantic::types::call::bind", + matching_overload_index = ?self.matching_overload_index(), + "after step 1", + ); + merged_argument_forms.merge(&argument_forms); for (_, overload) in self.matching_overloads_mut() { overload.check_types(db, expanded_arguments, call_expression_tcx); } + tracing::trace!( + target: "ty_python_semantic::types::call::bind", + matching_overload_index = ?self.matching_overload_index(), + "after step 2", + ); + let return_type = match self.matching_overload_index() { MatchingOverloadIndex::None => None, MatchingOverloadIndex::Single(index) => { @@ -1758,6 +1822,12 @@ impl<'db> CallableBinding<'db> { MatchingOverloadIndex::Multiple(matching_overload_indexes) => { self.filter_overloads_containing_variadic(&matching_overload_indexes); + tracing::trace!( + target: "ty_python_semantic::types::call::bind", + matching_overload_index = ?self.matching_overload_index(), + "after step 4", + ); + match self.matching_overload_index() { MatchingOverloadIndex::None => { tracing::debug!( @@ -1772,6 +1842,13 @@ impl<'db> CallableBinding<'db> { expanded_arguments, &indexes, ); + + tracing::trace!( + target: "ty_python_semantic::types::call::bind", + matching_overload_index = ?self.matching_overload_index(), + "after step 5", + ); + Some(self.return_type()) } } @@ -1926,12 +2003,37 @@ impl<'db> CallableBinding<'db> { .take(max_parameter_count) .collect::>(); + // The following loop is trying to construct a tuple of argument types that correspond to + // the participating parameter indexes. Considering the following example: + // + // ```python + // @overload + // def f(x: Literal[1], y: Literal[2]) -> tuple[int, int]: ... + // @overload + // def f(*args: Any) -> tuple[Any, ...]: ... + // + // f(1, 2) + // ``` + // + // Here, only the first parameter participates in the filtering process because only one + // overload has the second parameter. So, while going through the argument types, the + // second argument needs to be skipped but for the second overload both arguments map to + // the first parameter and that parameter is considered for the filtering process. This + // flag is to handle that special case of many-to-one mapping from arguments to parameters. + let mut variadic_parameter_handled = false; + for (argument_index, argument_type) in arguments.iter_types().enumerate() { + if variadic_parameter_handled { + continue; + } for overload_index in matching_overload_indexes { let overload = &self.overloads[*overload_index]; for (parameter_index, variadic_argument_type) in overload.argument_matches[argument_index].iter() { + if overload.signature.parameters()[parameter_index].is_variadic() { + variadic_parameter_handled = true; + } if !participating_parameter_indexes.contains(¶meter_index) { continue; } @@ -4533,3 +4635,55 @@ impl fmt::Display for FunctionKind { // An example of a routine with many many overloads: // https://github.com/henribru/google-api-python-client-stubs/blob/master/googleapiclient-stubs/discovery.pyi const MAXIMUM_OVERLOADS: usize = 50; + +/// Infer the return type for a call to `asynccontextmanager`. +/// +/// The `@asynccontextmanager` decorator transforms a function that returns (a subtype of) `AsyncIterator[T]` +/// into a function that returns `_AsyncGeneratorContextManager[T]`. +/// +/// TODO: This function only handles the most basic case. It should be removed once we have +/// full support for generic protocols in the solver. +fn asynccontextmanager_return_type<'db>(db: &'db dyn Db, func_ty: Type<'db>) -> Option> { + let bindings = func_ty.bindings(db); + let binding = bindings + .single_element()? + .overloads + .iter() + .exactly_one() + .ok()?; + let signature = &binding.signature; + let return_ty = signature.return_ty?; + + let yield_ty = return_ty + .try_iterate_with_mode(db, EvaluationMode::Async) + .ok()? + .homogeneous_element_type(db); + + if yield_ty.is_divergent() + || signature + .parameters() + .iter() + .any(|param| param.annotated_type().is_some_and(|ty| ty.is_divergent())) + { + return Some(yield_ty); + } + + let context_manager = + known_module_symbol(db, KnownModule::Contextlib, "_AsyncGeneratorContextManager") + .place + .ignore_possibly_undefined()? + .as_class_literal()?; + + let context_manager = context_manager.apply_specialization(db, |generic_context| { + generic_context.specialize_partial(db, [Some(yield_ty), None]) + }); + + let new_return_ty = Type::from(context_manager).to_instance(db)?; + let new_signature = Signature::new(signature.parameters().clone(), Some(new_return_ty)); + + Some(Type::Callable(CallableType::new( + db, + CallableSignature::single(new_signature), + CallableTypeKind::FunctionLike, + ))) +} diff --git a/crates/ty_python_semantic/src/types/class.rs b/crates/ty_python_semantic/src/types/class.rs index 514b312313..32962ea128 100644 --- a/crates/ty_python_semantic/src/types/class.rs +++ b/crates/ty_python_semantic/src/types/class.rs @@ -21,7 +21,9 @@ use crate::types::constraints::{ConstraintSet, IteratorConstraintsExtension}; use crate::types::context::InferContext; use crate::types::diagnostic::{INVALID_TYPE_ALIAS_TYPE, SUPER_CALL_IN_NAMED_TUPLE_METHOD}; use crate::types::enums::enum_metadata; -use crate::types::function::{DataclassTransformerParams, KnownFunction}; +use crate::types::function::{ + DataclassTransformerFlags, DataclassTransformerParams, KnownFunction, +}; use crate::types::generics::{ GenericContext, InferableTypeVars, Specialization, walk_generic_context, walk_specialization, }; @@ -716,6 +718,31 @@ impl<'db> ClassType<'db> { .find_map(|base| base.as_disjoint_base(db)) } + /// Return `true` if this class could exist in the MRO of `other`. + pub(super) fn could_exist_in_mro_of(self, db: &'db dyn Db, other: Self) -> bool { + other + .iter_mro(db) + .filter_map(ClassBase::into_class) + .any(|class| match (self, class) { + (ClassType::NonGeneric(this_class), ClassType::NonGeneric(other_class)) => { + this_class == other_class + } + (ClassType::Generic(this_alias), ClassType::Generic(other_alias)) => { + this_alias.origin(db) == other_alias.origin(db) + && !this_alias + .specialization(db) + .is_disjoint_from( + db, + other_alias.specialization(db), + InferableTypeVars::None, + ) + .is_always_satisfied(db) + } + (ClassType::NonGeneric(_), ClassType::Generic(_)) + | (ClassType::Generic(_), ClassType::NonGeneric(_)) => false, + }) + } + /// Return `true` if this class could coexist in an MRO with `other`. /// /// For two given classes `A` and `B`, it is often possible to say for sure @@ -727,16 +754,11 @@ impl<'db> ClassType<'db> { } if self.is_final(db) { - return self - .iter_mro(db) - .filter_map(ClassBase::into_class) - .any(|class| class.class_literal(db).0 == other.class_literal(db).0); + return other.could_exist_in_mro_of(db, self); } + if other.is_final(db) { - return other - .iter_mro(db) - .filter_map(ClassBase::into_class) - .any(|class| class.class_literal(db).0 == self.class_literal(db).0); + return self.could_exist_in_mro_of(db, other); } // Two disjoint bases can only coexist in an MRO if one is a subclass of the other. @@ -1889,15 +1911,6 @@ impl<'db> ClassLiteral<'db> { .contains(&ClassBase::Class(other)) } - pub(super) fn when_subclass_of( - self, - db: &'db dyn Db, - specialization: Option>, - other: ClassType<'db>, - ) -> ConstraintSet<'db> { - ConstraintSet::from(self.is_subclass_of(db, specialization, other)) - } - /// Return `true` if this class constitutes a typed dict specification (inherits from /// `typing.TypedDict`, either directly or indirectly). #[salsa::tracked(cycle_initial=is_typed_dict_cycle_initial, @@ -2347,6 +2360,8 @@ impl<'db> ClassLiteral<'db> { }; // Dataclass transformer flags can be overwritten using class arguments. + // TODO this should be done more generally, not just in `own_synthesized_member`, so that + // `dataclass_params` always reflects the transformer params. if let Some(transformer_params) = transformer_params.as_mut() { if let Some(class_def) = self.definition(db).kind(db).as_class() { let module = parsed_module(db, self.file(db)).load(db); @@ -2376,6 +2391,8 @@ impl<'db> ClassLiteral<'db> { let has_dataclass_param = |param| { dataclass_params.is_some_and(|params| params.flags(db).contains(param)) + // TODO if we were correctly initializing `dataclass_params` from the + // transformer params, this fallback shouldn't be needed here. || transformer_params.is_some_and(|params| params.flags(db).contains(param)) }; @@ -2455,8 +2472,7 @@ impl<'db> ClassLiteral<'db> { } } - let is_kw_only = name == "__replace__" - || kw_only.unwrap_or(has_dataclass_param(DataclassFlags::KW_ONLY)); + let is_kw_only = name == "__replace__" || kw_only.unwrap_or(false); // Use the alias name if provided, otherwise use the field name let parameter_name = @@ -3175,6 +3191,27 @@ impl<'db> ClassLiteral<'db> { } } + // Resolve the kw_only to the class-level default. This ensures that when fields + // are inherited by child classes, they use their defining class's kw_only default. + if let FieldKind::Dataclass { + kw_only: ref mut kw @ None, + .. + } = field.kind + { + let class_kw_only_default = self + .dataclass_params(db) + .is_some_and(|params| params.flags(db).contains(DataclassFlags::KW_ONLY)) + // TODO this next part should not be necessary, if we were properly + // initializing `dataclass_params` from the dataclass-transform params, for + // metaclass and base-class-based dataclass-transformers. + || matches!( + field_policy, + CodeGeneratorKind::DataclassLike(Some(transformer_params)) + if transformer_params.flags(db).contains(DataclassTransformerFlags::KW_ONLY_DEFAULT) + ); + *kw = Some(class_kw_only_default); + } + attributes.insert(symbol.name().clone(), field); } } @@ -3420,7 +3457,7 @@ impl<'db> ClassLiteral<'db> { .symbol_id(&method_def.node(&module).name) .unwrap(); class_map - .all_reachable_symbol_bindings(method_place) + .reachable_symbol_bindings(method_place) .find_map(|bind| { (bind.binding.is_defined_and(|def| def == method)) .then(|| class_map.binding_reachability(db, &bind)) @@ -4168,6 +4205,8 @@ pub enum KnownClass { SpecialForm, TypeVar, ParamSpec, + // typing_extensions.ParamSpec + ExtensionsParamSpec, // must be distinct from typing.ParamSpec, backports new features ParamSpecArgs, ParamSpecKwargs, ProtocolMeta, @@ -4239,6 +4278,7 @@ impl KnownClass { | Self::TypeVar | Self::ExtensionsTypeVar | Self::ParamSpec + | Self::ExtensionsParamSpec | Self::ParamSpecArgs | Self::ParamSpecKwargs | Self::TypeVarTuple @@ -4371,6 +4411,7 @@ impl KnownClass { | KnownClass::TypeVar | KnownClass::ExtensionsTypeVar | KnownClass::ParamSpec + | KnownClass::ExtensionsParamSpec | KnownClass::ParamSpecArgs | KnownClass::ParamSpecKwargs | KnownClass::TypeVarTuple @@ -4457,6 +4498,7 @@ impl KnownClass { | KnownClass::TypeVar | KnownClass::ExtensionsTypeVar | KnownClass::ParamSpec + | KnownClass::ExtensionsParamSpec | KnownClass::ParamSpecArgs | KnownClass::ParamSpecKwargs | KnownClass::TypeVarTuple @@ -4543,6 +4585,7 @@ impl KnownClass { | KnownClass::TypeVar | KnownClass::ExtensionsTypeVar | KnownClass::ParamSpec + | KnownClass::ExtensionsParamSpec | KnownClass::ParamSpecArgs | KnownClass::ParamSpecKwargs | KnownClass::TypeVarTuple @@ -4634,6 +4677,7 @@ impl KnownClass { | Self::TypeVar | Self::ExtensionsTypeVar | Self::ParamSpec + | Self::ExtensionsParamSpec | Self::ParamSpecArgs | Self::ParamSpecKwargs | Self::TypeVarTuple @@ -4733,6 +4777,7 @@ impl KnownClass { | KnownClass::TypeVar | KnownClass::ExtensionsTypeVar | KnownClass::ParamSpec + | KnownClass::ExtensionsParamSpec | KnownClass::ParamSpecArgs | KnownClass::ParamSpecKwargs | KnownClass::ProtocolMeta @@ -4806,6 +4851,7 @@ impl KnownClass { Self::TypeVar => "TypeVar", Self::ExtensionsTypeVar => "TypeVar", Self::ParamSpec => "ParamSpec", + Self::ExtensionsParamSpec => "ParamSpec", Self::ParamSpecArgs => "ParamSpecArgs", Self::ParamSpecKwargs => "ParamSpecKwargs", Self::TypeVarTuple => "TypeVarTuple", @@ -5139,11 +5185,18 @@ impl KnownClass { Self::TypeAliasType | Self::ExtensionsTypeVar | Self::TypeVarTuple - | Self::ParamSpec + | Self::ExtensionsParamSpec | Self::ParamSpecArgs | Self::ParamSpecKwargs | Self::Deprecated | Self::NewType => KnownModule::TypingExtensions, + Self::ParamSpec => { + if Program::get(db).python_version(db) >= PythonVersion::PY310 { + KnownModule::Typing + } else { + KnownModule::TypingExtensions + } + } Self::NoDefaultType => { let python_version = Program::get(db).python_version(db); @@ -5247,6 +5300,7 @@ impl KnownClass { | Self::TypeVar | Self::ExtensionsTypeVar | Self::ParamSpec + | Self::ExtensionsParamSpec | Self::ParamSpecArgs | Self::ParamSpecKwargs | Self::TypeVarTuple @@ -5337,6 +5391,7 @@ impl KnownClass { | Self::TypeVar | Self::ExtensionsTypeVar | Self::ParamSpec + | Self::ExtensionsParamSpec | Self::ParamSpecArgs | Self::ParamSpecKwargs | Self::TypeVarTuple @@ -5420,7 +5475,7 @@ impl KnownClass { "Iterable" => &[Self::Iterable], "Iterator" => &[Self::Iterator], "Mapping" => &[Self::Mapping], - "ParamSpec" => &[Self::ParamSpec], + "ParamSpec" => &[Self::ParamSpec, Self::ExtensionsParamSpec], "ParamSpecArgs" => &[Self::ParamSpecArgs], "ParamSpecKwargs" => &[Self::ParamSpecKwargs], "TypeVarTuple" => &[Self::TypeVarTuple], @@ -5542,6 +5597,8 @@ impl KnownClass { | Self::TypedDictFallback | Self::TypeVar | Self::ExtensionsTypeVar + | Self::ParamSpec + | Self::ExtensionsParamSpec | Self::NamedTupleLike | Self::ConstraintSet | Self::GenericContext @@ -5555,7 +5612,6 @@ impl KnownClass { | Self::TypeAliasType | Self::NoDefaultType | Self::SupportsIndex - | Self::ParamSpec | Self::ParamSpecArgs | Self::ParamSpecKwargs | Self::TypeVarTuple @@ -5970,6 +6026,7 @@ mod tests { KnownClass::Member | KnownClass::Nonmember | KnownClass::StrEnum => { PythonVersion::PY311 } + KnownClass::ParamSpec => PythonVersion::PY310, _ => PythonVersion::PY37, }; (class, version_added) diff --git a/crates/ty_python_semantic/src/types/class_base.rs b/crates/ty_python_semantic/src/types/class_base.rs index c5ce573d3b..26b490fa3b 100644 --- a/crates/ty_python_semantic/src/types/class_base.rs +++ b/crates/ty_python_semantic/src/types/class_base.rs @@ -152,11 +152,9 @@ impl<'db> ClassBase<'db> { Type::TypeAlias(alias) => Self::try_from_type(db, alias.value_type(db), subclass), - Type::NewTypeInstance(newtype) => ClassBase::try_from_type( - db, - Type::instance(db, newtype.base_class_type(db)), - subclass, - ), + Type::NewTypeInstance(newtype) => { + ClassBase::try_from_type(db, newtype.concrete_base_type(db), subclass) + } Type::PropertyInstance(_) | Type::BooleanLiteral(_) diff --git a/crates/ty_python_semantic/src/types/constraints.rs b/crates/ty_python_semantic/src/types/constraints.rs index 4ec970403b..289e134324 100644 --- a/crates/ty_python_semantic/src/types/constraints.rs +++ b/crates/ty_python_semantic/src/types/constraints.rs @@ -435,6 +435,11 @@ impl<'db> ConstraintSet<'db> { pub(crate) fn display(self, db: &'db dyn Db) -> impl Display { self.node.simplify_for_display(db).display(db) } + + #[expect(dead_code)] // Keep this around for debugging purposes + pub(crate) fn display_graph(self, db: &'db dyn Db, prefix: &dyn Display) -> impl Display { + self.node.display_graph(db, prefix) + } } impl From for ConstraintSet<'_> { @@ -458,6 +463,19 @@ impl<'db> BoundTypeVarInstance<'db> { } } +#[derive(Clone, Copy, Debug)] +enum IntersectionResult<'db> { + Simplified(ConstrainedTypeVar<'db>), + CannotSimplify, + Disjoint, +} + +impl IntersectionResult<'_> { + fn is_disjoint(self) -> bool { + matches!(self, IntersectionResult::Disjoint) + } +} + /// An individual constraint in a constraint set. This restricts a single typevar to be within a /// lower and upper bound. #[salsa::interned(debug, heap_size=ruff_memory_usage::heap_size)] @@ -484,6 +502,46 @@ impl<'db> ConstrainedTypeVar<'db> { debug_assert_eq!(lower, lower.bottom_materialization(db)); debug_assert_eq!(upper, upper.top_materialization(db)); + // It's not useful for an upper bound to be an intersection type, or for a lower bound to + // be a union type. Because the following equivalences hold, we can break these bounds + // apart and create an equivalent BDD with more nodes but simpler constraints. (Fewer, + // simpler constraints mean that our sequent maps won't grow pathologically large.) + // + // T ≤ (α & β) ⇔ (T ≤ α) ∧ (T ≤ β) + // T ≤ (¬α & ¬β) ⇔ (T ≤ ¬α) ∧ (T ≤ ¬β) + // (α | β) ≤ T ⇔ (α ≤ T) ∧ (β ≤ T) + if let Type::Union(lower_union) = lower { + let mut result = Node::AlwaysTrue; + for lower_element in lower_union.elements(db) { + result = result.and( + db, + ConstrainedTypeVar::new_node(db, typevar, *lower_element, upper), + ); + } + return result; + } + // A negated type ¬α is represented as an intersection with no positive elements, and a + // single negative element. We _don't_ want to treat that an "intersection" for the + // purposes of simplifying upper bounds. + if let Type::Intersection(upper_intersection) = upper + && !upper_intersection.is_simple_negation(db) + { + let mut result = Node::AlwaysTrue; + for upper_element in upper_intersection.iter_positive(db) { + result = result.and( + db, + ConstrainedTypeVar::new_node(db, typevar, lower, upper_element), + ); + } + for upper_element in upper_intersection.iter_negative(db) { + result = result.and( + db, + ConstrainedTypeVar::new_node(db, typevar, lower, upper_element.negate(db)), + ); + } + return result; + } + // Two identical typevars must always solve to the same type, so it is not useful to have // an upper or lower bound that is the typevar being constrained. match lower { @@ -659,7 +717,7 @@ impl<'db> ConstrainedTypeVar<'db> { } /// Returns the intersection of two range constraints, or `None` if the intersection is empty. - fn intersect(self, db: &'db dyn Db, other: Self) -> Option { + fn intersect(self, db: &'db dyn Db, other: Self) -> IntersectionResult<'db> { // (s₁ ≤ α ≤ t₁) ∧ (s₂ ≤ α ≤ t₂) = (s₁ ∪ s₂) ≤ α ≤ (t₁ ∩ t₂)) let lower = UnionType::from_elements(db, [self.lower(db), other.lower(db)]); let upper = IntersectionType::from_elements(db, [self.upper(db), other.upper(db)]); @@ -667,10 +725,14 @@ impl<'db> ConstrainedTypeVar<'db> { // If `lower ≰ upper`, then the intersection is empty, since there is no type that is both // greater than `lower`, and less than `upper`. if !lower.is_subtype_of(db, upper) { - return None; + return IntersectionResult::Disjoint; } - Some(Self::new(db, self.typevar(db), lower, upper)) + if lower.is_union() || upper.is_nontrivial_intersection(db) { + return IntersectionResult::CannotSimplify; + } + + IntersectionResult::Simplified(Self::new(db, self.typevar(db), lower, upper)) } fn display(self, db: &'db dyn Db) -> impl Display { @@ -794,6 +856,9 @@ impl<'db> Node<'db> { root_constraint.ordering(db) > constraint.ordering(db) }) ); + if if_true == Node::AlwaysFalse && if_false == Node::AlwaysFalse { + return Node::AlwaysFalse; + } Self::Interior(InteriorNode::new(db, constraint, if_true, if_false)) } @@ -1526,7 +1591,6 @@ impl<'db> Node<'db> { /// │ └─₀ never /// └─₀ never /// ``` - #[cfg_attr(not(test), expect(dead_code))] // Keep this around for debugging purposes fn display_graph(self, db: &'db dyn Db, prefix: &dyn Display) -> impl Display { struct DisplayNode<'a, 'db> { db: &'db dyn Db, @@ -2034,7 +2098,7 @@ impl<'db> InteriorNode<'db> { // constraints is empty, and others that we can make when the intersection is // non-empty. match left_constraint.intersect(db, right_constraint) { - Some(intersection_constraint) => { + IntersectionResult::Simplified(intersection_constraint) => { let intersection_constraint = intersection_constraint.normalized(db); // If the intersection is non-empty, we need to create a new constraint to @@ -2117,7 +2181,11 @@ impl<'db> InteriorNode<'db> { ); } - None => { + // If the intersection doesn't simplify to a single clause, we shouldn't update the + // BDD. + IntersectionResult::CannotSimplify => {} + + IntersectionResult::Disjoint => { // All of the below hold because we just proved that the intersection of left // and right is empty. @@ -2242,7 +2310,9 @@ impl<'db> ConstraintAssignment<'db> { ( ConstraintAssignment::Positive(self_constraint), ConstraintAssignment::Negative(other_constraint), - ) => self_constraint.intersect(db, other_constraint).is_none(), + ) => self_constraint + .intersect(db, other_constraint) + .is_disjoint(), // It's theoretically possible for a negative constraint to imply a positive constraint // if the positive constraint is always satisfied (`Never ≤ T ≤ object`). But we never @@ -2686,7 +2756,7 @@ impl<'db> SequentMap<'db> { } match left_constraint.intersect(db, right_constraint) { - Some(intersection_constraint) => { + IntersectionResult::Simplified(intersection_constraint) => { tracing::debug!( target: "ty_python_semantic::types::constraints::SequentMap", left = %left_constraint.display(db), @@ -2704,7 +2774,13 @@ impl<'db> SequentMap<'db> { self.add_single_implication(db, intersection_constraint, right_constraint); self.enqueue_constraint(intersection_constraint); } - None => { + + // The sequent map only needs to include constraints that might appear in a BDD. If the + // intersection does not collapse to a single constraint, then there's no new + // constraint that we need to add to the sequent map. + IntersectionResult::CannotSimplify => {} + + IntersectionResult::Disjoint => { tracing::debug!( target: "ty_python_semantic::types::constraints::SequentMap", left = %left_constraint.display(db), @@ -3446,9 +3522,7 @@ mod tests { │ └─₀ (U = bool) │ ┡━₁ always │ └─₀ never - └─₀ (U = str) - ┡━₁ never - └─₀ never + └─₀ never "#} .trim_end(); diff --git a/crates/ty_python_semantic/src/types/context.rs b/crates/ty_python_semantic/src/types/context.rs index 7eb84e0d01..662b38b302 100644 --- a/crates/ty_python_semantic/src/types/context.rs +++ b/crates/ty_python_semantic/src/types/context.rs @@ -177,8 +177,8 @@ impl<'db, 'ast> InferContext<'db, 'ast> { std::mem::replace(&mut self.multi_inference, multi_inference) } - pub(super) fn set_in_no_type_check(&mut self, no_type_check: InNoTypeCheck) { - self.no_type_check = no_type_check; + pub(super) fn set_in_no_type_check(&mut self, no_type_check: InNoTypeCheck) -> InNoTypeCheck { + std::mem::replace(&mut self.no_type_check, no_type_check) } fn is_in_no_type_check(&self) -> bool { diff --git a/crates/ty_python_semantic/src/types/cyclic.rs b/crates/ty_python_semantic/src/types/cyclic.rs index 344881303a..6f179b1a72 100644 --- a/crates/ty_python_semantic/src/types/cyclic.rs +++ b/crates/ty_python_semantic/src/types/cyclic.rs @@ -19,7 +19,7 @@ //! of the Rust types implementing protocols also call `visitor.visit`. The best way to avoid this //! is to prefer always calling `visitor.visit` only in the main recursive method on `Type`. -use std::cell::RefCell; +use std::cell::{Cell, RefCell}; use std::cmp::Eq; use std::hash::Hash; use std::marker::PhantomData; @@ -29,6 +29,22 @@ use rustc_hash::FxHashMap; use crate::FxIndexSet; use crate::types::Type; +/// Maximum recursion depth for cycle detection. +/// +/// This is a safety limit to prevent stack overflow when checking recursive generic protocols +/// that create infinitely growing type specializations. For example: +/// +/// ```python +/// class C[T](Protocol): +/// a: 'C[set[T]]' +/// ``` +/// +/// When checking `C[set[int]]` against e.g. `C[Unknown]`, member `a` requires checking +/// `C[set[set[int]]]`, which in turn requires checking `C[set[set[set[int]]]]`, etc. Each level +/// creates a unique cache key, so the standard cycle detection doesn't catch it. The depth limit +/// ensures we bail out before hitting a stack overflow. +const MAX_RECURSION_DEPTH: u32 = 64; + pub(crate) type TypeTransformer<'db, Tag> = CycleDetector, Type<'db>>; impl Default for TypeTransformer<'_, Tag> { @@ -58,6 +74,10 @@ pub struct CycleDetector { /// sort-of defeat the point of a cache if we did!) cache: RefCell>, + /// Current recursion depth. Used to prevent stack overflow if recursive generic types create + /// infinitely growing type specializations that don't trigger exact-match cycle detection. + depth: Cell, + fallback: R, _tag: PhantomData, @@ -68,6 +88,7 @@ impl CycleDetector { CycleDetector { seen: RefCell::new(FxIndexSet::default()), cache: RefCell::new(FxHashMap::default()), + depth: Cell::new(0), fallback, _tag: PhantomData, } @@ -83,7 +104,18 @@ impl CycleDetector { return self.fallback.clone(); } + // Check depth limit to prevent stack overflow from recursive generic types + // with growing specializations (e.g., C[set[T]] -> C[set[set[T]]] -> ...) + let current_depth = self.depth.get(); + if current_depth >= MAX_RECURSION_DEPTH { + self.seen.borrow_mut().pop(); + return self.fallback.clone(); + } + self.depth.set(current_depth + 1); + let ret = func(); + + self.depth.set(current_depth); self.seen.borrow_mut().pop(); self.cache.borrow_mut().insert(item, ret.clone()); @@ -100,7 +132,18 @@ impl CycleDetector { return Some(self.fallback.clone()); } + // Check depth limit to prevent stack overflow from recursive generic protocols + // with growing specializations (e.g., C[set[T]] -> C[set[set[T]]] -> ...) + let current_depth = self.depth.get(); + if current_depth >= MAX_RECURSION_DEPTH { + self.seen.borrow_mut().pop(); + return Some(self.fallback.clone()); + } + self.depth.set(current_depth + 1); + let ret = func()?; + + self.depth.set(current_depth); self.seen.borrow_mut().pop(); self.cache.borrow_mut().insert(item, ret.clone()); diff --git a/crates/ty_python_semantic/src/types/diagnostic.rs b/crates/ty_python_semantic/src/types/diagnostic.rs index 2706787998..23bb5b1a16 100644 --- a/crates/ty_python_semantic/src/types/diagnostic.rs +++ b/crates/ty_python_semantic/src/types/diagnostic.rs @@ -14,9 +14,7 @@ use crate::semantic_index::place::{PlaceTable, ScopedPlaceId}; use crate::semantic_index::{global_scope, place_table, use_def_map}; use crate::suppression::FileSuppressionId; use crate::types::call::CallError; -use crate::types::class::{ - CodeGeneratorKind, DisjointBase, DisjointBaseKind, Field, MethodDecorator, -}; +use crate::types::class::{CodeGeneratorKind, DisjointBase, DisjointBaseKind, MethodDecorator}; use crate::types::function::{FunctionDecorators, FunctionType, KnownFunction, OverloadLiteral}; use crate::types::infer::UnsupportedComparisonError; use crate::types::overrides::MethodKind; @@ -26,6 +24,7 @@ use crate::types::string_annotation::{ RAW_STRING_TYPE_ANNOTATION, }; use crate::types::tuple::TupleSpec; +use crate::types::typed_dict::TypedDictSchema; use crate::types::{ BoundTypeVarInstance, ClassType, DynamicType, LintDiagnosticGuard, Protocol, ProtocolInstanceType, SpecialFormType, SubclassOfInner, Type, TypeContext, binding_type, @@ -3471,7 +3470,7 @@ pub(crate) fn report_invalid_key_on_typed_dict<'db>( typed_dict_ty: Type<'db>, full_object_ty: Option>, key_ty: Type<'db>, - items: &FxIndexMap>, + items: &TypedDictSchema<'db>, ) { let db = context.db(); if let Some(builder) = context.report_lint(&INVALID_KEY, key_node) { diff --git a/crates/ty_python_semantic/src/types/display.rs b/crates/ty_python_semantic/src/types/display.rs index 19ed71bbff..39e5a1caee 100644 --- a/crates/ty_python_semantic/src/types/display.rs +++ b/crates/ty_python_semantic/src/types/display.rs @@ -25,8 +25,8 @@ use crate::types::visitor::TypeVisitor; use crate::types::{ BoundTypeVarIdentity, CallableType, CallableTypeKind, IntersectionType, KnownBoundMethodType, KnownClass, KnownInstanceType, MaterializationKind, Protocol, ProtocolInstanceType, - SpecialFormType, StringLiteralType, SubclassOfInner, Type, UnionType, WrapperDescriptorKind, - visitor, + SpecialFormType, StringLiteralType, SubclassOfInner, Type, TypedDictType, UnionType, + WrapperDescriptorKind, visitor, }; /// Settings for displaying types and signatures @@ -900,12 +900,24 @@ impl<'db> FmtDetailed<'db> for DisplayRepresentation<'db> { } f.write_str("]") } - Type::TypedDict(typed_dict) => typed_dict - .defining_class() + Type::TypedDict(TypedDictType::Class(defining_class)) => defining_class .class_literal(self.db) .0 .display_with(self.db, self.settings.clone()) .fmt_detailed(f), + Type::TypedDict(TypedDictType::Synthesized(synthesized)) => { + f.set_invalid_syntax(); + f.write_str("') + } Type::TypeAlias(alias) => { f.write_str(alias.name(self.db))?; match alias.specialization(self.db) { @@ -2373,7 +2385,10 @@ mod tests { use crate::Db; use crate::db::tests::setup_db; use crate::place::typing_extensions_symbol; - use crate::types::{KnownClass, Parameter, Parameters, Signature, Type}; + use crate::types::typed_dict::{ + SynthesizedTypedDictType, TypedDictFieldBuilder, TypedDictSchema, + }; + use crate::types::{KnownClass, Parameter, Parameters, Signature, Type, TypedDictType}; #[test] fn string_literal_display() { @@ -2418,6 +2433,37 @@ mod tests { ); } + #[test] + fn synthesized_typeddict_display() { + let db = setup_db(); + + let mut items = TypedDictSchema::default(); + items.insert( + Name::new("foo"), + TypedDictFieldBuilder::new(Type::IntLiteral(42)) + .required(true) + .build(), + ); + items.insert( + Name::new("bar"), + TypedDictFieldBuilder::new(Type::string_literal(&db, "hello")) + .required(true) + .build(), + ); + + let synthesized = SynthesizedTypedDictType::new(&db, items); + let type_ = Type::TypedDict(TypedDictType::Synthesized(synthesized)); + // Fields are sorted internally, even prior to normalization. + assert_eq!( + type_.display(&db).to_string(), + "", + ); + assert_eq!( + type_.normalized(&db).display(&db).to_string(), + "", + ); + } + fn display_signature<'db>( db: &'db dyn Db, parameters: impl IntoIterator>, diff --git a/crates/ty_python_semantic/src/types/enums.rs b/crates/ty_python_semantic/src/types/enums.rs index 9a91d1da3d..fcc8552b52 100644 --- a/crates/ty_python_semantic/src/types/enums.rs +++ b/crates/ty_python_semantic/src/types/enums.rs @@ -76,7 +76,7 @@ pub(crate) fn enum_metadata<'db>( let mut auto_counter = 0; let ignored_names: Option> = if let Some(ignore) = table.symbol_id("_ignore_") { - let ignore_bindings = use_def_map.all_reachable_symbol_bindings(ignore); + let ignore_bindings = use_def_map.reachable_symbol_bindings(ignore); let ignore_place = place_from_bindings(db, ignore_bindings).place; match ignore_place { diff --git a/crates/ty_python_semantic/src/types/function.rs b/crates/ty_python_semantic/src/types/function.rs index 6b6c798615..e727e6663b 100644 --- a/crates/ty_python_semantic/src/types/function.rs +++ b/crates/ty_python_semantic/src/types/function.rs @@ -73,7 +73,8 @@ use crate::types::diagnostic::{ report_runtime_check_against_non_runtime_checkable_protocol, }; use crate::types::display::DisplaySettings; -use crate::types::generics::{GenericContext, InferableTypeVars}; +use crate::types::generics::{GenericContext, InferableTypeVars, typing_self}; +use crate::types::infer::nearest_enclosing_class; use crate::types::list_members::all_members; use crate::types::narrow::ClassInfoConstraintFunction; use crate::types::signatures::{CallableSignature, Signature}; @@ -82,8 +83,9 @@ use crate::types::{ ApplyTypeMappingVisitor, BoundMethodType, BoundTypeVarInstance, CallableType, CallableTypeKind, ClassBase, ClassLiteral, ClassType, DeprecatedInstance, DynamicType, FindLegacyTypeVarsVisitor, HasRelationToVisitor, IsDisjointVisitor, IsEquivalentVisitor, KnownClass, KnownInstanceType, - NormalizedVisitor, SpecialFormType, Truthiness, Type, TypeContext, TypeMapping, TypeRelation, - UnionBuilder, binding_type, definition_expression_type, walk_signature, + NormalizedVisitor, SpecialFormType, SubclassOfInner, SubclassOfType, Truthiness, Type, + TypeContext, TypeMapping, TypeRelation, UnionBuilder, binding_type, definition_expression_type, + infer_definition_types, walk_signature, }; use crate::{Db, FxOrderSet, ModuleName, resolve_module}; @@ -499,13 +501,91 @@ impl<'db> OverloadLiteral<'db> { index, ); - Signature::from_function( + let mut raw_signature = Signature::from_function( db, pep695_ctx, definition, function_stmt_node, has_implicitly_positional_first_parameter, - ) + ); + + let generic_context = raw_signature.generic_context; + raw_signature.add_implicit_self_annotation(db, || { + if self.is_staticmethod(db) { + return None; + } + + // We have not yet added an implicit annotation to the `self` parameter, so any + // typevars that currently appear in the method's generic context come from explicit + // annotations. + let method_has_explicit_self = generic_context + .is_some_and(|context| context.variables(db).any(|v| v.typevar(db).is_self(db))); + + let class_scope_id = definition.scope(db); + let class_scope = index.scope(class_scope_id.file_scope_id(db)); + let class_node = class_scope.node().as_class()?; + let class_def = index.expect_single_definition(class_node); + let Type::ClassLiteral(class_literal) = infer_definition_types(db, class_def) + .declaration_type(class_def) + .inner_type() + else { + return None; + }; + let class_is_generic = class_literal.generic_context(db).is_some(); + let class_is_fallback = class_literal + .known(db) + .is_some_and(KnownClass::is_fallback_class); + + // Normally we implicitly annotate `self` or `cls` with `Self` or `type[Self]`, and + // create a `Self` typevar that we then have to solve for whenever this method is + // called. As an optimization, we can skip creating that typevar in certain situations: + // + // - The method cannot use explicit `Self` in any other parameter annotations, + // or in its return type. If it does, then we really do need specialization + // inference at each call site to see which specific instance type should be + // used in those other parameters / return type. + // + // - The class cannot be generic. If it is, then we might need an actual `Self` + // typevar to help carry through constraints that relate the instance type to + // other typevars in the method signature. + // + // - The class cannot be a "fallback class". A fallback class is used like a mixin, + // and so we need specialization inference to determine the "real" class that the + // fallback is augmenting. (See KnownClass::is_fallback_class for more details.) + if method_has_explicit_self || class_is_generic || class_is_fallback { + let scope_id = definition.scope(db); + let typevar_binding_context = Some(definition); + let index = semantic_index(db, scope_id.file(db)); + let class = nearest_enclosing_class(db, index, scope_id).unwrap(); + + let typing_self = typing_self(db, scope_id, typevar_binding_context, class).expect( + "We should always find the surrounding class \ + for an implicit self: Self annotation", + ); + + if self.is_classmethod(db) { + Some(SubclassOfType::from( + db, + SubclassOfInner::TypeVar(typing_self), + )) + } else { + Some(Type::TypeVar(typing_self)) + } + } else { + // If skip creating the typevar, we use "instance of class" or "subclass of + // class" as the implicit annotation instead. + if self.is_classmethod(db) { + Some(SubclassOfType::from( + db, + SubclassOfInner::Class(ClassType::NonGeneric(class_literal)), + )) + } else { + Some(class_literal.to_non_generic_instance(db)) + } + } + }); + + raw_signature } pub(crate) fn parameter_span( @@ -1028,18 +1108,6 @@ impl<'db> FunctionType<'db> { relation_visitor: &HasRelationToVisitor<'db>, disjointness_visitor: &IsDisjointVisitor<'db>, ) -> ConstraintSet<'db> { - // A function type is the subtype of itself, and not of any other function type. However, - // our representation of a function type includes any specialization that should be applied - // to the signature. Different specializations of the same function type are only subtypes - // of each other if they result in subtype signatures. - if matches!( - relation, - TypeRelation::Subtyping | TypeRelation::Redundancy | TypeRelation::SubtypingAssuming(_) - ) && self.normalized(db) == other.normalized(db) - { - return ConstraintSet::from(true); - } - if self.literal(db) != other.literal(db) { return ConstraintSet::from(false); } @@ -1179,10 +1247,9 @@ fn is_instance_truthiness<'db>( Type::NominalInstance(..) => always_true_if(is_instance(&ty)), - Type::NewTypeInstance(newtype) => always_true_if(is_instance(&Type::instance( - db, - newtype.base_class_type(db), - ))), + Type::NewTypeInstance(newtype) => { + always_true_if(is_instance(&newtype.concrete_base_type(db))) + } Type::BooleanLiteral(..) | Type::BytesLiteral(..) @@ -1315,6 +1382,10 @@ pub enum KnownFunction { #[strum(serialize = "abstractmethod")] AbstractMethod, + /// `contextlib.asynccontextmanager` + #[strum(serialize = "asynccontextmanager")] + AsyncContextManager, + /// `dataclasses.dataclass` Dataclass, /// `dataclasses.field` @@ -1402,6 +1473,9 @@ impl KnownFunction { Self::AbstractMethod => { matches!(module, KnownModule::Abc) } + Self::AsyncContextManager => { + matches!(module, KnownModule::Contextlib) + } Self::Dataclass | Self::Field => { matches!(module, KnownModule::Dataclasses) } @@ -1614,10 +1688,16 @@ impl KnownFunction { && !any_over_type(db, *casted_type, &contains_unknown_or_todo, true) { if let Some(builder) = context.report_lint(&REDUNDANT_CAST, call_expression) { - builder.into_diagnostic(format_args!( - "Value is already of type `{}`", - casted_type.display(db), + let source_display = source_type.display(db).to_string(); + let casted_display = casted_type.display(db).to_string(); + let mut diagnostic = builder.into_diagnostic(format_args!( + "Value is already of type `{casted_display}`", )); + if source_display != casted_display { + diagnostic.info(format_args!( + "`{casted_display}` is equivalent to `{source_display}`", + )); + } } } } @@ -1926,6 +2006,8 @@ pub(crate) mod tests { KnownFunction::AbstractMethod => KnownModule::Abc, + KnownFunction::AsyncContextManager => KnownModule::Contextlib, + KnownFunction::Dataclass | KnownFunction::Field => KnownModule::Dataclasses, KnownFunction::GetattrStatic => KnownModule::Inspect, diff --git a/crates/ty_python_semantic/src/types/generics.rs b/crates/ty_python_semantic/src/types/generics.rs index 7db5f7e7a2..58ccd28ca0 100644 --- a/crates/ty_python_semantic/src/types/generics.rs +++ b/crates/ty_python_semantic/src/types/generics.rs @@ -11,7 +11,7 @@ use crate::semantic_index::scope::{FileScopeId, NodeWithScopeKind, ScopeId}; use crate::semantic_index::{SemanticIndex, semantic_index}; use crate::types::class::ClassType; use crate::types::class_base::ClassBase; -use crate::types::constraints::ConstraintSet; +use crate::types::constraints::{ConstraintSet, IteratorConstraintsExtension}; use crate::types::instance::{Protocol, ProtocolInstanceType}; use crate::types::signatures::{Parameters, ParametersKind}; use crate::types::tuple::{TupleSpec, TupleType, walk_tuple_type}; @@ -86,7 +86,7 @@ pub(crate) fn typing_self<'db>( function_scope_id: ScopeId, typevar_binding_context: Option>, class: ClassLiteral<'db>, -) -> Option> { +) -> Option> { let index = semantic_index(db, function_scope_id.file(db)); let identity = TypeVarIdentity::new( @@ -117,7 +117,6 @@ pub(crate) fn typing_self<'db>( typevar_binding_context, typevar, ) - .map(Type::TypeVar) } #[derive(Clone, Copy, Debug)] @@ -1227,18 +1226,20 @@ impl<'db> Specialization<'db> { let self_materialization_kind = self.materialization_kind(db); let other_materialization_kind = other.materialization_kind(db); - let mut result = ConstraintSet::from(true); - for ((bound_typevar, self_type), other_type) in (generic_context.variables(db)) - .zip(self.types(db)) - .zip(other.types(db)) - { + let types = itertools::izip!( + generic_context.variables(db), + self.types(db), + other.types(db) + ); + + types.when_all(db, |(bound_typevar, self_type, other_type)| { // Subtyping/assignability of each type in the specialization depends on the variance // of the corresponding typevar: // - covariant: verify that self_type <: other_type // - contravariant: verify that other_type <: self_type // - invariant: verify that self_type <: other_type AND other_type <: self_type // - bivariant: skip, can't make subtyping/assignability false - let compatible = match bound_typevar.variance(db) { + match bound_typevar.variance(db) { TypeVarVariance::Invariant => has_relation_in_invariant_position( db, self_type, @@ -1267,13 +1268,82 @@ impl<'db> Specialization<'db> { disjointness_visitor, ), TypeVarVariance::Bivariant => ConstraintSet::from(true), - }; - if result.intersect(db, compatible).is_never_satisfied(db) { - return result; } + }) + } + + pub(crate) fn is_disjoint_from( + self, + db: &'db dyn Db, + other: Self, + inferable: InferableTypeVars<'_, 'db>, + ) -> ConstraintSet<'db> { + self.is_disjoint_from_impl( + db, + other, + inferable, + &IsDisjointVisitor::default(), + &HasRelationToVisitor::default(), + ) + } + + pub(crate) fn is_disjoint_from_impl( + self, + db: &'db dyn Db, + other: Self, + inferable: InferableTypeVars<'_, 'db>, + disjointness_visitor: &IsDisjointVisitor<'db>, + relation_visitor: &HasRelationToVisitor<'db>, + ) -> ConstraintSet<'db> { + let generic_context = self.generic_context(db); + if generic_context != other.generic_context(db) { + return ConstraintSet::from(true); } - result + if let (Some(self_tuple), Some(other_tuple)) = (self.tuple_inner(db), other.tuple_inner(db)) + { + return self_tuple.is_disjoint_from_impl( + db, + other_tuple, + inferable, + disjointness_visitor, + relation_visitor, + ); + } + + let types = itertools::izip!( + generic_context.variables(db), + self.types(db), + other.types(db) + ); + + types.when_all( + db, + |(bound_typevar, self_type, other_type)| match bound_typevar.variance(db) { + // TODO: This check can lead to false negatives. + // + // For example, `Foo[int]` and `Foo[bool]` are disjoint, even though `bool` is a subtype + // of `int`. However, given two non-inferable type variables `T` and `U`, `Foo[T]` and + // `Foo[U]` should not be considered disjoint, as `T` and `U` could be specialized to the + // same type. We don't currently have a good typing relationship to represent this. + TypeVarVariance::Invariant => self_type.is_disjoint_from_impl( + db, + *other_type, + inferable, + disjointness_visitor, + relation_visitor, + ), + + // If `Foo[T]` is covariant in `T`, `Foo[Never]` is a subtype of `Foo[A]` and `Foo[B]` + TypeVarVariance::Covariant => ConstraintSet::from(false), + + // If `Foo[T]` is contravariant in `T`, `Foo[A | B]` is a subtype of `Foo[A]` and `Foo[B]` + TypeVarVariance::Contravariant => ConstraintSet::from(false), + + // If `Foo[T]` is bivariant in `T`, `Foo[A]` and `Foo[B]` are mutual subtypes. + TypeVarVariance::Bivariant => ConstraintSet::from(false), + }, + ) } pub(crate) fn is_equivalent_to_impl( @@ -1545,35 +1615,66 @@ impl<'db> SpecializationBuilder<'db> { } self.add_type_mapping(*formal_bound_typevar, remaining_actual, polarity, f); } - (Type::Union(formal), _) => { - // Second, if the formal is a union, and precisely one union element is assignable - // from the actual type, then we don't add any type mapping. This handles a case like + (Type::Union(union_formal), _) => { + // Second, if the formal is a union, and the actual type is assignable to precisely + // one union element, then we don't add any type mapping. This handles a case like // // ```py - // def f[T](t: T | None): ... + // def f[T](t: T | None) -> T: ... // - // f(None) + // reveal_type(f(None)) # revealed: Unknown // ``` // // without specializing `T` to `None`. - // - // Otherwise, if precisely one union element _is_ a typevar (not _contains_ a - // typevar), then we add a mapping between that typevar and the actual type. if !actual.is_never() { - let assignable_elements = (formal.elements(self.db).iter()).filter(|ty| { - actual - .when_subtype_of(self.db, **ty, self.inferable) - .is_always_satisfied(self.db) - }); + let assignable_elements = + (union_formal.elements(self.db).iter()).filter(|ty| { + actual + .when_subtype_of(self.db, **ty, self.inferable) + .is_always_satisfied(self.db) + }); if assignable_elements.exactly_one().is_ok() { return Ok(()); } } - let bound_typevars = - (formal.elements(self.db).iter()).filter_map(|ty| ty.as_typevar()); - if let Ok(bound_typevar) = bound_typevars.exactly_one() { - self.add_type_mapping(bound_typevar, actual, polarity, f); + let mut bound_typevars = + (union_formal.elements(self.db).iter()).filter_map(|ty| ty.as_typevar()); + + // TODO: + // Handling more than one bare typevar is something that we can't handle yet. + if bound_typevars.nth(1).is_some() { + return Ok(()); + } + + // Finally, if there are no bare typevars, we try to infer type mappings by + // checking against each union element. This handles cases like + // ```py + // def f[T](t: P[T] | Q[T]) -> T: ... + // + // reveal_type(f(P[str]())) # revealed: str + // reveal_type(f(Q[int]())) # revealed: int + // ``` + let mut first_error = None; + let mut found_matching_element = false; + for formal_element in union_formal.elements(self.db) { + let result = self.infer_map_impl(*formal_element, actual, polarity, &mut f); + if let Err(err) = result { + first_error.get_or_insert(err); + } else { + // The recursive call to `infer_map_impl` may succeed even if the actual type is + // not assignable to the formal element. + if !actual + .when_assignable_to(self.db, *formal_element, self.inferable) + .is_never_satisfied(self.db) + { + found_matching_element = true; + } + } + } + + if !found_matching_element && let Some(error) = first_error { + return Err(error); } } diff --git a/crates/ty_python_semantic/src/types/ide_support.rs b/crates/ty_python_semantic/src/types/ide_support.rs index a74cc82f7e..4087d125c6 100644 --- a/crates/ty_python_semantic/src/types/ide_support.rs +++ b/crates/ty_python_semantic/src/types/ide_support.rs @@ -6,7 +6,7 @@ use crate::semantic_index::definition::Definition; use crate::semantic_index::definition::DefinitionKind; use crate::semantic_index::{attribute_scopes, global_scope, semantic_index, use_def_map}; use crate::types::call::{CallArguments, MatchedArgument}; -use crate::types::signatures::Signature; +use crate::types::signatures::{ParameterKind, Signature}; use crate::types::{CallDunderError, UnionType}; use crate::types::{CallableTypes, ClassBase, KnownClass, Type, TypeContext}; use crate::{Db, DisplaySettings, HasType, SemanticModel}; @@ -86,9 +86,9 @@ pub fn definitions_for_name<'db>( if let Some(global_symbol_id) = global_place_table.symbol_id(name_str) { let global_use_def_map = crate::semantic_index::use_def_map(db, global_scope_id); let global_bindings = - global_use_def_map.all_reachable_symbol_bindings(global_symbol_id); + global_use_def_map.reachable_symbol_bindings(global_symbol_id); let global_declarations = - global_use_def_map.all_reachable_symbol_declarations(global_symbol_id); + global_use_def_map.reachable_symbol_declarations(global_symbol_id); for binding in global_bindings { if let Some(def) = binding.binding.definition() { @@ -114,8 +114,8 @@ pub fn definitions_for_name<'db>( let use_def_map = index.use_def_map(scope_id); // Get all definitions (both bindings and declarations) for this place - let bindings = use_def_map.all_reachable_symbol_bindings(symbol_id); - let declarations = use_def_map.all_reachable_symbol_declarations(symbol_id); + let bindings = use_def_map.reachable_symbol_bindings(symbol_id); + let declarations = use_def_map.reachable_symbol_declarations(symbol_id); for binding in bindings { if let Some(def) = binding.binding.definition() { @@ -155,7 +155,8 @@ pub fn definitions_for_name<'db>( // https://typing.python.org/en/latest/spec/special-types.html#special-cases-for-float-and-complex if matches!(name_str, "float" | "complex") && let Some(expr) = node.expr_name() - && let Some(union) = expr.inferred_type(&SemanticModel::new(db, file)).as_union() + && let Some(ty) = expr.inferred_type(model) + && let Some(union) = ty.as_union() && is_float_or_complex_annotation(db, union, name_str) { return union @@ -234,7 +235,10 @@ pub fn definitions_for_attribute<'db>( let mut resolved = Vec::new(); // Determine the type of the LHS - let lhs_ty = attribute.value.inferred_type(model); + let Some(lhs_ty) = attribute.value.inferred_type(model) else { + return resolved; + }; + let tys = match lhs_ty { Type::Union(union) => union.elements(model.db()).to_vec(), _ => vec![lhs_ty], @@ -294,7 +298,7 @@ pub fn definitions_for_attribute<'db>( let use_def = use_def_map(db, class_scope); // Check declarations first - for decl in use_def.all_reachable_symbol_declarations(place_id) { + for decl in use_def.reachable_symbol_declarations(place_id) { if let Some(def) = decl.declaration.definition() { resolved.extend(resolve_definition( db, @@ -307,7 +311,7 @@ pub fn definitions_for_attribute<'db>( } // If no declarations found, check bindings - for binding in use_def.all_reachable_symbol_bindings(place_id) { + for binding in use_def.reachable_symbol_bindings(place_id) { if let Some(def) = binding.binding.definition() { resolved.extend(resolve_definition( db, @@ -332,7 +336,7 @@ pub fn definitions_for_attribute<'db>( let use_def = index.use_def_map(function_scope_id); // Check declarations first - for decl in use_def.all_reachable_member_declarations(place_id) { + for decl in use_def.reachable_member_declarations(place_id) { if let Some(def) = decl.declaration.definition() { resolved.extend(resolve_definition( db, @@ -345,7 +349,7 @@ pub fn definitions_for_attribute<'db>( } // If no declarations found, check bindings - for binding in use_def.all_reachable_member_bindings(place_id) { + for binding in use_def.reachable_member_bindings(place_id) { if let Some(def) = binding.binding.definition() { resolved.extend(resolve_definition( db, @@ -374,7 +378,9 @@ pub fn definitions_for_keyword_argument<'db>( call_expr: &ast::ExprCall, ) -> Vec> { let db = model.db(); - let func_type = call_expr.func.inferred_type(model); + let Some(func_type) = call_expr.func.inferred_type(model) else { + return Vec::new(); + }; let Some(keyword_name) = keyword.arg.as_ref() else { return Vec::new(); @@ -459,6 +465,12 @@ pub struct CallSignatureDetails<'db> { /// This provides easy access to parameter names for documentation lookup. pub parameter_names: Vec, + /// Parameter kinds, useful to determine correct autocomplete suggestions. + pub parameter_kinds: Vec>, + + /// Parameter kinds, useful to determine correct autocomplete suggestions. + pub parameter_types: Vec>>, + /// The definition where this callable was originally defined (useful for /// extracting docstrings). pub definition: Option>, @@ -492,7 +504,9 @@ pub fn call_signature_details<'db>( model: &SemanticModel<'db>, call_expr: &ast::ExprCall, ) -> Vec> { - let func_type = call_expr.func.inferred_type(model); + let Some(func_type) = call_expr.func.inferred_type(model) else { + return Vec::new(); + }; // Use into_callable to handle all the complex type conversions if let Some(callable_type) = func_type @@ -501,7 +515,9 @@ pub fn call_signature_details<'db>( { let call_arguments = CallArguments::from_arguments(&call_expr.arguments, |_, splatted_value| { - splatted_value.inferred_type(model) + splatted_value + .inferred_type(model) + .unwrap_or(Type::unknown()) }); let bindings = callable_type .bindings(model.db()) @@ -517,6 +533,12 @@ pub fn call_signature_details<'db>( let display_details = signature.display(model.db()).to_string_parts(); let parameter_label_offsets = display_details.parameter_ranges; let parameter_names = display_details.parameter_names; + let (parameter_kinds, parameter_types): (Vec, Vec>) = + signature + .parameters() + .iter() + .map(|param| (param.kind().clone(), param.annotated_type())) + .unzip(); CallSignatureDetails { definition: signature.definition(), @@ -524,6 +546,8 @@ pub fn call_signature_details<'db>( label: display_details.label, parameter_label_offsets, parameter_names, + parameter_kinds, + parameter_types, argument_to_parameter_mapping, } }) @@ -550,7 +574,7 @@ pub fn call_type_simplified_by_overloads( call_expr: &ast::ExprCall, ) -> Option { let db = model.db(); - let func_type = call_expr.func.inferred_type(model); + let func_type = call_expr.func.inferred_type(model)?; // Use into_callable to handle all the complex type conversions let callable_type = func_type.try_upcast_to_callable(db)?.into_type(db); @@ -565,7 +589,9 @@ pub fn call_type_simplified_by_overloads( // Hand the overload resolution system as much type info as we have let args = CallArguments::from_arguments_typed(&call_expr.arguments, |_, splatted_value| { - splatted_value.inferred_type(model) + splatted_value + .inferred_type(model) + .unwrap_or(Type::unknown()) }); // Try to resolve overloads with the arguments/types we have @@ -598,8 +624,8 @@ pub fn definitions_for_bin_op<'db>( model: &SemanticModel<'db>, binary_op: &ast::ExprBinOp, ) -> Option<(Vec>, Type<'db>)> { - let left_ty = binary_op.left.inferred_type(model); - let right_ty = binary_op.right.inferred_type(model); + let left_ty = binary_op.left.inferred_type(model)?; + let right_ty = binary_op.right.inferred_type(model)?; let Ok(bindings) = Type::try_call_bin_op(model.db(), left_ty, binary_op.op, right_ty) else { return None; @@ -625,7 +651,7 @@ pub fn definitions_for_unary_op<'db>( model: &SemanticModel<'db>, unary_op: &ast::ExprUnaryOp, ) -> Option<(Vec>, Type<'db>)> { - let operand_ty = unary_op.operand.inferred_type(model); + let operand_ty = unary_op.operand.inferred_type(model)?; let unary_dunder_method = match unary_op.op { ast::UnaryOp::Invert => "__invert__", @@ -829,6 +855,7 @@ mod resolve_definition { use ruff_db::system::SystemPath; use ruff_db::vendored::VendoredPathBuf; use ruff_python_ast as ast; + use ruff_python_stdlib::sys::is_builtin_module; use rustc_hash::FxHashSet; use tracing::trace; @@ -1082,8 +1109,8 @@ mod resolve_definition { let mut definitions = IndexSet::new(); // Get all definitions (both bindings and declarations) for this place - let bindings = use_def_map.all_reachable_symbol_bindings(symbol_id); - let declarations = use_def_map.all_reachable_symbol_declarations(symbol_id); + let bindings = use_def_map.reachable_symbol_bindings(symbol_id); + let declarations = use_def_map.reachable_symbol_declarations(symbol_id); for binding in bindings { if let Some(def) = binding.binding.definition() { @@ -1146,6 +1173,14 @@ mod resolve_definition { // here because there isn't really an importing file. However this `resolve_real_module` // can be understood as essentially `import .`, which is also what `file_to_module` is, // so this is in fact exactly the file we want to consider the importer. + // + // ... unless we have a builtin module. i.e., A module embedded + // into the interpreter. In which case, all we have are stubs. + // `resolve_real_module` will always return `None` for this case, but + // it will emit false positive logs. And this saves us some work. + if is_builtin_module(db.python_version().minor, stub_module.name(db)) { + return None; + } let real_module = resolve_real_module(db, stub_file_for_module_lookup, stub_module.name(db))?; trace!("Found real module: {}", real_module.name(db)); diff --git a/crates/ty_python_semantic/src/types/infer/builder.rs b/crates/ty_python_semantic/src/types/infer/builder.rs index ee687f651d..57e3df8eb9 100644 --- a/crates/ty_python_semantic/src/types/infer/builder.rs +++ b/crates/ty_python_semantic/src/types/infer/builder.rs @@ -104,13 +104,13 @@ use crate::types::visitor::any_over_type; use crate::types::{ BoundTypeVarInstance, CallDunderError, CallableBinding, CallableType, CallableTypeKind, ClassLiteral, ClassType, DataclassParams, DynamicType, InternedType, IntersectionBuilder, - IntersectionType, KnownClass, KnownInstanceType, LintDiagnosticGuard, MemberLookupPolicy, - MetaclassCandidate, PEP695TypeAliasType, ParamSpecAttrKind, Parameter, ParameterForm, - Parameters, Signature, SpecialFormType, SubclassOfType, TrackedConstraintSet, Truthiness, Type, - TypeAliasType, TypeAndQualifiers, TypeContext, TypeQualifiers, TypeVarBoundOrConstraints, - TypeVarBoundOrConstraintsEvaluation, TypeVarDefaultEvaluation, TypeVarIdentity, - TypeVarInstance, TypeVarKind, TypeVarVariance, TypedDictType, UnionBuilder, UnionType, - UnionTypeInstance, binding_type, infer_scope_types, todo_type, + IntersectionType, KnownClass, KnownInstanceType, KnownUnion, LintDiagnosticGuard, + MemberLookupPolicy, MetaclassCandidate, PEP695TypeAliasType, ParamSpecAttrKind, Parameter, + ParameterForm, Parameters, Signature, SpecialFormType, SubclassOfType, TrackedConstraintSet, + Truthiness, Type, TypeAliasType, TypeAndQualifiers, TypeContext, TypeQualifiers, + TypeVarBoundOrConstraints, TypeVarBoundOrConstraintsEvaluation, TypeVarDefaultEvaluation, + TypeVarIdentity, TypeVarInstance, TypeVarKind, TypeVarVariance, TypedDictType, UnionBuilder, + UnionType, UnionTypeInstance, binding_type, infer_scope_types, todo_type, }; use crate::types::{CallableTypes, overrides}; use crate::types::{ClassBase, add_inferred_python_version_hint_to_diagnostic}; @@ -2241,7 +2241,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { name, type_params, parameters, - returns, + returns: _, body: _, decorator_list, } = function; @@ -2288,21 +2288,13 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { self.infer_expression(default, TypeContext::default()); } - // If there are type params, parameters and returns are evaluated in that scope, that is, in - // `infer_function_type_params`, rather than here. + // If there are type params, parameters and returns are evaluated in that scope. Otherwise, + // we always defer the inference of the parameters and returns. That ensures that we do not + // add any spurious salsa cycles when applying decorators below. (Applying a decorator + // requires getting the signature of this function definition, which in turn requires + // (lazily) inferring the parameter and return types.) if type_params.is_none() { - if self.defer_annotations() { - self.deferred.insert(definition, self.multi_inference_state); - } else { - let previous_typevar_binding_context = - self.typevar_binding_context.replace(definition); - self.infer_return_type_annotation( - returns.as_deref(), - DeferredExpressionState::None, - ); - self.infer_parameters(parameters); - self.typevar_binding_context = previous_typevar_binding_context; - } + self.deferred.insert(definition, self.multi_inference_state); } let known_function = @@ -2702,21 +2694,23 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { let function_node = function_definition.node(self.module()); let function_name = &function_node.name; - // TODO: handle implicit type of `cls` for classmethods - if is_implicit_classmethod(function_name) || is_implicit_staticmethod(function_name) { + if is_implicit_staticmethod(function_name) { return None; } + let mut is_classmethod = is_implicit_classmethod(function_name); let inference = infer_definition_types(db, method_definition); for decorator in &function_node.decorator_list { let decorator_ty = inference.expression_type(&decorator.expression); - if decorator_ty.as_class_literal().is_some_and(|class| { - matches!( - class.known(db), - Some(KnownClass::Classmethod | KnownClass::Staticmethod) - ) - }) { - return None; + if let Some(known_class) = decorator_ty + .as_class_literal() + .and_then(|class| class.known(db)) + { + if known_class == KnownClass::Staticmethod { + return None; + } + + is_classmethod |= known_class == KnownClass::Classmethod; } } @@ -2726,7 +2720,13 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { .inner_type() .as_class_literal()?; - typing_self(db, self.scope(), Some(method_definition), class_literal) + let typing_self = typing_self(db, self.scope(), Some(method_definition), class_literal); + if is_classmethod { + typing_self + .map(|typing_self| SubclassOfType::from(db, SubclassOfInner::TypeVar(typing_self))) + } else { + typing_self.map(Type::TypeVar) + } } /// Set initial declared/inferred types for a `**kwargs` keyword-variadic parameter. @@ -2938,10 +2938,24 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { definition: Definition<'db>, function: &ast::StmtFunctionDef, ) { + let mut prev_in_no_type_check = self.context.set_in_no_type_check(InNoTypeCheck::Yes); + for decorator in &function.decorator_list { + let decorator_type = self.infer_decorator(decorator); + if let Type::FunctionLiteral(function) = decorator_type + && let Some(KnownFunction::NoTypeCheck) = function.known(self.db()) + { + // If the function is decorated with the `no_type_check` decorator, + // we need to suppress any errors that come after the decorators. + prev_in_no_type_check = InNoTypeCheck::Yes; + break; + } + } + self.context.set_in_no_type_check(prev_in_no_type_check); + let previous_typevar_binding_context = self.typevar_binding_context.replace(definition); self.infer_return_type_annotation( function.returns.as_deref(), - DeferredExpressionState::Deferred, + self.defer_annotations().into(), ); self.infer_parameters(function.parameters.as_ref()); self.typevar_binding_context = previous_typevar_binding_context; @@ -5040,9 +5054,15 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ) => { self.infer_legacy_typevar(target, call_expr, definition, typevar_class) } - Some(KnownClass::ParamSpec) => { - self.infer_paramspec(target, call_expr, definition) - } + Some( + paramspec_class @ (KnownClass::ParamSpec + | KnownClass::ExtensionsParamSpec), + ) => self.infer_legacy_paramspec( + target, + call_expr, + definition, + paramspec_class, + ), Some(KnownClass::NewType) => { self.infer_newtype_expression(target, call_expr, definition) } @@ -5087,11 +5107,12 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { target_ty } - fn infer_paramspec( + fn infer_legacy_paramspec( &mut self, target: &ast::Expr, call_expr: &ast::ExprCall, definition: Definition<'db>, + known_class: KnownClass, ) -> Type<'db> { fn error<'db>( context: &InferContext<'db, '_>, @@ -5108,7 +5129,8 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { let db = self.db(); let arguments = &call_expr.arguments; - let assume_all_features = self.in_stub(); + let is_typing_extensions = known_class == KnownClass::ExtensionsParamSpec; + let assume_all_features = self.in_stub() || is_typing_extensions; let python_version = Program::get(db).python_version(db); let have_features_from = |version: PythonVersion| assume_all_features || python_version >= version; @@ -5601,7 +5623,10 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { self.infer_type_expression(&bound.value); } if let Some(default) = arguments.find_keyword("default") { - if let Some(KnownClass::ParamSpec) = known_class { + if matches!( + known_class, + Some(KnownClass::ParamSpec | KnownClass::ExtensionsParamSpec) + ) { self.infer_paramspec_default(&default.value); } else { self.infer_type_expression(&default.value); @@ -5611,28 +5636,35 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // Infer the deferred base type of a NewType. fn infer_newtype_assignment_deferred(&mut self, arguments: &ast::Arguments) { - match self.infer_type_expression(&arguments.args[1]) { - Type::NominalInstance(_) | Type::NewTypeInstance(_) => {} + let inferred = self.infer_type_expression(&arguments.args[1]); + match inferred { + Type::NominalInstance(_) | Type::NewTypeInstance(_) => return, + // There are exactly two union types allowed as bases for NewType: `int | float` and + // `int | float | complex`. These are allowed because that's what `float` and `complex` + // expand into in type position. We don't currently ask whether the union was implicit + // or explicit, so the explicit version is also allowed. + Type::Union(union_ty) => { + if let Some(KnownUnion::Float | KnownUnion::Complex) = union_ty.known(self.db()) { + return; + } + } // `Unknown` is likely to be the result of an unresolved import or a typo, which will // already get a diagnostic, so don't pile on an extra diagnostic here. - Type::Dynamic(DynamicType::Unknown) => {} - other_type => { - if let Some(builder) = self - .context - .report_lint(&INVALID_NEWTYPE, &arguments.args[1]) - { - let mut diag = builder.into_diagnostic("invalid base for `typing.NewType`"); - diag.set_primary_message(format!("type `{}`", other_type.display(self.db()))); - if matches!(other_type, Type::ProtocolInstance(_)) { - diag.info("The base of a `NewType` is not allowed to be a protocol class."); - } else if matches!(other_type, Type::TypedDict(_)) { - diag.info("The base of a `NewType` is not allowed to be a `TypedDict`."); - } else { - diag.info( - "The base of a `NewType` must be a class type or another `NewType`.", - ); - } - } + Type::Dynamic(DynamicType::Unknown) => return, + _ => {} + } + if let Some(builder) = self + .context + .report_lint(&INVALID_NEWTYPE, &arguments.args[1]) + { + let mut diag = builder.into_diagnostic("invalid base for `typing.NewType`"); + diag.set_primary_message(format!("type `{}`", inferred.display(self.db()))); + if matches!(inferred, Type::ProtocolInstance(_)) { + diag.info("The base of a `NewType` is not allowed to be a protocol class."); + } else if matches!(inferred, Type::TypedDict(_)) { + diag.info("The base of a `NewType` is not allowed to be a `TypedDict`."); + } else { + diag.info("The base of a `NewType` must be a class type or another `NewType`."); } } } @@ -5898,7 +5930,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { return; }; builder.into_diagnostic(format_args!( - "Operator `{op}=` is unsupported between objects of type `{}` and `{}`", + "Operator `{op}=` is not supported between objects of type `{}` and `{}`", target_type.display(db), value_type.display(db) )); @@ -8447,7 +8479,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ); } } - Some(KnownClass::ParamSpec) => { + Some(KnownClass::ParamSpec | KnownClass::ExtensionsParamSpec) => { if let Some(builder) = self .context .report_lint(&INVALID_PARAMSPEC, call_expression) @@ -8882,7 +8914,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { // If we're inferring types of deferred expressions, look them up from end-of-scope. if self.is_deferred() { let place = if let Some(place_id) = place_table.place_id(expr) { - place_from_bindings(db, use_def.all_reachable_bindings(place_id)).place + place_from_bindings(db, use_def.reachable_bindings(place_id)).place } else { assert!( self.deferred_state.in_string_annotation(), @@ -9667,7 +9699,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { self.context.report_lint(&UNSUPPORTED_OPERATOR, unary) { builder.into_diagnostic(format_args!( - "Unary operator `{op}` is unsupported for type `{}`", + "Unary operator `{op}` is not supported for type `{}`", operand_type.display(self.db()), )); } @@ -9704,7 +9736,7 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { if let Some(builder) = self.context.report_lint(&UNSUPPORTED_OPERATOR, binary) { let mut diag = builder.into_diagnostic(format_args!( - "Operator `{op}` is unsupported between objects of type `{}` and `{}`", + "Operator `{op}` is not supported between objects of type `{}` and `{}`", left_ty.display(db), right_ty.display(db) )); @@ -10832,19 +10864,14 @@ impl<'db, 'ast> TypeInferenceBuilder<'db, 'ast> { ( Type::KnownInstance(KnownInstanceType::ConstraintSet(left)), Type::KnownInstance(KnownInstanceType::ConstraintSet(right)), - ) => { - let result = match op { - ast::CmpOp::Eq => Some( - left.constraints(self.db()).iff(self.db(), right.constraints(self.db())) - ), - ast::CmpOp::NotEq => Some( - left.constraints(self.db()).iff(self.db(), right.constraints(self.db())).negate(self.db()) - ), - _ => None, - }; - result.map(|constraints| Ok(Type::KnownInstance(KnownInstanceType::ConstraintSet( - TrackedConstraintSet::new(self.db(), constraints) - )))) + ) => match op { + ast::CmpOp::Eq => Some(Ok(Type::BooleanLiteral( + left.constraints(self.db()).iff(self.db(), right.constraints(self.db())).is_always_satisfied(self.db()), + ))), + ast::CmpOp::NotEq => Some(Ok(Type::BooleanLiteral( + !left.constraints(self.db()).iff(self.db(), right.constraints(self.db())).is_always_satisfied(self.db()), + ))), + _ => None, } ( diff --git a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs index 690abb901e..c8df4df756 100644 --- a/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs +++ b/crates/ty_python_semantic/src/types/infer/builder/type_expression.rs @@ -155,7 +155,12 @@ impl<'db> TypeInferenceBuilder<'db, '_> { } // anything else is an invalid annotation: op => { - self.infer_binary_expression(binary, TypeContext::default()); + // Avoid inferring the types of invalid binary expressions that have been + // parsed from a string annotation, as they are not present in the semantic + // index. + if !self.deferred_state.in_string_annotation() { + self.infer_binary_expression(binary, TypeContext::default()); + } self.report_invalid_type_expression( expression, format_args!( @@ -1188,7 +1193,11 @@ impl<'db> TypeInferenceBuilder<'db, '_> { ) .in_type_expression(db, self.scope(), None) .unwrap_or_else(|err| err.into_fallback_type(&self.context, subscript, true)); - self.store_expression_type(arguments_slice, ty); + // Only store on the tuple slice; non-tuple cases are handled by + // `infer_subscript_load_impl` via `infer_expression`. + if arguments_slice.is_tuple_expr() { + self.store_expression_type(arguments_slice, ty); + } ty } SpecialFormType::Literal => match self.infer_literal_parameter_type(arguments_slice) { diff --git a/crates/ty_python_semantic/src/types/list_members.rs b/crates/ty_python_semantic/src/types/list_members.rs index eb47745b74..4e4a32c294 100644 --- a/crates/ty_python_semantic/src/types/list_members.rs +++ b/crates/ty_python_semantic/src/types/list_members.rs @@ -25,8 +25,9 @@ use crate::{ }, }; -/// Iterate over all declarations and bindings in the given scope. -pub(crate) fn all_members_of_scope<'db>( +/// Iterate over all declarations and bindings that exist at the end +/// of the given scope. +pub(crate) fn all_end_of_scope_members<'db>( db: &'db dyn Db, scope_id: ScopeId<'db>, ) -> impl Iterator> + 'db { @@ -75,6 +76,60 @@ pub(crate) fn all_members_of_scope<'db>( )) } +/// Iterate over all declarations and bindings that are reachable anywhere +/// in the given scope. +pub(crate) fn all_reachable_members<'db>( + db: &'db dyn Db, + scope_id: ScopeId<'db>, +) -> impl Iterator> + 'db { + let use_def_map = use_def_map(db, scope_id); + let table = place_table(db, scope_id); + + use_def_map + .all_reachable_symbols() + .flat_map(move |(symbol_id, declarations, bindings)| { + let symbol = table.symbol(symbol_id); + + let declaration_place_result = place_from_declarations(db, declarations); + let declaration = + declaration_place_result + .first_declaration + .and_then(|first_reachable_definition| { + let ty = declaration_place_result + .ignore_conflicting_declarations() + .place + .ignore_possibly_undefined()?; + let member = Member { + name: symbol.name().clone(), + ty, + }; + Some(MemberWithDefinition { + member, + first_reachable_definition, + }) + }); + + let place_with_definition = place_from_bindings(db, bindings); + let binding = + place_with_definition + .first_definition + .and_then(|first_reachable_definition| { + let ty = place_with_definition.place.ignore_possibly_undefined()?; + let member = Member { + name: symbol.name().clone(), + ty, + }; + Some(MemberWithDefinition { + member, + first_reachable_definition, + }) + }); + + [declaration, binding] + }) + .flatten() +} + // `__init__`, `__repr__`, `__eq__`, `__ne__` and `__hash__` are always included via `object`, // so we don't need to list them here. const SYNTHETIC_DATACLASS_ATTRIBUTES: &[&str] = &[ @@ -132,7 +187,7 @@ impl<'db> AllMembers<'db> { } Type::NewTypeInstance(newtype) => { - self.extend_with_type(db, Type::instance(db, newtype.base_class_type(db))); + self.extend_with_type(db, newtype.concrete_base_type(db)); } Type::ClassLiteral(class_literal) if class_literal.is_typed_dict(db) => { @@ -359,7 +414,7 @@ impl<'db> AllMembers<'db> { .map(|class| class.class_literal(db).0) { let parent_scope = parent.body_scope(db); - for memberdef in all_members_of_scope(db, parent_scope) { + for memberdef in all_end_of_scope_members(db, parent_scope) { let result = ty.member(db, memberdef.member.name.as_str()); let Some(ty) = result.place.ignore_possibly_undefined() else { continue; @@ -407,7 +462,7 @@ impl<'db> AllMembers<'db> { // class member. This gets us the right type for each // member, e.g., `SomeClass.__delattr__` is not a bound // method, but `instance_of_SomeClass.__delattr__` is. - for memberdef in all_members_of_scope(db, class_body_scope) { + for memberdef in all_end_of_scope_members(db, class_body_scope) { let result = ty.member(db, memberdef.member.name.as_str()); let Some(ty) = result.place.ignore_possibly_undefined() else { continue; diff --git a/crates/ty_python_semantic/src/types/newtype.rs b/crates/ty_python_semantic/src/types/newtype.rs index 84a6e18f50..cc6f2cff69 100644 --- a/crates/ty_python_semantic/src/types/newtype.rs +++ b/crates/ty_python_semantic/src/types/newtype.rs @@ -3,7 +3,9 @@ use std::collections::BTreeSet; use crate::Db; use crate::semantic_index::definition::{Definition, DefinitionKind}; use crate::types::constraints::ConstraintSet; -use crate::types::{ClassType, Type, definition_expression_type, visitor}; +use crate::types::{ + ClassType, KnownClass, KnownUnion, Type, UnionType, definition_expression_type, visitor, +}; use ruff_db::parsed::parsed_module; use ruff_python_ast as ast; @@ -80,8 +82,15 @@ impl<'db> NewType<'db> { NewTypeBase::ClassType(nominal_instance_type.class(db)) } Type::NewTypeInstance(newtype) => NewTypeBase::NewType(newtype), - // This branch includes bases that are other typing constructs besides classes and - // other newtypes, for example unions. `NewType("Foo", int | str)` is not allowed. + // There are exactly two union types allowed as bases for NewType: `int | float` and + // `int | float | complex`. These are allowed because that's what `float` and `complex` + // expand into in type position. We don't currently ask whether the union was implicit + // or explicit, so the explicit version is also allowed. + Type::Union(union_type) => match union_type.known(db) { + Some(KnownUnion::Float) => NewTypeBase::Float, + Some(KnownUnion::Complex) => NewTypeBase::Complex, + _ => object_fallback, + }, _ => object_fallback, } } @@ -94,15 +103,16 @@ impl<'db> NewType<'db> { } } - // Walk the `NewTypeBase` chain to find the underlying `ClassType`. There might not be a - // `ClassType` if this `NewType` is cyclical, and we fall back to `object` in that case. - pub fn base_class_type(self, db: &'db dyn Db) -> ClassType<'db> { + // Walk the `NewTypeBase` chain to find the underlying non-newtype `Type`. There might not be + // one if this `NewType` is cyclical, and we fall back to `object` in that case. + pub fn concrete_base_type(self, db: &'db dyn Db) -> Type<'db> { for base in self.iter_bases(db) { - if let NewTypeBase::ClassType(class_type) = base { - return class_type; + match base { + NewTypeBase::NewType(_) => continue, + concrete => return concrete.instance_type(db), } } - ClassType::object(db) + Type::object() } pub(crate) fn is_equivalent_to_impl(self, db: &'db dyn Db, other: Self) -> bool { @@ -179,10 +189,14 @@ impl<'db> NewType<'db> { Some(mapped_base), )); } + // Mapping base class types is used for normalization and applying type mappings, + // neither of which have any effect on `float` or `complex` (which are already + // fully normalized and non-generic), so we don't need to bother calling `f`. + NewTypeBase::Float | NewTypeBase::Complex => {} } } - // If we get here, there is no `ClassType` (because this newtype is cyclic), and we don't - // call `f` at all. + // If we get here, there is no `ClassType` (because this newtype is either float/complex or + // cyclic), and we don't call `f` at all. Some(self) } @@ -209,6 +223,12 @@ pub(crate) fn walk_newtype_instance_type<'db, V: visitor::TypeVisitor<'db> + ?Si pub enum NewTypeBase<'db> { ClassType(ClassType<'db>), NewType(NewType<'db>), + // `float` and `complex` are special-cased in type position, where they refer to `int | float` + // and `int | float | complex` respectively. As an extension of that special case, we allow + // them in `NewType` bases, even though unions and other typing constructs normally aren't + // allowed. + Float, + Complex, } impl<'db> NewTypeBase<'db> { @@ -216,6 +236,21 @@ impl<'db> NewTypeBase<'db> { match self { NewTypeBase::ClassType(class_type) => Type::instance(db, class_type), NewTypeBase::NewType(newtype) => Type::NewTypeInstance(newtype), + NewTypeBase::Float => UnionType::from_elements( + db, + [ + KnownClass::Int.to_instance(db), + KnownClass::Float.to_instance(db), + ], + ), + NewTypeBase::Complex => UnionType::from_elements( + db, + [ + KnownClass::Int.to_instance(db), + KnownClass::Float.to_instance(db), + KnownClass::Complex.to_instance(db), + ], + ), } } } @@ -246,10 +281,6 @@ impl<'db> Iterator for NewTypeBaseIter<'db> { fn next(&mut self) -> Option { let current = self.current?; match current.base(self.db) { - NewTypeBase::ClassType(base_class_type) => { - self.current = None; - Some(NewTypeBase::ClassType(base_class_type)) - } NewTypeBase::NewType(base_newtype) => { // Doing the insertion only in this branch avoids allocating in the common case. self.seen_before.insert(current); @@ -262,6 +293,10 @@ impl<'db> Iterator for NewTypeBaseIter<'db> { Some(NewTypeBase::NewType(base_newtype)) } } + concrete_base => { + self.current = None; + Some(concrete_base) + } } } } diff --git a/crates/ty_python_semantic/src/types/overrides.rs b/crates/ty_python_semantic/src/types/overrides.rs index 5d6328a606..c56581358c 100644 --- a/crates/ty_python_semantic/src/types/overrides.rs +++ b/crates/ty_python_semantic/src/types/overrides.rs @@ -25,7 +25,7 @@ use crate::{ report_overridden_final_method, }, function::{FunctionDecorators, FunctionType, KnownFunction}, - list_members::{Member, MemberWithDefinition, all_members_of_scope}, + list_members::{Member, MemberWithDefinition, all_end_of_scope_members}, }, }; @@ -54,7 +54,7 @@ pub(super) fn check_class<'db>(context: &InferContext<'db, '_>, class: ClassLite let class_specialized = class.identity_specialization(db); let scope = class.body_scope(db); - let own_class_members: FxHashSet<_> = all_members_of_scope(db, scope).collect(); + let own_class_members: FxHashSet<_> = all_end_of_scope_members(db, scope).collect(); for member in own_class_members { check_class_declaration(context, configuration, class_specialized, scope, &member); @@ -129,7 +129,7 @@ fn check_class_declaration<'db>( && PROHIBITED_NAMEDTUPLE_ATTRS.contains(&member.name.as_str()) && let Some(symbol_id) = place_table(db, class_scope).symbol_id(&member.name) && let Some(bad_definition) = use_def_map(db, class_scope) - .all_reachable_bindings(ScopedPlaceId::Symbol(symbol_id)) + .reachable_bindings(ScopedPlaceId::Symbol(symbol_id)) .filter_map(|binding| binding.binding.definition()) .find(|def| !matches!(def.kind(db), DefinitionKind::AnnotatedAssignment(_))) && let Some(builder) = context.report_lint( diff --git a/crates/ty_python_semantic/src/types/signatures.rs b/crates/ty_python_semantic/src/types/signatures.rs index ffc224c8d5..45c3f81de2 100644 --- a/crates/ty_python_semantic/src/types/signatures.rs +++ b/crates/ty_python_semantic/src/types/signatures.rs @@ -13,23 +13,15 @@ use std::{collections::HashMap, slice::Iter}; use itertools::{EitherOrBoth, Itertools}; -use ruff_db::parsed::parsed_module; -use ruff_python_ast::ParameterWithDefault; use smallvec::{SmallVec, smallvec_inline}; -use super::{ - DynamicType, Type, TypeVarVariance, definition_expression_type, infer_definition_types, - semantic_index, -}; -use crate::semantic_index::definition::{Definition, DefinitionKind}; +use super::{DynamicType, Type, TypeVarVariance, definition_expression_type, semantic_index}; +use crate::semantic_index::definition::Definition; use crate::types::constraints::{ConstraintSet, IteratorConstraintsExtension}; -use crate::types::function::{is_implicit_classmethod, is_implicit_staticmethod}; -use crate::types::generics::{ - GenericContext, InferableTypeVars, typing_self, walk_generic_context, -}; -use crate::types::infer::nearest_enclosing_class; +use crate::types::generics::{GenericContext, InferableTypeVars, walk_generic_context}; +use crate::types::infer::{infer_deferred_types, infer_scope_types}; use crate::types::{ - ApplyTypeMappingVisitor, BindingContext, BoundTypeVarInstance, CallableTypeKind, ClassLiteral, + ApplyTypeMappingVisitor, BindingContext, BoundTypeVarInstance, CallableTypeKind, FindLegacyTypeVarsVisitor, HasRelationToVisitor, IsDisjointVisitor, IsEquivalentVisitor, KnownClass, MaterializationKind, NormalizedVisitor, ParamSpecAttrKind, TypeContext, TypeMapping, TypeRelation, VarianceInferable, todo_type, @@ -37,83 +29,30 @@ use crate::types::{ use crate::{Db, FxOrderSet}; use ruff_python_ast::{self as ast, name::Name}; -#[derive(Clone, Copy, Debug)] -#[expect(clippy::struct_excessive_bools)] -struct MethodInformation<'db> { - is_staticmethod: bool, - is_classmethod: bool, - method_may_be_generic: bool, - class_literal: ClassLiteral<'db>, - class_is_generic: bool, -} - -fn infer_method_information<'db>( +/// Infer the type of a parameter or return annotation in a function signature. +/// +/// This is very similar to [`definition_expression_type`], but knows that `TypeInferenceBuilder` +/// will always infer the parameters and return of a function in its PEP-695 typevar scope, if +/// there is one; otherwise they will be inferred in the function definition scope, but will always +/// be deferred. (This prevents spurious salsa cycles when we need the signature of the function +/// while in the middle of inferring its definition scope — for instance, when applying +/// decorators.) +fn function_signature_expression_type<'db>( db: &'db dyn Db, definition: Definition<'db>, -) -> Option> { - let DefinitionKind::Function(function_definition) = definition.kind(db) else { - return None; - }; - - let class_scope_id = definition.scope(db); - let file = class_scope_id.file(db); - let module = parsed_module(db, file).load(db); + expression: &ast::Expr, +) -> Type<'db> { + let file = definition.file(db); let index = semantic_index(db, file); - - let class_scope = index.scope(class_scope_id.file_scope_id(db)); - let class_node = class_scope.node().as_class()?; - - let function_node = function_definition.node(&module); - let function_name = &function_node.name; - - let mut is_staticmethod = is_implicit_classmethod(function_name); - let mut is_classmethod = is_implicit_staticmethod(function_name); - - let inference = infer_definition_types(db, definition); - for decorator in &function_node.decorator_list { - let decorator_ty = inference.expression_type(&decorator.expression); - - match decorator_ty - .as_class_literal() - .and_then(|class| class.known(db)) - { - Some(KnownClass::Staticmethod) => { - is_staticmethod = true; - } - Some(KnownClass::Classmethod) => { - is_classmethod = true; - } - _ => {} - } + let file_scope = index.expression_scope_id(expression); + let scope = file_scope.to_scope_id(db, file); + if scope == definition.scope(db) { + // expression is in the function definition scope, but always deferred + infer_deferred_types(db, definition).expression_type(expression) + } else { + // expression is in the PEP-695 type params sub-scope + infer_scope_types(db, scope).expression_type(expression) } - - let method_may_be_generic = match inference.declaration_type(definition).inner_type() { - Type::FunctionLiteral(f) => f.signature(db).overloads.iter().any(|s| { - s.generic_context - .is_some_and(|context| context.variables(db).any(|v| v.typevar(db).is_self(db))) - }), - _ => true, - }; - - let class_def = index.expect_single_definition(class_node); - let (class_literal, class_is_generic) = match infer_definition_types(db, class_def) - .declaration_type(class_def) - .inner_type() - { - Type::ClassLiteral(class_literal) => { - (class_literal, class_literal.generic_context(db).is_some()) - } - Type::GenericAlias(alias) => (alias.origin(db), true), - _ => return None, - }; - - Some(MethodInformation { - is_staticmethod, - is_classmethod, - method_may_be_generic, - class_literal, - class_is_generic, - }) } /// The signature of a single callable. If the callable is overloaded, there is a separate @@ -615,7 +554,7 @@ impl<'db> Signature<'db> { let return_ty = function_node .returns .as_ref() - .map(|returns| definition_expression_type(db, definition, returns.as_ref())); + .map(|returns| function_signature_expression_type(db, definition, returns.as_ref())); let legacy_generic_context = GenericContext::from_function_params(db, definition, ¶meters, return_ty); let full_generic_context = GenericContext::merge_pep695_and_legacy( @@ -771,6 +710,57 @@ impl<'db> Signature<'db> { &self.parameters } + /// Adds an implicit annotation to the first parameter of this signature, if that parameter is + /// positional and does not already have an annotation. We do not check whether that's the + /// right thing to do! The caller must determine whether the first parameter is actually a + /// `self` or `cls` parameter, and must determine the correct type to use as the implicit + /// annotation. + pub(crate) fn add_implicit_self_annotation( + &mut self, + db: &'db dyn Db, + self_type: impl FnOnce() -> Option>, + ) { + if let Some(first_parameter) = self.parameters.value.first_mut() + && first_parameter.is_positional() + && first_parameter.annotated_type.is_none() + && let Some(self_type) = self_type() + { + first_parameter.annotated_type = Some(self_type); + first_parameter.inferred_annotation = true; + + // If we've added an implicit `self` annotation, we might need to update the + // signature's generic context, too. (The generic context should include any synthetic + // typevars created for `typing.Self`, even if the `typing.Self` annotation was added + // implicitly.) + let self_typevar = match self_type { + Type::TypeVar(self_typevar) => Some(self_typevar), + Type::SubclassOf(subclass_of) => subclass_of.into_type_var(), + _ => None, + }; + + if let Some(self_typevar) = self_typevar { + match self.generic_context.as_mut() { + Some(generic_context) + if generic_context + .binds_typevar(db, self_typevar.typevar(db)) + .is_some() => {} + Some(generic_context) => { + *generic_context = GenericContext::from_typevar_instances( + db, + std::iter::once(self_typevar).chain(generic_context.variables(db)), + ); + } + None => { + self.generic_context = Some(GenericContext::from_typevar_instances( + db, + std::iter::once(self_typevar), + )); + } + } + } + } + } + /// Return the definition associated with this signature, if any. pub(crate) fn definition(&self) -> Option> { self.definition @@ -1674,69 +1664,16 @@ impl<'db> Parameters<'db> { }) }; - let method_info = infer_method_information(db, definition); - let is_static_or_classmethod = - method_info.is_some_and(|f| f.is_staticmethod || f.is_classmethod); - - let inferred_annotation = |arg: &ParameterWithDefault| { - if let Some(MethodInformation { - method_may_be_generic, - class_literal, - class_is_generic, - .. - }) = method_info - && !is_static_or_classmethod - && arg.parameter.annotation().is_none() - && parameters.index(arg.name().id()) == Some(0) - { - if method_may_be_generic - || class_is_generic - || class_literal - .known(db) - .is_some_and(KnownClass::is_fallback_class) - { - let scope_id = definition.scope(db); - let typevar_binding_context = Some(definition); - let index = semantic_index(db, scope_id.file(db)); - let class = nearest_enclosing_class(db, index, scope_id).unwrap(); - - Some( - typing_self(db, scope_id, typevar_binding_context, class) - .expect("We should always find the surrounding class for an implicit self: Self annotation"), - ) - } else { - // For methods of non-generic classes that are not otherwise generic (e.g. return `Self` or - // have additional type parameters), the implicit `Self` type of the `self` parameter would - // be the only type variable, so we can just use the class directly. - Some(class_literal.to_non_generic_instance(db)) - } - } else { - None - } - }; - let pos_only_param = |param: &ast::ParameterWithDefault| { - if let Some(inferred_annotation_type) = inferred_annotation(param) { - Parameter { - annotated_type: Some(inferred_annotation_type), - inferred_annotation: true, - kind: ParameterKind::PositionalOnly { - name: Some(param.parameter.name.id.clone()), - default_type: default_type(param), - }, - form: ParameterForm::Value, - } - } else { - Parameter::from_node_and_kind( - db, - definition, - ¶m.parameter, - ParameterKind::PositionalOnly { - name: Some(param.parameter.name.id.clone()), - default_type: default_type(param), - }, - ) - } + Parameter::from_node_and_kind( + db, + definition, + ¶m.parameter, + ParameterKind::PositionalOnly { + name: Some(param.parameter.name.id.clone()), + default_type: default_type(param), + }, + ) }; let mut positional_only: Vec = posonlyargs.iter().map(pos_only_param).collect(); @@ -1760,27 +1697,15 @@ impl<'db> Parameters<'db> { } let positional_or_keyword = pos_or_keyword_iter.map(|arg| { - if let Some(inferred_annotation_type) = inferred_annotation(arg) { - Parameter { - annotated_type: Some(inferred_annotation_type), - inferred_annotation: true, - kind: ParameterKind::PositionalOrKeyword { - name: arg.parameter.name.id.clone(), - default_type: default_type(arg), - }, - form: ParameterForm::Value, - } - } else { - Parameter::from_node_and_kind( - db, - definition, - &arg.parameter, - ParameterKind::PositionalOrKeyword { - name: arg.parameter.name.id.clone(), - default_type: default_type(arg), - }, - ) - } + Parameter::from_node_and_kind( + db, + definition, + &arg.parameter, + ParameterKind::PositionalOrKeyword { + name: arg.parameter.name.id.clone(), + default_type: default_type(arg), + }, + ) }); let variadic = vararg.as_ref().map(|arg| { @@ -2197,7 +2122,7 @@ impl<'db> Parameter<'db> { Self { annotated_type: parameter .annotation() - .map(|annotation| definition_expression_type(db, definition, annotation)), + .map(|annotation| function_signature_expression_type(db, definition, annotation)), kind, form: ParameterForm::Value, inferred_annotation: false, @@ -2292,7 +2217,7 @@ impl<'db> Parameter<'db> { } #[derive(Clone, Debug, PartialEq, Eq, Hash, salsa::Update, get_size2::GetSize)] -pub(crate) enum ParameterKind<'db> { +pub enum ParameterKind<'db> { /// Positional-only parameter, e.g. `def f(x, /): ...` PositionalOnly { /// Parameter name. diff --git a/crates/ty_python_semantic/src/types/subclass_of.rs b/crates/ty_python_semantic/src/types/subclass_of.rs index 898a82e086..c6bb9d0378 100644 --- a/crates/ty_python_semantic/src/types/subclass_of.rs +++ b/crates/ty_python_semantic/src/types/subclass_of.rs @@ -8,7 +8,7 @@ use crate::types::{ ApplyTypeMappingVisitor, BoundTypeVarInstance, ClassType, DynamicType, FindLegacyTypeVarsVisitor, HasRelationToVisitor, IsDisjointVisitor, KnownClass, MaterializationKind, MemberLookupPolicy, NormalizedVisitor, SpecialFormType, Type, TypeContext, - TypeMapping, TypeRelation, TypeVarBoundOrConstraints, todo_type, + TypeMapping, TypeRelation, TypeVarBoundOrConstraints, TypedDictType, todo_type, }; use crate::{Db, FxOrderSet}; @@ -381,7 +381,12 @@ impl<'db> SubclassOfInner<'db> { pub(crate) fn try_from_instance(db: &'db dyn Db, ty: Type<'db>) -> Option { Some(match ty { Type::NominalInstance(instance) => SubclassOfInner::Class(instance.class(db)), - Type::TypedDict(typed_dict) => SubclassOfInner::Class(typed_dict.defining_class()), + Type::TypedDict(typed_dict) => match typed_dict { + TypedDictType::Class(class) => SubclassOfInner::Class(class), + TypedDictType::Synthesized(_) => SubclassOfInner::Dynamic( + todo_type!("type[T] for synthesized TypedDicts").expect_dynamic(), + ), + }, Type::TypeVar(bound_typevar) => SubclassOfInner::TypeVar(bound_typevar), Type::Dynamic(DynamicType::Any) => SubclassOfInner::Dynamic(DynamicType::Any), Type::Dynamic(DynamicType::Unknown) => SubclassOfInner::Dynamic(DynamicType::Unknown), diff --git a/crates/ty_python_semantic/src/types/tuple.rs b/crates/ty_python_semantic/src/types/tuple.rs index 9b046cd4cf..f0cc3bedc8 100644 --- a/crates/ty_python_semantic/src/types/tuple.rs +++ b/crates/ty_python_semantic/src/types/tuple.rs @@ -288,6 +288,23 @@ impl<'db> TupleType<'db> { ) } + pub(crate) fn is_disjoint_from_impl( + self, + db: &'db dyn Db, + other: Self, + inferable: InferableTypeVars<'_, 'db>, + disjointness_visitor: &IsDisjointVisitor<'db>, + relation_visitor: &HasRelationToVisitor<'db>, + ) -> ConstraintSet<'db> { + self.tuple(db).is_disjoint_from_impl( + db, + other.tuple(db), + inferable, + disjointness_visitor, + relation_visitor, + ) + } + pub(crate) fn is_equivalent_to_impl( self, db: &'db dyn Db, diff --git a/crates/ty_python_semantic/src/types/typed_dict.rs b/crates/ty_python_semantic/src/types/typed_dict.rs index 14ee100d2d..dee49a3afe 100644 --- a/crates/ty_python_semantic/src/types/typed_dict.rs +++ b/crates/ty_python_semantic/src/types/typed_dict.rs @@ -1,3 +1,6 @@ +use std::collections::BTreeMap; +use std::ops::{Deref, DerefMut}; + use bitflags::bitflags; use ruff_db::diagnostic::{Annotation, Diagnostic, Span, SubDiagnostic, SubDiagnosticSeverity}; use ruff_db::parsed::parsed_module; @@ -12,10 +15,15 @@ use super::diagnostic::{ report_missing_typed_dict_key, }; use super::{ApplyTypeMappingVisitor, Type, TypeMapping, visitor}; -use crate::types::constraints::ConstraintSet; +use crate::Db; +use crate::semantic_index::definition::Definition; +use crate::types::class::FieldKind; +use crate::types::constraints::{ConstraintSet, IteratorConstraintsExtension}; use crate::types::generics::InferableTypeVars; -use crate::types::{HasRelationToVisitor, IsDisjointVisitor, TypeContext, TypeRelation}; -use crate::{Db, FxIndexMap}; +use crate::types::{ + HasRelationToVisitor, IsDisjointVisitor, IsEquivalentVisitor, NormalizedVisitor, TypeContext, + TypeRelation, +}; use ordermap::OrderSet; @@ -41,24 +49,60 @@ impl Default for TypedDictParams { /// Type that represents the set of all inhabitants (`dict` instances) that conform to /// a given `TypedDict` schema. #[derive(Debug, Copy, Clone, PartialEq, Eq, salsa::Update, Hash, get_size2::GetSize)] -pub struct TypedDictType<'db> { +pub enum TypedDictType<'db> { /// A reference to the class (inheriting from `typing.TypedDict`) that specifies the /// schema of this `TypedDict`. - defining_class: ClassType<'db>, + Class(ClassType<'db>), + /// A `TypedDict` that doesn't correspond to a class definition, either because it's been + /// `normalized`, or because it's been synthesized to represent constraints. + Synthesized(SynthesizedTypedDictType<'db>), } impl<'db> TypedDictType<'db> { pub(crate) fn new(defining_class: ClassType<'db>) -> Self { - Self { defining_class } + Self::Class(defining_class) } - pub(crate) fn defining_class(self) -> ClassType<'db> { - self.defining_class + pub(crate) fn defining_class(self) -> Option> { + match self { + Self::Class(defining_class) => Some(defining_class), + Self::Synthesized(_) => None, + } } - pub(crate) fn items(self, db: &'db dyn Db) -> &'db FxIndexMap> { - let (class_literal, specialization) = self.defining_class.class_literal(db); - class_literal.fields(db, specialization, CodeGeneratorKind::TypedDict) + pub(crate) fn items(self, db: &'db dyn Db) -> &'db TypedDictSchema<'db> { + #[salsa::tracked(returns(ref), heap_size=ruff_memory_usage::heap_size)] + fn class_based_items<'db>(db: &'db dyn Db, class: ClassType<'db>) -> TypedDictSchema<'db> { + let (class_literal, specialization) = class.class_literal(db); + class_literal + .fields(db, specialization, CodeGeneratorKind::TypedDict) + .into_iter() + .map(|(name, field)| { + let field = match field { + Field { + first_declaration, + declared_ty, + kind: + FieldKind::TypedDict { + is_required, + is_read_only, + }, + } => TypedDictFieldBuilder::new(*declared_ty) + .required(*is_required) + .read_only(*is_read_only) + .first_declaration(*first_declaration) + .build(), + _ => unreachable!("TypedDict field expected"), + }; + (name.clone(), field) + }) + .collect() + } + + match self { + Self::Class(defining_class) => class_based_items(db, defining_class), + Self::Synthesized(synthesized) => synthesized.items(db), + } } pub(crate) fn apply_type_mapping_impl<'a>( @@ -69,12 +113,12 @@ impl<'db> TypedDictType<'db> { visitor: &ApplyTypeMappingVisitor<'db>, ) -> Self { // TODO: Materialization of gradual TypedDicts needs more logic - Self { - defining_class: self.defining_class.apply_type_mapping_impl( - db, - type_mapping, - tcx, - visitor, + match self { + Self::Class(defining_class) => { + Self::Class(defining_class.apply_type_mapping_impl(db, type_mapping, tcx, visitor)) + } + Self::Synthesized(synthesized) => Self::Synthesized( + synthesized.apply_type_mapping_impl(db, type_mapping, tcx, visitor), ), } } @@ -93,9 +137,9 @@ impl<'db> TypedDictType<'db> { // First do a quick nominal check that (if it succeeds) means that we can avoid // materializing the full `TypedDict` schema for either `self` or `target`. // This should be cheaper in many cases, and also helps us avoid some cycles. - if self - .defining_class - .is_subclass_of(db, target.defining_class) + if let Some(defining_class) = self.defining_class() + && let Some(target_defining_class) = target.defining_class() + && defining_class.is_subclass_of(db, target_defining_class) { return ConstraintSet::from(true); } @@ -246,6 +290,57 @@ impl<'db> TypedDictType<'db> { } constraints } + + pub fn definition(self, db: &'db dyn Db) -> Option> { + match self { + TypedDictType::Class(defining_class) => Some(defining_class.definition(db)), + TypedDictType::Synthesized(_) => None, + } + } + + pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self { + match self { + TypedDictType::Class(_) => { + let synthesized = SynthesizedTypedDictType::new(db, self.items(db)); + TypedDictType::Synthesized(synthesized.normalized_impl(db, visitor)) + } + TypedDictType::Synthesized(synthesized) => { + TypedDictType::Synthesized(synthesized.normalized_impl(db, visitor)) + } + } + } + + pub(crate) fn is_equivalent_to_impl( + self, + db: &'db dyn Db, + other: TypedDictType<'db>, + inferable: InferableTypeVars<'_, 'db>, + visitor: &IsEquivalentVisitor<'db>, + ) -> ConstraintSet<'db> { + // TODO: `closed` and `extra_items` support will go here. Until then we don't look at the + // params at all, because `total` is already incorporated into `FieldKind`. + + // Since both sides' fields are pre-sorted into `BTreeMap`s, we can iterate over them in + // sorted order instead of paying for a lookup for each field, as long as their lengths are + // the same. + if self.items(db).len() != other.items(db).len() { + return ConstraintSet::from(false); + } + self.items(db).iter().zip(other.items(db)).when_all( + db, + |((name, field), (other_name, other_field))| { + if name != other_name || field.flags != other_field.flags { + return ConstraintSet::from(false); + } + field.declared_ty.is_equivalent_to_impl( + db, + other_field.declared_ty, + inferable, + visitor, + ) + }, + ) + } } pub(crate) fn walk_typed_dict_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>( @@ -253,7 +348,16 @@ pub(crate) fn walk_typed_dict_type<'db, V: visitor::TypeVisitor<'db> + ?Sized>( typed_dict: TypedDictType<'db>, visitor: &V, ) { - visitor.visit_type(db, typed_dict.defining_class.into()); + match typed_dict { + TypedDictType::Class(defining_class) => { + visitor.visit_type(db, defining_class.into()); + } + TypedDictType::Synthesized(synthesized) => { + for field in synthesized.items(db).values() { + visitor.visit_type(db, field.declared_ty); + } + } + } } pub(super) fn typed_dict_params_from_class_def(class_stmt: &StmtClassDef) -> TypedDictParams { @@ -631,3 +735,173 @@ pub(super) fn validate_typed_dict_dict_literal<'db>( Err(provided_keys) } } + +#[salsa::interned(debug)] +pub struct SynthesizedTypedDictType<'db> { + #[returns(ref)] + pub(crate) items: TypedDictSchema<'db>, +} + +// The Salsa heap is tracked separately. +impl get_size2::GetSize for SynthesizedTypedDictType<'_> {} + +impl<'db> SynthesizedTypedDictType<'db> { + pub(super) fn apply_type_mapping_impl<'a>( + self, + db: &'db dyn Db, + type_mapping: &TypeMapping<'a, 'db>, + tcx: TypeContext<'db>, + visitor: &ApplyTypeMappingVisitor<'db>, + ) -> Self { + let items = self + .items(db) + .iter() + .map(|(name, field)| { + let field = field + .clone() + .apply_type_mapping_impl(db, type_mapping, tcx, visitor); + + (name.clone(), field) + }) + .collect::>(); + + SynthesizedTypedDictType::new(db, items) + } + + pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self { + let items = self + .items(db) + .iter() + .map(|(name, field)| { + let field = field.clone().normalized_impl(db, visitor); + (name.clone(), field) + }) + .collect::>(); + Self::new(db, items) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default, get_size2::GetSize, salsa::Update)] +pub struct TypedDictSchema<'db>(BTreeMap>); + +impl<'db> Deref for TypedDictSchema<'db> { + type Target = BTreeMap>; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for TypedDictSchema<'_> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl<'a> IntoIterator for &'a TypedDictSchema<'_> { + type Item = (&'a Name, &'a TypedDictField<'a>); + type IntoIter = std::collections::btree_map::Iter<'a, Name, TypedDictField<'a>>; + + fn into_iter(self) -> Self::IntoIter { + self.0.iter() + } +} + +impl<'db> FromIterator<(Name, TypedDictField<'db>)> for TypedDictSchema<'db> { + fn from_iter)>>(iter: T) -> Self { + Self(iter.into_iter().collect()) + } +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, get_size2::GetSize, salsa::Update)] +pub struct TypedDictField<'db> { + pub(super) declared_ty: Type<'db>, + flags: TypedDictFieldFlags, + first_declaration: Option>, +} + +impl<'db> TypedDictField<'db> { + pub(crate) const fn is_required(&self) -> bool { + self.flags.contains(TypedDictFieldFlags::REQUIRED) + } + + pub(crate) const fn is_read_only(&self) -> bool { + self.flags.contains(TypedDictFieldFlags::READ_ONLY) + } + + pub(crate) fn apply_type_mapping_impl<'a>( + self, + db: &'db dyn Db, + type_mapping: &TypeMapping<'a, 'db>, + tcx: TypeContext<'db>, + visitor: &ApplyTypeMappingVisitor<'db>, + ) -> Self { + Self { + declared_ty: self + .declared_ty + .apply_type_mapping_impl(db, type_mapping, tcx, visitor), + flags: self.flags, + first_declaration: self.first_declaration, + } + } + + pub(crate) fn normalized_impl(self, db: &'db dyn Db, visitor: &NormalizedVisitor<'db>) -> Self { + Self { + declared_ty: self.declared_ty.normalized_impl(db, visitor), + flags: self.flags, + // A normalized typed-dict field does not hold onto the original declaration, + // since a normalized typed-dict is an abstract type where equality does not depend + // on the source-code definition. + first_declaration: None, + } + } +} + +pub(super) struct TypedDictFieldBuilder<'db> { + declared_ty: Type<'db>, + flags: TypedDictFieldFlags, + first_declaration: Option>, +} + +impl<'db> TypedDictFieldBuilder<'db> { + pub(crate) fn new(declared_ty: Type<'db>) -> Self { + Self { + declared_ty, + flags: TypedDictFieldFlags::empty(), + first_declaration: None, + } + } + + pub(crate) fn required(mut self, yes: bool) -> Self { + self.flags.set(TypedDictFieldFlags::REQUIRED, yes); + self + } + + pub(crate) fn read_only(mut self, yes: bool) -> Self { + self.flags.set(TypedDictFieldFlags::READ_ONLY, yes); + self + } + + pub(crate) fn first_declaration(mut self, definition: Option>) -> Self { + self.first_declaration = definition; + self + } + + pub(crate) fn build(self) -> TypedDictField<'db> { + TypedDictField { + declared_ty: self.declared_ty, + flags: self.flags, + first_declaration: self.first_declaration, + } + } +} + +bitflags! { + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, salsa::Update)] + struct TypedDictFieldFlags: u8 { + const REQUIRED = 1 << 0; + const READ_ONLY = 1 << 1; + } +} + +impl get_size2::GetSize for TypedDictFieldFlags {} diff --git a/crates/ty_server/src/capabilities.rs b/crates/ty_server/src/capabilities.rs index 23daa43dee..60b5298280 100644 --- a/crates/ty_server/src/capabilities.rs +++ b/crates/ty_server/src/capabilities.rs @@ -36,6 +36,7 @@ bitflags::bitflags! { const WORKSPACE_CONFIGURATION = 1 << 15; const RENAME_DYNAMIC_REGISTRATION = 1 << 16; const COMPLETION_ITEM_LABEL_DETAILS_SUPPORT = 1 << 17; + const DIAGNOSTIC_RELATED_INFORMATION = 1 << 18; } } @@ -163,6 +164,11 @@ impl ResolvedClientCapabilities { self.contains(Self::DIAGNOSTIC_DYNAMIC_REGISTRATION) } + /// Returns `true` if the client has related information support for diagnostics. + pub(crate) const fn supports_diagnostic_related_information(self) -> bool { + self.contains(Self::DIAGNOSTIC_RELATED_INFORMATION) + } + /// Returns `true` if the client supports dynamic registration for rename capabilities. pub(crate) const fn supports_rename_dynamic_registration(self) -> bool { self.contains(Self::RENAME_DYNAMIC_REGISTRATION) @@ -211,15 +217,22 @@ impl ResolvedClientCapabilities { } } - if text_document.is_some_and(|text_document| text_document.diagnostic.is_some()) { + if let Some(diagnostic) = + text_document.and_then(|text_document| text_document.diagnostic.as_ref()) + { flags |= Self::PULL_DIAGNOSTICS; + + if diagnostic.dynamic_registration == Some(true) { + flags |= Self::DIAGNOSTIC_DYNAMIC_REGISTRATION; + } } - if text_document - .and_then(|text_document| text_document.diagnostic.as_ref()?.dynamic_registration) - .unwrap_or_default() + if let Some(publish_diagnostics) = + text_document.and_then(|text_document| text_document.publish_diagnostics.as_ref()) { - flags |= Self::DIAGNOSTIC_DYNAMIC_REGISTRATION; + if publish_diagnostics.related_information == Some(true) { + flags |= Self::DIAGNOSTIC_RELATED_INFORMATION; + } } if text_document diff --git a/crates/ty_server/src/server.rs b/crates/ty_server/src/server.rs index 321a74857e..402800d325 100644 --- a/crates/ty_server/src/server.rs +++ b/crates/ty_server/src/server.rs @@ -3,7 +3,7 @@ use self::schedule::spawn_main_loop; use crate::PositionEncoding; use crate::capabilities::{ResolvedClientCapabilities, server_capabilities}; -use crate::session::{InitializationOptions, Session}; +use crate::session::{InitializationOptions, Session, warn_about_unknown_options}; use anyhow::Context; use lsp_server::Connection; use lsp_types::{ClientCapabilities, InitializeParams, MessageType, Url}; @@ -96,29 +96,7 @@ impl Server { let unknown_options = &initialization_options.options.unknown; if !unknown_options.is_empty() { - // HACK: Old versions of the ty VS Code extension used a custom schema for settings - // which was changed in version 2025.35.0. This is to ensure that users don't receive - // unnecessary warnings when using an older version of the extension. This should be - // removed after a few releases. - if !unknown_options.contains_key("settings") - || !unknown_options.contains_key("globalSettings") - { - tracing::warn!( - "Received unknown options during initialization: {}", - serde_json::to_string_pretty(&unknown_options) - .unwrap_or_else(|_| format!("{unknown_options:?}")) - ); - - client.show_warning_message(format_args!( - "Received unknown options during initialization: '{}'. \ - Refer to the logs for more details", - unknown_options - .keys() - .map(String::as_str) - .collect::>() - .join("', '") - )); - } + warn_about_unknown_options(&client, None, unknown_options); } // Get workspace URLs without settings - settings will come from workspace/configuration diff --git a/crates/ty_server/src/server/api/diagnostics.rs b/crates/ty_server/src/server/api/diagnostics.rs index d25e6f5243..60420b4651 100644 --- a/crates/ty_server/src/server/api/diagnostics.rs +++ b/crates/ty_server/src/server/api/diagnostics.rs @@ -16,6 +16,7 @@ use ruff_db::system::SystemPathBuf; use serde::{Deserialize, Serialize}; use ty_project::{Db as _, ProjectDatabase}; +use crate::capabilities::ResolvedClientCapabilities; use crate::document::{FileRangeExt, ToRangeExt}; use crate::session::DocumentHandle; use crate::session::client::Client; @@ -56,7 +57,11 @@ impl Diagnostics { Self::result_id_from_hash(&self.items) } - pub(super) fn to_lsp_diagnostics(&self, db: &ProjectDatabase) -> LspDiagnostics { + pub(super) fn to_lsp_diagnostics( + &self, + db: &ProjectDatabase, + client_capabilities: ResolvedClientCapabilities, + ) -> LspDiagnostics { if let Some(notebook_document) = db.notebook_document(self.file_or_notebook) { let mut cell_diagnostics: FxHashMap> = FxHashMap::default(); @@ -67,7 +72,8 @@ impl Diagnostics { } for diagnostic in &self.items { - let (url, lsp_diagnostic) = to_lsp_diagnostic(db, diagnostic, self.encoding); + let (url, lsp_diagnostic) = + to_lsp_diagnostic(db, diagnostic, self.encoding, client_capabilities); let Some(url) = url else { tracing::warn!("Unable to find notebook cell"); @@ -85,7 +91,9 @@ impl Diagnostics { LspDiagnostics::TextDocument( self.items .iter() - .map(|diagnostic| to_lsp_diagnostic(db, diagnostic, self.encoding).1) + .map(|diagnostic| { + to_lsp_diagnostic(db, diagnostic, self.encoding, client_capabilities).1 + }) .collect(), ) } @@ -181,7 +189,7 @@ pub(super) fn publish_diagnostics(document: &DocumentHandle, session: &Session, }); }; - match diagnostics.to_lsp_diagnostics(db) { + match diagnostics.to_lsp_diagnostics(db, session.client_capabilities()) { LspDiagnostics::TextDocument(diagnostics) => { publish_diagnostics_notification(document.url().clone(), diagnostics); } @@ -212,6 +220,7 @@ pub(crate) fn publish_settings_diagnostics( } let session_encoding = session.position_encoding(); + let client_capabilities = session.client_capabilities(); let state = session.project_state_mut(&AnySystemPath::System(path)); let db = &state.db; let project = db.project(); @@ -253,7 +262,9 @@ pub(crate) fn publish_settings_diagnostics( // Convert diagnostics to LSP format let lsp_diagnostics = file_diagnostics .into_iter() - .map(|diagnostic| to_lsp_diagnostic(db, &diagnostic, session_encoding).1) + .map(|diagnostic| { + to_lsp_diagnostic(db, &diagnostic, session_encoding, client_capabilities).1 + }) .collect::>(); client.send_notification::(PublishDiagnosticsParams { @@ -292,7 +303,11 @@ pub(super) fn to_lsp_diagnostic( db: &dyn Db, diagnostic: &ruff_db::diagnostic::Diagnostic, encoding: PositionEncoding, + client_capabilities: ResolvedClientCapabilities, ) -> (Option, Diagnostic) { + let supports_related_information = + client_capabilities.supports_diagnostic_related_information(); + let location = diagnostic.primary_span().and_then(|span| { let file = span.expect_ty_file(); span.range()? @@ -330,30 +345,35 @@ pub(super) fn to_lsp_diagnostic( Some(CodeDescription { href }) }); - let mut related_information = Vec::new(); + let related_information = + if supports_related_information { + let mut related_information = Vec::new(); + related_information.extend(diagnostic.secondary_annotations().filter_map( + |annotation| annotation_to_related_information(db, annotation, encoding), + )); - related_information.extend( - diagnostic - .secondary_annotations() - .filter_map(|annotation| annotation_to_related_information(db, annotation, encoding)), - ); + for sub_diagnostic in diagnostic.sub_diagnostics() { + related_information.extend(sub_diagnostic_to_related_information( + db, + sub_diagnostic, + encoding, + )); - for sub_diagnostic in diagnostic.sub_diagnostics() { - related_information.extend(sub_diagnostic_to_related_information( - db, - sub_diagnostic, - encoding, - )); + related_information.extend( + sub_diagnostic + .annotations() + .iter() + .filter(|annotation| !annotation.is_primary()) + .filter_map(|annotation| { + annotation_to_related_information(db, annotation, encoding) + }), + ); + } - related_information.extend( - sub_diagnostic - .annotations() - .iter() - .filter_map(|annotation| { - annotation_to_related_information(db, annotation, encoding) - }), - ); - } + Some(related_information) + } else { + None + }; let data = DiagnosticData::try_from_diagnostic(db, diagnostic, encoding); @@ -366,8 +386,21 @@ pub(super) fn to_lsp_diagnostic( code: Some(NumberOrString::String(diagnostic.id().to_string())), code_description, source: Some(DIAGNOSTIC_NAME.into()), - message: diagnostic.concise_message().to_string(), - related_information: Some(related_information), + message: if supports_related_information { + // Show both the primary and annotation messages if available, + // because we don't create a related information for the primary message. + if let Some(annotation_message) = diagnostic + .primary_annotation() + .and_then(|annotation| annotation.get_message()) + { + format!("{}: {annotation_message}", diagnostic.primary_message()) + } else { + diagnostic.primary_message().to_string() + } + } else { + diagnostic.concise_message().to_string() + }, + related_information, data: serde_json::to_value(data).ok(), }, ) diff --git a/crates/ty_server/src/server/api/requests/completion.rs b/crates/ty_server/src/server/api/requests/completion.rs index 1055f85b56..540546cf49 100644 --- a/crates/ty_server/src/server/api/requests/completion.rs +++ b/crates/ty_server/src/server/api/requests/completion.rs @@ -8,7 +8,7 @@ use lsp_types::{ }; use ruff_source_file::OneIndexed; use ruff_text_size::Ranged; -use ty_ide::{CompletionKind, CompletionSettings, completion}; +use ty_ide::{CompletionKind, completion}; use ty_project::ProjectDatabase; use crate::document::{PositionExt, ToRangeExt}; @@ -56,10 +56,8 @@ impl BackgroundDocumentRequestHandler for CompletionRequestHandler { ) else { return Ok(None); }; - let settings = CompletionSettings { - auto_import: snapshot.global_settings().is_auto_import_enabled(), - }; - let completions = completion(db, &settings, file, offset); + let settings = snapshot.workspace_settings().completions(); + let completions = completion(db, settings, file, offset); if completions.is_empty() { return Ok(None); } diff --git a/crates/ty_server/src/server/api/requests/diagnostic.rs b/crates/ty_server/src/server/api/requests/diagnostic.rs index ad1b8a4dc0..97f0633c8a 100644 --- a/crates/ty_server/src/server/api/requests/diagnostic.rs +++ b/crates/ty_server/src/server/api/requests/diagnostic.rs @@ -59,7 +59,9 @@ impl BackgroundDocumentRequestHandler for DocumentDiagnosticRequestHandler { result_id: new_id, // SAFETY: Pull diagnostic requests are only called for text documents, not for // notebook documents. - items: diagnostics.to_lsp_diagnostics(db).expect_text_document(), + items: diagnostics + .to_lsp_diagnostics(db, snapshot.resolved_client_capabilities()) + .expect_text_document(), }, }) } diff --git a/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs b/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs index a9f33880e5..4935735ced 100644 --- a/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs +++ b/crates/ty_server/src/server/api/requests/workspace_diagnostic.rs @@ -19,6 +19,7 @@ use serde::{Deserialize, Serialize}; use ty_project::{ProgressReporter, ProjectDatabase}; use crate::PositionEncoding; +use crate::capabilities::ResolvedClientCapabilities; use crate::document::DocumentKey; use crate::server::api::diagnostics::{Diagnostics, to_lsp_diagnostic}; use crate::server::api::traits::{ @@ -318,6 +319,7 @@ struct ResponseWriter<'a> { mode: ReportingMode, index: &'a Index, position_encoding: PositionEncoding, + client_capabilities: ResolvedClientCapabilities, // It's important that we use `AnySystemPath` over `Url` here because // `file_to_url` isn't guaranteed to return the exact same URL as the one provided // by the client. @@ -357,6 +359,7 @@ impl<'a> ResponseWriter<'a> { mode, index, position_encoding, + client_capabilities: snapshot.resolved_client_capabilities(), previous_result_ids, } } @@ -406,7 +409,15 @@ impl<'a> ResponseWriter<'a> { new_id => { let lsp_diagnostics = diagnostics .iter() - .map(|diagnostic| to_lsp_diagnostic(db, diagnostic, self.position_encoding).1) + .map(|diagnostic| { + to_lsp_diagnostic( + db, + diagnostic, + self.position_encoding, + self.client_capabilities, + ) + .1 + }) .collect::>(); WorkspaceDocumentDiagnosticReport::Full(WorkspaceFullDocumentDiagnosticReport { diff --git a/crates/ty_server/src/session.rs b/crates/ty_server/src/session.rs index d97e11ac48..4ebfe7ead0 100644 --- a/crates/ty_server/src/session.rs +++ b/crates/ty_server/src/session.rs @@ -1,6 +1,6 @@ //! Data model, state management, and configuration resolution. -use std::collections::{BTreeMap, HashSet, VecDeque}; +use std::collections::{BTreeMap, HashMap, HashSet, VecDeque}; use std::ops::{Deref, DerefMut}; use std::panic::RefUnwindSafe; use std::sync::Arc; @@ -20,6 +20,7 @@ use lsp_types::{ use ruff_db::Db; use ruff_db::files::{File, system_path_to_file}; use ruff_db::system::{System, SystemPath, SystemPathBuf}; +use ruff_python_ast::PySourceType; use ty_combine::Combine; use ty_project::metadata::Options; use ty_project::watch::{ChangeEvent, CreatedKind}; @@ -467,28 +468,7 @@ impl Session { let unknown_options = &options.unknown; if !unknown_options.is_empty() { - // HACK: This is to ensure that users with an older version of the ty VS Code - // extension don't get warnings about unknown options when they are using a newer - // version of the language server. This should be removed after a few releases. - if !unknown_options.contains_key("importStrategy") - && !unknown_options.contains_key("interpreter") - { - tracing::warn!( - "Received unknown options for workspace `{url}`: {}", - serde_json::to_string_pretty(unknown_options) - .unwrap_or_else(|_| format!("{unknown_options:?}")) - ); - - client.show_warning_message(format!( - "Received unknown options for workspace `{url}`: '{}'. \ - Refer to the logs for more details.", - unknown_options - .keys() - .map(String::as_str) - .collect::>() - .join("', '") - )); - } + warn_about_unknown_options(client, Some(&url), unknown_options); } combined_global_options.combine_with(Some(global)); @@ -1543,7 +1523,8 @@ impl DocumentHandle { pub(crate) fn close(&self, session: &mut Session) -> crate::Result { let is_cell = self.is_cell(); let path = self.notebook_or_file_path(); - session.index_mut().close_document(&self.key())?; + + let removed_document = session.index_mut().close_document(&self.key())?; // Close the text or notebook file in the database but skip this // step for cells because closing a cell doesn't close its notebook. @@ -1556,6 +1537,19 @@ impl DocumentHandle { AnySystemPath::System(system_path) => { if let Some(file) = db.files().try_system(db, system_path) { db.project().close_file(db, file); + + // In case we preferred the language given by the Client + // over the one detected by the file extension, remove the file + // from the project to handle cases where a user changes the language + // of a file (which results in a didClose and didOpen for the same path but with different languages). + if removed_document.language_id().is_some() + && system_path + .extension() + .and_then(PySourceType::try_from_extension) + .is_none() + { + db.project().remove_file(db, file); + } } else { // This can only fail when the path is a directory or it doesn't exists but the // file should exists for this handler in this branch. This is because every @@ -1595,3 +1589,29 @@ impl DocumentHandle { Ok(requires_clear_diagnostics) } } + +/// Warns about unknown options received by the server. +/// +/// If `workspace_url` is `Some`, it indicates that the unknown options were received during a +/// workspace initialization, otherwise they were received during the server initialization. +pub(super) fn warn_about_unknown_options( + client: &Client, + workspace_url: Option<&Url>, + unknown_options: &HashMap, +) { + let message = if let Some(workspace_url) = workspace_url { + format!( + "Received unknown options for workspace `{workspace_url}`: {}", + serde_json::to_string_pretty(unknown_options) + .unwrap_or_else(|_| format!("{unknown_options:?}")) + ) + } else { + format!( + "Received unknown options during initialization: {}", + serde_json::to_string_pretty(unknown_options) + .unwrap_or_else(|_| format!("{unknown_options:?}")) + ) + }; + tracing::warn!("{message}"); + client.show_warning_message(message); +} diff --git a/crates/ty_server/src/session/index.rs b/crates/ty_server/src/session/index.rs index 6a34fb4ea2..95237212cf 100644 --- a/crates/ty_server/src/session/index.rs +++ b/crates/ty_server/src/session/index.rs @@ -1,7 +1,7 @@ use rustc_hash::FxHashMap; use std::sync::Arc; -use crate::document::DocumentKey; +use crate::document::{DocumentKey, LanguageId}; use crate::session::DocumentHandle; use crate::{ PositionEncoding, TextDocument, @@ -187,12 +187,12 @@ impl Index { handle } - pub(super) fn close_document(&mut self, key: &DocumentKey) -> Result<(), DocumentError> { - let Some(_) = self.documents.remove(key) else { + pub(super) fn close_document(&mut self, key: &DocumentKey) -> Result { + let Some(document) = self.documents.remove(key) else { return Err(DocumentError::NotFound(key.clone())); }; - Ok(()) + Ok(document) } pub(super) fn document_mut( @@ -229,6 +229,13 @@ impl Document { } } + pub(crate) fn language_id(&self) -> Option { + match self { + Self::Text(document) => document.language_id(), + Self::Notebook(_) => None, + } + } + pub(crate) fn as_notebook_mut(&mut self) -> Option<&mut NotebookDocument> { Some(match self { Self::Notebook(notebook) => Arc::make_mut(notebook), diff --git a/crates/ty_server/src/session/options.rs b/crates/ty_server/src/session/options.rs index 982dccd484..b202270aae 100644 --- a/crates/ty_server/src/session/options.rs +++ b/crates/ty_server/src/session/options.rs @@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize}; use serde_json::Value; use ty_combine::Combine; -use ty_ide::InlayHintSettings; +use ty_ide::{CompletionSettings, InlayHintSettings}; use ty_project::metadata::Options as TyOptions; use ty_project::metadata::options::ProjectOptionsOverrides; use ty_project::metadata::value::{RangedValue, RelativePathBuf}; @@ -123,8 +123,11 @@ impl ClientOptions { } #[must_use] - pub fn with_experimental_auto_import(mut self, enabled: bool) -> Self { - self.global.experimental.get_or_insert_default().auto_import = Some(enabled); + pub fn with_auto_import(mut self, enabled: bool) -> Self { + self.workspace + .completions + .get_or_insert_default() + .auto_import = Some(enabled); self } @@ -155,7 +158,6 @@ impl GlobalOptions { .experimental .map(|experimental| ExperimentalSettings { rename: experimental.rename.unwrap_or(false), - auto_import: experimental.auto_import.unwrap_or(false), }) .unwrap_or_default(); @@ -178,6 +180,9 @@ pub(crate) struct WorkspaceOptions { /// Options to configure inlay hints. inlay_hints: Option, + /// Options to configure completions. + completions: Option, + /// Information about the currently active Python environment in the VS Code Python extension. /// /// This is relevant only for VS Code and is populated by the ty VS Code extension. @@ -235,6 +240,10 @@ impl WorkspaceOptions { .inlay_hints .map(InlayHintOptions::into_settings) .unwrap_or_default(), + completions: self + .completions + .map(CompletionOptions::into_settings) + .unwrap_or_default(), overrides, } } @@ -256,6 +265,26 @@ impl InlayHintOptions { } } +#[derive(Clone, Combine, Debug, Serialize, Deserialize, Default)] +#[serde(rename_all = "camelCase")] +struct CompletionOptions { + auto_import: Option, +} + +impl CompletionOptions { + // N.B. It's important for the defaults here to + // match the defaults for `CompletionSettings`. + // This is because `WorkspaceSettings::default()` + // uses `CompletionSettings::default()`. But + // `WorkspaceOptions::default().into_settings()` will use this + // definition. + fn into_settings(self) -> CompletionSettings { + CompletionSettings { + auto_import: self.auto_import.unwrap_or(true), + } + } +} + /// Diagnostic mode for the language server. #[derive(Clone, Copy, Debug, Default, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] @@ -300,12 +329,6 @@ impl Combine for DiagnosticMode { pub(crate) struct Experimental { /// Whether to enable the experimental symbol rename feature. pub(crate) rename: Option, - /// Whether to enable the experimental "auto-import" feature. - /// - /// At time of writing (2025-08-29), this feature is still - /// under active development. It may not work right or may be - /// incomplete. - pub(crate) auto_import: Option, } #[derive(Clone, Debug, Serialize, Deserialize, Default)] diff --git a/crates/ty_server/src/session/settings.rs b/crates/ty_server/src/session/settings.rs index 7b3e95f3ed..5e73df0102 100644 --- a/crates/ty_server/src/session/settings.rs +++ b/crates/ty_server/src/session/settings.rs @@ -1,6 +1,6 @@ use super::options::DiagnosticMode; -use ty_ide::InlayHintSettings; +use ty_ide::{CompletionSettings, InlayHintSettings}; use ty_project::metadata::options::ProjectOptionsOverrides; /// Resolved client settings that are shared across all workspaces. @@ -14,10 +14,6 @@ impl GlobalSettings { pub(crate) fn is_rename_enabled(&self) -> bool { self.experimental.rename } - - pub(crate) fn is_auto_import_enabled(&self) -> bool { - self.experimental.auto_import - } } impl GlobalSettings { @@ -29,7 +25,6 @@ impl GlobalSettings { #[derive(Clone, Default, Debug, PartialEq)] pub(crate) struct ExperimentalSettings { pub(super) rename: bool, - pub(super) auto_import: bool, } /// Resolved client settings for a specific workspace. @@ -40,6 +35,7 @@ pub(crate) struct ExperimentalSettings { pub(crate) struct WorkspaceSettings { pub(super) disable_language_services: bool, pub(super) inlay_hints: InlayHintSettings, + pub(super) completions: CompletionSettings, pub(super) overrides: Option, } @@ -55,4 +51,8 @@ impl WorkspaceSettings { pub(crate) fn inlay_hints(&self) -> &InlayHintSettings { &self.inlay_hints } + + pub(crate) fn completions(&self) -> &CompletionSettings { + &self.completions + } } diff --git a/crates/ty_server/tests/e2e/code_actions.rs b/crates/ty_server/tests/e2e/code_actions.rs index d60d9ad302..8d8c1daebe 100644 --- a/crates/ty_server/tests/e2e/code_actions.rs +++ b/crates/ty_server/tests/e2e/code_actions.rs @@ -132,10 +132,7 @@ x: Literal[1] = 1 "; let ty_toml = SystemPath::new("ty.toml"); - let ty_toml_content = "\ -[rules] -unused-ignore-comment = \"warn\" -"; + let ty_toml_content = ""; let mut server = TestServerBuilder::new()? .with_workspace(workspace_root, None)? diff --git a/crates/ty_server/tests/e2e/completions.rs b/crates/ty_server/tests/e2e/completions.rs new file mode 100644 index 0000000000..80576100b6 --- /dev/null +++ b/crates/ty_server/tests/e2e/completions.rs @@ -0,0 +1,82 @@ +use anyhow::Result; +use lsp_types::{Position, notification::PublishDiagnostics}; +use ruff_db::system::SystemPath; +use ty_server::ClientOptions; + +use crate::TestServerBuilder; + +/// Tests that auto-import is enabled by default. +#[test] +fn default_auto_import() -> Result<()> { + let workspace_root = SystemPath::new("src"); + let foo = SystemPath::new("src/foo.py"); + let foo_content = "\ +walktr +"; + + let mut server = TestServerBuilder::new()? + .with_initialization_options(ClientOptions::default()) + .with_workspace(workspace_root, None)? + .with_file(foo, foo_content)? + .build() + .wait_until_workspaces_are_initialized(); + + server.open_text_document(foo, foo_content, 1); + let _ = server.await_notification::(); + + let hints = server.completion_request(&server.file_uri(foo), Position::new(0, 6)); + + insta::assert_json_snapshot!(hints, @r#" + [ + { + "label": "walktree (import inspect)", + "kind": 3, + "sortText": "0", + "insertText": "walktree", + "additionalTextEdits": [ + { + "range": { + "start": { + "line": 0, + "character": 0 + }, + "end": { + "line": 0, + "character": 0 + } + }, + "newText": "from inspect import walktree\n" + } + ] + } + ] + "#); + + Ok(()) +} + +/// Tests that disabling auto-import works. +#[test] +fn disable_auto_import() -> Result<()> { + let workspace_root = SystemPath::new("src"); + let foo = SystemPath::new("src/foo.py"); + let foo_content = "\ +walktr +"; + + let mut server = TestServerBuilder::new()? + .with_initialization_options(ClientOptions::default().with_auto_import(false)) + .with_workspace(workspace_root, None)? + .with_file(foo, foo_content)? + .build() + .wait_until_workspaces_are_initialized(); + + server.open_text_document(foo, foo_content, 1); + let _ = server.await_notification::(); + + let hints = server.completion_request(&server.file_uri(foo), Position::new(0, 6)); + + insta::assert_json_snapshot!(hints, @"[]"); + + Ok(()) +} diff --git a/crates/ty_server/tests/e2e/initialize.rs b/crates/ty_server/tests/e2e/initialize.rs index 1526e78022..8611afdccd 100644 --- a/crates/ty_server/tests/e2e/initialize.rs +++ b/crates/ty_server/tests/e2e/initialize.rs @@ -402,7 +402,7 @@ fn unknown_initialization_options() -> Result<()> { insta::assert_json_snapshot!(show_message_params, @r#" { "type": 2, - "message": "Received unknown options during initialization: 'bar'. Refer to the logs for more details" + "message": "Received unknown options during initialization: {\n /"bar/": null\n}" } "#); @@ -427,7 +427,7 @@ fn unknown_options_in_workspace_configuration() -> Result<()> { insta::assert_json_snapshot!(show_message_params, @r#" { "type": 2, - "message": "Received unknown options for workspace `file:///foo`: 'bar'. Refer to the logs for more details." + "message": "Received unknown options for workspace `file:///foo`: {\n /"bar/": null\n}" } "#); diff --git a/crates/ty_server/tests/e2e/main.rs b/crates/ty_server/tests/e2e/main.rs index 7e380562fa..579e730129 100644 --- a/crates/ty_server/tests/e2e/main.rs +++ b/crates/ty_server/tests/e2e/main.rs @@ -29,6 +29,7 @@ mod code_actions; mod commands; +mod completions; mod initialize; mod inlay_hints; mod notebook; @@ -51,11 +52,12 @@ use lsp_types::notification::{ Initialized, Notification, }; use lsp_types::request::{ - DocumentDiagnosticRequest, HoverRequest, Initialize, InlayHintRequest, Request, Shutdown, - WorkspaceConfiguration, WorkspaceDiagnosticRequest, + Completion, DocumentDiagnosticRequest, HoverRequest, Initialize, InlayHintRequest, Request, + Shutdown, WorkspaceConfiguration, WorkspaceDiagnosticRequest, }; use lsp_types::{ - ClientCapabilities, ConfigurationParams, DiagnosticClientCapabilities, + ClientCapabilities, CompletionItem, CompletionParams, CompletionResponse, + CompletionTriggerKind, ConfigurationParams, DiagnosticClientCapabilities, DidChangeTextDocumentParams, DidChangeWatchedFilesClientCapabilities, DidChangeWatchedFilesParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, DocumentDiagnosticParams, DocumentDiagnosticReportResult, FileEvent, Hover, HoverParams, @@ -785,7 +787,6 @@ impl TestServer { } /// Send a `textDocument/didClose` notification - #[expect(dead_code)] pub(crate) fn close_text_document(&mut self, path: impl AsRef) { let params = DidCloseTextDocumentParams { text_document: TextDocumentIdentifier { @@ -872,6 +873,31 @@ impl TestServer { let id = self.send_request::(params); self.await_response::(&id) } + + /// Sends a `textDocument/completion` request for the document at the given URL and position. + pub(crate) fn completion_request( + &mut self, + uri: &Url, + position: Position, + ) -> Vec { + let completions_id = self.send_request::(CompletionParams { + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { uri: uri.clone() }, + position, + }, + work_done_progress_params: lsp_types::WorkDoneProgressParams::default(), + partial_result_params: lsp_types::PartialResultParams::default(), + context: Some(lsp_types::CompletionContext { + trigger_kind: CompletionTriggerKind::TRIGGER_FOR_INCOMPLETE_COMPLETIONS, + trigger_character: None, + }), + }); + match self.await_response::(&completions_id) { + Some(CompletionResponse::Array(array)) => array, + Some(CompletionResponse::List(lsp_types::CompletionList { items, .. })) => items, + None => vec![], + } + } } impl fmt::Debug for TestServer { @@ -1103,6 +1129,16 @@ impl TestServerBuilder { self } + pub(crate) fn enable_diagnostic_related_information(mut self, enabled: bool) -> Self { + self.client_capabilities + .text_document + .get_or_insert_default() + .publish_diagnostics + .get_or_insert_default() + .related_information = Some(enabled); + self + } + /// Set custom client capabilities (overrides any previously set capabilities) #[expect(dead_code)] pub(crate) fn with_client_capabilities(mut self, capabilities: ClientCapabilities) -> Self { diff --git a/crates/ty_server/tests/e2e/notebook.rs b/crates/ty_server/tests/e2e/notebook.rs index b8cb10643b..c8d5c738db 100644 --- a/crates/ty_server/tests/e2e/notebook.rs +++ b/crates/ty_server/tests/e2e/notebook.rs @@ -1,5 +1,5 @@ use insta::assert_json_snapshot; -use lsp_types::{CompletionResponse, CompletionTriggerKind, NotebookCellKind, Position, Range}; +use lsp_types::{NotebookCellKind, Position, Range}; use ruff_db::system::SystemPath; use ty_server::ClientOptions; @@ -10,6 +10,7 @@ static FILTERS: &[(&str, &str)] = &[(r#""sortText": "[0-9 ]+""#, r#""sortText": #[test] fn publish_diagnostics_open() -> anyhow::Result<()> { let mut server = TestServerBuilder::new()? + .enable_diagnostic_related_information(true) .build() .wait_until_workspaces_are_initialized(); @@ -219,8 +220,7 @@ fn swap_cells() -> anyhow::Result<()> { "href": "https://ty.dev/rules#unresolved-reference" }, "source": "ty", - "message": "Name `a` used when not defined", - "relatedInformation": [] + "message": "Name `a` used when not defined" } ], "vscode-notebook-cell://src/test.ipynb#1": [], @@ -285,7 +285,7 @@ fn auto_import() -> anyhow::Result<()> { let mut server = TestServerBuilder::new()? .with_workspace( SystemPath::new("src"), - Some(ClientOptions::default().with_experimental_auto_import(true)), + Some(ClientOptions::default().with_auto_import(true)), )? .build() .wait_until_workspaces_are_initialized(); @@ -325,7 +325,7 @@ fn auto_import_same_cell() -> anyhow::Result<()> { let mut server = TestServerBuilder::new()? .with_workspace( SystemPath::new("src"), - Some(ClientOptions::default().with_experimental_auto_import(true)), + Some(ClientOptions::default().with_auto_import(true)), )? .build() .wait_until_workspaces_are_initialized(); @@ -360,7 +360,7 @@ fn auto_import_from_future() -> anyhow::Result<()> { let mut server = TestServerBuilder::new()? .with_workspace( SystemPath::new("src"), - Some(ClientOptions::default().with_experimental_auto_import(true)), + Some(ClientOptions::default().with_auto_import(true)), )? .build() .wait_until_workspaces_are_initialized(); @@ -397,7 +397,7 @@ fn auto_import_docstring() -> anyhow::Result<()> { let mut server = TestServerBuilder::new()? .with_workspace( SystemPath::new("src"), - Some(ClientOptions::default().with_experimental_auto_import(true)), + Some(ClientOptions::default().with_auto_import(true)), )? .build() .wait_until_workspaces_are_initialized(); @@ -521,31 +521,10 @@ fn literal_completions( cell: &lsp_types::Url, position: Position, ) -> Vec { - let completions_id = - server.send_request::(lsp_types::CompletionParams { - text_document_position: lsp_types::TextDocumentPositionParams { - text_document: lsp_types::TextDocumentIdentifier { uri: cell.clone() }, - position, - }, - work_done_progress_params: lsp_types::WorkDoneProgressParams::default(), - partial_result_params: lsp_types::PartialResultParams::default(), - context: Some(lsp_types::CompletionContext { - trigger_kind: CompletionTriggerKind::TRIGGER_FOR_INCOMPLETE_COMPLETIONS, - trigger_character: None, - }), - }); - + let mut items = server.completion_request(cell, position); // There are a ton of imports we don't care about in here... // The import bit is that an edit is always restricted to the current cell. That means, // we can't add `Literal` to the `from typing import TYPE_CHECKING` import in cell 1 - let completions = server.await_response::(&completions_id); - let mut items = match completions { - Some(CompletionResponse::Array(array)) => array, - Some(CompletionResponse::List(lsp_types::CompletionList { items, .. })) => items, - None => return vec![], - }; - items.retain(|item| item.label.starts_with("Litera")); - items } diff --git a/crates/ty_server/tests/e2e/publish_diagnostics.rs b/crates/ty_server/tests/e2e/publish_diagnostics.rs index b7f1eaf2d9..bbe7094325 100644 --- a/crates/ty_server/tests/e2e/publish_diagnostics.rs +++ b/crates/ty_server/tests/e2e/publish_diagnostics.rs @@ -1,7 +1,10 @@ use std::time::Duration; use anyhow::Result; -use lsp_types::{FileChangeType, FileEvent, notification::PublishDiagnostics}; +use lsp_types::{ + DidOpenTextDocumentParams, FileChangeType, FileEvent, TextDocumentItem, + notification::{DidOpenTextDocument, PublishDiagnostics}, +}; use ruff_db::system::SystemPath; use crate::TestServerBuilder; @@ -30,6 +33,57 @@ def foo() -> str: Ok(()) } +#[test] +fn message_without_related_information_support() -> Result<()> { + let workspace_root = SystemPath::new("src"); + let foo = SystemPath::new("src/foo.py"); + let foo_content = r#" +from typing import assert_type + +assert_type("test", list[str]) +"#; + + let mut server = TestServerBuilder::new()? + .with_workspace(workspace_root, None)? + .with_file(foo, foo_content)? + .enable_pull_diagnostics(false) + .build() + .wait_until_workspaces_are_initialized(); + + server.open_text_document(foo, foo_content, 1); + let diagnostics = server.await_notification::(); + + insta::assert_debug_snapshot!(diagnostics); + + Ok(()) +} + +#[test] +fn message_with_related_information_support() -> Result<()> { + let workspace_root = SystemPath::new("src"); + let foo = SystemPath::new("src/foo.py"); + let foo_content = r#" +from typing import assert_type + +assert_type("test", list[str]) +"#; + + let mut server = TestServerBuilder::new()? + .with_workspace(workspace_root, None)? + .with_file(foo, foo_content)? + .enable_diagnostic_related_information(true) + .enable_pull_diagnostics(false) + .build() + .wait_until_workspaces_are_initialized(); + + server.open_text_document(foo, foo_content, 1); + let diagnostics = server.await_notification::(); + + insta::assert_debug_snapshot!(diagnostics); + + Ok(()) +} + #[test] fn on_did_change_watched_files() -> Result<()> { let workspace_root = SystemPath::new("src"); @@ -109,3 +163,66 @@ def foo() -> str: Ok(()) } + +#[test] +fn on_did_open_file_without_extension_but_python_language() -> Result<()> { + let foo = SystemPath::new("src/foo"); + let foo_content = "\ +def foo() -> str: + return 42 +"; + + let mut server = TestServerBuilder::new()? + .with_workspace(SystemPath::new("src"), None)? + .with_file(foo, foo_content)? + .enable_pull_diagnostics(false) + .build() + .wait_until_workspaces_are_initialized(); + + server.open_text_document(foo, foo_content, 1); + let diagnostics = server.await_notification::(); + + insta::assert_debug_snapshot!(diagnostics); + + Ok(()) +} + +#[test] +fn changing_language_of_file_without_extension() -> Result<()> { + let foo = SystemPath::new("src/foo"); + let foo_content = "\ +def foo() -> str: + return 42 +"; + + let mut server = TestServerBuilder::new()? + .with_workspace(SystemPath::new("src"), None)? + .with_file(foo, foo_content)? + .enable_pull_diagnostics(false) + .build() + .wait_until_workspaces_are_initialized(); + + server.open_text_document(foo, foo_content, 1); + let diagnostics = server.await_notification::(); + + insta::assert_debug_snapshot!(diagnostics); + + server.close_text_document(foo); + + let params = DidOpenTextDocumentParams { + text_document: TextDocumentItem { + uri: server.file_uri(foo), + language_id: "text".to_string(), + version: 1, + text: foo_content.to_string(), + }, + }; + server.send_notification::(params); + let _close_diagnostics = server.await_notification::(); + + let diagnostics = server.await_notification::(); + + insta::assert_debug_snapshot!(diagnostics); + + Ok(()) +} diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action.snap b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action.snap index ae0da5c3ad..af59d0f038 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action.snap @@ -25,7 +25,6 @@ expression: code_actions }, "source": "ty", "message": "Unused `ty: ignore` directive", - "relatedInformation": [], "tags": [ 1 ] @@ -74,7 +73,6 @@ expression: code_actions }, "source": "ty", "message": "Unused `ty: ignore` directive", - "relatedInformation": [], "tags": [ 1 ] diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_attribute_access_on_unimported.snap b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_attribute_access_on_unimported.snap index 3026696d8e..78da8d45f7 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_attribute_access_on_unimported.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_attribute_access_on_unimported.snap @@ -24,8 +24,7 @@ expression: code_actions "href": "https://ty.dev/rules#unresolved-reference" }, "source": "ty", - "message": "Name `typing` used when not defined", - "relatedInformation": [] + "message": "Name `typing` used when not defined" } ], "edit": { @@ -70,8 +69,7 @@ expression: code_actions "href": "https://ty.dev/rules#unresolved-reference" }, "source": "ty", - "message": "Name `typing` used when not defined", - "relatedInformation": [] + "message": "Name `typing` used when not defined" } ], "edit": { diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_existing_import_undefined_decorator.snap b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_existing_import_undefined_decorator.snap index fd022ed8b2..f5e5a4f565 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_existing_import_undefined_decorator.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_existing_import_undefined_decorator.snap @@ -24,8 +24,7 @@ expression: code_actions "href": "https://ty.dev/rules#unresolved-reference" }, "source": "ty", - "message": "Name `deprecated` used when not defined", - "relatedInformation": [] + "message": "Name `deprecated` used when not defined" } ], "edit": { @@ -70,8 +69,7 @@ expression: code_actions "href": "https://ty.dev/rules#unresolved-reference" }, "source": "ty", - "message": "Name `deprecated` used when not defined", - "relatedInformation": [] + "message": "Name `deprecated` used when not defined" } ], "edit": { diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_invalid_string_annotations.snap b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_invalid_string_annotations.snap index 07ae5cb675..6e5e2f3edc 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_invalid_string_annotations.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_invalid_string_annotations.snap @@ -24,8 +24,7 @@ expression: code_actions "href": "https://ty.dev/rules#unresolved-reference" }, "source": "ty", - "message": "Name `foobar` used when not defined", - "relatedInformation": [] + "message": "Name `foobar` used when not defined" } ], "edit": { diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_possible_missing_submodule_attribute.snap b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_possible_missing_submodule_attribute.snap index fe723d2fcc..5ab3b0dc23 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_possible_missing_submodule_attribute.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_possible_missing_submodule_attribute.snap @@ -24,8 +24,7 @@ expression: code_actions "href": "https://ty.dev/rules#possibly-missing-attribute" }, "source": "ty", - "message": "Submodule `parser` may not be available as an attribute on module `html`", - "relatedInformation": [] + "message": "Submodule `parser` may not be available as an attribute on module `html`" } ], "edit": { diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_undefined_decorator.snap b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_undefined_decorator.snap index 44c5c5cd22..6c42ea73da 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_undefined_decorator.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_undefined_decorator.snap @@ -24,8 +24,7 @@ expression: code_actions "href": "https://ty.dev/rules#unresolved-reference" }, "source": "ty", - "message": "Name `deprecated` used when not defined", - "relatedInformation": [] + "message": "Name `deprecated` used when not defined" } ], "edit": { @@ -70,8 +69,7 @@ expression: code_actions "href": "https://ty.dev/rules#unresolved-reference" }, "source": "ty", - "message": "Name `deprecated` used when not defined", - "relatedInformation": [] + "message": "Name `deprecated` used when not defined" } ], "edit": { diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_undefined_reference_multi.snap b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_undefined_reference_multi.snap index a57ac11745..c1ed495ca8 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_undefined_reference_multi.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__code_actions__code_action_undefined_reference_multi.snap @@ -24,8 +24,7 @@ expression: code_actions "href": "https://ty.dev/rules#unresolved-reference" }, "source": "ty", - "message": "Name `Literal` used when not defined", - "relatedInformation": [] + "message": "Name `Literal` used when not defined" } ], "edit": { @@ -70,8 +69,7 @@ expression: code_actions "href": "https://ty.dev/rules#unresolved-reference" }, "source": "ty", - "message": "Name `Literal` used when not defined", - "relatedInformation": [] + "message": "Name `Literal` used when not defined" } ], "edit": { diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap b/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap index 0daa6c768a..762b91f71c 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__commands__debug_command.snap @@ -10,7 +10,6 @@ Global settings: GlobalSettings { diagnostic_mode: OpenFilesOnly, experimental: ExperimentalSettings { rename: false, - auto_import: false, }, } Open text documents: 0 @@ -22,6 +21,9 @@ Settings: WorkspaceSettings { variable_types: true, call_argument_names: true, }, + completions: CompletionSettings { + auto_import: true, + }, overrides: None, } diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__notebook__diagnostic_end_of_file.snap b/crates/ty_server/tests/e2e/snapshots/e2e__notebook__diagnostic_end_of_file.snap index 3d11a0804c..24b529f89a 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__notebook__diagnostic_end_of_file.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__notebook__diagnostic_end_of_file.snap @@ -22,8 +22,7 @@ expression: diagnostics "href": "https://ty.dev/rules#invalid-return-type" }, "source": "ty", - "message": "Function can implicitly return `None`, which is not assignable to return type `str`", - "relatedInformation": [] + "message": "Function can implicitly return `None`, which is not assignable to return type `str`" } ], "vscode-notebook-cell://test.ipynb#2": [ @@ -41,8 +40,7 @@ expression: diagnostics "severity": 1, "code": "invalid-syntax", "source": "ty", - "message": "Expected `,`, found name", - "relatedInformation": [] + "message": "Expected `,`, found name" }, { "range": { @@ -61,8 +59,7 @@ expression: diagnostics "href": "https://ty.dev/rules#unresolved-reference" }, "source": "ty", - "message": "Name `word` used when not defined", - "relatedInformation": [] + "message": "Name `word` used when not defined" }, { "range": { @@ -78,8 +75,7 @@ expression: diagnostics "severity": 1, "code": "invalid-syntax", "source": "ty", - "message": "Expected `,`, found string", - "relatedInformation": [] + "message": "Expected `,`, found string" }, { "range": { @@ -98,41 +94,7 @@ expression: diagnostics "href": "https://ty.dev/rules#invalid-argument-type" }, "source": "ty", - "message": "Argument to function `with_style` is incorrect: Expected `Style`, found `Literal[/", /"]`", - "relatedInformation": [ - { - "location": { - "uri": "vscode-notebook-cell://test.ipynb#1", - "range": { - "start": { - "line": 0, - "character": 4 - }, - "end": { - "line": 0, - "character": 14 - } - } - }, - "message": "Function defined here" - }, - { - "location": { - "uri": "vscode-notebook-cell://test.ipynb#1", - "range": { - "start": { - "line": 0, - "character": 32 - }, - "end": { - "line": 0, - "character": 44 - } - } - }, - "message": "Parameter declared here" - } - ] + "message": "Argument to function `with_style` is incorrect: Expected `Style`, found `Literal[/", /"]`" }, { "range": { @@ -148,8 +110,7 @@ expression: diagnostics "severity": 1, "code": "invalid-syntax", "source": "ty", - "message": "Expected `,`, found name", - "relatedInformation": [] + "message": "Expected `,`, found name" }, { "range": { @@ -168,8 +129,7 @@ expression: diagnostics "href": "https://ty.dev/rules#unresolved-reference" }, "source": "ty", - "message": "Name `underline` used when not defined", - "relatedInformation": [] + "message": "Name `underline` used when not defined" }, { "range": { @@ -188,25 +148,7 @@ expression: diagnostics "href": "https://ty.dev/rules#too-many-positional-arguments" }, "source": "ty", - "message": "Too many positional arguments to function `with_style`: expected 3, got 6", - "relatedInformation": [ - { - "location": { - "uri": "vscode-notebook-cell://test.ipynb#1", - "range": { - "start": { - "line": 0, - "character": 4 - }, - "end": { - "line": 0, - "character": 52 - } - } - }, - "message": "Function signature here" - } - ] + "message": "Too many positional arguments to function `with_style`: expected 3, got 6" }, { "range": { @@ -222,8 +164,7 @@ expression: diagnostics "severity": 1, "code": "invalid-syntax", "source": "ty", - "message": "missing closing quote in string literal", - "relatedInformation": [] + "message": "missing closing quote in string literal" }, { "range": { @@ -239,8 +180,7 @@ expression: diagnostics "severity": 1, "code": "invalid-syntax", "source": "ty", - "message": "Expected `,`, found name", - "relatedInformation": [] + "message": "Expected `,`, found name" }, { "range": { @@ -256,8 +196,7 @@ expression: diagnostics "severity": 1, "code": "invalid-syntax", "source": "ty", - "message": "unexpected EOF while parsing", - "relatedInformation": [] + "message": "unexpected EOF while parsing" } ] } diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__changing_language_of_file_without_extension-2.snap b/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__changing_language_of_file_without_extension-2.snap new file mode 100644 index 0000000000..91a4a10b81 --- /dev/null +++ b/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__changing_language_of_file_without_extension-2.snap @@ -0,0 +1,21 @@ +--- +source: crates/ty_server/tests/e2e/publish_diagnostics.rs +expression: diagnostics +--- +PublishDiagnosticsParams { + uri: Url { + scheme: "file", + cannot_be_a_base: false, + username: "", + password: None, + host: None, + port: None, + path: "/src/foo", + query: None, + fragment: None, + }, + diagnostics: [], + version: Some( + 1, + ), +} diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__changing_language_of_file_without_extension.snap b/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__changing_language_of_file_without_extension.snap new file mode 100644 index 0000000000..d6a9af02de --- /dev/null +++ b/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__changing_language_of_file_without_extension.snap @@ -0,0 +1,70 @@ +--- +source: crates/ty_server/tests/e2e/publish_diagnostics.rs +expression: diagnostics +--- +PublishDiagnosticsParams { + uri: Url { + scheme: "file", + cannot_be_a_base: false, + username: "", + password: None, + host: None, + port: None, + path: "/src/foo", + query: None, + fragment: None, + }, + diagnostics: [ + Diagnostic { + range: Range { + start: Position { + line: 1, + character: 11, + }, + end: Position { + line: 1, + character: 13, + }, + }, + severity: Some( + Error, + ), + code: Some( + String( + "invalid-return-type", + ), + ), + code_description: Some( + CodeDescription { + href: Url { + scheme: "https", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "ty.dev", + ), + ), + port: None, + path: "/rules", + query: None, + fragment: Some( + "invalid-return-type", + ), + }, + }, + ), + source: Some( + "ty", + ), + message: "Return type does not match returned value: expected `str`, found `Literal[42]`", + related_information: None, + tags: None, + data: None, + }, + ], + version: Some( + 1, + ), +} diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__message_with_related_information_support.snap b/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__message_with_related_information_support.snap new file mode 100644 index 0000000000..7c18c8f93d --- /dev/null +++ b/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__message_with_related_information_support.snap @@ -0,0 +1,99 @@ +--- +source: crates/ty_server/tests/e2e/publish_diagnostics.rs +expression: diagnostics +--- +PublishDiagnosticsParams { + uri: Url { + scheme: "file", + cannot_be_a_base: false, + username: "", + password: None, + host: None, + port: None, + path: "/src/foo.py", + query: None, + fragment: None, + }, + diagnostics: [ + Diagnostic { + range: Range { + start: Position { + line: 3, + character: 0, + }, + end: Position { + line: 3, + character: 30, + }, + }, + severity: Some( + Error, + ), + code: Some( + String( + "type-assertion-failure", + ), + ), + code_description: Some( + CodeDescription { + href: Url { + scheme: "https", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "ty.dev", + ), + ), + port: None, + path: "/rules", + query: None, + fragment: Some( + "type-assertion-failure", + ), + }, + }, + ), + source: Some( + "ty", + ), + message: "Argument does not have asserted type `list[str]`", + related_information: Some( + [ + DiagnosticRelatedInformation { + location: Location { + uri: Url { + scheme: "file", + cannot_be_a_base: false, + username: "", + password: None, + host: None, + port: None, + path: "/src/foo.py", + query: None, + fragment: None, + }, + range: Range { + start: Position { + line: 3, + character: 12, + }, + end: Position { + line: 3, + character: 18, + }, + }, + }, + message: "Inferred type is `Literal[/"test/"]`", + }, + ], + ), + tags: None, + data: None, + }, + ], + version: Some( + 1, + ), +} diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__message_without_related_information_support.snap b/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__message_without_related_information_support.snap new file mode 100644 index 0000000000..d2056ebf20 --- /dev/null +++ b/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__message_without_related_information_support.snap @@ -0,0 +1,70 @@ +--- +source: crates/ty_server/tests/e2e/publish_diagnostics.rs +expression: diagnostics +--- +PublishDiagnosticsParams { + uri: Url { + scheme: "file", + cannot_be_a_base: false, + username: "", + password: None, + host: None, + port: None, + path: "/src/foo.py", + query: None, + fragment: None, + }, + diagnostics: [ + Diagnostic { + range: Range { + start: Position { + line: 3, + character: 0, + }, + end: Position { + line: 3, + character: 30, + }, + }, + severity: Some( + Error, + ), + code: Some( + String( + "type-assertion-failure", + ), + ), + code_description: Some( + CodeDescription { + href: Url { + scheme: "https", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "ty.dev", + ), + ), + port: None, + path: "/rules", + query: None, + fragment: Some( + "type-assertion-failure", + ), + }, + }, + ), + source: Some( + "ty", + ), + message: "Type `list[str]` does not match asserted type `Literal[/"test/"]`", + related_information: None, + tags: None, + data: None, + }, + ], + version: Some( + 1, + ), +} diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__on_did_open.snap b/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__on_did_open.snap index be48dde9dc..63f94e2ae7 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__on_did_open.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__on_did_open.snap @@ -59,36 +59,7 @@ PublishDiagnosticsParams { "ty", ), message: "Return type does not match returned value: expected `str`, found `Literal[42]`", - related_information: Some( - [ - DiagnosticRelatedInformation { - location: Location { - uri: Url { - scheme: "file", - cannot_be_a_base: false, - username: "", - password: None, - host: None, - port: None, - path: "/src/foo.py", - query: None, - fragment: None, - }, - range: Range { - start: Position { - line: 0, - character: 13, - }, - end: Position { - line: 0, - character: 16, - }, - }, - }, - message: "Expected `str` because of return type", - }, - ], - ), + related_information: None, tags: None, data: None, }, diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__on_did_open_file_without_extension_but_python_language.snap b/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__on_did_open_file_without_extension_but_python_language.snap new file mode 100644 index 0000000000..d6a9af02de --- /dev/null +++ b/crates/ty_server/tests/e2e/snapshots/e2e__publish_diagnostics__on_did_open_file_without_extension_but_python_language.snap @@ -0,0 +1,70 @@ +--- +source: crates/ty_server/tests/e2e/publish_diagnostics.rs +expression: diagnostics +--- +PublishDiagnosticsParams { + uri: Url { + scheme: "file", + cannot_be_a_base: false, + username: "", + password: None, + host: None, + port: None, + path: "/src/foo", + query: None, + fragment: None, + }, + diagnostics: [ + Diagnostic { + range: Range { + start: Position { + line: 1, + character: 11, + }, + end: Position { + line: 1, + character: 13, + }, + }, + severity: Some( + Error, + ), + code: Some( + String( + "invalid-return-type", + ), + ), + code_description: Some( + CodeDescription { + href: Url { + scheme: "https", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "ty.dev", + ), + ), + port: None, + path: "/rules", + query: None, + fragment: Some( + "invalid-return-type", + ), + }, + }, + ), + source: Some( + "ty", + ), + message: "Return type does not match returned value: expected `str`, found `Literal[42]`", + related_information: None, + tags: None, + data: None, + }, + ], + version: Some( + 1, + ), +} diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__on_did_open.snap b/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__on_did_open.snap index 12ade6fda2..86cf96fef5 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__on_did_open.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__on_did_open.snap @@ -55,36 +55,7 @@ Report( "ty", ), message: "Return type does not match returned value: expected `str`, found `Literal[42]`", - related_information: Some( - [ - DiagnosticRelatedInformation { - location: Location { - uri: Url { - scheme: "file", - cannot_be_a_base: false, - username: "", - password: None, - host: None, - port: None, - path: "/src/foo.py", - query: None, - fragment: None, - }, - range: Range { - start: Position { - line: 0, - character: 13, - }, - end: Position { - line: 0, - character: 16, - }, - }, - }, - message: "Expected `str` because of return type", - }, - ], - ), + related_information: None, tags: None, data: None, }, diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_after_changes.snap b/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_after_changes.snap index 38f03c13e2..02b3f7f831 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_after_changes.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_after_changes.snap @@ -70,36 +70,7 @@ Report( "ty", ), message: "Return type does not match returned value: expected `int`, found `Literal[/"hello/"]`", - related_information: Some( - [ - DiagnosticRelatedInformation { - location: Location { - uri: Url { - scheme: "file", - cannot_be_a_base: false, - username: "", - password: None, - host: None, - port: None, - path: "/src/changed_error.py", - query: None, - fragment: None, - }, - range: Range { - start: Position { - line: 0, - character: 13, - }, - end: Position { - line: 0, - character: 16, - }, - }, - }, - message: "Expected `int` because of return type", - }, - ], - ), + related_information: None, tags: None, data: None, }, @@ -194,36 +165,7 @@ Report( "ty", ), message: "Return type does not match returned value: expected `str`, found `Literal[42]`", - related_information: Some( - [ - DiagnosticRelatedInformation { - location: Location { - uri: Url { - scheme: "file", - cannot_be_a_base: false, - username: "", - password: None, - host: None, - port: None, - path: "/src/modified_same_error.py", - query: None, - fragment: None, - }, - range: Range { - start: Position { - line: 3, - character: 13, - }, - end: Position { - line: 3, - character: 16, - }, - }, - }, - message: "Expected `str` because of return type", - }, - ], - ), + related_information: None, tags: None, data: None, }, @@ -296,36 +238,7 @@ Report( "ty", ), message: "Return type does not match returned value: expected `str`, found `Literal[42]`", - related_information: Some( - [ - DiagnosticRelatedInformation { - location: Location { - uri: Url { - scheme: "file", - cannot_be_a_base: false, - username: "", - password: None, - host: None, - port: None, - path: "/src/new_error.py", - query: None, - fragment: None, - }, - range: Range { - start: Position { - line: 0, - character: 13, - }, - end: Position { - line: 0, - character: 16, - }, - }, - }, - message: "Expected `str` because of return type", - }, - ], - ), + related_information: None, tags: None, data: None, }, diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_initial_state.snap b/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_initial_state.snap index 2b3bc9b579..ddd3944928 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_initial_state.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_initial_state.snap @@ -68,36 +68,7 @@ Report( "ty", ), message: "Return type does not match returned value: expected `str`, found `Literal[42]`", - related_information: Some( - [ - DiagnosticRelatedInformation { - location: Location { - uri: Url { - scheme: "file", - cannot_be_a_base: false, - username: "", - password: None, - host: None, - port: None, - path: "/src/changed_error.py", - query: None, - fragment: None, - }, - range: Range { - start: Position { - line: 0, - character: 13, - }, - end: Position { - line: 0, - character: 16, - }, - }, - }, - message: "Expected `str` because of return type", - }, - ], - ), + related_information: None, tags: None, data: None, }, @@ -168,36 +139,7 @@ Report( "ty", ), message: "Return type does not match returned value: expected `str`, found `Literal[42]`", - related_information: Some( - [ - DiagnosticRelatedInformation { - location: Location { - uri: Url { - scheme: "file", - cannot_be_a_base: false, - username: "", - password: None, - host: None, - port: None, - path: "/src/fixed_error.py", - query: None, - fragment: None, - }, - range: Range { - start: Position { - line: 0, - character: 13, - }, - end: Position { - line: 0, - character: 16, - }, - }, - }, - message: "Expected `str` because of return type", - }, - ], - ), + related_information: None, tags: None, data: None, }, @@ -268,36 +210,7 @@ Report( "ty", ), message: "Return type does not match returned value: expected `str`, found `Literal[42]`", - related_information: Some( - [ - DiagnosticRelatedInformation { - location: Location { - uri: Url { - scheme: "file", - cannot_be_a_base: false, - username: "", - password: None, - host: None, - port: None, - path: "/src/modified_same_error.py", - query: None, - fragment: None, - }, - range: Range { - start: Position { - line: 0, - character: 13, - }, - end: Position { - line: 0, - character: 16, - }, - }, - }, - message: "Expected `str` because of return type", - }, - ], - ), + related_information: None, tags: None, data: None, }, @@ -370,36 +283,7 @@ Report( "ty", ), message: "Return type does not match returned value: expected `str`, found `Literal[42]`", - related_information: Some( - [ - DiagnosticRelatedInformation { - location: Location { - uri: Url { - scheme: "file", - cannot_be_a_base: false, - username: "", - password: None, - host: None, - port: None, - path: "/src/unchanged.py", - query: None, - fragment: None, - }, - range: Range { - start: Position { - line: 0, - character: 13, - }, - end: Position { - line: 0, - character: 16, - }, - }, - }, - message: "Expected `str` because of return type", - }, - ], - ), + related_information: None, tags: None, data: None, }, diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_long_polling_change_response.snap b/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_long_polling_change_response.snap index e2f12e261c..e1e5223e0d 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_long_polling_change_response.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_long_polling_change_response.snap @@ -70,36 +70,7 @@ Report( "ty", ), message: "Return type does not match returned value: expected `str`, found `Literal[42]`", - related_information: Some( - [ - DiagnosticRelatedInformation { - location: Location { - uri: Url { - scheme: "file", - cannot_be_a_base: false, - username: "", - password: None, - host: None, - port: None, - path: "/src/test.py", - query: None, - fragment: None, - }, - range: Range { - start: Position { - line: 0, - character: 15, - }, - end: Position { - line: 0, - character: 18, - }, - }, - }, - message: "Expected `str` because of return type", - }, - ], - ), + related_information: None, tags: None, data: None, }, diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_streaming_with_caching.snap b/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_streaming_with_caching.snap index 90fb910f5e..d6832c5425 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_streaming_with_caching.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_streaming_with_caching.snap @@ -68,9 +68,7 @@ expression: all_items "ty", ), message: "Name `true` used when not defined", - related_information: Some( - [], - ), + related_information: None, tags: None, data: None, }, @@ -143,9 +141,7 @@ expression: all_items "ty", ), message: "Name `true` used when not defined", - related_information: Some( - [], - ), + related_information: None, tags: None, data: None, }, @@ -218,9 +214,7 @@ expression: all_items "ty", ), message: "Name `true` used when not defined", - related_information: Some( - [], - ), + related_information: None, tags: None, data: None, }, diff --git a/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_suspend_change_suspend_first_response.snap b/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_suspend_change_suspend_first_response.snap index 6f38e6bc19..2052485560 100644 --- a/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_suspend_change_suspend_first_response.snap +++ b/crates/ty_server/tests/e2e/snapshots/e2e__pull_diagnostics__workspace_diagnostic_suspend_change_suspend_first_response.snap @@ -70,36 +70,7 @@ Report( "ty", ), message: "Return type does not match returned value: expected `str`, found `Literal[42]`", - related_information: Some( - [ - DiagnosticRelatedInformation { - location: Location { - uri: Url { - scheme: "file", - cannot_be_a_base: false, - username: "", - password: None, - host: None, - port: None, - path: "/src/test.py", - query: None, - fragment: None, - }, - range: Range { - start: Position { - line: 0, - character: 15, - }, - end: Position { - line: 0, - character: 18, - }, - }, - }, - message: "Expected `str` because of return type", - }, - ], - ), + related_information: None, tags: None, data: None, }, diff --git a/crates/ty_test/Cargo.toml b/crates/ty_test/Cargo.toml index f300b614a0..a7c18b9d00 100644 --- a/crates/ty_test/Cargo.toml +++ b/crates/ty_test/Cargo.toml @@ -25,6 +25,7 @@ ty_vendored = { workspace = true } anyhow = { workspace = true } camino = { workspace = true } +dunce = { workspace = true } colored = { workspace = true } insta = { workspace = true, features = ["filters"] } memchr = { workspace = true } diff --git a/crates/ty_test/README.md b/crates/ty_test/README.md index ecf4614d94..b31a45e877 100644 --- a/crates/ty_test/README.md +++ b/crates/ty_test/README.md @@ -316,6 +316,44 @@ To enable logging in an mdtest, set `log = true` at the top level of the TOML bl See [`MarkdownTestConfig`](https://github.com/astral-sh/ruff/blob/main/crates/ty_test/src/config.rs) for the full list of supported configuration options. +### Testing with external dependencies + +Tests can specify external Python dependencies using a `[project]` section in the TOML configuration. +This allows testing code that uses third-party libraries like `pydantic`, `numpy`, etc. + +It is recommended to specify exact versions of packages to ensure reproducibility. The specified +Python version and platform are required for tests with external dependencies, as they are used +during package resolution. + +````markdown +```toml +[environment] +python-version = "3.13" +python-platform = "linux" + +[project] +dependencies = ["pydantic==2.12.2"] +``` + +```py +import pydantic + +# use pydantic in the test +``` +```` + +When a test has dependencies: + +1. The test framework creates a `pyproject.toml` in a temporary directory. +1. Runs `uv sync` to install the dependencies. +1. Copies the installed packages from the virtual environment's `site-packages` directory into the test's + in-memory filesystem. +1. Configures the type checker to use these packages. + +**Note**: This feature requires `uv` to be installed and available in your `PATH`. The dependencies +are installed fresh for each test that specifies them, so tests with many dependencies may be slower +to run. + ### Specifying a custom typeshed Some tests will need to override the default typeshed with custom files. The `[environment]` diff --git a/crates/ty_test/src/config.rs b/crates/ty_test/src/config.rs index 6fe3a17fd0..6154208d69 100644 --- a/crates/ty_test/src/config.rs +++ b/crates/ty_test/src/config.rs @@ -4,8 +4,12 @@ //! //! ```toml //! log = true # or log = "ty=WARN" +//! //! [environment] //! python-version = "3.10" +//! +//! [project] +//! dependencies = ["pydantic==2.12.2"] //! ``` use anyhow::Context; @@ -25,6 +29,9 @@ pub(crate) struct MarkdownTestConfig { /// /// Defaults to the case-sensitive [`ruff_db::system::InMemorySystem`]. pub(crate) system: Option, + + /// Project configuration for installing external dependencies. + pub(crate) project: Option, } impl MarkdownTestConfig { @@ -51,6 +58,10 @@ impl MarkdownTestConfig { pub(crate) fn python(&self) -> Option<&SystemPath> { self.environment.as_ref()?.python.as_deref() } + + pub(crate) fn dependencies(&self) -> Option<&[String]> { + self.project.as_ref()?.dependencies.as_deref() + } } #[derive(Deserialize, Debug, Default, Clone)] @@ -116,3 +127,16 @@ pub(crate) enum SystemKind { /// This system should only be used when testing system or OS specific behavior. Os, } + +/// Project configuration for tests that need external dependencies. +#[derive(Deserialize, Debug, Default, Clone)] +#[serde(rename_all = "kebab-case", deny_unknown_fields)] +pub(crate) struct Project { + /// List of Python package dependencies in `pyproject.toml` format. + /// + /// These will be installed using `uv sync` into a temporary virtual environment. + /// The site-packages directory will then be copied into the test's filesystem. + /// + /// Example: `dependencies = ["pydantic==2.12.2"]` + pub(crate) dependencies: Option>, +} diff --git a/crates/ty_test/src/external_dependencies.rs b/crates/ty_test/src/external_dependencies.rs new file mode 100644 index 0000000000..38a14d78c9 --- /dev/null +++ b/crates/ty_test/src/external_dependencies.rs @@ -0,0 +1,186 @@ +use crate::db::Db; + +use anyhow::{Context, Result, anyhow, bail}; +use ruff_db::system::{DbWithWritableSystem as _, OsSystem, SystemPath}; +use ruff_python_ast::PythonVersion; +use ty_python_semantic::{PythonEnvironment, PythonPlatform, SysPrefixPathOrigin}; + +/// Setup a virtual environment in the in-memory filesystem of `db` with +/// the specified dependencies installed. +pub(crate) fn setup_venv( + db: &mut Db, + dependencies: &[String], + python_version: PythonVersion, + python_platform: &PythonPlatform, + dest_venv_path: &SystemPath, +) -> Result<()> { + // Create a temporary directory for the project + let temp_dir = tempfile::Builder::new() + .prefix("mdtest-venv-") + .tempdir() + .context("Failed to create temporary directory for mdtest virtual environment")?; + + // Canonicalize here to fix problems with `.strip_prefix()` later on Windows + let temp_dir_path = dunce::canonicalize(temp_dir.path()) + .context("Failed to canonicalize temporary directory path")?; + + let temp_path = SystemPath::from_std_path(&temp_dir_path) + .ok_or_else(|| { + anyhow!( + "Temporary directory path is not valid UTF-8: {}", + temp_dir_path.display() + ) + })? + .to_path_buf(); + + // Generate a minimal pyproject.toml + let pyproject_toml = format!( + r#"[project] +name = "mdtest-deps" +version = "0.1.0" +requires-python = "~={python_version}.0" +dependencies = [ +{deps} +] +"#, + python_version = python_version, + deps = dependencies + .iter() + .map(|dep| format!(" \"{dep}\",")) + .collect::>() + .join("\n") + ); + + std::fs::write( + temp_path.join("pyproject.toml").as_std_path(), + pyproject_toml, + ) + .context("Failed to write pyproject.toml")?; + + // Convert PythonPlatform to uv's platform format + let uv_platform = match python_platform { + PythonPlatform::Identifier(id) => match id.as_str() { + "win32" => "windows", + "darwin" => "macos", + "linux" => "linux", + other => other, + }, + PythonPlatform::All => { + bail!("For an mdtest with external dependencies, a Python platform must be specified"); + } + }; + + // Run `uv sync` to install dependencies + let uv_sync_output = std::process::Command::new("uv") + .args(["sync", "--python-platform", uv_platform]) + .current_dir(temp_path.as_std_path()) + .output() + .context("Failed to run `uv sync`. Is `uv` installed?")?; + + if !uv_sync_output.status.success() { + let stderr = String::from_utf8_lossy(&uv_sync_output.stderr); + bail!( + "`uv sync` failed with exit code {:?}:\n{}", + uv_sync_output.status.code(), + stderr + ); + } + + let venv_path = temp_path.join(".venv"); + + copy_site_packages_to_db(db, &venv_path, dest_venv_path, python_version) +} + +/// Copy the site-packages directory from a real virtual environment to the in-memory filesystem of `db`. +/// +/// This recursively copies all files from the venv's site-packages directory into the +/// in-memory filesystem at the specified destination path. +fn copy_site_packages_to_db( + db: &mut Db, + venv_path: &SystemPath, + dest_venv_path: &SystemPath, + _python_version: PythonVersion, +) -> Result<()> { + // Discover the site-packages directory in the virtual environment + let system = OsSystem::new(venv_path); + let env = PythonEnvironment::new(venv_path, SysPrefixPathOrigin::LocalVenv, &system) + .context("Failed to create Python environment for temporary virtual environment")?; + + let site_packages_paths = env + .site_packages_paths(&system) + .context(format!("Failed to discover site-packages in '{venv_path}'"))?; + + let site_packages_path = site_packages_paths + .into_iter() + .next() + .ok_or_else(|| anyhow!("No site-packages directory found in '{venv_path}'"))?; + + // Create the destination directory structure + let relative_site_packages = site_packages_path.strip_prefix(venv_path).map_err(|_| { + anyhow!("site-packages path '{site_packages_path}' is not under venv path '{venv_path}'") + })?; + let dest_site_packages = dest_venv_path.join(relative_site_packages); + db.create_directory_all(&dest_site_packages) + .context("Failed to create site-packages directory in database")?; + + // Recursively copy all files from site-packages + copy_directory_recursive(db, &site_packages_path, &dest_site_packages)?; + + Ok(()) +} + +fn copy_directory_recursive(db: &mut Db, src: &SystemPath, dest: &SystemPath) -> Result<()> { + use std::fs; + + for entry in fs::read_dir(src.as_std_path()) + .with_context(|| format!("Failed to read directory {src}"))? + { + let entry = entry.with_context(|| format!("Failed to read directory entry in {src}"))?; + let entry_path = entry.path(); + let file_type = entry + .file_type() + .with_context(|| format!("Failed to get file type for {}", entry_path.display()))?; + + let src_path = SystemPath::from_std_path(&entry_path) + .ok_or_else(|| anyhow!("Path {} is not valid UTF-8", entry_path.display()))?; + + let file_name = entry.file_name(); + let file_name_str = file_name.to_str().ok_or_else(|| { + anyhow!( + "File name {} is not valid UTF-8", + file_name.to_string_lossy() + ) + })?; + + let dest_path = dest.join(file_name_str); + + if file_type.is_dir() { + // Skip __pycache__ directories and other unnecessary directories + if file_name_str == "__pycache__" || file_name_str.ends_with(".dist-info") { + continue; + } + + db.create_directory_all(&dest_path) + .with_context(|| format!("Failed to create directory {dest_path}"))?; + + copy_directory_recursive(db, src_path, &dest_path)?; + } else if file_type.is_file() { + let is_python_source = entry_path.extension().is_some_and(|ext| { + ext.eq_ignore_ascii_case("py") || ext.eq_ignore_ascii_case("pyi") + }); + + if !is_python_source { + // Skip all non-Python files (binaries, data files, etc.) + continue; + } + + let contents = fs::read_to_string(src_path.as_std_path()) + .with_context(|| format!("Failed to read file {src_path}"))?; + + db.write_file(&dest_path, contents) + .with_context(|| format!("Failed to write file {dest_path}"))?; + } + } + + Ok(()) +} diff --git a/crates/ty_test/src/lib.rs b/crates/ty_test/src/lib.rs index feb38bdf66..ad49261739 100644 --- a/crates/ty_test/src/lib.rs +++ b/crates/ty_test/src/lib.rs @@ -28,6 +28,7 @@ mod assertion; mod config; mod db; mod diagnostic; +mod external_dependencies; mod matcher; mod parser; @@ -70,16 +71,21 @@ pub fn run( Log::Filter(filter) => setup_logging_with_filter(filter), }); - let failures = run_test(&mut db, relative_fixture_path, snapshot_path, &test); - let inconsistencies = run_module_resolution_consistency_test(&db); - let this_test_failed = failures.is_err() || inconsistencies.is_err(); + let result = run_test(&mut db, relative_fixture_path, snapshot_path, &test); + let inconsistencies = if result.as_ref().is_ok_and(|t| t.has_been_skipped()) { + Ok(()) + } else { + run_module_resolution_consistency_test(&db) + }; + + let this_test_failed = result.is_err() || inconsistencies.is_err(); any_failures = any_failures || this_test_failed; if this_test_failed && output_format.is_cli() { println!("\n{}\n", test.name().bold().underline()); } - if let Err(failures) = failures { + if let Err(failures) = result { let md_index = LineIndex::from_source_text(&source); for test_failures in failures { @@ -212,12 +218,24 @@ impl OutputFormat { } } +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum TestOutcome { + Success, + Skipped, +} + +impl TestOutcome { + const fn has_been_skipped(self) -> bool { + matches!(self, TestOutcome::Skipped) + } +} + fn run_test( db: &mut db::Db, relative_fixture_path: &Utf8Path, snapshot_path: &Utf8Path, test: &parser::MarkdownTest, -) -> Result<(), Failures> { +) -> Result { // Initialize the system and remove all files and directories to reset the system to a clean state. match test.configuration().system.unwrap_or_default() { SystemKind::InMemory => { @@ -248,6 +266,27 @@ fn run_test( let custom_typeshed_path = test.configuration().typeshed(); let python_version = test.configuration().python_version().unwrap_or_default(); + // Setup virtual environment with dependencies if specified + let venv_for_external_dependencies = SystemPathBuf::from("/.venv"); + if let Some(dependencies) = test.configuration().dependencies() { + if !std::env::var("MDTEST_EXTERNAL").is_ok_and(|v| v == "1") { + return Ok(TestOutcome::Skipped); + } + + let python_platform = test.configuration().python_platform().expect( + "Tests with external dependencies must specify `python-platform` in the configuration", + ); + + external_dependencies::setup_venv( + db, + dependencies, + python_version, + &python_platform, + &venv_for_external_dependencies, + ) + .expect("Failed to setup in-memory virtual environment with dependencies"); + } + let mut typeshed_files = vec![]; let mut has_custom_versions_file = false; @@ -350,7 +389,19 @@ fn run_test( let configuration = test.configuration(); - let site_packages_paths = if let Some(python) = configuration.python() { + let site_packages_paths = if configuration.dependencies().is_some() { + // If dependencies were specified, use the venv we just set up + let environment = PythonEnvironment::new( + &venv_for_external_dependencies, + SysPrefixPathOrigin::PythonCliFlag, + db.system(), + ) + .expect("Python environment to point to a valid path"); + environment + .site_packages_paths(db.system()) + .expect("Python environment to be valid") + .into_vec() + } else if let Some(python) = configuration.python() { let environment = PythonEnvironment::new(python, SysPrefixPathOrigin::PythonCliFlag, db.system()) .expect("Python environment to point to a valid path"); @@ -551,7 +602,7 @@ fn run_test( } if failures.is_empty() { - Ok(()) + Ok(TestOutcome::Success) } else { Err(failures) } diff --git a/crates/ty_test/src/matcher.rs b/crates/ty_test/src/matcher.rs index ce8214e72c..5fe219278c 100644 --- a/crates/ty_test/src/matcher.rs +++ b/crates/ty_test/src/matcher.rs @@ -427,10 +427,16 @@ mod tests { let mut diag = if self.id == DiagnosticId::RevealedType { Diagnostic::new(self.id, Severity::Error, "Revealed type") } else { - Diagnostic::new(self.id, Severity::Error, "") + Diagnostic::new(self.id, Severity::Error, self.message) }; let span = Span::from(file).with_range(self.range); - diag.annotate(Annotation::primary(span).message(self.message)); + let mut annotation = Annotation::primary(span); + + if self.id == DiagnosticId::RevealedType { + annotation = annotation.message(self.message); + } + + diag.annotate(annotation); diag } } diff --git a/docs/configuration.md b/docs/configuration.md index 3420611e18..a88f71488f 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -352,6 +352,7 @@ For example, without `force-exclude` enabled, `ruff check /path/to/excluded/file ### Default inclusions By default, Ruff will discover files matching `*.py`, `*.pyi`, `*.ipynb`, or `pyproject.toml`. +In [preview](preview.md) mode, Ruff will also discover `*.pyw` by default. To lint or format files with additional file extensions, use the [`extend-include`](settings.md#extend-include) setting. You can also change the default selection using the [`include`](settings.md#include) setting. diff --git a/docs/integrations.md b/docs/integrations.md index d92c785c76..d459ea6959 100644 --- a/docs/integrations.md +++ b/docs/integrations.md @@ -80,7 +80,7 @@ You can add the following configuration to `.gitlab-ci.yml` to run a `ruff forma stage: build interruptible: true image: - name: ghcr.io/astral-sh/ruff:0.14.8-alpine + name: ghcr.io/astral-sh/ruff:0.14.9-alpine before_script: - cd $CI_PROJECT_DIR - ruff --version @@ -106,7 +106,7 @@ Ruff can be used as a [pre-commit](https://pre-commit.com) hook via [`ruff-pre-c ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.8 + rev: v0.14.9 hooks: # Run the linter. - id: ruff-check @@ -119,7 +119,7 @@ To enable lint fixes, add the `--fix` argument to the lint hook: ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.8 + rev: v0.14.9 hooks: # Run the linter. - id: ruff-check @@ -133,7 +133,7 @@ To avoid running on Jupyter Notebooks, remove `jupyter` from the list of allowed ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.8 + rev: v0.14.9 hooks: # Run the linter. - id: ruff-check diff --git a/docs/linter.md b/docs/linter.md index 1f3e4f22b5..6644d54f34 100644 --- a/docs/linter.md +++ b/docs/linter.md @@ -279,10 +279,22 @@ Conversely, the following configuration would only enable fixes for `F401`: Ruff supports several mechanisms for suppressing lint errors, be they false positives or permissible violations. -To omit a lint rule entirely, add it to the "ignore" list via the [`lint.ignore`](settings.md#lint_ignore) +### Configuration + +To omit a lint rule everywhere, add it to the "ignore" list via the [`lint.ignore`](settings.md#lint_ignore) setting, either on the command-line or in your `pyproject.toml` or `ruff.toml` file. -To suppress a violation inline, Ruff uses a `noqa` system similar to [Flake8](https://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html). +To omit a lint rule within specific files based on file path prefixes or patterns, +see the [`lint.per-file-ignores`](settings.md#lint_per-file-ignores) setting. + +### Comments + +Ruff supports multiple forms of suppression comments, including inline and file-level `noqa` +comments, and range suppressions. + +#### Line-level + +Ruff supports a `noqa` system similar to [Flake8](https://flake8.pycqa.org/en/3.1.1/user/ignoring-errors.html). To ignore an individual violation, add `# noqa: {code}` to the end of the line, like so: ```python @@ -314,6 +326,84 @@ import os # noqa: I001 import abc ``` +The full inline comment specification is as follows: + +- An inline blanket `noqa` comment is given by a case-insensitive match for + `#noqa` with optional whitespace after the `#` symbol, followed by either: the + end of the comment, the beginning of a new comment (`#`), or whitespace + followed by any character other than `:`. +- An inline rule suppression is given by first finding a case-insensitive match + for `#noqa` with optional whitespace after the `#` symbol, optional whitespace + after `noqa`, and followed by the symbol `:`. After this we are expected to + have a list of rule codes which is given by sequences of uppercase ASCII + characters followed by ASCII digits, separated by whitespace or commas. The + list ends at the last valid code. We will attempt to interpret rules with a + missing delimiter (e.g. `F401F841`), though a warning will be emitted in this + case. + +#### Block-level + +*Range suppressions are currently only available in [preview mode](preview.md#preview).* + +To ignore one or more violations within a range or block of code, a "disable" comment +followed by a matching "enable" comment can be used, like so: + +```python +# ruff: disable[E501] +VALUE_1 = "Lorem ipsum dolor sit amet ..." +VALUE_2 = "Lorem ipsum dolor sit amet ..." +VALUE_3 = "Lorem ipsum dolor sit amet ..." +# ruff: enable[E501] +``` + +To define a range, both the "disable" and "enable" comments must have matching codes, +in the same order, as well as matching indentation levels within a logical block of code: + +```python +def foo(): + # ruff: disable[E741, F841] + i = 1 + # ruff: enable[E741, F841] +``` + +If no matching "enable" comment is found, Ruff will also treat this as an "implicit" range. +The implicit range is defined from the starting "disable" comment, until reaching +a logical scope indented less than the starting comment: + +```python +def foo(): + # ruff: disable[E741, F841] + i = 1 + if True: + O = 1 + l = 1 + +# implicit end of range +foo() +``` + +It is strongly suggested to use explicit range suppressions, in order to prevent +accidental suppressions of violations, especially at global module scope. + +Range suppressions cannot be used to enable or select rules that aren't already +selected by the project configuration or runtime flags. An "enable" comment can only +be used to terminate a preceding "disable" comment with identical codes. + +Unlike `noqa` suppressions, range suppressions do not support "blanket" suppression +of all violations. At least one violation code must be listed. + +The full range suppression comment specification is as follows: + +- An own-line comment starting with case sensitive `#ruff:`, with optional whitespace + after the `#` symbol and `:` symbol, followed by either `disable` or `enable` + to start or end a range respectively, immediately followed by `[`, any codes to + be suppressed, and ending with `]`. +- Codes to be suppressed must be separated by commas, with optional whitespace + before or after each code, and may be followed by an optional trailing comma + after the last code. + +#### File-level + To ignore all violations across an entire file, add the line `# ruff: noqa` anywhere in the file, preferably towards the top, like so: @@ -328,51 +418,46 @@ file, preferably towards the top, like so: # ruff: noqa: F841 ``` -Or see the [`lint.per-file-ignores`](settings.md#lint_per-file-ignores) setting, which enables the same -functionality from within your `pyproject.toml` or `ruff.toml` file. - Global `noqa` comments must be on their own line to disambiguate from comments which ignore violations on a single line. Note that Ruff will also respect Flake8's `# flake8: noqa` directive, and will treat it as equivalent to `# ruff: noqa`. -### Full suppression comment specification +The file-level suppression comment specification is as follows: -The full specification is as follows: - -- An inline blanket `noqa` comment is given by a case-insensitive match for - `#noqa` with optional whitespace after the `#` symbol, followed by either: the - end of the comment, the beginning of a new comment (`#`), or whitespace - followed by any character other than `:`. -- An inline rule suppression is given by first finding a case-insensitive match - for `#noqa` with optional whitespace after the `#` symbol, optional whitespace - after `noqa`, and followed by the symbol `:`. After this we are expected to - have a list of rule codes which is given by sequences of uppercase ASCII - characters followed by ASCII digits, separated by whitespace or commas. The - list ends at the last valid code. We will attempt to interpret rules with a - missing delimiter (e.g. `F401F841`), though a warning will be emitted in this - case. - A file-level exemption comment is given by a case-sensitive match for `#ruff:` or `#flake8:`, with optional whitespace after `#` and before `:`, followed by optional whitespace and a case-insensitive match for `noqa`. After this, the - specification is as in the inline case. + specification is as in the inline `noqa` suppressions above. -### Detecting unused suppression comments +### Detecting unused suppressions Ruff implements a special rule, [`unused-noqa`](https://docs.astral.sh/ruff/rules/unused-noqa/), -under the `RUF100` code, to enforce that your `noqa` directives are "valid", in that the violations -they _say_ they ignore are actually being triggered on that line (and thus suppressed). To flag -unused `noqa` directives, run: `ruff check /path/to/file.py --extend-select RUF100`. +under the `RUF100` code, to enforce that your suppressions are "valid", in that the violations +they _say_ they ignore are actually being triggered and suppressed. To flag +unused suppression comments, run Ruff with `--extend-select RUF100`, like so: -Ruff can also _remove_ any unused `noqa` directives via its fix functionality. To remove any -unused `noqa` directives, run: `ruff check /path/to/file.py --extend-select RUF100 --fix`. +```shell-session +$ ruff check /path/to/file.py --extend-select RUF100 +``` + +Ruff can also _remove_ any unused suppression comments via its fix functionality. +To remove any unused suppressions, run Ruff with `--fix`, like so: + +```shell-session +$ ruff check /path/to/file.py --extend-select RUF100 --fix +``` ### Inserting necessary suppression comments Ruff can _automatically add_ `noqa` directives to all lines that contain violations, which is useful when migrating a new codebase to Ruff. To automatically add `noqa` directives to all -relevant lines (with the appropriate rule codes), run: `ruff check /path/to/file.py --add-noqa`. +relevant lines (with the appropriate rule codes), run Ruff with `--add-noqa`, like so: + +```shell-session +$ ruff check /path/to/file.py --add-noqa +``` ### Action comments diff --git a/docs/tutorial.md b/docs/tutorial.md index 59b64e52e1..04169bc1f4 100644 --- a/docs/tutorial.md +++ b/docs/tutorial.md @@ -369,7 +369,7 @@ This tutorial has focused on Ruff's command-line interface, but Ruff can also be ```yaml - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.14.8 + rev: v0.14.9 hooks: # Run the linter. - id: ruff-check diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml index adbdcd7545..66041ec1d3 100644 --- a/fuzz/Cargo.toml +++ b/fuzz/Cargo.toml @@ -30,7 +30,7 @@ ty_python_semantic = { path = "../crates/ty_python_semantic" } ty_vendored = { path = "../crates/ty_vendored" } libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer", default-features = false } -salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "59aa1075e837f5deb0d6ffb24b68fedc0f4bc5e0", default-features = false, features = [ +salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "55e5e7d32fa3fc189276f35bb04c9438f9aedbd1", default-features = false, features = [ "compact_str", "macros", "salsa_unstable", diff --git a/pyproject.toml b/pyproject.toml index 5159682235..96f840d03b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "maturin" [project] name = "ruff" -version = "0.14.8" +version = "0.14.9" description = "An extremely fast Python linter and code formatter, written in Rust." authors = [{ name = "Astral Software Inc.", email = "hey@astral.sh" }] readme = "README.md" diff --git a/python/py-fuzzer/fuzz.py b/python/py-fuzzer/fuzz.py index 090f329b32..a9659b8cf0 100644 --- a/python/py-fuzzer/fuzz.py +++ b/python/py-fuzzer/fuzz.py @@ -33,7 +33,7 @@ from collections.abc import Callable from dataclasses import KW_ONLY, dataclass from functools import partial from pathlib import Path -from typing import NewType, NoReturn, assert_never +from typing import Final, NewType, NoReturn, assert_never from pysource_codegen import generate as generate_random_code from pysource_minimize import CouldNotMinimize, minimize as minimize_repro @@ -44,6 +44,12 @@ MinimizedSourceCode = NewType("MinimizedSourceCode", str) Seed = NewType("Seed", int) ExitCode = NewType("ExitCode", int) +TY_TARGET_PLATFORM: Final = "linux" + +# ty supports `--python-version=3.8`, but typeshed only supports 3.9+, +# so that's probably the oldest version we can usefully test with. +OLDEST_SUPPORTED_PYTHON: Final = "3.9" + def ty_contains_bug(code: str, *, ty_executable: Path) -> bool: """Return `True` if the code triggers a panic in type-checking code.""" @@ -51,7 +57,17 @@ def ty_contains_bug(code: str, *, ty_executable: Path) -> bool: input_file = Path(tempdir, "input.py") input_file.write_text(code) completed_process = subprocess.run( - [ty_executable, "check", input_file], capture_output=True, text=True + [ + ty_executable, + "check", + input_file, + "--python-version", + OLDEST_SUPPORTED_PYTHON, + "--python-platform", + TY_TARGET_PLATFORM, + ], + capture_output=True, + text=True, ) return completed_process.returncode not in {0, 1, 2} @@ -137,7 +153,10 @@ class FuzzResult: case Executable.RUFF: panic_message = f"The following code triggers a {new}parser bug:" case Executable.TY: - panic_message = f"The following code triggers a {new}ty panic:" + panic_message = ( + f"The following code triggers a {new}ty panic with " + f"`--python-version={OLDEST_SUPPORTED_PYTHON} --python-platform={TY_TARGET_PLATFORM}`:" + ) case _ as unreachable: assert_never(unreachable) diff --git a/python/py-fuzzer/pyproject.toml b/python/py-fuzzer/pyproject.toml index 52bc17f25a..de1af7cc26 100644 --- a/python/py-fuzzer/pyproject.toml +++ b/python/py-fuzzer/pyproject.toml @@ -19,7 +19,7 @@ requires = ["hatchling"] build-backend = "hatchling.build" [dependency-groups] -dev = ["mypy", "ruff"] +dev = ["mypy", "ruff", "ty"] [tool.hatch.build.targets.wheel] include = ["fuzz.py"] diff --git a/python/py-fuzzer/uv.lock b/python/py-fuzzer/uv.lock index 7716e16da9..096049a945 100644 --- a/python/py-fuzzer/uv.lock +++ b/python/py-fuzzer/uv.lock @@ -89,6 +89,7 @@ dependencies = [ dev = [ { name = "mypy" }, { name = "ruff" }, + { name = "ty" }, ] [package.metadata] @@ -104,6 +105,7 @@ requires-dist = [ dev = [ { name = "mypy" }, { name = "ruff" }, + { name = "ty" }, ] [[package]] @@ -196,6 +198,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4f/bd/de8d508070629b6d84a30d01d57e4a65c69aa7f5abe7560b8fad3b50ea59/termcolor-3.1.0-py3-none-any.whl", hash = "sha256:591dd26b5c2ce03b9e43f391264626557873ce1d379019786f99b0c2bee140aa", size = 7684, upload-time = "2025-04-30T11:37:52.382Z" }, ] +[[package]] +name = "ty" +version = "0.0.1a32" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/92/8da015685fb83734a2a83de02080e64d182509de77fa9bcf3eed12eeab4b/ty-0.0.1a32.tar.gz", hash = "sha256:12f62e8a3dd0eaeb9557d74b1c32f0616ae40eae10a4f411e1e2a73225f67ff2", size = 4689151, upload-time = "2025-12-05T21:04:26.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e6/fdc35c9ba047f16afdfedf36fb51c221e0190ccde9f70ee28e77084d6612/ty-0.0.1a32-py3-none-linux_armv6l.whl", hash = "sha256:ffe595eaf616f06f58f951766477830a55c2502d2c9f77dde8f60d9a836e0645", size = 9673128, upload-time = "2025-12-05T21:04:17.702Z" }, + { url = "https://files.pythonhosted.org/packages/19/20/eaff31048e2f309f37478f7d715c8de9f9bab03cba4758da27b9311147af/ty-0.0.1a32-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:07f1dce88ad6028fb14665aefe4e6697012c34bd48edd37d02b7eb6a833dbf62", size = 9434094, upload-time = "2025-12-05T21:04:03.383Z" }, + { url = "https://files.pythonhosted.org/packages/67/d4/ea8ed57d11b81c459f23561fd6bfb0f54a8d4120cf72541e3bdf71d46202/ty-0.0.1a32-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8fab7ed12528c77ddd600a9638ca859156a53c20f1e381353fa87a255bd397eb", size = 8980296, upload-time = "2025-12-05T21:04:28.912Z" }, + { url = "https://files.pythonhosted.org/packages/49/02/3ce98bbfbb3916678d717ee69358d38a404ca9a39391dda8874b66dd5ee7/ty-0.0.1a32-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ace395280fc21e25eff0a53cfbd68170f90a4b8ef2f85dfabe1ecbca2ced456b", size = 9263054, upload-time = "2025-12-05T21:04:05.619Z" }, + { url = "https://files.pythonhosted.org/packages/b7/be/a639638bcd1664de2d70a87da6c4fe0e3272a60b7fa3f0c108a956a456bd/ty-0.0.1a32-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2bcbeed7f5ed8e3c1c7e525fce541e7b943ac04ee7fe369a926551b5e50ea4a8", size = 9451396, upload-time = "2025-12-05T21:04:01.265Z" }, + { url = "https://files.pythonhosted.org/packages/1f/a4/2bcf54e842a3d10dc14b369f28a3bab530c5d7ddba624e910b212bda93ee/ty-0.0.1a32-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:60ff2e4493f90f81a260205d87719bb1d3420928a1e4a2a7454af7cbdfed2047", size = 9862726, upload-time = "2025-12-05T21:04:08.806Z" }, + { url = "https://files.pythonhosted.org/packages/5f/c7/19e6719496e59f2f082f34bcac312698366cf50879fdcc3ef76298bfe6a0/ty-0.0.1a32-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:53cad50a59a0d943b06872e0b10f9f2b564805c2ea93f64c7798852bc1901954", size = 10475051, upload-time = "2025-12-05T21:04:31.059Z" }, + { url = "https://files.pythonhosted.org/packages/88/77/bdf0ddb066d2b62f141d058f8a33bb7c8628cdbb8bfa75b20e296b79fb4e/ty-0.0.1a32-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:343d43cdc1d7f649ea2baa64ac2b479da3d679239b94509f1df12f7211561ea9", size = 10232712, upload-time = "2025-12-05T21:04:19.849Z" }, + { url = "https://files.pythonhosted.org/packages/ed/07/f73260a461762a581a007015c1019d40658828ce41576f8c1db88dee574d/ty-0.0.1a32-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f45483e4a84bcf622413712164ea687ce323a9f7013b9e7977c5d623ed937ca9", size = 10237705, upload-time = "2025-12-05T21:04:35.366Z" }, + { url = "https://files.pythonhosted.org/packages/2c/57/dbb92206cf2f798d8c51ea16504e8afb90a139d0ff105c31cec9a1db29f9/ty-0.0.1a32-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d452f30d47002a6bafc36d1b6aee42c321e9ec9f7f43a04a2ee7d48c208b86c", size = 9766469, upload-time = "2025-12-05T21:04:22.236Z" }, + { url = "https://files.pythonhosted.org/packages/c3/5e/143d93bd143abcebcbaa98c8aeec78898553d62d0a5a432cd79e0cf5bd6d/ty-0.0.1a32-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:86c4e31737fe954637890cef1f3e1b479ffb20e836cac3b76050bdbe80005010", size = 9238592, upload-time = "2025-12-05T21:04:11.33Z" }, + { url = "https://files.pythonhosted.org/packages/21/b8/225230ae097ed88f3c92ad974dd77f8e4f86f2594d9cd0c729da39769878/ty-0.0.1a32-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:daf15fa03bc39a76a0fbc9c2d81d79d528f584e3fbe08d71981e3f7912db91d6", size = 9502161, upload-time = "2025-12-05T21:04:37.642Z" }, + { url = "https://files.pythonhosted.org/packages/85/13/cc89955c9637f25f3aca2dd7749c6008639ef036f0b9bea3e9d89e892ff9/ty-0.0.1a32-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6128f6bab5c6dab3d08689fed1d529dc34f50f221f89c8e16064ed0c549dad7a", size = 9603058, upload-time = "2025-12-05T21:04:39.532Z" }, + { url = "https://files.pythonhosted.org/packages/46/77/1fe2793c8065a02d1f70ca7da1b87db49ca621bcbbdb79a18ad79d5d0ab2/ty-0.0.1a32-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:55aab688be1b46776a5a458a1993cae0da7725932c45393399c479c2fa979337", size = 9879903, upload-time = "2025-12-05T21:04:13.567Z" }, + { url = "https://files.pythonhosted.org/packages/fc/47/fd58e80a3e42310b4b649340d5d97403fe796146cae8678b3a031a414b8e/ty-0.0.1a32-py3-none-win32.whl", hash = "sha256:f55ec25088a09236ad1578b656a07fa009c3a353f5923486905ba48175d142a6", size = 9077703, upload-time = "2025-12-05T21:04:15.849Z" }, + { url = "https://files.pythonhosted.org/packages/8d/96/209c417c69317339ea8e9b3277fd98364a0e97dd1ffd3585e143ec7b4e57/ty-0.0.1a32-py3-none-win_amd64.whl", hash = "sha256:ed8d5cbd4e47dfed86aaa27e243008aa4e82b6a5434f3ab95c26d3ee5874d9d7", size = 9922426, upload-time = "2025-12-05T21:04:33.289Z" }, + { url = "https://files.pythonhosted.org/packages/e0/1c/350fd851fb91244f8c80cec218009cbee7564d76c14e2f423b47e69a5cbc/ty-0.0.1a32-py3-none-win_arm64.whl", hash = "sha256:dbb25f9b513d34cee8ce419514eaef03313f45c3f7ab4eb6e6d427ea1f6854af", size = 9453761, upload-time = "2025-12-05T21:04:24.502Z" }, +] + [[package]] name = "typing-extensions" version = "4.15.0" diff --git a/scripts/benchmarks/pyproject.toml b/scripts/benchmarks/pyproject.toml index e9bad66935..55a526eb3b 100644 --- a/scripts/benchmarks/pyproject.toml +++ b/scripts/benchmarks/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "scripts" -version = "0.14.8" +version = "0.14.9" description = "" authors = ["Charles Marsh "] diff --git a/scripts/check_ecosystem.py b/scripts/check_ecosystem.py index fe2e73d9a1..8bc2dbea55 100755 --- a/scripts/check_ecosystem.py +++ b/scripts/check_ecosystem.py @@ -47,7 +47,7 @@ class Repository(NamedTuple): show_fixes: bool = False @asynccontextmanager - async def clone(self: Self, checkout_dir: Path) -> AsyncIterator[Path]: + async def clone(self: Self, checkout_dir: Path) -> AsyncIterator[str]: """Shallow clone this repository to a temporary directory.""" if checkout_dir.exists(): logger.debug(f"Reusing {self.org}:{self.repo}") diff --git a/scripts/mypy_primer.sh b/scripts/mypy_primer.sh index 527376c12b..7af2501f03 100755 --- a/scripts/mypy_primer.sh +++ b/scripts/mypy_primer.sh @@ -10,8 +10,10 @@ PRIMER_SELECTOR="$(paste -s -d'|' "${PRIMER_SELECTOR}")" echo "new commit" git rev-list --format=%s --max-count=1 "${GITHUB_SHA}" -MERGE_BASE="$(git merge-base "${GITHUB_SHA}" "origin/${GITHUB_BASE_REF}")" -git checkout -b base_commit "${MERGE_BASE}" +if [ -z "${BASE_REVISION:-}" ]; then + BASE_REVISION="$(git merge-base "${GITHUB_SHA}" "origin/${GITHUB_BASE_REF}")" +fi +git checkout -b base_commit "${BASE_REVISION}" echo "base commit" git rev-list --format=%s --max-count=1 base_commit