Merge branch 'main' into brent/indent-lambda-params

This commit is contained in:
Brent Westbrook 2025-12-09 14:34:28 -05:00
commit 4ffbd496e3
No known key found for this signature in database
195 changed files with 21154 additions and 9308 deletions

View File

@ -75,14 +75,6 @@
matchManagers: ["cargo"], matchManagers: ["cargo"],
enabled: false, enabled: false,
}, },
{
// `mkdocs-material` requires a manual update to keep the version in sync
// with `mkdocs-material-insider`.
// See: https://squidfunk.github.io/mkdocs-material/insiders/upgrade/
matchManagers: ["pip_requirements"],
matchPackageNames: ["mkdocs-material"],
enabled: false,
},
{ {
groupName: "pre-commit dependencies", groupName: "pre-commit dependencies",
matchManagers: ["pre-commit"], matchManagers: ["pre-commit"],

View File

@ -24,6 +24,8 @@ env:
PACKAGE_NAME: ruff PACKAGE_NAME: ruff
PYTHON_VERSION: "3.14" PYTHON_VERSION: "3.14"
NEXTEST_PROFILE: ci NEXTEST_PROFILE: ci
# Enable mdtests that require external dependencies
MDTEST_EXTERNAL: "1"
jobs: jobs:
determine_changes: determine_changes:
@ -779,8 +781,6 @@ jobs:
name: "mkdocs" name: "mkdocs"
runs-on: ubuntu-latest runs-on: ubuntu-latest
timeout-minutes: 10 timeout-minutes: 10
env:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with: with:
@ -788,11 +788,6 @@ jobs:
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 - uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
with: with:
save-if: ${{ github.ref == 'refs/heads/main' }} save-if: ${{ github.ref == 'refs/heads/main' }}
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
with:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup show run: rustup show
- name: Install uv - name: Install uv
@ -800,11 +795,7 @@ jobs:
with: with:
python-version: 3.13 python-version: 3.13
activate-environment: true activate-environment: true
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: uv pip install -r docs/requirements-insiders.txt
- name: "Install dependencies" - name: "Install dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: uv pip install -r docs/requirements.txt run: uv pip install -r docs/requirements.txt
- name: "Update README File" - name: "Update README File"
run: python scripts/transform_readme.py --target mkdocs run: python scripts/transform_readme.py --target mkdocs
@ -812,12 +803,8 @@ jobs:
run: python scripts/generate_mkdocs.py run: python scripts/generate_mkdocs.py
- name: "Check docs formatting" - name: "Check docs formatting"
run: python scripts/check_docs_formatted.py run: python scripts/check_docs_formatted.py
- name: "Build Insiders docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: mkdocs build --strict -f mkdocs.insiders.yml
- name: "Build docs" - name: "Build docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }} run: mkdocs build --strict -f mkdocs.yml
run: mkdocs build --strict -f mkdocs.public.yml
check-formatter-instability-and-black-similarity: check-formatter-instability-and-black-similarity:
name: "formatter instabilities and black similarity" name: "formatter instabilities and black similarity"

View File

@ -20,8 +20,6 @@ on:
jobs: jobs:
mkdocs: mkdocs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
env:
MKDOCS_INSIDERS_SSH_KEY_EXISTS: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY != '' }}
steps: steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with: with:
@ -59,23 +57,12 @@ jobs:
echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV" echo "branch_name=update-docs-$branch_display_name-$timestamp" >> "$GITHUB_ENV"
echo "timestamp=$timestamp" >> "$GITHUB_ENV" echo "timestamp=$timestamp" >> "$GITHUB_ENV"
- name: "Add SSH key"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
uses: webfactory/ssh-agent@a6f90b1f127823b31d4d4a8d96047790581349bd # v0.9.1
with:
ssh-private-key: ${{ secrets.MKDOCS_INSIDERS_SSH_KEY }}
- name: "Install Rust toolchain" - name: "Install Rust toolchain"
run: rustup show run: rustup show
- uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2 - uses: Swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 # v2.8.2
- name: "Install Insiders dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: pip install -r docs/requirements-insiders.txt
- name: "Install dependencies" - name: "Install dependencies"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
run: pip install -r docs/requirements.txt run: pip install -r docs/requirements.txt
- name: "Copy README File" - name: "Copy README File"
@ -83,13 +70,8 @@ jobs:
python scripts/transform_readme.py --target mkdocs python scripts/transform_readme.py --target mkdocs
python scripts/generate_mkdocs.py python scripts/generate_mkdocs.py
- name: "Build Insiders docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
run: mkdocs build --strict -f mkdocs.insiders.yml
- name: "Build docs" - name: "Build docs"
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }} run: mkdocs build --strict -f mkdocs.yml
run: mkdocs build --strict -f mkdocs.public.yml
- name: "Clone docs repo" - name: "Clone docs repo"
run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs run: git clone https://${{ secrets.ASTRAL_DOCS_PAT }}@github.com/astral-sh/docs.git astral-docs

View File

@ -331,13 +331,6 @@ you addressed them.
## MkDocs ## MkDocs
> [!NOTE]
>
> The documentation uses Material for MkDocs Insiders, which is closed-source software.
> This means only members of the Astral organization can preview the documentation exactly as it
> will appear in production.
> Outside contributors can still preview the documentation, but there will be some differences. Consult [the Material for MkDocs documentation](https://squidfunk.github.io/mkdocs-material/insiders/benefits/#features) for which features are exclusively available in the insiders version.
To preview any changes to the documentation locally: To preview any changes to the documentation locally:
1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install). 1. Install the [Rust toolchain](https://www.rust-lang.org/tools/install).
@ -351,11 +344,7 @@ To preview any changes to the documentation locally:
1. Run the development server with: 1. Run the development server with:
```shell ```shell
# For contributors. uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.yml
uvx --with-requirements docs/requirements.txt -- mkdocs serve -f mkdocs.public.yml
# For members of the Astral org, which has access to MkDocs Insiders via sponsorship.
uvx --with-requirements docs/requirements-insiders.txt -- mkdocs serve -f mkdocs.insiders.yml
``` ```
The documentation should then be available locally at The documentation should then be available locally at

1
Cargo.lock generated
View File

@ -4557,6 +4557,7 @@ dependencies = [
"anyhow", "anyhow",
"camino", "camino",
"colored 3.0.0", "colored 3.0.0",
"dunce",
"insta", "insta",
"memchr", "memchr",
"path-slash", "path-slash",

View File

@ -272,6 +272,12 @@ large_stack_arrays = "allow"
lto = "fat" lto = "fat"
codegen-units = 16 codegen-units = 16
# Profile to build a minimally sized binary for ruff/ty
[profile.minimal-size]
inherits = "release"
opt-level = "z"
codegen-units = 1
# Some crates don't change as much but benefit more from # Some crates don't change as much but benefit more from
# more expensive optimization passes, so we selectively # more expensive optimization passes, so we selectively
# decrease codegen-units in some cases. # decrease codegen-units in some cases.

View File

@ -1440,6 +1440,78 @@ def function():
Ok(()) Ok(())
} }
#[test]
fn ignore_noqa() -> Result<()> {
let fixture = CliTest::new()?;
fixture.write_file(
"ruff.toml",
r#"
[lint]
select = ["F401"]
"#,
)?;
fixture.write_file(
"noqa.py",
r#"
import os # noqa: F401
# ruff: disable[F401]
import sys
"#,
)?;
// without --ignore-noqa
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py"),
@r"
success: false
exit_code: 1
----- stdout -----
noqa.py:5:8: F401 [*] `sys` imported but unused
Found 1 error.
[*] 1 fixable with the `--fix` option.
----- stderr -----
");
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.args(["--preview"]),
@r"
success: true
exit_code: 0
----- stdout -----
All checks passed!
----- stderr -----
");
// with --ignore-noqa --preview
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.args(["--ignore-noqa", "--preview"]),
@r"
success: false
exit_code: 1
----- stdout -----
noqa.py:2:8: F401 [*] `os` imported but unused
noqa.py:5:8: F401 [*] `sys` imported but unused
Found 2 errors.
[*] 2 fixable with the `--fix` option.
----- stderr -----
");
Ok(())
}
#[test] #[test]
fn add_noqa() -> Result<()> { fn add_noqa() -> Result<()> {
let fixture = CliTest::new()?; let fixture = CliTest::new()?;
@ -1632,6 +1704,100 @@ def unused(x): # noqa: ANN001, ARG001, D103
Ok(()) Ok(())
} }
#[test]
fn add_noqa_existing_file_level_noqa() -> Result<()> {
let fixture = CliTest::new()?;
fixture.write_file(
"ruff.toml",
r#"
[lint]
select = ["F401"]
"#,
)?;
fixture.write_file(
"noqa.py",
r#"
# ruff: noqa F401
import os
"#,
)?;
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.arg("--preview")
.args(["--add-noqa"])
.arg("-")
.pass_stdin(r#"
"#), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
let test_code =
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
insta::assert_snapshot!(test_code, @r"
# ruff: noqa F401
import os
");
Ok(())
}
#[test]
fn add_noqa_existing_range_suppression() -> Result<()> {
let fixture = CliTest::new()?;
fixture.write_file(
"ruff.toml",
r#"
[lint]
select = ["F401"]
"#,
)?;
fixture.write_file(
"noqa.py",
r#"
# ruff: disable[F401]
import os
"#,
)?;
assert_cmd_snapshot!(fixture
.check_command()
.args(["--config", "ruff.toml"])
.arg("noqa.py")
.arg("--preview")
.args(["--add-noqa"])
.arg("-")
.pass_stdin(r#"
"#), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
let test_code =
fs::read_to_string(fixture.root().join("noqa.py")).expect("should read test file");
insta::assert_snapshot!(test_code, @r"
# ruff: disable[F401]
import os
");
Ok(())
}
#[test] #[test]
fn add_noqa_multiline_comment() -> Result<()> { fn add_noqa_multiline_comment() -> Result<()> {
let fixture = CliTest::new()?; let fixture = CliTest::new()?;

View File

@ -166,28 +166,8 @@ impl Diagnostic {
/// Returns the primary message for this diagnostic. /// Returns the primary message for this diagnostic.
/// ///
/// A diagnostic always has a message, but it may be empty. /// A diagnostic always has a message, but it may be empty.
///
/// NOTE: At present, this routine will return the first primary
/// annotation's message as the primary message when the main diagnostic
/// message is empty. This is meant to facilitate an incremental migration
/// in ty over to the new diagnostic data model. (The old data model
/// didn't distinguish between messages on the entire diagnostic and
/// messages attached to a particular span.)
pub fn primary_message(&self) -> &str { pub fn primary_message(&self) -> &str {
if !self.inner.message.as_str().is_empty() { self.inner.message.as_str()
return self.inner.message.as_str();
}
// FIXME: As a special case, while we're migrating ty
// to the new diagnostic data model, we'll look for a primary
// message from the primary annotation. This is because most
// ty diagnostics are created with an empty diagnostic
// message and instead attach the message to the annotation.
// Fixing this will require touching basically every diagnostic
// in ty, so we do it this way for now to match the old
// semantics. ---AG
self.primary_annotation()
.and_then(|ann| ann.get_message())
.unwrap_or_default()
} }
/// Introspects this diagnostic and returns what kind of "primary" message /// Introspects this diagnostic and returns what kind of "primary" message
@ -199,18 +179,6 @@ impl Diagnostic {
/// contains *essential* information or context for understanding the /// contains *essential* information or context for understanding the
/// diagnostic. /// diagnostic.
/// ///
/// The reason why we don't just always return both the main diagnostic
/// message and the primary annotation message is because this was written
/// in the midst of an incremental migration of ty over to the new
/// diagnostic data model. At time of writing, diagnostics were still
/// constructed in the old model where the main diagnostic message and the
/// primary annotation message were not distinguished from each other. So
/// for now, we carefully return what kind of messages this diagnostic
/// contains. In effect, if this diagnostic has a non-empty main message
/// *and* a non-empty primary annotation message, then the diagnostic is
/// 100% using the new diagnostic data model and we can format things
/// appropriately.
///
/// The type returned implements the `std::fmt::Display` trait. In most /// The type returned implements the `std::fmt::Display` trait. In most
/// cases, just converting it to a string (or printing it) will do what /// cases, just converting it to a string (or printing it) will do what
/// you want. /// you want.
@ -224,11 +192,10 @@ impl Diagnostic {
.primary_annotation() .primary_annotation()
.and_then(|ann| ann.get_message()) .and_then(|ann| ann.get_message())
.unwrap_or_default(); .unwrap_or_default();
match (main.is_empty(), annotation.is_empty()) { if annotation.is_empty() {
(false, true) => ConciseMessage::MainDiagnostic(main), ConciseMessage::MainDiagnostic(main)
(true, false) => ConciseMessage::PrimaryAnnotation(annotation), } else {
(false, false) => ConciseMessage::Both { main, annotation }, ConciseMessage::Both { main, annotation }
(true, true) => ConciseMessage::Empty,
} }
} }
@ -693,18 +660,6 @@ impl SubDiagnostic {
/// contains *essential* information or context for understanding the /// contains *essential* information or context for understanding the
/// diagnostic. /// diagnostic.
/// ///
/// The reason why we don't just always return both the main diagnostic
/// message and the primary annotation message is because this was written
/// in the midst of an incremental migration of ty over to the new
/// diagnostic data model. At time of writing, diagnostics were still
/// constructed in the old model where the main diagnostic message and the
/// primary annotation message were not distinguished from each other. So
/// for now, we carefully return what kind of messages this diagnostic
/// contains. In effect, if this diagnostic has a non-empty main message
/// *and* a non-empty primary annotation message, then the diagnostic is
/// 100% using the new diagnostic data model and we can format things
/// appropriately.
///
/// The type returned implements the `std::fmt::Display` trait. In most /// The type returned implements the `std::fmt::Display` trait. In most
/// cases, just converting it to a string (or printing it) will do what /// cases, just converting it to a string (or printing it) will do what
/// you want. /// you want.
@ -714,11 +669,10 @@ impl SubDiagnostic {
.primary_annotation() .primary_annotation()
.and_then(|ann| ann.get_message()) .and_then(|ann| ann.get_message())
.unwrap_or_default(); .unwrap_or_default();
match (main.is_empty(), annotation.is_empty()) { if annotation.is_empty() {
(false, true) => ConciseMessage::MainDiagnostic(main), ConciseMessage::MainDiagnostic(main)
(true, false) => ConciseMessage::PrimaryAnnotation(annotation), } else {
(false, false) => ConciseMessage::Both { main, annotation }, ConciseMessage::Both { main, annotation }
(true, true) => ConciseMessage::Empty,
} }
} }
} }
@ -888,6 +842,10 @@ impl Annotation {
pub fn hide_snippet(&mut self, yes: bool) { pub fn hide_snippet(&mut self, yes: bool) {
self.hide_snippet = yes; self.hide_snippet = yes;
} }
pub fn is_primary(&self) -> bool {
self.is_primary
}
} }
/// Tags that can be associated with an annotation. /// Tags that can be associated with an annotation.
@ -1508,28 +1466,10 @@ pub enum DiagnosticFormat {
pub enum ConciseMessage<'a> { pub enum ConciseMessage<'a> {
/// A diagnostic contains a non-empty main message and an empty /// A diagnostic contains a non-empty main message and an empty
/// primary annotation message. /// primary annotation message.
///
/// This strongly suggests that the diagnostic is using the
/// "new" data model.
MainDiagnostic(&'a str), MainDiagnostic(&'a str),
/// A diagnostic contains an empty main message and a non-empty
/// primary annotation message.
///
/// This strongly suggests that the diagnostic is using the
/// "old" data model.
PrimaryAnnotation(&'a str),
/// A diagnostic contains a non-empty main message and a non-empty /// A diagnostic contains a non-empty main message and a non-empty
/// primary annotation message. /// primary annotation message.
///
/// This strongly suggests that the diagnostic is using the
/// "new" data model.
Both { main: &'a str, annotation: &'a str }, Both { main: &'a str, annotation: &'a str },
/// A diagnostic contains an empty main message and an empty
/// primary annotation message.
///
/// This indicates that the diagnostic is probably using the old
/// model.
Empty,
/// A custom concise message has been provided. /// A custom concise message has been provided.
Custom(&'a str), Custom(&'a str),
} }
@ -1540,13 +1480,9 @@ impl std::fmt::Display for ConciseMessage<'_> {
ConciseMessage::MainDiagnostic(main) => { ConciseMessage::MainDiagnostic(main) => {
write!(f, "{main}") write!(f, "{main}")
} }
ConciseMessage::PrimaryAnnotation(annotation) => {
write!(f, "{annotation}")
}
ConciseMessage::Both { main, annotation } => { ConciseMessage::Both { main, annotation } => {
write!(f, "{main}: {annotation}") write!(f, "{main}: {annotation}")
} }
ConciseMessage::Empty => Ok(()),
ConciseMessage::Custom(message) => { ConciseMessage::Custom(message) => {
write!(f, "{message}") write!(f, "{message}")
} }

View File

@ -28,9 +28,11 @@ yaml.load("{}", SafeLoader)
yaml.load("{}", yaml.SafeLoader) yaml.load("{}", yaml.SafeLoader)
yaml.load("{}", CSafeLoader) yaml.load("{}", CSafeLoader)
yaml.load("{}", yaml.CSafeLoader) yaml.load("{}", yaml.CSafeLoader)
yaml.load("{}", yaml.cyaml.CSafeLoader)
yaml.load("{}", NewSafeLoader) yaml.load("{}", NewSafeLoader)
yaml.load("{}", Loader=SafeLoader) yaml.load("{}", Loader=SafeLoader)
yaml.load("{}", Loader=yaml.SafeLoader) yaml.load("{}", Loader=yaml.SafeLoader)
yaml.load("{}", Loader=CSafeLoader) yaml.load("{}", Loader=CSafeLoader)
yaml.load("{}", Loader=yaml.CSafeLoader) yaml.load("{}", Loader=yaml.CSafeLoader)
yaml.load("{}", Loader=yaml.cyaml.CSafeLoader)
yaml.load("{}", Loader=NewSafeLoader) yaml.load("{}", Loader=NewSafeLoader)

View File

@ -199,6 +199,9 @@ def bytes_okay(value=bytes(1)):
def int_okay(value=int("12")): def int_okay(value=int("12")):
pass pass
# Allow immutable slice()
def slice_okay(value=slice(1,2)):
pass
# Allow immutable complex() value # Allow immutable complex() value
def complex_okay(value=complex(1,2)): def complex_okay(value=complex(1,2)):

View File

@ -218,3 +218,26 @@ def should_not_fail(payload, Args):
Args: Args:
The other arguments. The other arguments.
""" """
# Test cases for Unpack[TypedDict] kwargs
from typing import TypedDict
from typing_extensions import Unpack
class User(TypedDict):
id: int
name: str
def function_with_unpack_args_should_not_fail(query: str, **kwargs: Unpack[User]):
"""Function with Unpack kwargs.
Args:
query: some arg
"""
def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
"""Function with Unpack kwargs but missing query arg documentation.
Args:
**kwargs: keyword arguments
"""

View File

@ -2,15 +2,40 @@ from abc import ABC, abstractmethod
from contextlib import suppress from contextlib import suppress
class MyError(Exception):
...
class MySubError(MyError):
...
class MyValueError(ValueError):
...
class MyUserWarning(UserWarning):
...
# Violation test cases with builtin errors: PLW0133
# Test case 1: Useless exception statement # Test case 1: Useless exception statement
def func(): def func():
AssertionError("This is an assertion error") # PLW0133 AssertionError("This is an assertion error") # PLW0133
MyError("This is a custom error") # PLW0133
MySubError("This is a custom error") # PLW0133
MyValueError("This is a custom value error") # PLW0133
# Test case 2: Useless exception statement in try-except block # Test case 2: Useless exception statement in try-except block
def func(): def func():
try: try:
Exception("This is an exception") # PLW0133 Exception("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
except Exception as err: except Exception as err:
pass pass
@ -19,6 +44,9 @@ def func():
def func(): def func():
if True: if True:
RuntimeError("This is an exception") # PLW0133 RuntimeError("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
# Test case 4: Useless exception statement in class # Test case 4: Useless exception statement in class
@ -26,12 +54,18 @@ def func():
class Class: class Class:
def __init__(self): def __init__(self):
TypeError("This is an exception") # PLW0133 TypeError("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
# Test case 5: Useless exception statement in function # Test case 5: Useless exception statement in function
def func(): def func():
def inner(): def inner():
IndexError("This is an exception") # PLW0133 IndexError("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
inner() inner()
@ -40,6 +74,9 @@ def func():
def func(): def func():
while True: while True:
KeyError("This is an exception") # PLW0133 KeyError("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
# Test case 7: Useless exception statement in abstract class # Test case 7: Useless exception statement in abstract class
@ -48,27 +85,58 @@ def func():
@abstractmethod @abstractmethod
def method(self): def method(self):
NotImplementedError("This is an exception") # PLW0133 NotImplementedError("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
# Test case 8: Useless exception statement inside context manager # Test case 8: Useless exception statement inside context manager
def func(): def func():
with suppress(AttributeError): with suppress(Exception):
AttributeError("This is an exception") # PLW0133 AttributeError("This is an exception") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
# Test case 9: Useless exception statement in parentheses # Test case 9: Useless exception statement in parentheses
def func(): def func():
(RuntimeError("This is an exception")) # PLW0133 (RuntimeError("This is an exception")) # PLW0133
(MyError("This is an exception")) # PLW0133
(MySubError("This is an exception")) # PLW0133
(MyValueError("This is an exception")) # PLW0133
# Test case 10: Useless exception statement in continuation # Test case 10: Useless exception statement in continuation
def func(): def func():
x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133 x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133
x = 1; (MyError("This is an exception")); y = 2 # PLW0133
x = 1; (MySubError("This is an exception")); y = 2 # PLW0133
x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133
# Test case 11: Useless warning statement # Test case 11: Useless warning statement
def func(): def func():
UserWarning("This is an assertion error") # PLW0133 UserWarning("This is a user warning") # PLW0133
MyUserWarning("This is a custom user warning") # PLW0133
# Test case 12: Useless exception statement at module level
import builtins
builtins.TypeError("still an exception even though it's an Attribute") # PLW0133
PythonFinalizationError("Added in Python 3.13") # PLW0133
MyError("This is an exception") # PLW0133
MySubError("This is an exception") # PLW0133
MyValueError("This is an exception") # PLW0133
UserWarning("This is a user warning") # PLW0133
MyUserWarning("This is a custom user warning") # PLW0133
# Non-violation test cases: PLW0133 # Non-violation test cases: PLW0133
@ -119,10 +187,3 @@ def func():
def func(): def func():
with suppress(AttributeError): with suppress(AttributeError):
raise AttributeError("This is an exception") # OK raise AttributeError("This is an exception") # OK
import builtins
builtins.TypeError("still an exception even though it's an Attribute")
PythonFinalizationError("Added in Python 3.13")

View File

@ -0,0 +1,88 @@
def f():
# These should both be ignored by the range suppression.
# ruff: disable[E741, F841]
I = 1
# ruff: enable[E741, F841]
def f():
# These should both be ignored by the implicit range suppression.
# Should also generate an "unmatched suppression" warning.
# ruff:disable[E741,F841]
I = 1
def f():
# Neither warning is ignored, and an "unmatched suppression"
# should be generated.
I = 1
# ruff: enable[E741, F841]
def f():
# One should be ignored by the range suppression, and
# the other logged to the user.
# ruff: disable[E741]
I = 1
# ruff: enable[E741]
def f():
# Test interleaved range suppressions. The first and last
# lines should each log a different warning, while the
# middle line should be completely silenced.
# ruff: disable[E741]
l = 0
# ruff: disable[F841]
O = 1
# ruff: enable[E741]
I = 2
# ruff: enable[F841]
def f():
# Neither of these are ignored and warnings are
# logged to user
# ruff: disable[E501]
I = 1
# ruff: enable[E501]
def f():
# These should both be ignored by the range suppression,
# and an unusued noqa diagnostic should be logged.
# ruff:disable[E741,F841]
I = 1 # noqa: E741,F841
# ruff:enable[E741,F841]
def f():
# TODO: Duplicate codes should be counted as duplicate, not unused
# ruff: disable[F841, F841]
foo = 0
def f():
# Overlapping range suppressions, one should be marked as used,
# and the other should trigger an unused suppression diagnostic
# ruff: disable[F841]
# ruff: disable[F841]
foo = 0
def f():
# Multiple codes but only one is used
# ruff: disable[E741, F401, F841]
foo = 0
def f():
# Multiple codes but only two are used
# ruff: disable[E741, F401, F841]
I = 0
def f():
# Multiple codes but none are used
# ruff: disable[E741, F401, F841]
print("hello")

View File

@ -12,17 +12,20 @@ use crate::fix::edits::delete_comment;
use crate::noqa::{ use crate::noqa::{
Code, Directive, FileExemption, FileNoqaDirectives, NoqaDirectives, NoqaMapping, Code, Directive, FileExemption, FileNoqaDirectives, NoqaDirectives, NoqaMapping,
}; };
use crate::preview::is_range_suppressions_enabled;
use crate::registry::Rule; use crate::registry::Rule;
use crate::rule_redirects::get_redirect_target; use crate::rule_redirects::get_redirect_target;
use crate::rules::pygrep_hooks; use crate::rules::pygrep_hooks;
use crate::rules::ruff; use crate::rules::ruff;
use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA}; use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA};
use crate::settings::LinterSettings; use crate::settings::LinterSettings;
use crate::suppression::Suppressions;
use crate::{Edit, Fix, Locator}; use crate::{Edit, Fix, Locator};
use super::ast::LintContext; use super::ast::LintContext;
/// RUF100 /// RUF100
#[expect(clippy::too_many_arguments)]
pub(crate) fn check_noqa( pub(crate) fn check_noqa(
context: &mut LintContext, context: &mut LintContext,
path: &Path, path: &Path,
@ -31,6 +34,7 @@ pub(crate) fn check_noqa(
noqa_line_for: &NoqaMapping, noqa_line_for: &NoqaMapping,
analyze_directives: bool, analyze_directives: bool,
settings: &LinterSettings, settings: &LinterSettings,
suppressions: &Suppressions,
) -> Vec<usize> { ) -> Vec<usize> {
// Identify any codes that are globally exempted (within the current file). // Identify any codes that are globally exempted (within the current file).
let file_noqa_directives = let file_noqa_directives =
@ -40,7 +44,7 @@ pub(crate) fn check_noqa(
let mut noqa_directives = let mut noqa_directives =
NoqaDirectives::from_commented_ranges(comment_ranges, &settings.external, path, locator); NoqaDirectives::from_commented_ranges(comment_ranges, &settings.external, path, locator);
if file_noqa_directives.is_empty() && noqa_directives.is_empty() { if file_noqa_directives.is_empty() && noqa_directives.is_empty() && suppressions.is_empty() {
return Vec::new(); return Vec::new();
} }
@ -60,11 +64,19 @@ pub(crate) fn check_noqa(
continue; continue;
} }
// Apply file-level suppressions first
if exemption.contains_secondary_code(code) { if exemption.contains_secondary_code(code) {
ignored_diagnostics.push(index); ignored_diagnostics.push(index);
continue; continue;
} }
// Apply ranged suppressions next
if is_range_suppressions_enabled(settings) && suppressions.check_diagnostic(diagnostic) {
ignored_diagnostics.push(index);
continue;
}
// Apply end-of-line noqa suppressions last
let noqa_offsets = diagnostic let noqa_offsets = diagnostic
.parent() .parent()
.into_iter() .into_iter()
@ -107,6 +119,9 @@ pub(crate) fn check_noqa(
} }
} }
// Diagnostics for unused/invalid range suppressions
suppressions.check_suppressions(context, locator);
// Enforce that the noqa directive was actually used (RUF100), unless RUF100 was itself // Enforce that the noqa directive was actually used (RUF100), unless RUF100 was itself
// suppressed. // suppressed.
if context.is_rule_enabled(Rule::UnusedNOQA) if context.is_rule_enabled(Rule::UnusedNOQA)
@ -128,8 +143,13 @@ pub(crate) fn check_noqa(
Directive::All(directive) => { Directive::All(directive) => {
if matches.is_empty() { if matches.is_empty() {
let edit = delete_comment(directive.range(), locator); let edit = delete_comment(directive.range(), locator);
let mut diagnostic = context let mut diagnostic = context.report_diagnostic(
.report_diagnostic(UnusedNOQA { codes: None }, directive.range()); UnusedNOQA {
codes: None,
kind: ruff::rules::UnusedNOQAKind::Noqa,
},
directive.range(),
);
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Unnecessary); diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Unnecessary);
diagnostic.set_fix(Fix::safe_edit(edit)); diagnostic.set_fix(Fix::safe_edit(edit));
} }
@ -224,6 +244,7 @@ pub(crate) fn check_noqa(
.map(|code| (*code).to_string()) .map(|code| (*code).to_string())
.collect(), .collect(),
}), }),
kind: ruff::rules::UnusedNOQAKind::Noqa,
}, },
directive.range(), directive.range(),
); );

View File

@ -32,6 +32,7 @@ use crate::rules::ruff::rules::test_rules::{self, TEST_RULES, TestRule};
use crate::settings::types::UnsafeFixes; use crate::settings::types::UnsafeFixes;
use crate::settings::{LinterSettings, TargetVersion, flags}; use crate::settings::{LinterSettings, TargetVersion, flags};
use crate::source_kind::SourceKind; use crate::source_kind::SourceKind;
use crate::suppression::Suppressions;
use crate::{Locator, directives, fs}; use crate::{Locator, directives, fs};
pub(crate) mod float; pub(crate) mod float;
@ -128,6 +129,7 @@ pub fn check_path(
source_type: PySourceType, source_type: PySourceType,
parsed: &Parsed<ModModule>, parsed: &Parsed<ModModule>,
target_version: TargetVersion, target_version: TargetVersion,
suppressions: &Suppressions,
) -> Vec<Diagnostic> { ) -> Vec<Diagnostic> {
// Aggregate all diagnostics. // Aggregate all diagnostics.
let mut context = LintContext::new(path, locator.contents(), settings); let mut context = LintContext::new(path, locator.contents(), settings);
@ -339,6 +341,7 @@ pub fn check_path(
&directives.noqa_line_for, &directives.noqa_line_for,
parsed.has_valid_syntax(), parsed.has_valid_syntax(),
settings, settings,
suppressions,
); );
if noqa.is_enabled() { if noqa.is_enabled() {
for index in ignored.iter().rev() { for index in ignored.iter().rev() {
@ -400,6 +403,9 @@ pub fn add_noqa_to_path(
&indexer, &indexer,
); );
// Parse range suppression comments
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
// Generate diagnostics, ignoring any existing `noqa` directives. // Generate diagnostics, ignoring any existing `noqa` directives.
let diagnostics = check_path( let diagnostics = check_path(
path, path,
@ -414,6 +420,7 @@ pub fn add_noqa_to_path(
source_type, source_type,
&parsed, &parsed,
target_version, target_version,
&suppressions,
); );
// Add any missing `# noqa` pragmas. // Add any missing `# noqa` pragmas.
@ -427,6 +434,7 @@ pub fn add_noqa_to_path(
&directives.noqa_line_for, &directives.noqa_line_for,
stylist.line_ending(), stylist.line_ending(),
reason, reason,
&suppressions,
) )
} }
@ -461,6 +469,9 @@ pub fn lint_only(
&indexer, &indexer,
); );
// Parse range suppression comments
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
// Generate diagnostics. // Generate diagnostics.
let diagnostics = check_path( let diagnostics = check_path(
path, path,
@ -475,6 +486,7 @@ pub fn lint_only(
source_type, source_type,
&parsed, &parsed,
target_version, target_version,
&suppressions,
); );
LinterResult { LinterResult {
@ -566,6 +578,9 @@ pub fn lint_fix<'a>(
&indexer, &indexer,
); );
// Parse range suppression comments
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
// Generate diagnostics. // Generate diagnostics.
let diagnostics = check_path( let diagnostics = check_path(
path, path,
@ -580,6 +595,7 @@ pub fn lint_fix<'a>(
source_type, source_type,
&parsed, &parsed,
target_version, target_version,
&suppressions,
); );
if iterations == 0 { if iterations == 0 {
@ -769,6 +785,7 @@ mod tests {
use crate::registry::Rule; use crate::registry::Rule;
use crate::settings::LinterSettings; use crate::settings::LinterSettings;
use crate::source_kind::SourceKind; use crate::source_kind::SourceKind;
use crate::suppression::Suppressions;
use crate::test::{TestedNotebook, assert_notebook_path, test_contents, test_snippet}; use crate::test::{TestedNotebook, assert_notebook_path, test_contents, test_snippet};
use crate::{Locator, assert_diagnostics, directives, settings}; use crate::{Locator, assert_diagnostics, directives, settings};
@ -944,6 +961,7 @@ mod tests {
&locator, &locator,
&indexer, &indexer,
); );
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
let mut diagnostics = check_path( let mut diagnostics = check_path(
path, path,
None, None,
@ -957,6 +975,7 @@ mod tests {
source_type, source_type,
&parsed, &parsed,
target_version, target_version,
&suppressions,
); );
diagnostics.sort_by(Diagnostic::ruff_start_ordering); diagnostics.sort_by(Diagnostic::ruff_start_ordering);
diagnostics diagnostics

View File

@ -20,12 +20,14 @@ use crate::Locator;
use crate::fs::relativize_path; use crate::fs::relativize_path;
use crate::registry::Rule; use crate::registry::Rule;
use crate::rule_redirects::get_redirect_target; use crate::rule_redirects::get_redirect_target;
use crate::suppression::Suppressions;
/// Generates an array of edits that matches the length of `messages`. /// Generates an array of edits that matches the length of `messages`.
/// Each potential edit in the array is paired, in order, with the associated diagnostic. /// Each potential edit in the array is paired, in order, with the associated diagnostic.
/// Each edit will add a `noqa` comment to the appropriate line in the source to hide /// Each edit will add a `noqa` comment to the appropriate line in the source to hide
/// the diagnostic. These edits may conflict with each other and should not be applied /// the diagnostic. These edits may conflict with each other and should not be applied
/// simultaneously. /// simultaneously.
#[expect(clippy::too_many_arguments)]
pub fn generate_noqa_edits( pub fn generate_noqa_edits(
path: &Path, path: &Path,
diagnostics: &[Diagnostic], diagnostics: &[Diagnostic],
@ -34,11 +36,19 @@ pub fn generate_noqa_edits(
external: &[String], external: &[String],
noqa_line_for: &NoqaMapping, noqa_line_for: &NoqaMapping,
line_ending: LineEnding, line_ending: LineEnding,
suppressions: &Suppressions,
) -> Vec<Option<Edit>> { ) -> Vec<Option<Edit>> {
let file_directives = FileNoqaDirectives::extract(locator, comment_ranges, external, path); let file_directives = FileNoqaDirectives::extract(locator, comment_ranges, external, path);
let exemption = FileExemption::from(&file_directives); let exemption = FileExemption::from(&file_directives);
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator); let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for); let comments = find_noqa_comments(
diagnostics,
locator,
&exemption,
&directives,
noqa_line_for,
suppressions,
);
build_noqa_edits_by_diagnostic(comments, locator, line_ending, None) build_noqa_edits_by_diagnostic(comments, locator, line_ending, None)
} }
@ -725,6 +735,7 @@ pub(crate) fn add_noqa(
noqa_line_for: &NoqaMapping, noqa_line_for: &NoqaMapping,
line_ending: LineEnding, line_ending: LineEnding,
reason: Option<&str>, reason: Option<&str>,
suppressions: &Suppressions,
) -> Result<usize> { ) -> Result<usize> {
let (count, output) = add_noqa_inner( let (count, output) = add_noqa_inner(
path, path,
@ -735,6 +746,7 @@ pub(crate) fn add_noqa(
noqa_line_for, noqa_line_for,
line_ending, line_ending,
reason, reason,
suppressions,
); );
fs::write(path, output)?; fs::write(path, output)?;
@ -751,6 +763,7 @@ fn add_noqa_inner(
noqa_line_for: &NoqaMapping, noqa_line_for: &NoqaMapping,
line_ending: LineEnding, line_ending: LineEnding,
reason: Option<&str>, reason: Option<&str>,
suppressions: &Suppressions,
) -> (usize, String) { ) -> (usize, String) {
let mut count = 0; let mut count = 0;
@ -760,7 +773,14 @@ fn add_noqa_inner(
let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator); let directives = NoqaDirectives::from_commented_ranges(comment_ranges, external, path, locator);
let comments = find_noqa_comments(diagnostics, locator, &exemption, &directives, noqa_line_for); let comments = find_noqa_comments(
diagnostics,
locator,
&exemption,
&directives,
noqa_line_for,
suppressions,
);
let edits = build_noqa_edits_by_line(comments, locator, line_ending, reason); let edits = build_noqa_edits_by_line(comments, locator, line_ending, reason);
@ -859,6 +879,7 @@ fn find_noqa_comments<'a>(
exemption: &'a FileExemption, exemption: &'a FileExemption,
directives: &'a NoqaDirectives, directives: &'a NoqaDirectives,
noqa_line_for: &NoqaMapping, noqa_line_for: &NoqaMapping,
suppressions: &'a Suppressions,
) -> Vec<Option<NoqaComment<'a>>> { ) -> Vec<Option<NoqaComment<'a>>> {
// List of noqa comments, ordered to match up with `messages` // List of noqa comments, ordered to match up with `messages`
let mut comments_by_line: Vec<Option<NoqaComment<'a>>> = vec![]; let mut comments_by_line: Vec<Option<NoqaComment<'a>>> = vec![];
@ -875,6 +896,12 @@ fn find_noqa_comments<'a>(
continue; continue;
} }
// Apply ranged suppressions next
if suppressions.check_diagnostic(message) {
comments_by_line.push(None);
continue;
}
// Is the violation ignored by a `noqa` directive on the parent line? // Is the violation ignored by a `noqa` directive on the parent line?
if let Some(parent) = message.parent() { if let Some(parent) = message.parent() {
if let Some(directive_line) = if let Some(directive_line) =
@ -1253,6 +1280,7 @@ mod tests {
use crate::rules::pycodestyle::rules::{AmbiguousVariableName, UselessSemicolon}; use crate::rules::pycodestyle::rules::{AmbiguousVariableName, UselessSemicolon};
use crate::rules::pyflakes::rules::UnusedVariable; use crate::rules::pyflakes::rules::UnusedVariable;
use crate::rules::pyupgrade::rules::PrintfStringFormatting; use crate::rules::pyupgrade::rules::PrintfStringFormatting;
use crate::suppression::Suppressions;
use crate::{Edit, Violation}; use crate::{Edit, Violation};
use crate::{Locator, generate_noqa_edits}; use crate::{Locator, generate_noqa_edits};
@ -2848,6 +2876,7 @@ mod tests {
&noqa_line_for, &noqa_line_for,
LineEnding::Lf, LineEnding::Lf,
None, None,
&Suppressions::default(),
); );
assert_eq!(count, 0); assert_eq!(count, 0);
assert_eq!(output, format!("{contents}")); assert_eq!(output, format!("{contents}"));
@ -2872,6 +2901,7 @@ mod tests {
&noqa_line_for, &noqa_line_for,
LineEnding::Lf, LineEnding::Lf,
None, None,
&Suppressions::default(),
); );
assert_eq!(count, 1); assert_eq!(count, 1);
assert_eq!(output, "x = 1 # noqa: F841\n"); assert_eq!(output, "x = 1 # noqa: F841\n");
@ -2903,6 +2933,7 @@ mod tests {
&noqa_line_for, &noqa_line_for,
LineEnding::Lf, LineEnding::Lf,
None, None,
&Suppressions::default(),
); );
assert_eq!(count, 1); assert_eq!(count, 1);
assert_eq!(output, "x = 1 # noqa: E741, F841\n"); assert_eq!(output, "x = 1 # noqa: E741, F841\n");
@ -2934,6 +2965,7 @@ mod tests {
&noqa_line_for, &noqa_line_for,
LineEnding::Lf, LineEnding::Lf,
None, None,
&Suppressions::default(),
); );
assert_eq!(count, 0); assert_eq!(count, 0);
assert_eq!(output, "x = 1 # noqa"); assert_eq!(output, "x = 1 # noqa");
@ -2956,6 +2988,7 @@ print(
let messages = [PrintfStringFormatting let messages = [PrintfStringFormatting
.into_diagnostic(TextRange::new(12.into(), 79.into()), &source_file)]; .into_diagnostic(TextRange::new(12.into(), 79.into()), &source_file)];
let comment_ranges = CommentRanges::default(); let comment_ranges = CommentRanges::default();
let suppressions = Suppressions::default();
let edits = generate_noqa_edits( let edits = generate_noqa_edits(
path, path,
&messages, &messages,
@ -2964,6 +2997,7 @@ print(
&[], &[],
&noqa_line_for, &noqa_line_for,
LineEnding::Lf, LineEnding::Lf,
&suppressions,
); );
assert_eq!( assert_eq!(
edits, edits,
@ -2987,6 +3021,7 @@ bar =
[UselessSemicolon.into_diagnostic(TextRange::new(4.into(), 5.into()), &source_file)]; [UselessSemicolon.into_diagnostic(TextRange::new(4.into(), 5.into()), &source_file)];
let noqa_line_for = NoqaMapping::default(); let noqa_line_for = NoqaMapping::default();
let comment_ranges = CommentRanges::default(); let comment_ranges = CommentRanges::default();
let suppressions = Suppressions::default();
let edits = generate_noqa_edits( let edits = generate_noqa_edits(
path, path,
&messages, &messages,
@ -2995,6 +3030,7 @@ bar =
&[], &[],
&noqa_line_for, &noqa_line_for,
LineEnding::Lf, LineEnding::Lf,
&suppressions,
); );
assert_eq!( assert_eq!(
edits, edits,

View File

@ -9,6 +9,11 @@ use crate::settings::LinterSettings;
// Rule-specific behavior // Rule-specific behavior
// https://github.com/astral-sh/ruff/pull/21382
pub(crate) const fn is_custom_exception_checking_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}
// https://github.com/astral-sh/ruff/pull/15541 // https://github.com/astral-sh/ruff/pull/15541
pub(crate) const fn is_suspicious_function_reference_enabled(settings: &LinterSettings) -> bool { pub(crate) const fn is_suspicious_function_reference_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled() settings.preview.is_enabled()
@ -286,3 +291,8 @@ pub(crate) const fn is_s310_resolve_string_literal_bindings_enabled(
) -> bool { ) -> bool {
settings.preview.is_enabled() settings.preview.is_enabled()
} }
// https://github.com/astral-sh/ruff/pull/21623
pub(crate) const fn is_range_suppressions_enabled(settings: &LinterSettings) -> bool {
settings.preview.is_enabled()
}

View File

@ -75,6 +75,7 @@ pub(crate) fn unsafe_yaml_load(checker: &Checker, call: &ast::ExprCall) {
qualified_name.segments(), qualified_name.segments(),
["yaml", "SafeLoader" | "CSafeLoader"] ["yaml", "SafeLoader" | "CSafeLoader"]
| ["yaml", "loader", "SafeLoader" | "CSafeLoader"] | ["yaml", "loader", "SafeLoader" | "CSafeLoader"]
| ["yaml", "cyaml", "CSafeLoader"]
) )
}) })
{ {

View File

@ -236,227 +236,227 @@ help: Replace with `None`; initialize within function
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:239:20 --> B006_B008.py:242:20
| |
237 | # B006 and B008 240 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008. 241 | # We should handle arbitrary nesting of these B008.
239 | def nested_combo(a=[float(3), dt.datetime.now()]): 242 | def nested_combo(a=[float(3), dt.datetime.now()]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
240 | pass 243 | pass
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
236 | 239 |
237 | # B006 and B008 240 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008. 241 | # We should handle arbitrary nesting of these B008.
- def nested_combo(a=[float(3), dt.datetime.now()]): - def nested_combo(a=[float(3), dt.datetime.now()]):
239 + def nested_combo(a=None): 242 + def nested_combo(a=None):
240 | pass 243 | pass
241 | 244 |
242 | 245 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:276:27 --> B006_B008.py:279:27
| |
275 | def mutable_annotations( 278 | def mutable_annotations(
276 | a: list[int] | None = [], 279 | a: list[int] | None = [],
| ^^ | ^^
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
273 | 276 |
274 | 277 |
275 | def mutable_annotations( 278 | def mutable_annotations(
- a: list[int] | None = [], - a: list[int] | None = [],
276 + a: list[int] | None = None, 279 + a: list[int] | None = None,
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:277:35 --> B006_B008.py:280:35
| |
275 | def mutable_annotations( 278 | def mutable_annotations(
276 | a: list[int] | None = [], 279 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
| ^^ | ^^
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
274 | 277 |
275 | def mutable_annotations( 278 | def mutable_annotations(
276 | a: list[int] | None = [], 279 | a: list[int] | None = [],
- b: Optional[Dict[int, int]] = {}, - b: Optional[Dict[int, int]] = {},
277 + b: Optional[Dict[int, int]] = None, 280 + b: Optional[Dict[int, int]] = None,
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ): 283 | ):
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:278:62 --> B006_B008.py:281:62
| |
276 | a: list[int] | None = [], 279 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| ^^^^^ | ^^^^^
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ): 283 | ):
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
275 | def mutable_annotations( 278 | def mutable_annotations(
276 | a: list[int] | None = [], 279 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
- c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), - c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
278 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None, 281 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ): 283 | ):
281 | pass 284 | pass
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:279:80 --> B006_B008.py:282:80
| |
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| ^^^^^ | ^^^^^
280 | ): 283 | ):
281 | pass 284 | pass
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
276 | a: list[int] | None = [], 279 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
- d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), - d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None, 282 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
280 | ): 283 | ):
281 | pass 284 | pass
282 | 285 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:284:52 --> B006_B008.py:287:52
| |
284 | def single_line_func_wrong(value: dict[str, str] = {}): 287 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^ | ^^
285 | """Docstring""" 288 | """Docstring"""
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
281 | pass 284 | pass
282 | 285 |
283 |
- def single_line_func_wrong(value: dict[str, str] = {}):
284 + def single_line_func_wrong(value: dict[str, str] = None):
285 | """Docstring"""
286 | 286 |
287 | - def single_line_func_wrong(value: dict[str, str] = {}):
287 + def single_line_func_wrong(value: dict[str, str] = None):
288 | """Docstring"""
289 |
290 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:288:52 --> B006_B008.py:291:52
| |
288 | def single_line_func_wrong(value: dict[str, str] = {}): 291 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^ | ^^
289 | """Docstring""" 292 | """Docstring"""
290 | ... 293 | ...
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
285 | """Docstring""" 288 | """Docstring"""
286 | 289 |
287 | 290 |
- def single_line_func_wrong(value: dict[str, str] = {}): - def single_line_func_wrong(value: dict[str, str] = {}):
288 + def single_line_func_wrong(value: dict[str, str] = None): 291 + def single_line_func_wrong(value: dict[str, str] = None):
289 | """Docstring""" 292 | """Docstring"""
290 | ... 293 | ...
291 | 294 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:293:52 --> B006_B008.py:296:52
| |
293 | def single_line_func_wrong(value: dict[str, str] = {}): 296 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^ | ^^
294 | """Docstring"""; ... 297 | """Docstring"""; ...
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
290 | ... 293 | ...
291 | 294 |
292 |
- def single_line_func_wrong(value: dict[str, str] = {}):
293 + def single_line_func_wrong(value: dict[str, str] = None):
294 | """Docstring"""; ...
295 | 295 |
296 | - def single_line_func_wrong(value: dict[str, str] = {}):
296 + def single_line_func_wrong(value: dict[str, str] = None):
297 | """Docstring"""; ...
298 |
299 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:297:52 --> B006_B008.py:300:52
| |
297 | def single_line_func_wrong(value: dict[str, str] = {}): 300 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^ | ^^
298 | """Docstring"""; \ 301 | """Docstring"""; \
299 | ... 302 | ...
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
294 | """Docstring"""; ... 297 | """Docstring"""; ...
295 | 298 |
296 | 299 |
- def single_line_func_wrong(value: dict[str, str] = {}): - def single_line_func_wrong(value: dict[str, str] = {}):
297 + def single_line_func_wrong(value: dict[str, str] = None): 300 + def single_line_func_wrong(value: dict[str, str] = None):
298 | """Docstring"""; \ 301 | """Docstring"""; \
299 | ... 302 | ...
300 | 303 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:302:52 --> B006_B008.py:305:52
| |
302 | def single_line_func_wrong(value: dict[str, str] = { 305 | def single_line_func_wrong(value: dict[str, str] = {
| ____________________________________________________^ | ____________________________________________________^
303 | | # This is a comment 306 | | # This is a comment
304 | | }): 307 | | }):
| |_^ | |_^
305 | """Docstring""" 308 | """Docstring"""
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
299 | ... 302 | ...
300 | 303 |
301 | 304 |
- def single_line_func_wrong(value: dict[str, str] = { - def single_line_func_wrong(value: dict[str, str] = {
- # This is a comment - # This is a comment
- }): - }):
302 + def single_line_func_wrong(value: dict[str, str] = None): 305 + def single_line_func_wrong(value: dict[str, str] = None):
303 | """Docstring""" 306 | """Docstring"""
304 | 307 |
305 | 308 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 Do not use mutable data structures for argument defaults B006 Do not use mutable data structures for argument defaults
--> B006_B008.py:308:52 --> B006_B008.py:311:52
| |
308 | def single_line_func_wrong(value: dict[str, str] = {}) \ 311 | def single_line_func_wrong(value: dict[str, str] = {}) \
| ^^ | ^^
309 | : \ 312 | : \
310 | """Docstring""" 313 | """Docstring"""
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:313:52 --> B006_B008.py:316:52
| |
313 | def single_line_func_wrong(value: dict[str, str] = {}): 316 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^ | ^^
314 | """Docstring without newline""" 317 | """Docstring without newline"""
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
310 | """Docstring""" 313 | """Docstring"""
311 | 314 |
312 | 315 |
- def single_line_func_wrong(value: dict[str, str] = {}): - def single_line_func_wrong(value: dict[str, str] = {}):
313 + def single_line_func_wrong(value: dict[str, str] = None): 316 + def single_line_func_wrong(value: dict[str, str] = None):
314 | """Docstring without newline""" 317 | """Docstring without newline"""
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior

View File

@ -53,39 +53,39 @@ B008 Do not perform function call in argument defaults; instead, perform the cal
| |
B008 Do not perform function call `dt.datetime.now` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable B008 Do not perform function call `dt.datetime.now` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
--> B006_B008.py:239:31 --> B006_B008.py:242:31
| |
237 | # B006 and B008 240 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008. 241 | # We should handle arbitrary nesting of these B008.
239 | def nested_combo(a=[float(3), dt.datetime.now()]): 242 | def nested_combo(a=[float(3), dt.datetime.now()]):
| ^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^
240 | pass 243 | pass
| |
B008 Do not perform function call `map` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable B008 Do not perform function call `map` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
--> B006_B008.py:245:22 --> B006_B008.py:248:22
| |
243 | # Don't flag nested B006 since we can't guarantee that 246 | # Don't flag nested B006 since we can't guarantee that
244 | # it isn't made mutable by the outer operation. 247 | # it isn't made mutable by the outer operation.
245 | def no_nested_b006(a=map(lambda s: s.upper(), ["a", "b", "c"])): 248 | def no_nested_b006(a=map(lambda s: s.upper(), ["a", "b", "c"])):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
246 | pass 249 | pass
| |
B008 Do not perform function call `random.randint` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable B008 Do not perform function call `random.randint` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
--> B006_B008.py:250:19 --> B006_B008.py:253:19
| |
249 | # B008-ception. 252 | # B008-ception.
250 | def nested_b008(a=random.randint(0, dt.datetime.now().year)): 253 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
251 | pass 254 | pass
| |
B008 Do not perform function call `dt.datetime.now` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable B008 Do not perform function call `dt.datetime.now` in argument defaults; instead, perform the call within the function, or read the default from a module-level singleton variable
--> B006_B008.py:250:37 --> B006_B008.py:253:37
| |
249 | # B008-ception. 252 | # B008-ception.
250 | def nested_b008(a=random.randint(0, dt.datetime.now().year)): 253 | def nested_b008(a=random.randint(0, dt.datetime.now().year)):
| ^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^
251 | pass 254 | pass
| |

View File

@ -236,227 +236,227 @@ help: Replace with `None`; initialize within function
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:239:20 --> B006_B008.py:242:20
| |
237 | # B006 and B008 240 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008. 241 | # We should handle arbitrary nesting of these B008.
239 | def nested_combo(a=[float(3), dt.datetime.now()]): 242 | def nested_combo(a=[float(3), dt.datetime.now()]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
240 | pass 243 | pass
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
236 | 239 |
237 | # B006 and B008 240 | # B006 and B008
238 | # We should handle arbitrary nesting of these B008. 241 | # We should handle arbitrary nesting of these B008.
- def nested_combo(a=[float(3), dt.datetime.now()]): - def nested_combo(a=[float(3), dt.datetime.now()]):
239 + def nested_combo(a=None): 242 + def nested_combo(a=None):
240 | pass 243 | pass
241 | 244 |
242 | 245 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:276:27 --> B006_B008.py:279:27
| |
275 | def mutable_annotations( 278 | def mutable_annotations(
276 | a: list[int] | None = [], 279 | a: list[int] | None = [],
| ^^ | ^^
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
273 | 276 |
274 | 277 |
275 | def mutable_annotations( 278 | def mutable_annotations(
- a: list[int] | None = [], - a: list[int] | None = [],
276 + a: list[int] | None = None, 279 + a: list[int] | None = None,
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:277:35 --> B006_B008.py:280:35
| |
275 | def mutable_annotations( 278 | def mutable_annotations(
276 | a: list[int] | None = [], 279 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
| ^^ | ^^
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
274 | 277 |
275 | def mutable_annotations( 278 | def mutable_annotations(
276 | a: list[int] | None = [], 279 | a: list[int] | None = [],
- b: Optional[Dict[int, int]] = {}, - b: Optional[Dict[int, int]] = {},
277 + b: Optional[Dict[int, int]] = None, 280 + b: Optional[Dict[int, int]] = None,
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ): 283 | ):
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:278:62 --> B006_B008.py:281:62
| |
276 | a: list[int] | None = [], 279 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| ^^^^^ | ^^^^^
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ): 283 | ):
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
275 | def mutable_annotations( 278 | def mutable_annotations(
276 | a: list[int] | None = [], 279 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
- c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), - c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
278 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None, 281 + c: Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
280 | ): 283 | ):
281 | pass 284 | pass
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:279:80 --> B006_B008.py:282:80
| |
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 282 | d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
| ^^^^^ | ^^^^^
280 | ): 283 | ):
281 | pass 284 | pass
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
276 | a: list[int] | None = [], 279 | a: list[int] | None = [],
277 | b: Optional[Dict[int, int]] = {}, 280 | b: Optional[Dict[int, int]] = {},
278 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), 281 | c: Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
- d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(), - d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = set(),
279 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None, 282 + d: typing_extensions.Annotated[Union[Set[str], abc.Sized], "annotation"] = None,
280 | ): 283 | ):
281 | pass 284 | pass
282 | 285 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:284:52 --> B006_B008.py:287:52
| |
284 | def single_line_func_wrong(value: dict[str, str] = {}): 287 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^ | ^^
285 | """Docstring""" 288 | """Docstring"""
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
281 | pass 284 | pass
282 | 285 |
283 |
- def single_line_func_wrong(value: dict[str, str] = {}):
284 + def single_line_func_wrong(value: dict[str, str] = None):
285 | """Docstring"""
286 | 286 |
287 | - def single_line_func_wrong(value: dict[str, str] = {}):
287 + def single_line_func_wrong(value: dict[str, str] = None):
288 | """Docstring"""
289 |
290 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:288:52 --> B006_B008.py:291:52
| |
288 | def single_line_func_wrong(value: dict[str, str] = {}): 291 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^ | ^^
289 | """Docstring""" 292 | """Docstring"""
290 | ... 293 | ...
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
285 | """Docstring""" 288 | """Docstring"""
286 | 289 |
287 | 290 |
- def single_line_func_wrong(value: dict[str, str] = {}): - def single_line_func_wrong(value: dict[str, str] = {}):
288 + def single_line_func_wrong(value: dict[str, str] = None): 291 + def single_line_func_wrong(value: dict[str, str] = None):
289 | """Docstring""" 292 | """Docstring"""
290 | ... 293 | ...
291 | 294 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:293:52 --> B006_B008.py:296:52
| |
293 | def single_line_func_wrong(value: dict[str, str] = {}): 296 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^ | ^^
294 | """Docstring"""; ... 297 | """Docstring"""; ...
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
290 | ... 293 | ...
291 | 294 |
292 |
- def single_line_func_wrong(value: dict[str, str] = {}):
293 + def single_line_func_wrong(value: dict[str, str] = None):
294 | """Docstring"""; ...
295 | 295 |
296 | - def single_line_func_wrong(value: dict[str, str] = {}):
296 + def single_line_func_wrong(value: dict[str, str] = None):
297 | """Docstring"""; ...
298 |
299 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:297:52 --> B006_B008.py:300:52
| |
297 | def single_line_func_wrong(value: dict[str, str] = {}): 300 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^ | ^^
298 | """Docstring"""; \ 301 | """Docstring"""; \
299 | ... 302 | ...
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
294 | """Docstring"""; ... 297 | """Docstring"""; ...
295 | 298 |
296 | 299 |
- def single_line_func_wrong(value: dict[str, str] = {}): - def single_line_func_wrong(value: dict[str, str] = {}):
297 + def single_line_func_wrong(value: dict[str, str] = None): 300 + def single_line_func_wrong(value: dict[str, str] = None):
298 | """Docstring"""; \ 301 | """Docstring"""; \
299 | ... 302 | ...
300 | 303 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:302:52 --> B006_B008.py:305:52
| |
302 | def single_line_func_wrong(value: dict[str, str] = { 305 | def single_line_func_wrong(value: dict[str, str] = {
| ____________________________________________________^ | ____________________________________________________^
303 | | # This is a comment 306 | | # This is a comment
304 | | }): 307 | | }):
| |_^ | |_^
305 | """Docstring""" 308 | """Docstring"""
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
299 | ... 302 | ...
300 | 303 |
301 | 304 |
- def single_line_func_wrong(value: dict[str, str] = { - def single_line_func_wrong(value: dict[str, str] = {
- # This is a comment - # This is a comment
- }): - }):
302 + def single_line_func_wrong(value: dict[str, str] = None): 305 + def single_line_func_wrong(value: dict[str, str] = None):
303 | """Docstring""" 306 | """Docstring"""
304 | 307 |
305 | 308 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
B006 Do not use mutable data structures for argument defaults B006 Do not use mutable data structures for argument defaults
--> B006_B008.py:308:52 --> B006_B008.py:311:52
| |
308 | def single_line_func_wrong(value: dict[str, str] = {}) \ 311 | def single_line_func_wrong(value: dict[str, str] = {}) \
| ^^ | ^^
309 | : \ 312 | : \
310 | """Docstring""" 313 | """Docstring"""
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
B006 [*] Do not use mutable data structures for argument defaults B006 [*] Do not use mutable data structures for argument defaults
--> B006_B008.py:313:52 --> B006_B008.py:316:52
| |
313 | def single_line_func_wrong(value: dict[str, str] = {}): 316 | def single_line_func_wrong(value: dict[str, str] = {}):
| ^^ | ^^
314 | """Docstring without newline""" 317 | """Docstring without newline"""
| |
help: Replace with `None`; initialize within function help: Replace with `None`; initialize within function
310 | """Docstring""" 313 | """Docstring"""
311 | 314 |
312 | 315 |
- def single_line_func_wrong(value: dict[str, str] = {}): - def single_line_func_wrong(value: dict[str, str] = {}):
313 + def single_line_func_wrong(value: dict[str, str] = None): 316 + def single_line_func_wrong(value: dict[str, str] = None):
314 | """Docstring without newline""" 317 | """Docstring without newline"""
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior

View File

@ -4,7 +4,9 @@ use rustc_hash::FxHashSet;
use std::sync::LazyLock; use std::sync::LazyLock;
use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::Parameter;
use ruff_python_ast::docstrings::{clean_space, leading_space}; use ruff_python_ast::docstrings::{clean_space, leading_space};
use ruff_python_ast::helpers::map_subscript;
use ruff_python_ast::identifier::Identifier; use ruff_python_ast::identifier::Identifier;
use ruff_python_semantic::analyze::visibility::is_staticmethod; use ruff_python_semantic::analyze::visibility::is_staticmethod;
use ruff_python_trivia::textwrap::dedent; use ruff_python_trivia::textwrap::dedent;
@ -1184,6 +1186,9 @@ impl AlwaysFixableViolation for MissingSectionNameColon {
/// This rule is enabled when using the `google` convention, and disabled when /// This rule is enabled when using the `google` convention, and disabled when
/// using the `pep257` and `numpy` conventions. /// using the `pep257` and `numpy` conventions.
/// ///
/// Parameters annotated with `typing.Unpack` are exempt from this rule.
/// This follows the Python typing specification for unpacking keyword arguments.
///
/// ## Example /// ## Example
/// ```python /// ```python
/// def calculate_speed(distance: float, time: float) -> float: /// def calculate_speed(distance: float, time: float) -> float:
@ -1233,6 +1238,7 @@ impl AlwaysFixableViolation for MissingSectionNameColon {
/// - [PEP 257 Docstring Conventions](https://peps.python.org/pep-0257/) /// - [PEP 257 Docstring Conventions](https://peps.python.org/pep-0257/)
/// - [PEP 287 reStructuredText Docstring Format](https://peps.python.org/pep-0287/) /// - [PEP 287 reStructuredText Docstring Format](https://peps.python.org/pep-0287/)
/// - [Google Python Style Guide - Docstrings](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings) /// - [Google Python Style Guide - Docstrings](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings)
/// - [Python - Unpack for keyword arguments](https://typing.python.org/en/latest/spec/callables.html#unpack-kwargs)
#[derive(ViolationMetadata)] #[derive(ViolationMetadata)]
#[violation_metadata(stable_since = "v0.0.73")] #[violation_metadata(stable_since = "v0.0.73")]
pub(crate) struct UndocumentedParam { pub(crate) struct UndocumentedParam {
@ -1808,7 +1814,9 @@ fn missing_args(checker: &Checker, docstring: &Docstring, docstrings_args: &FxHa
missing_arg_names.insert(starred_arg_name); missing_arg_names.insert(starred_arg_name);
} }
} }
if let Some(arg) = function.parameters.kwarg.as_ref() { if let Some(arg) = function.parameters.kwarg.as_ref()
&& !has_unpack_annotation(checker, arg)
{
let arg_name = arg.name.as_str(); let arg_name = arg.name.as_str();
let starred_arg_name = format!("**{arg_name}"); let starred_arg_name = format!("**{arg_name}");
if !arg_name.starts_with('_') if !arg_name.starts_with('_')
@ -1834,6 +1842,15 @@ fn missing_args(checker: &Checker, docstring: &Docstring, docstrings_args: &FxHa
} }
} }
/// Returns `true` if the parameter is annotated with `typing.Unpack`
fn has_unpack_annotation(checker: &Checker, parameter: &Parameter) -> bool {
parameter.annotation.as_ref().is_some_and(|annotation| {
checker
.semantic()
.match_typing_expr(map_subscript(annotation), "Unpack")
})
}
// See: `GOOGLE_ARGS_REGEX` in `pydocstyle/checker.py`. // See: `GOOGLE_ARGS_REGEX` in `pydocstyle/checker.py`.
static GOOGLE_ARGS_REGEX: LazyLock<Regex> = static GOOGLE_ARGS_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"^\s*(\*?\*?\w+)\s*(\(.*?\))?\s*:(\r\n|\n)?\s*.+").unwrap()); LazyLock::new(|| Regex::new(r"^\s*(\*?\*?\w+)\s*(\(.*?\))?\s*:(\r\n|\n)?\s*.+").unwrap());

View File

@ -101,3 +101,13 @@ D417 Missing argument description in the docstring for `should_fail`: `Args`
200 | """ 200 | """
201 | Send a message. 201 | Send a message.
| |
D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query`
--> D417.py:238:5
|
236 | """
237 |
238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
239 | """Function with Unpack kwargs but missing query arg documentation.
|

View File

@ -83,3 +83,13 @@ D417 Missing argument description in the docstring for `should_fail`: `Args`
200 | """ 200 | """
201 | Send a message. 201 | Send a message.
| |
D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query`
--> D417.py:238:5
|
236 | """
237 |
238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
239 | """Function with Unpack kwargs but missing query arg documentation.
|

View File

@ -101,3 +101,13 @@ D417 Missing argument description in the docstring for `should_fail`: `Args`
200 | """ 200 | """
201 | Send a message. 201 | Send a message.
| |
D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query`
--> D417.py:238:5
|
236 | """
237 |
238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
239 | """Function with Unpack kwargs but missing query arg documentation.
|

View File

@ -101,3 +101,13 @@ D417 Missing argument description in the docstring for `should_fail`: `Args`
200 | """ 200 | """
201 | Send a message. 201 | Send a message.
| |
D417 Missing argument description in the docstring for `function_with_unpack_and_missing_arg_doc_should_fail`: `query`
--> D417.py:238:5
|
236 | """
237 |
238 | def function_with_unpack_and_missing_arg_doc_should_fail(query: str, **kwargs: Unpack[User]):
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
239 | """Function with Unpack kwargs but missing query arg documentation.
|

View File

@ -28,6 +28,7 @@ mod tests {
use crate::settings::types::PreviewMode; use crate::settings::types::PreviewMode;
use crate::settings::{LinterSettings, flags}; use crate::settings::{LinterSettings, flags};
use crate::source_kind::SourceKind; use crate::source_kind::SourceKind;
use crate::suppression::Suppressions;
use crate::test::{test_contents, test_path, test_snippet}; use crate::test::{test_contents, test_path, test_snippet};
use crate::{Locator, assert_diagnostics, assert_diagnostics_diff, directives}; use crate::{Locator, assert_diagnostics, assert_diagnostics_diff, directives};
@ -955,6 +956,8 @@ mod tests {
&locator, &locator,
&indexer, &indexer,
); );
let suppressions =
Suppressions::from_tokens(&settings, locator.contents(), parsed.tokens());
let mut messages = check_path( let mut messages = check_path(
Path::new("<filename>"), Path::new("<filename>"),
None, None,
@ -968,6 +971,7 @@ mod tests {
source_type, source_type,
&parsed, &parsed,
target_version, target_version,
&suppressions,
); );
messages.sort_by(Diagnostic::ruff_start_ordering); messages.sort_by(Diagnostic::ruff_start_ordering);
let actual = messages let actual = messages

View File

@ -16,10 +16,10 @@ mod tests {
use crate::registry::Rule; use crate::registry::Rule;
use crate::rules::{flake8_tidy_imports, pylint}; use crate::rules::{flake8_tidy_imports, pylint};
use crate::assert_diagnostics;
use crate::settings::LinterSettings; use crate::settings::LinterSettings;
use crate::settings::types::PreviewMode; use crate::settings::types::PreviewMode;
use crate::test::test_path; use crate::test::test_path;
use crate::{assert_diagnostics, assert_diagnostics_diff};
#[test_case(Rule::SingledispatchMethod, Path::new("singledispatch_method.py"))] #[test_case(Rule::SingledispatchMethod, Path::new("singledispatch_method.py"))]
#[test_case( #[test_case(
@ -253,6 +253,32 @@ mod tests {
Ok(()) Ok(())
} }
#[test_case(
Rule::UselessExceptionStatement,
Path::new("useless_exception_statement.py")
)]
fn preview_rules(rule_code: Rule, path: &Path) -> Result<()> {
let snapshot = format!(
"preview__{}_{}",
rule_code.noqa_code(),
path.to_string_lossy()
);
assert_diagnostics_diff!(
snapshot,
Path::new("pylint").join(path).as_path(),
&LinterSettings {
preview: PreviewMode::Disabled,
..LinterSettings::for_rule(rule_code)
},
&LinterSettings {
preview: PreviewMode::Enabled,
..LinterSettings::for_rule(rule_code)
}
);
Ok(())
}
#[test] #[test]
fn continue_in_finally() -> Result<()> { fn continue_in_finally() -> Result<()> {
let diagnostics = test_path( let diagnostics = test_path(

View File

@ -1,10 +1,11 @@
use ruff_macros::{ViolationMetadata, derive_message_formats}; use ruff_macros::{ViolationMetadata, derive_message_formats};
use ruff_python_ast::{self as ast, Expr}; use ruff_python_ast::{self as ast, Expr};
use ruff_python_semantic::SemanticModel; use ruff_python_semantic::{SemanticModel, analyze};
use ruff_python_stdlib::builtins; use ruff_python_stdlib::builtins;
use ruff_text_size::Ranged; use ruff_text_size::Ranged;
use crate::checkers::ast::Checker; use crate::checkers::ast::Checker;
use crate::preview::is_custom_exception_checking_enabled;
use crate::{Edit, Fix, FixAvailability, Violation}; use crate::{Edit, Fix, FixAvailability, Violation};
use ruff_python_ast::PythonVersion; use ruff_python_ast::PythonVersion;
@ -20,6 +21,9 @@ use ruff_python_ast::PythonVersion;
/// This rule only detects built-in exceptions, like `ValueError`, and does /// This rule only detects built-in exceptions, like `ValueError`, and does
/// not catch user-defined exceptions. /// not catch user-defined exceptions.
/// ///
/// In [preview], this rule will also detect user-defined exceptions, but only
/// the ones defined in the file being checked.
///
/// ## Example /// ## Example
/// ```python /// ```python
/// ValueError("...") /// ValueError("...")
@ -32,7 +36,8 @@ use ruff_python_ast::PythonVersion;
/// ///
/// ## Fix safety /// ## Fix safety
/// This rule's fix is marked as unsafe, as converting a useless exception /// This rule's fix is marked as unsafe, as converting a useless exception
/// statement to a `raise` statement will change the program's behavior. ///
/// [preview]: https://docs.astral.sh/ruff/preview/
#[derive(ViolationMetadata)] #[derive(ViolationMetadata)]
#[violation_metadata(stable_since = "0.5.0")] #[violation_metadata(stable_since = "0.5.0")]
pub(crate) struct UselessExceptionStatement; pub(crate) struct UselessExceptionStatement;
@ -56,7 +61,10 @@ pub(crate) fn useless_exception_statement(checker: &Checker, expr: &ast::StmtExp
return; return;
}; };
if is_builtin_exception(func, checker.semantic(), checker.target_version()) { if is_builtin_exception(func, checker.semantic(), checker.target_version())
|| (is_custom_exception_checking_enabled(checker.settings())
&& is_custom_exception(func, checker.semantic(), checker.target_version()))
{
let mut diagnostic = checker.report_diagnostic(UselessExceptionStatement, expr.range()); let mut diagnostic = checker.report_diagnostic(UselessExceptionStatement, expr.range());
diagnostic.set_fix(Fix::unsafe_edit(Edit::insertion( diagnostic.set_fix(Fix::unsafe_edit(Edit::insertion(
"raise ".to_string(), "raise ".to_string(),
@ -78,3 +86,34 @@ fn is_builtin_exception(
if builtins::is_exception(name, target_version.minor)) if builtins::is_exception(name, target_version.minor))
}) })
} }
/// Returns `true` if the given expression is a custom exception.
fn is_custom_exception(
expr: &Expr,
semantic: &SemanticModel,
target_version: PythonVersion,
) -> bool {
let Some(qualified_name) = semantic.resolve_qualified_name(expr) else {
return false;
};
let Some(symbol) = qualified_name.segments().last() else {
return false;
};
let Some(binding_id) = semantic.lookup_symbol(symbol) else {
return false;
};
let binding = semantic.binding(binding_id);
let Some(source) = binding.source else {
return false;
};
let statement = semantic.statement(source);
if let ast::Stmt::ClassDef(class_def) = statement {
return analyze::class::any_qualified_base_class(class_def, semantic, &|qualified_name| {
if let ["" | "builtins", name] = qualified_name.segments() {
return builtins::is_exception(name, target_version.minor);
}
false
});
}
false
}

View File

@ -2,250 +2,294 @@
source: crates/ruff_linter/src/rules/pylint/mod.rs source: crates/ruff_linter/src/rules/pylint/mod.rs
--- ---
PLW0133 [*] Missing `raise` statement on exception PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:7:5 --> useless_exception_statement.py:26:5
| |
5 | # Test case 1: Useless exception statement 24 | # Test case 1: Useless exception statement
6 | def func():
7 | AssertionError("This is an assertion error") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Add `raise` keyword
4 |
5 | # Test case 1: Useless exception statement
6 | def func():
- AssertionError("This is an assertion error") # PLW0133
7 + raise AssertionError("This is an assertion error") # PLW0133
8 |
9 |
10 | # Test case 2: Useless exception statement in try-except block
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:13:9
|
11 | def func():
12 | try:
13 | Exception("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
14 | except Exception as err:
15 | pass
|
help: Add `raise` keyword
10 | # Test case 2: Useless exception statement in try-except block
11 | def func():
12 | try:
- Exception("This is an exception") # PLW0133
13 + raise Exception("This is an exception") # PLW0133
14 | except Exception as err:
15 | pass
16 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:21:9
|
19 | def func():
20 | if True:
21 | RuntimeError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Add `raise` keyword
18 | # Test case 3: Useless exception statement in if statement
19 | def func():
20 | if True:
- RuntimeError("This is an exception") # PLW0133
21 + raise RuntimeError("This is an exception") # PLW0133
22 |
23 |
24 | # Test case 4: Useless exception statement in class
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:28:13
|
26 | class Class:
27 | def __init__(self):
28 | TypeError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Add `raise` keyword
25 | def func(): 25 | def func():
26 | class Class: 26 | AssertionError("This is an assertion error") # PLW0133
27 | def __init__(self): | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
27 | MyError("This is a custom error") # PLW0133
28 | MySubError("This is a custom error") # PLW0133
|
help: Add `raise` keyword
23 |
24 | # Test case 1: Useless exception statement
25 | def func():
- AssertionError("This is an assertion error") # PLW0133
26 + raise AssertionError("This is an assertion error") # PLW0133
27 | MyError("This is a custom error") # PLW0133
28 | MySubError("This is a custom error") # PLW0133
29 | MyValueError("This is a custom value error") # PLW0133
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:35:9
|
33 | def func():
34 | try:
35 | Exception("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
36 | MyError("This is an exception") # PLW0133
37 | MySubError("This is an exception") # PLW0133
|
help: Add `raise` keyword
32 | # Test case 2: Useless exception statement in try-except block
33 | def func():
34 | try:
- Exception("This is an exception") # PLW0133
35 + raise Exception("This is an exception") # PLW0133
36 | MyError("This is an exception") # PLW0133
37 | MySubError("This is an exception") # PLW0133
38 | MyValueError("This is an exception") # PLW0133
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:46:9
|
44 | def func():
45 | if True:
46 | RuntimeError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
47 | MyError("This is an exception") # PLW0133
48 | MySubError("This is an exception") # PLW0133
|
help: Add `raise` keyword
43 | # Test case 3: Useless exception statement in if statement
44 | def func():
45 | if True:
- RuntimeError("This is an exception") # PLW0133
46 + raise RuntimeError("This is an exception") # PLW0133
47 | MyError("This is an exception") # PLW0133
48 | MySubError("This is an exception") # PLW0133
49 | MyValueError("This is an exception") # PLW0133
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:56:13
|
54 | class Class:
55 | def __init__(self):
56 | TypeError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
57 | MyError("This is an exception") # PLW0133
58 | MySubError("This is an exception") # PLW0133
|
help: Add `raise` keyword
53 | def func():
54 | class Class:
55 | def __init__(self):
- TypeError("This is an exception") # PLW0133 - TypeError("This is an exception") # PLW0133
28 + raise TypeError("This is an exception") # PLW0133 56 + raise TypeError("This is an exception") # PLW0133
29 | 57 | MyError("This is an exception") # PLW0133
30 | 58 | MySubError("This is an exception") # PLW0133
31 | # Test case 5: Useless exception statement in function 59 | MyValueError("This is an exception") # PLW0133
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:34:9 --> useless_exception_statement.py:65:9
| |
32 | def func(): 63 | def func():
33 | def inner(): 64 | def inner():
34 | IndexError("This is an exception") # PLW0133 65 | IndexError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
35 | 66 | MyError("This is an exception") # PLW0133
36 | inner() 67 | MySubError("This is an exception") # PLW0133
| |
help: Add `raise` keyword help: Add `raise` keyword
31 | # Test case 5: Useless exception statement in function 62 | # Test case 5: Useless exception statement in function
32 | def func(): 63 | def func():
33 | def inner(): 64 | def inner():
- IndexError("This is an exception") # PLW0133 - IndexError("This is an exception") # PLW0133
34 + raise IndexError("This is an exception") # PLW0133 65 + raise IndexError("This is an exception") # PLW0133
35 | 66 | MyError("This is an exception") # PLW0133
36 | inner() 67 | MySubError("This is an exception") # PLW0133
37 | 68 | MyValueError("This is an exception") # PLW0133
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:42:9 --> useless_exception_statement.py:76:9
| |
40 | def func(): 74 | def func():
41 | while True: 75 | while True:
42 | KeyError("This is an exception") # PLW0133 76 | KeyError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
77 | MyError("This is an exception") # PLW0133
78 | MySubError("This is an exception") # PLW0133
| |
help: Add `raise` keyword help: Add `raise` keyword
39 | # Test case 6: Useless exception statement in while loop 73 | # Test case 6: Useless exception statement in while loop
40 | def func(): 74 | def func():
41 | while True: 75 | while True:
- KeyError("This is an exception") # PLW0133 - KeyError("This is an exception") # PLW0133
42 + raise KeyError("This is an exception") # PLW0133 76 + raise KeyError("This is an exception") # PLW0133
43 | 77 | MyError("This is an exception") # PLW0133
44 | 78 | MySubError("This is an exception") # PLW0133
45 | # Test case 7: Useless exception statement in abstract class 79 | MyValueError("This is an exception") # PLW0133
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:50:13 --> useless_exception_statement.py:87:13
| |
48 | @abstractmethod 85 | @abstractmethod
49 | def method(self): 86 | def method(self):
50 | NotImplementedError("This is an exception") # PLW0133 87 | NotImplementedError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
88 | MyError("This is an exception") # PLW0133
89 | MySubError("This is an exception") # PLW0133
| |
help: Add `raise` keyword help: Add `raise` keyword
47 | class Class(ABC): 84 | class Class(ABC):
48 | @abstractmethod 85 | @abstractmethod
49 | def method(self): 86 | def method(self):
- NotImplementedError("This is an exception") # PLW0133 - NotImplementedError("This is an exception") # PLW0133
50 + raise NotImplementedError("This is an exception") # PLW0133 87 + raise NotImplementedError("This is an exception") # PLW0133
51 | 88 | MyError("This is an exception") # PLW0133
52 | 89 | MySubError("This is an exception") # PLW0133
53 | # Test case 8: Useless exception statement inside context manager 90 | MyValueError("This is an exception") # PLW0133
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:56:9 --> useless_exception_statement.py:96:9
| |
54 | def func(): 94 | def func():
55 | with suppress(AttributeError): 95 | with suppress(Exception):
56 | AttributeError("This is an exception") # PLW0133 96 | AttributeError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
97 | MyError("This is an exception") # PLW0133
98 | MySubError("This is an exception") # PLW0133
| |
help: Add `raise` keyword help: Add `raise` keyword
53 | # Test case 8: Useless exception statement inside context manager 93 | # Test case 8: Useless exception statement inside context manager
54 | def func(): 94 | def func():
55 | with suppress(AttributeError): 95 | with suppress(Exception):
- AttributeError("This is an exception") # PLW0133 - AttributeError("This is an exception") # PLW0133
56 + raise AttributeError("This is an exception") # PLW0133 96 + raise AttributeError("This is an exception") # PLW0133
57 | 97 | MyError("This is an exception") # PLW0133
58 | 98 | MySubError("This is an exception") # PLW0133
59 | # Test case 9: Useless exception statement in parentheses 99 | MyValueError("This is an exception") # PLW0133
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:61:5 --> useless_exception_statement.py:104:5
| |
59 | # Test case 9: Useless exception statement in parentheses 102 | # Test case 9: Useless exception statement in parentheses
60 | def func(): 103 | def func():
61 | (RuntimeError("This is an exception")) # PLW0133 104 | (RuntimeError("This is an exception")) # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
105 | (MyError("This is an exception")) # PLW0133
106 | (MySubError("This is an exception")) # PLW0133
| |
help: Add `raise` keyword help: Add `raise` keyword
58 | 101 |
59 | # Test case 9: Useless exception statement in parentheses 102 | # Test case 9: Useless exception statement in parentheses
60 | def func(): 103 | def func():
- (RuntimeError("This is an exception")) # PLW0133 - (RuntimeError("This is an exception")) # PLW0133
61 + raise (RuntimeError("This is an exception")) # PLW0133 104 + raise (RuntimeError("This is an exception")) # PLW0133
62 | 105 | (MyError("This is an exception")) # PLW0133
63 | 106 | (MySubError("This is an exception")) # PLW0133
64 | # Test case 10: Useless exception statement in continuation 107 | (MyValueError("This is an exception")) # PLW0133
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:66:12 --> useless_exception_statement.py:112:12
| |
64 | # Test case 10: Useless exception statement in continuation 110 | # Test case 10: Useless exception statement in continuation
65 | def func(): 111 | def func():
66 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133 112 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133
114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133
| |
help: Add `raise` keyword help: Add `raise` keyword
63 | 109 |
64 | # Test case 10: Useless exception statement in continuation 110 | # Test case 10: Useless exception statement in continuation
65 | def func(): 111 | def func():
- x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133 - x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133
66 + x = 1; raise (RuntimeError("This is an exception")); y = 2 # PLW0133 112 + x = 1; raise (RuntimeError("This is an exception")); y = 2 # PLW0133
67 | 113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133
68 | 114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133
69 | # Test case 11: Useless warning statement 115 | x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:71:5 --> useless_exception_statement.py:120:5
| |
69 | # Test case 11: Useless warning statement 118 | # Test case 11: Useless warning statement
70 | def func(): 119 | def func():
71 | UserWarning("This is an assertion error") # PLW0133 120 | UserWarning("This is a user warning") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| 121 | MyUserWarning("This is a custom user warning") # PLW0133
help: Add `raise` keyword
68 |
69 | # Test case 11: Useless warning statement
70 | def func():
- UserWarning("This is an assertion error") # PLW0133
71 + raise UserWarning("This is an assertion error") # PLW0133
72 |
73 |
74 | # Non-violation test cases: PLW0133
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:126:1
|
124 | import builtins
125 |
126 | builtins.TypeError("still an exception even though it's an Attribute")
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
127 |
128 | PythonFinalizationError("Added in Python 3.13")
| |
help: Add `raise` keyword help: Add `raise` keyword
117 |
118 | # Test case 11: Useless warning statement
119 | def func():
- UserWarning("This is a user warning") # PLW0133
120 + raise UserWarning("This is a user warning") # PLW0133
121 | MyUserWarning("This is a custom user warning") # PLW0133
122 |
123 | 123 |
124 | import builtins
125 |
- builtins.TypeError("still an exception even though it's an Attribute")
126 + raise builtins.TypeError("still an exception even though it's an Attribute")
127 |
128 | PythonFinalizationError("Added in Python 3.13")
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:128:1 --> useless_exception_statement.py:127:1
| |
126 | builtins.TypeError("still an exception even though it's an Attribute") 125 | import builtins
127 | 126 |
128 | PythonFinalizationError("Added in Python 3.13") 127 | builtins.TypeError("still an exception even though it's an Attribute") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
128 |
129 | PythonFinalizationError("Added in Python 3.13") # PLW0133
| |
help: Add `raise` keyword help: Add `raise` keyword
125 | 124 | # Test case 12: Useless exception statement at module level
126 | builtins.TypeError("still an exception even though it's an Attribute") 125 | import builtins
127 | 126 |
- PythonFinalizationError("Added in Python 3.13") - builtins.TypeError("still an exception even though it's an Attribute") # PLW0133
128 + raise PythonFinalizationError("Added in Python 3.13") 127 + raise builtins.TypeError("still an exception even though it's an Attribute") # PLW0133
128 |
129 | PythonFinalizationError("Added in Python 3.13") # PLW0133
130 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:129:1
|
127 | builtins.TypeError("still an exception even though it's an Attribute") # PLW0133
128 |
129 | PythonFinalizationError("Added in Python 3.13") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
130 |
131 | MyError("This is an exception") # PLW0133
|
help: Add `raise` keyword
126 |
127 | builtins.TypeError("still an exception even though it's an Attribute") # PLW0133
128 |
- PythonFinalizationError("Added in Python 3.13") # PLW0133
129 + raise PythonFinalizationError("Added in Python 3.13") # PLW0133
130 |
131 | MyError("This is an exception") # PLW0133
132 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:137:1
|
135 | MyValueError("This is an exception") # PLW0133
136 |
137 | UserWarning("This is a user warning") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
138 |
139 | MyUserWarning("This is a custom user warning") # PLW0133
|
help: Add `raise` keyword
134 |
135 | MyValueError("This is an exception") # PLW0133
136 |
- UserWarning("This is a user warning") # PLW0133
137 + raise UserWarning("This is a user warning") # PLW0133
138 |
139 | MyUserWarning("This is a custom user warning") # PLW0133
140 |
note: This is an unsafe fix and may change runtime behavior note: This is an unsafe fix and may change runtime behavior

View File

@ -0,0 +1,751 @@
---
source: crates/ruff_linter/src/rules/pylint/mod.rs
---
--- Linter settings ---
-linter.preview = disabled
+linter.preview = enabled
--- Summary ---
Removed: 0
Added: 35
--- Added ---
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:27:5
|
25 | def func():
26 | AssertionError("This is an assertion error") # PLW0133
27 | MyError("This is a custom error") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
28 | MySubError("This is a custom error") # PLW0133
29 | MyValueError("This is a custom value error") # PLW0133
|
help: Add `raise` keyword
24 | # Test case 1: Useless exception statement
25 | def func():
26 | AssertionError("This is an assertion error") # PLW0133
- MyError("This is a custom error") # PLW0133
27 + raise MyError("This is a custom error") # PLW0133
28 | MySubError("This is a custom error") # PLW0133
29 | MyValueError("This is a custom value error") # PLW0133
30 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:28:5
|
26 | AssertionError("This is an assertion error") # PLW0133
27 | MyError("This is a custom error") # PLW0133
28 | MySubError("This is a custom error") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
29 | MyValueError("This is a custom value error") # PLW0133
|
help: Add `raise` keyword
25 | def func():
26 | AssertionError("This is an assertion error") # PLW0133
27 | MyError("This is a custom error") # PLW0133
- MySubError("This is a custom error") # PLW0133
28 + raise MySubError("This is a custom error") # PLW0133
29 | MyValueError("This is a custom value error") # PLW0133
30 |
31 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:29:5
|
27 | MyError("This is a custom error") # PLW0133
28 | MySubError("This is a custom error") # PLW0133
29 | MyValueError("This is a custom value error") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Add `raise` keyword
26 | AssertionError("This is an assertion error") # PLW0133
27 | MyError("This is a custom error") # PLW0133
28 | MySubError("This is a custom error") # PLW0133
- MyValueError("This is a custom value error") # PLW0133
29 + raise MyValueError("This is a custom value error") # PLW0133
30 |
31 |
32 | # Test case 2: Useless exception statement in try-except block
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:36:9
|
34 | try:
35 | Exception("This is an exception") # PLW0133
36 | MyError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
37 | MySubError("This is an exception") # PLW0133
38 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
33 | def func():
34 | try:
35 | Exception("This is an exception") # PLW0133
- MyError("This is an exception") # PLW0133
36 + raise MyError("This is an exception") # PLW0133
37 | MySubError("This is an exception") # PLW0133
38 | MyValueError("This is an exception") # PLW0133
39 | except Exception as err:
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:37:9
|
35 | Exception("This is an exception") # PLW0133
36 | MyError("This is an exception") # PLW0133
37 | MySubError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
38 | MyValueError("This is an exception") # PLW0133
39 | except Exception as err:
|
help: Add `raise` keyword
34 | try:
35 | Exception("This is an exception") # PLW0133
36 | MyError("This is an exception") # PLW0133
- MySubError("This is an exception") # PLW0133
37 + raise MySubError("This is an exception") # PLW0133
38 | MyValueError("This is an exception") # PLW0133
39 | except Exception as err:
40 | pass
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:38:9
|
36 | MyError("This is an exception") # PLW0133
37 | MySubError("This is an exception") # PLW0133
38 | MyValueError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
39 | except Exception as err:
40 | pass
|
help: Add `raise` keyword
35 | Exception("This is an exception") # PLW0133
36 | MyError("This is an exception") # PLW0133
37 | MySubError("This is an exception") # PLW0133
- MyValueError("This is an exception") # PLW0133
38 + raise MyValueError("This is an exception") # PLW0133
39 | except Exception as err:
40 | pass
41 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:47:9
|
45 | if True:
46 | RuntimeError("This is an exception") # PLW0133
47 | MyError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
48 | MySubError("This is an exception") # PLW0133
49 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
44 | def func():
45 | if True:
46 | RuntimeError("This is an exception") # PLW0133
- MyError("This is an exception") # PLW0133
47 + raise MyError("This is an exception") # PLW0133
48 | MySubError("This is an exception") # PLW0133
49 | MyValueError("This is an exception") # PLW0133
50 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:48:9
|
46 | RuntimeError("This is an exception") # PLW0133
47 | MyError("This is an exception") # PLW0133
48 | MySubError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
49 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
45 | if True:
46 | RuntimeError("This is an exception") # PLW0133
47 | MyError("This is an exception") # PLW0133
- MySubError("This is an exception") # PLW0133
48 + raise MySubError("This is an exception") # PLW0133
49 | MyValueError("This is an exception") # PLW0133
50 |
51 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:49:9
|
47 | MyError("This is an exception") # PLW0133
48 | MySubError("This is an exception") # PLW0133
49 | MyValueError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Add `raise` keyword
46 | RuntimeError("This is an exception") # PLW0133
47 | MyError("This is an exception") # PLW0133
48 | MySubError("This is an exception") # PLW0133
- MyValueError("This is an exception") # PLW0133
49 + raise MyValueError("This is an exception") # PLW0133
50 |
51 |
52 | # Test case 4: Useless exception statement in class
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:57:13
|
55 | def __init__(self):
56 | TypeError("This is an exception") # PLW0133
57 | MyError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
58 | MySubError("This is an exception") # PLW0133
59 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
54 | class Class:
55 | def __init__(self):
56 | TypeError("This is an exception") # PLW0133
- MyError("This is an exception") # PLW0133
57 + raise MyError("This is an exception") # PLW0133
58 | MySubError("This is an exception") # PLW0133
59 | MyValueError("This is an exception") # PLW0133
60 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:58:13
|
56 | TypeError("This is an exception") # PLW0133
57 | MyError("This is an exception") # PLW0133
58 | MySubError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
59 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
55 | def __init__(self):
56 | TypeError("This is an exception") # PLW0133
57 | MyError("This is an exception") # PLW0133
- MySubError("This is an exception") # PLW0133
58 + raise MySubError("This is an exception") # PLW0133
59 | MyValueError("This is an exception") # PLW0133
60 |
61 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:59:13
|
57 | MyError("This is an exception") # PLW0133
58 | MySubError("This is an exception") # PLW0133
59 | MyValueError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Add `raise` keyword
56 | TypeError("This is an exception") # PLW0133
57 | MyError("This is an exception") # PLW0133
58 | MySubError("This is an exception") # PLW0133
- MyValueError("This is an exception") # PLW0133
59 + raise MyValueError("This is an exception") # PLW0133
60 |
61 |
62 | # Test case 5: Useless exception statement in function
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:66:9
|
64 | def inner():
65 | IndexError("This is an exception") # PLW0133
66 | MyError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
67 | MySubError("This is an exception") # PLW0133
68 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
63 | def func():
64 | def inner():
65 | IndexError("This is an exception") # PLW0133
- MyError("This is an exception") # PLW0133
66 + raise MyError("This is an exception") # PLW0133
67 | MySubError("This is an exception") # PLW0133
68 | MyValueError("This is an exception") # PLW0133
69 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:67:9
|
65 | IndexError("This is an exception") # PLW0133
66 | MyError("This is an exception") # PLW0133
67 | MySubError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
68 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
64 | def inner():
65 | IndexError("This is an exception") # PLW0133
66 | MyError("This is an exception") # PLW0133
- MySubError("This is an exception") # PLW0133
67 + raise MySubError("This is an exception") # PLW0133
68 | MyValueError("This is an exception") # PLW0133
69 |
70 | inner()
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:68:9
|
66 | MyError("This is an exception") # PLW0133
67 | MySubError("This is an exception") # PLW0133
68 | MyValueError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
69 |
70 | inner()
|
help: Add `raise` keyword
65 | IndexError("This is an exception") # PLW0133
66 | MyError("This is an exception") # PLW0133
67 | MySubError("This is an exception") # PLW0133
- MyValueError("This is an exception") # PLW0133
68 + raise MyValueError("This is an exception") # PLW0133
69 |
70 | inner()
71 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:77:9
|
75 | while True:
76 | KeyError("This is an exception") # PLW0133
77 | MyError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
78 | MySubError("This is an exception") # PLW0133
79 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
74 | def func():
75 | while True:
76 | KeyError("This is an exception") # PLW0133
- MyError("This is an exception") # PLW0133
77 + raise MyError("This is an exception") # PLW0133
78 | MySubError("This is an exception") # PLW0133
79 | MyValueError("This is an exception") # PLW0133
80 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:78:9
|
76 | KeyError("This is an exception") # PLW0133
77 | MyError("This is an exception") # PLW0133
78 | MySubError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
79 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
75 | while True:
76 | KeyError("This is an exception") # PLW0133
77 | MyError("This is an exception") # PLW0133
- MySubError("This is an exception") # PLW0133
78 + raise MySubError("This is an exception") # PLW0133
79 | MyValueError("This is an exception") # PLW0133
80 |
81 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:79:9
|
77 | MyError("This is an exception") # PLW0133
78 | MySubError("This is an exception") # PLW0133
79 | MyValueError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Add `raise` keyword
76 | KeyError("This is an exception") # PLW0133
77 | MyError("This is an exception") # PLW0133
78 | MySubError("This is an exception") # PLW0133
- MyValueError("This is an exception") # PLW0133
79 + raise MyValueError("This is an exception") # PLW0133
80 |
81 |
82 | # Test case 7: Useless exception statement in abstract class
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:88:13
|
86 | def method(self):
87 | NotImplementedError("This is an exception") # PLW0133
88 | MyError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
89 | MySubError("This is an exception") # PLW0133
90 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
85 | @abstractmethod
86 | def method(self):
87 | NotImplementedError("This is an exception") # PLW0133
- MyError("This is an exception") # PLW0133
88 + raise MyError("This is an exception") # PLW0133
89 | MySubError("This is an exception") # PLW0133
90 | MyValueError("This is an exception") # PLW0133
91 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:89:13
|
87 | NotImplementedError("This is an exception") # PLW0133
88 | MyError("This is an exception") # PLW0133
89 | MySubError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
90 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
86 | def method(self):
87 | NotImplementedError("This is an exception") # PLW0133
88 | MyError("This is an exception") # PLW0133
- MySubError("This is an exception") # PLW0133
89 + raise MySubError("This is an exception") # PLW0133
90 | MyValueError("This is an exception") # PLW0133
91 |
92 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:90:13
|
88 | MyError("This is an exception") # PLW0133
89 | MySubError("This is an exception") # PLW0133
90 | MyValueError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Add `raise` keyword
87 | NotImplementedError("This is an exception") # PLW0133
88 | MyError("This is an exception") # PLW0133
89 | MySubError("This is an exception") # PLW0133
- MyValueError("This is an exception") # PLW0133
90 + raise MyValueError("This is an exception") # PLW0133
91 |
92 |
93 | # Test case 8: Useless exception statement inside context manager
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:97:9
|
95 | with suppress(Exception):
96 | AttributeError("This is an exception") # PLW0133
97 | MyError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
98 | MySubError("This is an exception") # PLW0133
99 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
94 | def func():
95 | with suppress(Exception):
96 | AttributeError("This is an exception") # PLW0133
- MyError("This is an exception") # PLW0133
97 + raise MyError("This is an exception") # PLW0133
98 | MySubError("This is an exception") # PLW0133
99 | MyValueError("This is an exception") # PLW0133
100 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:98:9
|
96 | AttributeError("This is an exception") # PLW0133
97 | MyError("This is an exception") # PLW0133
98 | MySubError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
99 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
95 | with suppress(Exception):
96 | AttributeError("This is an exception") # PLW0133
97 | MyError("This is an exception") # PLW0133
- MySubError("This is an exception") # PLW0133
98 + raise MySubError("This is an exception") # PLW0133
99 | MyValueError("This is an exception") # PLW0133
100 |
101 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:99:9
|
97 | MyError("This is an exception") # PLW0133
98 | MySubError("This is an exception") # PLW0133
99 | MyValueError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Add `raise` keyword
96 | AttributeError("This is an exception") # PLW0133
97 | MyError("This is an exception") # PLW0133
98 | MySubError("This is an exception") # PLW0133
- MyValueError("This is an exception") # PLW0133
99 + raise MyValueError("This is an exception") # PLW0133
100 |
101 |
102 | # Test case 9: Useless exception statement in parentheses
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:105:5
|
103 | def func():
104 | (RuntimeError("This is an exception")) # PLW0133
105 | (MyError("This is an exception")) # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
106 | (MySubError("This is an exception")) # PLW0133
107 | (MyValueError("This is an exception")) # PLW0133
|
help: Add `raise` keyword
102 | # Test case 9: Useless exception statement in parentheses
103 | def func():
104 | (RuntimeError("This is an exception")) # PLW0133
- (MyError("This is an exception")) # PLW0133
105 + raise (MyError("This is an exception")) # PLW0133
106 | (MySubError("This is an exception")) # PLW0133
107 | (MyValueError("This is an exception")) # PLW0133
108 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:106:5
|
104 | (RuntimeError("This is an exception")) # PLW0133
105 | (MyError("This is an exception")) # PLW0133
106 | (MySubError("This is an exception")) # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
107 | (MyValueError("This is an exception")) # PLW0133
|
help: Add `raise` keyword
103 | def func():
104 | (RuntimeError("This is an exception")) # PLW0133
105 | (MyError("This is an exception")) # PLW0133
- (MySubError("This is an exception")) # PLW0133
106 + raise (MySubError("This is an exception")) # PLW0133
107 | (MyValueError("This is an exception")) # PLW0133
108 |
109 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:107:5
|
105 | (MyError("This is an exception")) # PLW0133
106 | (MySubError("This is an exception")) # PLW0133
107 | (MyValueError("This is an exception")) # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Add `raise` keyword
104 | (RuntimeError("This is an exception")) # PLW0133
105 | (MyError("This is an exception")) # PLW0133
106 | (MySubError("This is an exception")) # PLW0133
- (MyValueError("This is an exception")) # PLW0133
107 + raise (MyValueError("This is an exception")) # PLW0133
108 |
109 |
110 | # Test case 10: Useless exception statement in continuation
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:113:12
|
111 | def func():
112 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133
113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133
115 | x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133
|
help: Add `raise` keyword
110 | # Test case 10: Useless exception statement in continuation
111 | def func():
112 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133
- x = 1; (MyError("This is an exception")); y = 2 # PLW0133
113 + x = 1; raise (MyError("This is an exception")); y = 2 # PLW0133
114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133
115 | x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133
116 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:114:12
|
112 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133
113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133
114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
115 | x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133
|
help: Add `raise` keyword
111 | def func():
112 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133
113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133
- x = 1; (MySubError("This is an exception")); y = 2 # PLW0133
114 + x = 1; raise (MySubError("This is an exception")); y = 2 # PLW0133
115 | x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133
116 |
117 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:115:12
|
113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133
114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133
115 | x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Add `raise` keyword
112 | x = 1; (RuntimeError("This is an exception")); y = 2 # PLW0133
113 | x = 1; (MyError("This is an exception")); y = 2 # PLW0133
114 | x = 1; (MySubError("This is an exception")); y = 2 # PLW0133
- x = 1; (MyValueError("This is an exception")); y = 2 # PLW0133
115 + x = 1; raise (MyValueError("This is an exception")); y = 2 # PLW0133
116 |
117 |
118 | # Test case 11: Useless warning statement
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:121:5
|
119 | def func():
120 | UserWarning("This is a user warning") # PLW0133
121 | MyUserWarning("This is a custom user warning") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Add `raise` keyword
118 | # Test case 11: Useless warning statement
119 | def func():
120 | UserWarning("This is a user warning") # PLW0133
- MyUserWarning("This is a custom user warning") # PLW0133
121 + raise MyUserWarning("This is a custom user warning") # PLW0133
122 |
123 |
124 | # Test case 12: Useless exception statement at module level
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:131:1
|
129 | PythonFinalizationError("Added in Python 3.13") # PLW0133
130 |
131 | MyError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
132 |
133 | MySubError("This is an exception") # PLW0133
|
help: Add `raise` keyword
128 |
129 | PythonFinalizationError("Added in Python 3.13") # PLW0133
130 |
- MyError("This is an exception") # PLW0133
131 + raise MyError("This is an exception") # PLW0133
132 |
133 | MySubError("This is an exception") # PLW0133
134 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:133:1
|
131 | MyError("This is an exception") # PLW0133
132 |
133 | MySubError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
134 |
135 | MyValueError("This is an exception") # PLW0133
|
help: Add `raise` keyword
130 |
131 | MyError("This is an exception") # PLW0133
132 |
- MySubError("This is an exception") # PLW0133
133 + raise MySubError("This is an exception") # PLW0133
134 |
135 | MyValueError("This is an exception") # PLW0133
136 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:135:1
|
133 | MySubError("This is an exception") # PLW0133
134 |
135 | MyValueError("This is an exception") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
136 |
137 | UserWarning("This is a user warning") # PLW0133
|
help: Add `raise` keyword
132 |
133 | MySubError("This is an exception") # PLW0133
134 |
- MyValueError("This is an exception") # PLW0133
135 + raise MyValueError("This is an exception") # PLW0133
136 |
137 | UserWarning("This is a user warning") # PLW0133
138 |
note: This is an unsafe fix and may change runtime behavior
PLW0133 [*] Missing `raise` statement on exception
--> useless_exception_statement.py:139:1
|
137 | UserWarning("This is a user warning") # PLW0133
138 |
139 | MyUserWarning("This is a custom user warning") # PLW0133
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Add `raise` keyword
136 |
137 | UserWarning("This is a user warning") # PLW0133
138 |
- MyUserWarning("This is a custom user warning") # PLW0133
139 + raise MyUserWarning("This is a custom user warning") # PLW0133
140 |
141 |
142 | # Non-violation test cases: PLW0133
note: This is an unsafe fix and may change runtime behavior

View File

@ -305,6 +305,25 @@ mod tests {
Ok(()) Ok(())
} }
#[test]
fn range_suppressions() -> Result<()> {
assert_diagnostics_diff!(
Path::new("ruff/suppressions.py"),
&settings::LinterSettings::for_rules(vec![
Rule::UnusedVariable,
Rule::AmbiguousVariableName,
Rule::UnusedNOQA,
]),
&settings::LinterSettings::for_rules(vec![
Rule::UnusedVariable,
Rule::AmbiguousVariableName,
Rule::UnusedNOQA,
])
.with_preview_mode(),
);
Ok(())
}
#[test] #[test]
fn ruf100_0() -> Result<()> { fn ruf100_0() -> Result<()> {
let diagnostics = test_path( let diagnostics = test_path(

View File

@ -4,7 +4,7 @@ use ruff_macros::{ViolationMetadata, derive_message_formats};
use crate::AlwaysFixableViolation; use crate::AlwaysFixableViolation;
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq, Default)]
pub(crate) struct UnusedCodes { pub(crate) struct UnusedCodes {
pub disabled: Vec<String>, pub disabled: Vec<String>,
pub duplicated: Vec<String>, pub duplicated: Vec<String>,
@ -12,6 +12,21 @@ pub(crate) struct UnusedCodes {
pub unmatched: Vec<String>, pub unmatched: Vec<String>,
} }
#[derive(Debug, PartialEq, Eq)]
pub(crate) enum UnusedNOQAKind {
Noqa,
Suppression,
}
impl UnusedNOQAKind {
fn as_str(&self) -> &str {
match self {
UnusedNOQAKind::Noqa => "`noqa` directive",
UnusedNOQAKind::Suppression => "suppression",
}
}
}
/// ## What it does /// ## What it does
/// Checks for `noqa` directives that are no longer applicable. /// Checks for `noqa` directives that are no longer applicable.
/// ///
@ -46,6 +61,7 @@ pub(crate) struct UnusedCodes {
#[violation_metadata(stable_since = "v0.0.155")] #[violation_metadata(stable_since = "v0.0.155")]
pub(crate) struct UnusedNOQA { pub(crate) struct UnusedNOQA {
pub codes: Option<UnusedCodes>, pub codes: Option<UnusedCodes>,
pub kind: UnusedNOQAKind,
} }
impl AlwaysFixableViolation for UnusedNOQA { impl AlwaysFixableViolation for UnusedNOQA {
@ -95,16 +111,20 @@ impl AlwaysFixableViolation for UnusedNOQA {
)); ));
} }
if codes_by_reason.is_empty() { if codes_by_reason.is_empty() {
"Unused `noqa` directive".to_string() format!("Unused {}", self.kind.as_str())
} else { } else {
format!("Unused `noqa` directive ({})", codes_by_reason.join("; ")) format!(
"Unused {} ({})",
self.kind.as_str(),
codes_by_reason.join("; ")
)
} }
} }
None => "Unused blanket `noqa` directive".to_string(), None => format!("Unused blanket {}", self.kind.as_str()),
} }
} }
fn fix_title(&self) -> String { fn fix_title(&self) -> String {
"Remove unused `noqa` directive".to_string() format!("Remove unused {}", self.kind.as_str())
} }
} }

View File

@ -0,0 +1,451 @@
---
source: crates/ruff_linter/src/rules/ruff/mod.rs
---
--- Linter settings ---
-linter.preview = disabled
+linter.preview = enabled
--- Summary ---
Removed: 14
Added: 11
--- Removed ---
E741 Ambiguous variable name: `I`
--> suppressions.py:4:5
|
2 | # These should both be ignored by the range suppression.
3 | # ruff: disable[E741, F841]
4 | I = 1
| ^
5 | # ruff: enable[E741, F841]
|
F841 [*] Local variable `I` is assigned to but never used
--> suppressions.py:4:5
|
2 | # These should both be ignored by the range suppression.
3 | # ruff: disable[E741, F841]
4 | I = 1
| ^
5 | # ruff: enable[E741, F841]
|
help: Remove assignment to unused variable `I`
1 | def f():
2 | # These should both be ignored by the range suppression.
3 | # ruff: disable[E741, F841]
- I = 1
4 + pass
5 | # ruff: enable[E741, F841]
6 |
7 |
note: This is an unsafe fix and may change runtime behavior
E741 Ambiguous variable name: `I`
--> suppressions.py:12:5
|
10 | # Should also generate an "unmatched suppression" warning.
11 | # ruff:disable[E741,F841]
12 | I = 1
| ^
|
F841 [*] Local variable `I` is assigned to but never used
--> suppressions.py:12:5
|
10 | # Should also generate an "unmatched suppression" warning.
11 | # ruff:disable[E741,F841]
12 | I = 1
| ^
|
help: Remove assignment to unused variable `I`
9 | # These should both be ignored by the implicit range suppression.
10 | # Should also generate an "unmatched suppression" warning.
11 | # ruff:disable[E741,F841]
- I = 1
12 + pass
13 |
14 |
15 | def f():
note: This is an unsafe fix and may change runtime behavior
E741 Ambiguous variable name: `I`
--> suppressions.py:26:5
|
24 | # the other logged to the user.
25 | # ruff: disable[E741]
26 | I = 1
| ^
27 | # ruff: enable[E741]
|
E741 Ambiguous variable name: `l`
--> suppressions.py:35:5
|
33 | # middle line should be completely silenced.
34 | # ruff: disable[E741]
35 | l = 0
| ^
36 | # ruff: disable[F841]
37 | O = 1
|
E741 Ambiguous variable name: `O`
--> suppressions.py:37:5
|
35 | l = 0
36 | # ruff: disable[F841]
37 | O = 1
| ^
38 | # ruff: enable[E741]
39 | I = 2
|
F841 [*] Local variable `O` is assigned to but never used
--> suppressions.py:37:5
|
35 | l = 0
36 | # ruff: disable[F841]
37 | O = 1
| ^
38 | # ruff: enable[E741]
39 | I = 2
|
help: Remove assignment to unused variable `O`
34 | # ruff: disable[E741]
35 | l = 0
36 | # ruff: disable[F841]
- O = 1
37 | # ruff: enable[E741]
38 | I = 2
39 | # ruff: enable[F841]
note: This is an unsafe fix and may change runtime behavior
F841 [*] Local variable `I` is assigned to but never used
--> suppressions.py:39:5
|
37 | O = 1
38 | # ruff: enable[E741]
39 | I = 2
| ^
40 | # ruff: enable[F841]
|
help: Remove assignment to unused variable `I`
36 | # ruff: disable[F841]
37 | O = 1
38 | # ruff: enable[E741]
- I = 2
39 | # ruff: enable[F841]
40 |
41 |
note: This is an unsafe fix and may change runtime behavior
F841 [*] Local variable `foo` is assigned to but never used
--> suppressions.py:62:5
|
60 | # TODO: Duplicate codes should be counted as duplicate, not unused
61 | # ruff: disable[F841, F841]
62 | foo = 0
| ^^^
|
help: Remove assignment to unused variable `foo`
59 | def f():
60 | # TODO: Duplicate codes should be counted as duplicate, not unused
61 | # ruff: disable[F841, F841]
- foo = 0
62 + pass
63 |
64 |
65 | def f():
note: This is an unsafe fix and may change runtime behavior
F841 [*] Local variable `foo` is assigned to but never used
--> suppressions.py:70:5
|
68 | # ruff: disable[F841]
69 | # ruff: disable[F841]
70 | foo = 0
| ^^^
|
help: Remove assignment to unused variable `foo`
67 | # and the other should trigger an unused suppression diagnostic
68 | # ruff: disable[F841]
69 | # ruff: disable[F841]
- foo = 0
70 + pass
71 |
72 |
73 | def f():
note: This is an unsafe fix and may change runtime behavior
F841 [*] Local variable `foo` is assigned to but never used
--> suppressions.py:76:5
|
74 | # Multiple codes but only one is used
75 | # ruff: disable[E741, F401, F841]
76 | foo = 0
| ^^^
|
help: Remove assignment to unused variable `foo`
73 | def f():
74 | # Multiple codes but only one is used
75 | # ruff: disable[E741, F401, F841]
- foo = 0
76 + pass
77 |
78 |
79 | def f():
note: This is an unsafe fix and may change runtime behavior
E741 Ambiguous variable name: `I`
--> suppressions.py:82:5
|
80 | # Multiple codes but only two are used
81 | # ruff: disable[E741, F401, F841]
82 | I = 0
| ^
|
F841 [*] Local variable `I` is assigned to but never used
--> suppressions.py:82:5
|
80 | # Multiple codes but only two are used
81 | # ruff: disable[E741, F401, F841]
82 | I = 0
| ^
|
help: Remove assignment to unused variable `I`
79 | def f():
80 | # Multiple codes but only two are used
81 | # ruff: disable[E741, F401, F841]
- I = 0
82 + pass
83 |
84 |
85 | def f():
note: This is an unsafe fix and may change runtime behavior
--- Added ---
RUF100 [*] Unused suppression (non-enabled: `E501`)
--> suppressions.py:46:5
|
44 | # Neither of these are ignored and warnings are
45 | # logged to user
46 | # ruff: disable[E501]
| ^^^^^^^^^^^^^^^^^^^^^
47 | I = 1
48 | # ruff: enable[E501]
|
help: Remove unused suppression
43 | def f():
44 | # Neither of these are ignored and warnings are
45 | # logged to user
- # ruff: disable[E501]
46 | I = 1
47 | # ruff: enable[E501]
48 |
RUF100 [*] Unused suppression (non-enabled: `E501`)
--> suppressions.py:48:5
|
46 | # ruff: disable[E501]
47 | I = 1
48 | # ruff: enable[E501]
| ^^^^^^^^^^^^^^^^^^^^
|
help: Remove unused suppression
45 | # logged to user
46 | # ruff: disable[E501]
47 | I = 1
- # ruff: enable[E501]
48 |
49 |
50 | def f():
RUF100 [*] Unused `noqa` directive (unused: `E741`, `F841`)
--> suppressions.py:55:12
|
53 | # and an unusued noqa diagnostic should be logged.
54 | # ruff:disable[E741,F841]
55 | I = 1 # noqa: E741,F841
| ^^^^^^^^^^^^^^^^^
56 | # ruff:enable[E741,F841]
|
help: Remove unused `noqa` directive
52 | # These should both be ignored by the range suppression,
53 | # and an unusued noqa diagnostic should be logged.
54 | # ruff:disable[E741,F841]
- I = 1 # noqa: E741,F841
55 + I = 1
56 | # ruff:enable[E741,F841]
57 |
58 |
RUF100 [*] Unused suppression (unused: `F841`)
--> suppressions.py:61:21
|
59 | def f():
60 | # TODO: Duplicate codes should be counted as duplicate, not unused
61 | # ruff: disable[F841, F841]
| ^^^^
62 | foo = 0
|
help: Remove unused suppression
58 |
59 | def f():
60 | # TODO: Duplicate codes should be counted as duplicate, not unused
- # ruff: disable[F841, F841]
61 + # ruff: disable[F841]
62 | foo = 0
63 |
64 |
RUF100 [*] Unused suppression (unused: `F841`)
--> suppressions.py:69:5
|
67 | # and the other should trigger an unused suppression diagnostic
68 | # ruff: disable[F841]
69 | # ruff: disable[F841]
| ^^^^^^^^^^^^^^^^^^^^^
70 | foo = 0
|
help: Remove unused suppression
66 | # Overlapping range suppressions, one should be marked as used,
67 | # and the other should trigger an unused suppression diagnostic
68 | # ruff: disable[F841]
- # ruff: disable[F841]
69 | foo = 0
70 |
71 |
RUF100 [*] Unused suppression (unused: `E741`)
--> suppressions.py:75:21
|
73 | def f():
74 | # Multiple codes but only one is used
75 | # ruff: disable[E741, F401, F841]
| ^^^^
76 | foo = 0
|
help: Remove unused suppression
72 |
73 | def f():
74 | # Multiple codes but only one is used
- # ruff: disable[E741, F401, F841]
75 + # ruff: disable[F401, F841]
76 | foo = 0
77 |
78 |
RUF100 [*] Unused suppression (non-enabled: `F401`)
--> suppressions.py:75:27
|
73 | def f():
74 | # Multiple codes but only one is used
75 | # ruff: disable[E741, F401, F841]
| ^^^^
76 | foo = 0
|
help: Remove unused suppression
72 |
73 | def f():
74 | # Multiple codes but only one is used
- # ruff: disable[E741, F401, F841]
75 + # ruff: disable[E741, F841]
76 | foo = 0
77 |
78 |
RUF100 [*] Unused suppression (non-enabled: `F401`)
--> suppressions.py:81:27
|
79 | def f():
80 | # Multiple codes but only two are used
81 | # ruff: disable[E741, F401, F841]
| ^^^^
82 | I = 0
|
help: Remove unused suppression
78 |
79 | def f():
80 | # Multiple codes but only two are used
- # ruff: disable[E741, F401, F841]
81 + # ruff: disable[E741, F841]
82 | I = 0
83 |
84 |
RUF100 [*] Unused suppression (unused: `E741`)
--> suppressions.py:87:21
|
85 | def f():
86 | # Multiple codes but none are used
87 | # ruff: disable[E741, F401, F841]
| ^^^^
88 | print("hello")
|
help: Remove unused suppression
84 |
85 | def f():
86 | # Multiple codes but none are used
- # ruff: disable[E741, F401, F841]
87 + # ruff: disable[F401, F841]
88 | print("hello")
RUF100 [*] Unused suppression (non-enabled: `F401`)
--> suppressions.py:87:27
|
85 | def f():
86 | # Multiple codes but none are used
87 | # ruff: disable[E741, F401, F841]
| ^^^^
88 | print("hello")
|
help: Remove unused suppression
84 |
85 | def f():
86 | # Multiple codes but none are used
- # ruff: disable[E741, F401, F841]
87 + # ruff: disable[E741, F841]
88 | print("hello")
RUF100 [*] Unused suppression (unused: `F841`)
--> suppressions.py:87:33
|
85 | def f():
86 | # Multiple codes but none are used
87 | # ruff: disable[E741, F401, F841]
| ^^^^
88 | print("hello")
|
help: Remove unused suppression
84 |
85 | def f():
86 | # Multiple codes but none are used
- # ruff: disable[E741, F401, F841]
87 + # ruff: disable[E741, F401]
88 | print("hello")

View File

@ -465,6 +465,12 @@ impl LinterSettings {
self self
} }
#[must_use]
pub fn with_preview_mode(mut self) -> Self {
self.preview = PreviewMode::Enabled;
self
}
/// Resolve the [`TargetVersion`] to use for linting. /// Resolve the [`TargetVersion`] to use for linting.
/// ///
/// This method respects the per-file version overrides in /// This method respects the per-file version overrides in

View File

@ -1,7 +1,10 @@
use compact_str::CompactString; use compact_str::CompactString;
use core::fmt; use core::fmt;
use ruff_db::diagnostic::Diagnostic;
use ruff_diagnostics::{Edit, Fix};
use ruff_python_ast::token::{TokenKind, Tokens}; use ruff_python_ast::token::{TokenKind, Tokens};
use ruff_python_ast::whitespace::indentation; use ruff_python_ast::whitespace::indentation;
use std::cell::Cell;
use std::{error::Error, fmt::Formatter}; use std::{error::Error, fmt::Formatter};
use thiserror::Error; use thiserror::Error;
@ -9,7 +12,14 @@ use ruff_python_trivia::Cursor;
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize, TextSlice}; use ruff_text_size::{Ranged, TextLen, TextRange, TextSize, TextSlice};
use smallvec::{SmallVec, smallvec}; use smallvec::{SmallVec, smallvec};
#[allow(unused)] use crate::Locator;
use crate::checkers::ast::LintContext;
use crate::codes::Rule;
use crate::fix::edits::delete_comment;
use crate::preview::is_range_suppressions_enabled;
use crate::rules::ruff::rules::{UnusedCodes, UnusedNOQA, UnusedNOQAKind};
use crate::settings::LinterSettings;
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
enum SuppressionAction { enum SuppressionAction {
Disable, Disable,
@ -31,7 +41,6 @@ pub(crate) struct SuppressionComment {
reason: TextRange, reason: TextRange,
} }
#[allow(unused)]
impl SuppressionComment { impl SuppressionComment {
/// Return the suppressed codes as strings /// Return the suppressed codes as strings
fn codes_as_str<'src>(&self, source: &'src str) -> impl Iterator<Item = &'src str> { fn codes_as_str<'src>(&self, source: &'src str) -> impl Iterator<Item = &'src str> {
@ -48,7 +57,6 @@ pub(crate) struct PendingSuppressionComment<'a> {
comment: SuppressionComment, comment: SuppressionComment,
} }
#[allow(unused)]
impl PendingSuppressionComment<'_> { impl PendingSuppressionComment<'_> {
/// Whether the comment "matches" another comment, based on indentation and suppressed codes /// Whether the comment "matches" another comment, based on indentation and suppressed codes
/// Expects a "forward search" for matches, ie, will only match if the current comment is a /// Expects a "forward search" for matches, ie, will only match if the current comment is a
@ -64,8 +72,7 @@ impl PendingSuppressionComment<'_> {
} }
} }
#[allow(unused)] #[derive(Debug)]
#[derive(Clone, Debug)]
pub(crate) struct Suppression { pub(crate) struct Suppression {
/// The lint code being suppressed /// The lint code being suppressed
code: CompactString, code: CompactString,
@ -75,9 +82,11 @@ pub(crate) struct Suppression {
/// Any comments associated with the suppression /// Any comments associated with the suppression
comments: SmallVec<[SuppressionComment; 2]>, comments: SmallVec<[SuppressionComment; 2]>,
/// Whether this suppression actually suppressed a diagnostic
used: Cell<bool>,
} }
#[allow(unused)]
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub(crate) enum InvalidSuppressionKind { pub(crate) enum InvalidSuppressionKind {
/// Trailing suppression not supported /// Trailing suppression not supported
@ -98,8 +107,8 @@ pub(crate) struct InvalidSuppression {
} }
#[allow(unused)] #[allow(unused)]
#[derive(Debug)] #[derive(Debug, Default)]
pub(crate) struct Suppressions { pub struct Suppressions {
/// Valid suppression ranges with associated comments /// Valid suppression ranges with associated comments
valid: Vec<Suppression>, valid: Vec<Suppression>,
@ -110,11 +119,121 @@ pub(crate) struct Suppressions {
errors: Vec<ParseError>, errors: Vec<ParseError>,
} }
#[allow(unused)]
impl Suppressions { impl Suppressions {
pub(crate) fn from_tokens(source: &str, tokens: &Tokens) -> Suppressions { pub fn from_tokens(settings: &LinterSettings, source: &str, tokens: &Tokens) -> Suppressions {
if is_range_suppressions_enabled(settings) {
let builder = SuppressionsBuilder::new(source); let builder = SuppressionsBuilder::new(source);
builder.load_from_tokens(tokens) builder.load_from_tokens(tokens)
} else {
Suppressions::default()
}
}
pub(crate) fn is_empty(&self) -> bool {
self.valid.is_empty()
}
/// Check if a diagnostic is suppressed by any known range suppressions
pub(crate) fn check_diagnostic(&self, diagnostic: &Diagnostic) -> bool {
if self.valid.is_empty() {
return false;
}
let Some(code) = diagnostic.secondary_code() else {
return false;
};
let Some(span) = diagnostic.primary_span() else {
return false;
};
let Some(range) = span.range() else {
return false;
};
for suppression in &self.valid {
if *code == suppression.code.as_str() && suppression.range.contains_range(range) {
suppression.used.set(true);
return true;
}
}
false
}
pub(crate) fn check_suppressions(&self, context: &LintContext, locator: &Locator) {
if !context.any_rule_enabled(&[Rule::UnusedNOQA, Rule::InvalidRuleCode]) {
return;
}
let unused = self
.valid
.iter()
.filter(|suppression| !suppression.used.get());
for suppression in unused {
let Ok(rule) = Rule::from_code(&suppression.code) else {
continue; // TODO: invalid code
};
for comment in &suppression.comments {
let mut range = comment.range;
let edit = if comment.codes.len() == 1 {
delete_comment(comment.range, locator)
} else {
let code_index = comment
.codes
.iter()
.position(|range| locator.slice(range) == suppression.code)
.unwrap();
range = comment.codes[code_index];
let code_range = if code_index < (comment.codes.len() - 1) {
TextRange::new(
comment.codes[code_index].start(),
comment.codes[code_index + 1].start(),
)
} else {
TextRange::new(
comment.codes[code_index - 1].end(),
comment.codes[code_index].end(),
)
};
Edit::range_deletion(code_range)
};
let codes = if context.is_rule_enabled(rule) {
UnusedCodes {
unmatched: vec![suppression.code.to_string()],
..Default::default()
}
} else {
UnusedCodes {
disabled: vec![suppression.code.to_string()],
..Default::default()
}
};
let mut diagnostic = context.report_diagnostic(
UnusedNOQA {
codes: Some(codes),
kind: UnusedNOQAKind::Suppression,
},
range,
);
diagnostic.set_fix(Fix::safe_edit(edit));
}
}
for error in self
.errors
.iter()
.filter(|error| error.kind == ParseErrorKind::MissingCodes)
{
let mut diagnostic = context.report_diagnostic(
UnusedNOQA {
codes: Some(UnusedCodes::default()),
kind: UnusedNOQAKind::Suppression,
},
error.range,
);
diagnostic.set_fix(Fix::safe_edit(delete_comment(error.range, locator)));
}
} }
} }
@ -240,6 +359,7 @@ impl<'a> SuppressionsBuilder<'a> {
code: code.into(), code: code.into(),
range: combined_range, range: combined_range,
comments: smallvec![comment.comment.clone(), other.comment.clone()], comments: smallvec![comment.comment.clone(), other.comment.clone()],
used: false.into(),
}); });
} }
@ -256,6 +376,7 @@ impl<'a> SuppressionsBuilder<'a> {
code: code.into(), code: code.into(),
range: implicit_range, range: implicit_range,
comments: smallvec![comment.comment.clone()], comments: smallvec![comment.comment.clone()],
used: false.into(),
}); });
} }
self.pending.remove(comment_index); self.pending.remove(comment_index);
@ -457,9 +578,12 @@ mod tests {
use ruff_text_size::{TextRange, TextSize}; use ruff_text_size::{TextRange, TextSize};
use similar::DiffableStr; use similar::DiffableStr;
use crate::suppression::{ use crate::{
settings::LinterSettings,
suppression::{
InvalidSuppression, ParseError, Suppression, SuppressionAction, SuppressionComment, InvalidSuppression, ParseError, Suppression, SuppressionAction, SuppressionComment,
SuppressionParser, Suppressions, SuppressionParser, Suppressions,
},
}; };
#[test] #[test]
@ -1376,7 +1500,11 @@ def bar():
/// Parse all suppressions and errors in a module for testing /// Parse all suppressions and errors in a module for testing
fn debug(source: &'_ str) -> DebugSuppressions<'_> { fn debug(source: &'_ str) -> DebugSuppressions<'_> {
let parsed = parse(source, ParseOptions::from(Mode::Module)).unwrap(); let parsed = parse(source, ParseOptions::from(Mode::Module)).unwrap();
let suppressions = Suppressions::from_tokens(source, parsed.tokens()); let suppressions = Suppressions::from_tokens(
&LinterSettings::default().with_preview_mode(),
source,
parsed.tokens(),
);
DebugSuppressions { DebugSuppressions {
source, source,
suppressions, suppressions,

View File

@ -32,6 +32,7 @@ use crate::packaging::detect_package_root;
use crate::settings::types::UnsafeFixes; use crate::settings::types::UnsafeFixes;
use crate::settings::{LinterSettings, flags}; use crate::settings::{LinterSettings, flags};
use crate::source_kind::SourceKind; use crate::source_kind::SourceKind;
use crate::suppression::Suppressions;
use crate::{Applicability, FixAvailability}; use crate::{Applicability, FixAvailability};
use crate::{Locator, directives}; use crate::{Locator, directives};
@ -234,6 +235,7 @@ pub(crate) fn test_contents<'a>(
&locator, &locator,
&indexer, &indexer,
); );
let suppressions = Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
let messages = check_path( let messages = check_path(
path, path,
path.parent() path.parent()
@ -249,6 +251,7 @@ pub(crate) fn test_contents<'a>(
source_type, source_type,
&parsed, &parsed,
target_version, target_version,
&suppressions,
); );
let source_has_errors = parsed.has_invalid_syntax(); let source_has_errors = parsed.has_invalid_syntax();
@ -299,6 +302,8 @@ pub(crate) fn test_contents<'a>(
&indexer, &indexer,
); );
let suppressions =
Suppressions::from_tokens(settings, locator.contents(), parsed.tokens());
let fixed_messages = check_path( let fixed_messages = check_path(
path, path,
None, None,
@ -312,6 +317,7 @@ pub(crate) fn test_contents<'a>(
source_type, source_type,
&parsed, &parsed,
target_version, target_version,
&suppressions,
); );
if parsed.has_invalid_syntax() && !source_has_errors { if parsed.has_invalid_syntax() && !source_has_errors {

View File

@ -154,9 +154,7 @@ impl Tokens {
// the tokens which is valid as well. // the tokens which is valid as well.
assert!( assert!(
offset >= last.end(), offset >= last.end(),
"Offset {:?} is inside a token range {:?}", "Offset {offset:?} is inside token `{last:?}`",
offset,
last.range()
); );
} }
before before
@ -181,9 +179,7 @@ impl Tokens {
// the tokens which is valid as well. // the tokens which is valid as well.
assert!( assert!(
offset <= first.start(), offset <= first.start(),
"Offset {:?} is inside a token range {:?}", "Offset {offset:?} is inside token `{first:?}`",
offset,
first.range()
); );
} }
@ -391,7 +387,7 @@ mod tests {
} }
#[test] #[test]
#[should_panic(expected = "Offset 5 is inside a token range 4..7")] #[should_panic(expected = "Offset 5 is inside token `Name 4..7`")]
fn tokens_after_offset_inside_token() { fn tokens_after_offset_inside_token() {
let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter()); let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter());
tokens.after(TextSize::new(5)); tokens.after(TextSize::new(5));
@ -453,7 +449,7 @@ mod tests {
} }
#[test] #[test]
#[should_panic(expected = "Offset 5 is inside a token range 4..7")] #[should_panic(expected = "Offset 5 is inside token `Name 4..7`")]
fn tokens_before_offset_inside_token() { fn tokens_before_offset_inside_token() {
let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter()); let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter());
tokens.before(TextSize::new(5)); tokens.before(TextSize::new(5));
@ -505,14 +501,14 @@ mod tests {
} }
#[test] #[test]
#[should_panic(expected = "Offset 5 is inside a token range 4..7")] #[should_panic(expected = "Offset 5 is inside token `Name 4..7`")]
fn tokens_in_range_start_offset_inside_token() { fn tokens_in_range_start_offset_inside_token() {
let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter()); let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter());
tokens.in_range(TextRange::new(5.into(), 10.into())); tokens.in_range(TextRange::new(5.into(), 10.into()));
} }
#[test] #[test]
#[should_panic(expected = "Offset 6 is inside a token range 4..7")] #[should_panic(expected = "Offset 6 is inside token `Name 4..7`")]
fn tokens_in_range_end_offset_inside_token() { fn tokens_in_range_end_offset_inside_token() {
let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter()); let tokens = new_tokens(TEST_CASE_WITH_GAP.into_iter());
tokens.in_range(TextRange::new(0.into(), 6.into())); tokens.in_range(TextRange::new(0.into(), 6.into()));

View File

@ -703,3 +703,25 @@ transform = lambda left, right: ibis.timestamp("2017-04-01").cast(dt.date).betwe
# comment 4 # comment 4
1 1
) )
(
lambda
* # comment 2
x:
x
)
(
lambda # comment 1
* # comment 2
x:
x
)
(
lambda # comment 1
y,
* # comment 2
x:
x
)

View File

@ -1868,7 +1868,7 @@ fn handle_lambda_comment<'a>(
_preview: PreviewMode, _preview: PreviewMode,
) -> CommentPlacement<'a> { ) -> CommentPlacement<'a> {
if let Some(parameters) = lambda.parameters.as_deref() { if let Some(parameters) = lambda.parameters.as_deref() {
// Comments between the `lambda` and the parameters are dangling on the lambda: // End-of-line comments between the `lambda` and the parameters are dangling on the lambda:
// ```python // ```python
// ( // (
// lambda # comment // lambda # comment
@ -1876,8 +1876,24 @@ fn handle_lambda_comment<'a>(
// y // y
// ) // )
// ``` // ```
//
// But own-line comments are leading on the first parameter, if it exists:
// ```python
// (
// lambda
// # comment
// x:
// y
// )
// ```
if comment.start() < parameters.start() { if comment.start() < parameters.start() {
return CommentPlacement::dangling(comment.enclosing_node(), comment); return if let Some(first) = parameters.iter().next()
&& comment.line_position().is_own_line()
{
CommentPlacement::leading(first.as_parameter(), comment)
} else {
CommentPlacement::dangling(comment.enclosing_node(), comment)
};
} }
// Comments between the parameters and the body are dangling on the lambda: // Comments between the parameters and the body are dangling on the lambda:

View File

@ -41,65 +41,72 @@ impl FormatNodeRule<ExprLambda> for FormatExprLambda {
let (dangling_before_parameters, dangling_after_parameters) = dangling let (dangling_before_parameters, dangling_after_parameters) = dangling
.split_at(dangling.partition_point(|comment| comment.end() < parameters.start())); .split_at(dangling.partition_point(|comment| comment.end() < parameters.start()));
let (end_of_line_lambda_keyword_comments, leading_parameter_comments) = if preview { if dangling_before_parameters.is_empty() {
dangling_before_parameters.split_at( // If the first parameter has a leading comment, insert a hard line break. This
dangling_before_parameters // comment is associated as a leading comment on the first parameter:
.iter() //
.position(|comment| comment.line_position().is_own_line()) // ```py
.unwrap_or(dangling_before_parameters.len()), // (
) // lambda
} else { // * # comment
([].as_slice(), dangling_before_parameters) // x:
}; // x
// )
// To prevent an instability in cases like: // ```
//
// so a hard line break is needed to avoid formatting it like:
//
// ```py
// (
// lambda # comment
// *x: x
// )
// ```
//
// which is unstable because it's missing the second space before the comment.
//
// Inserting the line break causes it to format like:
//
// ```py
// (
// lambda
// # comment
// *x :x
// )
// ```
//
// which is also consistent with the formatting in the presence of an actual
// dangling comment on the lambda:
// //
// ```py // ```py
// ( // (
// lambda # comment 1 // lambda # comment 1
// * # comment 2 // * # comment 2
// x: # comment 3 // x:
// x // x
// ) // )
// ``` // ```
// //
// `# comment 1` and `# comment 2` also become dangling comments on the lambda, so // formats to:
// in preview, we include these in `dangling_after_parameters`, as long as the
// parameter list doesn't include any additional comments.
//
// This ends up formatted as:
//
// ```py
// (
// lambda *x: ( # comment 1 # comment 2 # comment 3
// x
// )
// )
// ```
//
// instead of the stable formatting:
// //
// ```py // ```py
// ( // (
// lambda # comment 1 // lambda # comment 1
// *x: # comment 2 // # comment 2
// # comment 3 // *x: x
// x
// ) // )
// ``` // ```
if parameters
trailing_comments(end_of_line_lambda_keyword_comments).fmt(f)?; .iter()
.next()
if leading_parameter_comments.is_empty() && !comments.has_leading(parameters) { .is_some_and(|parameter| comments.has_leading(parameter.as_parameter()))
write!(f, [space()])?; {
hard_line_break().fmt(f)?;
} else { } else {
write!( write!(f, [space()])?;
f, }
[ } else {
hard_line_break(), write!(f, [dangling_comments(dangling_before_parameters)])?;
leading_comments(leading_parameter_comments)
]
)?;
} }
// Try to keep the parameters on a single line, unless there are intervening comments. // Try to keep the parameters on a single line, unless there are intervening comments.

View File

@ -709,6 +709,28 @@ transform = lambda left, right: ibis.timestamp("2017-04-01").cast(dt.date).betwe
# comment 4 # comment 4
1 1
) )
(
lambda
* # comment 2
x:
x
)
(
lambda # comment 1
* # comment 2
x:
x
)
(
lambda # comment 1
y,
* # comment 2
x:
x
)
``` ```
## Output ## Output
@ -844,7 +866,8 @@ lambda a, /, c: a
( (
lambda lambda
# comment # comment
*x, **y: x *x,
**y: x
) )
( (
@ -857,8 +880,7 @@ lambda a, /, c: a
) )
( (
lambda lambda # comment 1
# comment 1
*x: # comment 2 *x: # comment 2
# comment 3 # comment 3
x x
@ -906,8 +928,7 @@ lambda: ( # comment
) )
( (
lambda lambda # 1
# 1
# 2 # 2
x: # 3 x: # 3
# 4 # 4
@ -917,8 +938,7 @@ lambda: ( # comment
) )
( (
lambda lambda # 1
# 1
# 2 # 2
x, # 3 x, # 3
# 4 # 4
@ -1356,35 +1376,32 @@ x = (
( (
lambda lambda
# comment # comment
*args,
**kwargs: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
)
(
lambda # comment
*args, **kwargs: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) *args, **kwargs: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs)
+ 1 + 1
) )
( (
lambda lambda # comment 1
# comment
*args, **kwargs: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs)
+ 1
)
(
lambda
# comment 1
# comment 2 # comment 2
*args,
**kwargs: # comment 3
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
)
(
lambda # comment 1
*args, **kwargs: # comment 3 *args, **kwargs: # comment 3
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
) )
( (
lambda lambda # 1
# comment 1
*args, **kwargs: # comment 3
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
)
(
lambda
# 1
# 2 # 2
left, # 3 left, # 3
# 4 # 4
@ -1461,6 +1478,23 @@ transform = (
# comment 4 # comment 4
1 1
) )
(
lambda *x: # comment 2
x
)
(
lambda # comment 1
*x: # comment 2
x
)
(
lambda # comment 1
y, *x: # comment 2
x
)
``` ```
@ -1556,7 +1590,7 @@ transform = (
) )
( (
@@ -135,18 +119,17 @@ @@ -136,17 +120,18 @@
( (
lambda lambda
# comment 1 # comment 1
@ -1572,18 +1606,17 @@ transform = (
) )
( (
- lambda lambda # comment 1
- # comment 1
- *x: # comment 2 - *x: # comment 2
- # comment 3 - # comment 3
- x - x
+ lambda *x: ( # comment 1 # comment 2 # comment 3 + *x: ( # comment 2 # comment 3
+ x + x
+ ) + )
) )
lambda *x: x lambda *x: x
@@ -162,54 +145,58 @@ @@ -162,30 +147,34 @@
) )
( (
@ -1630,11 +1663,9 @@ transform = (
x x
) )
) )
@@ -193,11 +182,12 @@
( (
- lambda lambda # 1
- # 1
+ lambda # 1
# 2 # 2
- x: # 3 - x: # 3
- # 4 - # 4
@ -1650,9 +1681,7 @@ transform = (
) )
( (
- lambda @@ -205,9 +195,10 @@
- # 1
+ lambda # 1
# 2 # 2
x, # 3 x, # 3
# 4 # 4
@ -1666,7 +1695,7 @@ transform = (
) )
( (
@@ -221,71 +208,79 @@ @@ -219,71 +210,79 @@
# Leading # Leading
lambda x: ( lambda x: (
@ -1805,7 +1834,7 @@ transform = (
# Regression tests for https://github.com/astral-sh/ruff/issues/8179 # Regression tests for https://github.com/astral-sh/ruff/issues/8179
@@ -294,9 +289,9 @@ @@ -292,9 +291,9 @@
c, c,
d, d,
e, e,
@ -1818,7 +1847,7 @@ transform = (
) )
@@ -305,15 +300,9 @@ @@ -303,15 +302,9 @@
c, c,
d, d,
e, e,
@ -1837,7 +1866,7 @@ transform = (
g=10, g=10,
) )
@@ -323,9 +312,9 @@ @@ -321,9 +314,9 @@
c, c,
d, d,
e, e,
@ -1850,7 +1879,7 @@ transform = (
) )
@@ -341,9 +330,9 @@ @@ -339,9 +332,9 @@
class C: class C:
function_dict: Dict[Text, Callable[[CRFToken], Any]] = { function_dict: Dict[Text, Callable[[CRFToken], Any]] = {
@ -1863,7 +1892,7 @@ transform = (
} }
@@ -355,42 +344,40 @@ @@ -353,42 +346,40 @@
def foo(): def foo():
if True: if True:
if True: if True:
@ -1922,7 +1951,7 @@ transform = (
CREATE TABLE {table} AS CREATE TABLE {table} AS
SELECT ROW_NUMBER() OVER () AS id, {var} SELECT ROW_NUMBER() OVER () AS id, {var}
FROM ( FROM (
@@ -405,18 +392,19 @@ @@ -403,18 +394,19 @@
long_assignment_target.with_attribute.and_a_slice[with_an_index] = ( long_assignment_target.with_attribute.and_a_slice[with_an_index] = (
# 1 # 1
# 2 # 2
@ -1949,7 +1978,7 @@ transform = (
) )
very_long_variable_name_x, very_long_variable_name_y = ( very_long_variable_name_x, very_long_variable_name_y = (
@@ -424,8 +412,8 @@ @@ -422,8 +414,8 @@
lambda b: b * another_very_long_expression_here, lambda b: b * another_very_long_expression_here,
) )
@ -1960,7 +1989,7 @@ transform = (
x, more_args, additional_parameters x, more_args, additional_parameters
) )
) )
@@ -461,12 +449,12 @@ @@ -459,12 +451,12 @@
[ [
# Not fluent # Not fluent
param( param(
@ -1975,7 +2004,7 @@ transform = (
), ),
param( param(
lambda left, right: ( lambda left, right: (
@@ -475,9 +463,9 @@ @@ -473,9 +465,9 @@
), ),
param(lambda left, right: ibis.timestamp("2017-04-01").cast(dt.date)), param(lambda left, right: ibis.timestamp("2017-04-01").cast(dt.date)),
param( param(
@ -1988,7 +2017,7 @@ transform = (
), ),
# This is too long on one line in the lambda body and gets wrapped # This is too long on one line in the lambda body and gets wrapped
# inside the body. # inside the body.
@@ -511,16 +499,18 @@ @@ -509,16 +501,18 @@
] ]
# adds parentheses around the body # adds parentheses around the body
@ -2010,7 +2039,7 @@ transform = (
lambda x, y, z: ( lambda x, y, z: (
x + y + z x + y + z
@@ -531,7 +521,7 @@ @@ -529,7 +523,7 @@
x + y + z # trailing eol body x + y + z # trailing eol body
) )
@ -2019,7 +2048,7 @@ transform = (
lambda x, y, z: ( lambda x, y, z: (
# leading body # leading body
@@ -543,21 +533,23 @@ @@ -541,21 +535,23 @@
) )
( (
@ -2053,7 +2082,7 @@ transform = (
# dangling header comment # dangling header comment
source_bucket source_bucket
if name == source_bucket_name if name == source_bucket_name
@@ -565,8 +557,7 @@ @@ -563,8 +559,7 @@
) )
( (
@ -2063,7 +2092,7 @@ transform = (
source_bucket source_bucket
if name == source_bucket_name if name == source_bucket_name
else storage.Bucket(mock_service, destination_bucket_name) else storage.Bucket(mock_service, destination_bucket_name)
@@ -574,61 +565,70 @@ @@ -572,61 +567,70 @@
) )
( (
@ -2166,10 +2195,10 @@ transform = (
) )
( (
@@ -641,51 +641,50 @@ @@ -645,22 +649,25 @@
( (
lambda lambda # comment
# comment
- *args, **kwargs: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) - *args, **kwargs: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs)
- + 1 - + 1
+ *args, **kwargs: ( + *args, **kwargs: (
@ -2178,20 +2207,18 @@ transform = (
) )
( (
- lambda lambda # comment 1
- # comment # comment 2
- *args, **kwargs: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) *args,
- + 1 - **kwargs: # comment 3
+ lambda *args, **kwargs: ( # comment - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
+ **kwargs: ( # comment 3
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1 + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
+ ) + )
) )
( (
- lambda lambda # comment 1
- # comment 1
+ lambda # comment 1
# comment 2
- *args, **kwargs: # comment 3 - *args, **kwargs: # comment 3
- aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1 - aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
+ *args, **kwargs: ( # comment 3 + *args, **kwargs: ( # comment 3
@ -2200,19 +2227,7 @@ transform = (
) )
( (
- lambda @@ -668,19 +675,20 @@
- # comment 1
- *args, **kwargs: # comment 3
- aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
+ lambda *args, **kwargs: ( # comment 1 # comment 3
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
+ )
)
(
- lambda
- # 1
+ lambda # 1
# 2 # 2
left, # 3 left, # 3
# 4 # 4
@ -2243,7 +2258,7 @@ transform = (
) )
) )
) )
@@ -703,46 +702,50 @@ @@ -698,63 +706,70 @@
foo( foo(
lambda from_ts, # but still wrap the body if it gets too long lambda from_ts, # but still wrap the body if it gets too long
to_ts, to_ts,
@ -2316,6 +2331,32 @@ transform = (
+ # comment 3 + # comment 3
+ # comment 4 + # comment 4
+ 1 + 1
+ )
)
(
- lambda *x: # comment 2
- x
+ lambda *x: ( # comment 2
+ x
+ )
)
(
lambda # comment 1
- *x: # comment 2
- x
+ *x: ( # comment 2
+ x
+ )
)
(
lambda # comment 1
- y, *x: # comment 2
- x
+ y, *x: ( # comment 2
+ x
+ ) + )
) )
``` ```

View File

@ -326,7 +326,15 @@ pub fn is_immutable_return_type(qualified_name: &[&str]) -> bool {
| ["re", "compile"] | ["re", "compile"]
| [ | [
"", "",
"bool" | "bytes" | "complex" | "float" | "frozenset" | "int" | "str" | "tuple" "bool"
| "bytes"
| "complex"
| "float"
| "frozenset"
| "int"
| "str"
| "tuple"
| "slice"
] ]
) )
} }

View File

@ -20,6 +20,7 @@ use ruff_linter::{
packaging::detect_package_root, packaging::detect_package_root,
settings::flags, settings::flags,
source_kind::SourceKind, source_kind::SourceKind,
suppression::Suppressions,
}; };
use ruff_notebook::Notebook; use ruff_notebook::Notebook;
use ruff_python_codegen::Stylist; use ruff_python_codegen::Stylist;
@ -118,6 +119,10 @@ pub(crate) fn check(
// Extract the `# noqa` and `# isort: skip` directives from the source. // Extract the `# noqa` and `# isort: skip` directives from the source.
let directives = extract_directives(parsed.tokens(), Flags::all(), &locator, &indexer); let directives = extract_directives(parsed.tokens(), Flags::all(), &locator, &indexer);
// Parse range suppression comments
let suppressions =
Suppressions::from_tokens(&settings.linter, locator.contents(), parsed.tokens());
// Generate checks. // Generate checks.
let diagnostics = check_path( let diagnostics = check_path(
&document_path, &document_path,
@ -132,6 +137,7 @@ pub(crate) fn check(
source_type, source_type,
&parsed, &parsed,
target_version, target_version,
&suppressions,
); );
let noqa_edits = generate_noqa_edits( let noqa_edits = generate_noqa_edits(
@ -142,6 +148,7 @@ pub(crate) fn check(
&settings.linter.external, &settings.linter.external,
&directives.noqa_line_for, &directives.noqa_line_for,
stylist.line_ending(), stylist.line_ending(),
&suppressions,
); );
let mut diagnostics_map = DiagnosticsMap::default(); let mut diagnostics_map = DiagnosticsMap::default();

View File

@ -33,26 +33,29 @@ impl LineIndex {
line_starts.push(TextSize::default()); line_starts.push(TextSize::default());
let bytes = text.as_bytes(); let bytes = text.as_bytes();
let mut utf8 = false;
assert!(u32::try_from(bytes.len()).is_ok()); assert!(u32::try_from(bytes.len()).is_ok());
for (i, byte) in bytes.iter().enumerate() { for i in memchr::memchr2_iter(b'\n', b'\r', bytes) {
utf8 |= !byte.is_ascii(); // Skip `\r` in `\r\n` sequences (only count the `\n`).
if bytes[i] == b'\r' && bytes.get(i + 1) == Some(&b'\n') {
match byte { continue;
// Only track one line break for `\r\n`. }
b'\r' if bytes.get(i + 1) == Some(&b'\n') => continue,
b'\n' | b'\r' => {
// SAFETY: Assertion above guarantees `i <= u32::MAX` // SAFETY: Assertion above guarantees `i <= u32::MAX`
#[expect(clippy::cast_possible_truncation)] #[expect(clippy::cast_possible_truncation)]
line_starts.push(TextSize::from(i as u32) + TextSize::from(1)); line_starts.push(TextSize::from(i as u32) + TextSize::from(1));
} }
_ => {}
} // Determine whether the source text is ASCII.
//
// Empirically, this simple loop is auto-vectorized by LLVM and benchmarks faster than both
// `str::is_ascii()` and hand-written SIMD.
let mut has_non_ascii = false;
for byte in bytes {
has_non_ascii |= !byte.is_ascii();
} }
let kind = if utf8 { let kind = if has_non_ascii {
IndexKind::Utf8 IndexKind::Utf8
} else { } else {
IndexKind::Ascii IndexKind::Ascii

View File

@ -2,6 +2,7 @@ use std::path::Path;
use js_sys::Error; use js_sys::Error;
use ruff_linter::settings::types::PythonVersion; use ruff_linter::settings::types::PythonVersion;
use ruff_linter::suppression::Suppressions;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use wasm_bindgen::prelude::*; use wasm_bindgen::prelude::*;
@ -212,6 +213,9 @@ impl Workspace {
&indexer, &indexer,
); );
let suppressions =
Suppressions::from_tokens(&self.settings.linter, locator.contents(), parsed.tokens());
// Generate checks. // Generate checks.
let diagnostics = check_path( let diagnostics = check_path(
Path::new("<filename>"), Path::new("<filename>"),
@ -226,6 +230,7 @@ impl Workspace {
source_type, source_type,
&parsed, &parsed,
target_version, target_version,
&suppressions,
); );
let source_code = locator.to_source_code(); let source_code = locator.to_source_code();

View File

@ -43,7 +43,7 @@ fn config_override_python_version() -> anyhow::Result<()> {
| |
2 | [tool.ty.environment] 2 | [tool.ty.environment]
3 | python-version = "3.11" 3 | python-version = "3.11"
| ^^^^^^ Python 3.11 assumed due to this configuration setting | ^^^^^^ Python version configuration
| |
info: rule `unresolved-attribute` is enabled by default info: rule `unresolved-attribute` is enabled by default
@ -143,7 +143,7 @@ fn config_file_annotation_showing_where_python_version_set_typing_error() -> any
), ),
])?; ])?;
assert_cmd_snapshot!(case.command(), @r###" assert_cmd_snapshot!(case.command(), @r#"
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
@ -159,14 +159,14 @@ fn config_file_annotation_showing_where_python_version_set_typing_error() -> any
| |
2 | [tool.ty.environment] 2 | [tool.ty.environment]
3 | python-version = "3.8" 3 | python-version = "3.8"
| ^^^^^ Python 3.8 assumed due to this configuration setting | ^^^^^ Python version configuration
| |
info: rule `unresolved-reference` is enabled by default info: rule `unresolved-reference` is enabled by default
Found 1 diagnostic Found 1 diagnostic
----- stderr ----- ----- stderr -----
"###); "#);
assert_cmd_snapshot!(case.command().arg("--python-version=3.9"), @r###" assert_cmd_snapshot!(case.command().arg("--python-version=3.9"), @r###"
success: false success: false
@ -772,7 +772,7 @@ fn pyvenv_cfg_file_annotation_showing_where_python_version_set() -> anyhow::Resu
("test.py", "aiter"), ("test.py", "aiter"),
])?; ])?;
assert_cmd_snapshot!(case.command(), @r###" assert_cmd_snapshot!(case.command(), @r"
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
@ -787,7 +787,7 @@ fn pyvenv_cfg_file_annotation_showing_where_python_version_set() -> anyhow::Resu
--> venv/pyvenv.cfg:2:11 --> venv/pyvenv.cfg:2:11
| |
2 | version = 3.8 2 | version = 3.8
| ^^^ Python version inferred from virtual environment metadata file | ^^^ Virtual environment metadata
3 | home = foo/bar/bin 3 | home = foo/bar/bin
| |
info: No Python version was specified on the command line or in a configuration file info: No Python version was specified on the command line or in a configuration file
@ -796,7 +796,7 @@ fn pyvenv_cfg_file_annotation_showing_where_python_version_set() -> anyhow::Resu
Found 1 diagnostic Found 1 diagnostic
----- stderr ----- ----- stderr -----
"###); ");
Ok(()) Ok(())
} }
@ -831,7 +831,7 @@ fn pyvenv_cfg_file_annotation_no_trailing_newline() -> anyhow::Result<()> {
("test.py", "aiter"), ("test.py", "aiter"),
])?; ])?;
assert_cmd_snapshot!(case.command(), @r###" assert_cmd_snapshot!(case.command(), @r"
success: false success: false
exit_code: 1 exit_code: 1
----- stdout ----- ----- stdout -----
@ -846,7 +846,7 @@ fn pyvenv_cfg_file_annotation_no_trailing_newline() -> anyhow::Result<()> {
--> venv/pyvenv.cfg:4:23 --> venv/pyvenv.cfg:4:23
| |
4 | version = 3.8 4 | version = 3.8
| ^^^ Python version inferred from virtual environment metadata file | ^^^ Virtual environment metadata
| |
info: No Python version was specified on the command line or in a configuration file info: No Python version was specified on the command line or in a configuration file
info: rule `unresolved-reference` is enabled by default info: rule `unresolved-reference` is enabled by default
@ -854,7 +854,7 @@ fn pyvenv_cfg_file_annotation_no_trailing_newline() -> anyhow::Result<()> {
Found 1 diagnostic Found 1 diagnostic
----- stderr ----- ----- stderr -----
"###); ");
Ok(()) Ok(())
} }
@ -898,7 +898,7 @@ fn config_file_annotation_showing_where_python_version_set_syntax_error() -> any
| |
2 | [project] 2 | [project]
3 | requires-python = ">=3.8" 3 | requires-python = ">=3.8"
| ^^^^^^^ Python 3.8 assumed due to this configuration setting | ^^^^^^^ Python version configuration
| |
Found 1 diagnostic Found 1 diagnostic
@ -1206,7 +1206,7 @@ fn defaults_to_a_new_python_version() -> anyhow::Result<()> {
| |
2 | [environment] 2 | [environment]
3 | python-version = "3.10" 3 | python-version = "3.10"
| ^^^^^^ Python 3.10 assumed due to this configuration setting | ^^^^^^ Python version configuration
4 | python-platform = "linux" 4 | python-platform = "linux"
| |
info: rule `unresolved-attribute` is enabled by default info: rule `unresolved-attribute` is enabled by default
@ -1225,7 +1225,7 @@ fn defaults_to_a_new_python_version() -> anyhow::Result<()> {
| |
2 | [environment] 2 | [environment]
3 | python-version = "3.10" 3 | python-version = "3.10"
| ^^^^^^ Python 3.10 assumed due to this configuration setting | ^^^^^^ Python version configuration
4 | python-platform = "linux" 4 | python-platform = "linux"
| |
info: rule `unresolved-import` is enabled by default info: rule `unresolved-import` is enabled by default

View File

@ -1,4 +1,7 @@
name,file,index,rank name,file,index,rank
auto-import-includes-modules,main.py,0,1
auto-import-includes-modules,main.py,1,7
auto-import-includes-modules,main.py,2,1
auto-import-skips-current-module,main.py,0,1 auto-import-skips-current-module,main.py,0,1
fstring-completions,main.py,0,1 fstring-completions,main.py,0,1
higher-level-symbols-preferred,main.py,0, higher-level-symbols-preferred,main.py,0,
@ -11,9 +14,9 @@ import-deprioritizes-type_check_only,main.py,2,1
import-deprioritizes-type_check_only,main.py,3,2 import-deprioritizes-type_check_only,main.py,3,2
import-deprioritizes-type_check_only,main.py,4,3 import-deprioritizes-type_check_only,main.py,4,3
import-keyword-completion,main.py,0,1 import-keyword-completion,main.py,0,1
internal-typeshed-hidden,main.py,0,4 internal-typeshed-hidden,main.py,0,2
none-completion,main.py,0,2 none-completion,main.py,0,2
numpy-array,main.py,0, numpy-array,main.py,0,159
numpy-array,main.py,1,1 numpy-array,main.py,1,1
object-attr-instance-methods,main.py,0,1 object-attr-instance-methods,main.py,0,1
object-attr-instance-methods,main.py,1,1 object-attr-instance-methods,main.py,1,1
@ -23,6 +26,6 @@ scope-existing-over-new-import,main.py,0,1
scope-prioritize-closer,main.py,0,2 scope-prioritize-closer,main.py,0,2
scope-simple-long-identifier,main.py,0,1 scope-simple-long-identifier,main.py,0,1
tstring-completions,main.py,0,1 tstring-completions,main.py,0,1
ty-extensions-lower-stdlib,main.py,0,8 ty-extensions-lower-stdlib,main.py,0,9
type-var-typing-over-ast,main.py,0,3 type-var-typing-over-ast,main.py,0,3
type-var-typing-over-ast,main.py,1,275 type-var-typing-over-ast,main.py,1,251

1 name file index rank
2 auto-import-includes-modules main.py 0 1
3 auto-import-includes-modules main.py 1 7
4 auto-import-includes-modules main.py 2 1
5 auto-import-skips-current-module main.py 0 1
6 fstring-completions main.py 0 1
7 higher-level-symbols-preferred main.py 0
14 import-deprioritizes-type_check_only main.py 3 2
15 import-deprioritizes-type_check_only main.py 4 3
16 import-keyword-completion main.py 0 1
17 internal-typeshed-hidden main.py 0 4 2
18 none-completion main.py 0 2
19 numpy-array main.py 0 159
20 numpy-array main.py 1 1
21 object-attr-instance-methods main.py 0 1
22 object-attr-instance-methods main.py 1 1
26 scope-prioritize-closer main.py 0 2
27 scope-simple-long-identifier main.py 0 1
28 tstring-completions main.py 0 1
29 ty-extensions-lower-stdlib main.py 0 8 9
30 type-var-typing-over-ast main.py 0 3
31 type-var-typing-over-ast main.py 1 275 251

View File

@ -506,9 +506,21 @@ struct CompletionAnswer {
impl CompletionAnswer { impl CompletionAnswer {
/// Returns true when this answer matches the completion given. /// Returns true when this answer matches the completion given.
fn matches(&self, completion: &Completion) -> bool { fn matches(&self, completion: &Completion) -> bool {
if let Some(ref qualified) = completion.qualified {
if qualified.as_str() == self.qualified() {
return true;
}
}
self.symbol == completion.name.as_str() self.symbol == completion.name.as_str()
&& self.module.as_deref() == completion.module_name.map(ModuleName::as_str) && self.module.as_deref() == completion.module_name.map(ModuleName::as_str)
} }
fn qualified(&self) -> String {
self.module
.as_ref()
.map(|module| format!("{module}.{}", self.symbol))
.unwrap_or_else(|| self.symbol.clone())
}
} }
/// Copy the Python project from `src_dir` to `dst_dir`. /// Copy the Python project from `src_dir` to `dst_dir`.

View File

@ -0,0 +1,2 @@
[settings]
auto-import = true

View File

@ -0,0 +1,3 @@
multiprocess<CURSOR: multiprocessing>
collect<CURSOR: collections>
collabc<CURSOR: collections.abc>

View File

@ -0,0 +1,5 @@
[project]
name = "test"
version = "0.1.0"
requires-python = ">=3.13"
dependencies = []

View File

@ -0,0 +1,8 @@
version = 1
revision = 3
requires-python = ">=3.13"
[[package]]
name = "test"
version = "0.1.0"
source = { virtual = "." }

View File

@ -2,7 +2,10 @@ use ruff_db::files::File;
use ty_project::Db; use ty_project::Db;
use ty_python_semantic::{Module, ModuleName, all_modules, resolve_real_shadowable_module}; use ty_python_semantic::{Module, ModuleName, all_modules, resolve_real_shadowable_module};
use crate::symbols::{QueryPattern, SymbolInfo, symbols_for_file_global_only}; use crate::{
SymbolKind,
symbols::{QueryPattern, SymbolInfo, symbols_for_file_global_only},
};
/// Get all symbols matching the query string. /// Get all symbols matching the query string.
/// ///
@ -36,18 +39,39 @@ pub fn all_symbols<'db>(
let Some(file) = module.file(&*db) else { let Some(file) = module.file(&*db) else {
continue; continue;
}; };
// By convention, modules starting with an underscore
// are generally considered unexported. However, we
// should consider first party modules fair game.
//
// Note that we apply this recursively. e.g.,
// `numpy._core.multiarray` is considered private
// because it's a child of `_core`.
if module.name(&*db).components().any(|c| c.starts_with('_'))
&& module
.search_path(&*db)
.is_none_or(|sp| !sp.is_first_party())
{
continue;
}
// TODO: also make it available in `TYPE_CHECKING` blocks // TODO: also make it available in `TYPE_CHECKING` blocks
// (we'd need https://github.com/astral-sh/ty/issues/1553 to do this well) // (we'd need https://github.com/astral-sh/ty/issues/1553 to do this well)
if !is_typing_extensions_available && module.name(&*db) == &typing_extensions { if !is_typing_extensions_available && module.name(&*db) == &typing_extensions {
continue; continue;
} }
s.spawn(move |_| { s.spawn(move |_| {
if query.is_match_symbol_name(module.name(&*db)) {
results.lock().unwrap().push(AllSymbolInfo {
symbol: None,
module,
file,
});
}
for (_, symbol) in symbols_for_file_global_only(&*db, file).search(query) { for (_, symbol) in symbols_for_file_global_only(&*db, file).search(query) {
// It seems like we could do better here than // It seems like we could do better here than
// locking `results` for every single symbol, // locking `results` for every single symbol,
// but this works pretty well as it is. // but this works pretty well as it is.
results.lock().unwrap().push(AllSymbolInfo { results.lock().unwrap().push(AllSymbolInfo {
symbol: symbol.to_owned(), symbol: Some(symbol.to_owned()),
module, module,
file, file,
}); });
@ -59,8 +83,16 @@ pub fn all_symbols<'db>(
let mut results = results.into_inner().unwrap(); let mut results = results.into_inner().unwrap();
results.sort_by(|s1, s2| { results.sort_by(|s1, s2| {
let key1 = (&s1.symbol.name, s1.file.path(db).as_str()); let key1 = (
let key2 = (&s2.symbol.name, s2.file.path(db).as_str()); s1.name_in_file()
.unwrap_or_else(|| s1.module().name(db).as_str()),
s1.file.path(db).as_str(),
);
let key2 = (
s2.name_in_file()
.unwrap_or_else(|| s2.module().name(db).as_str()),
s2.file.path(db).as_str(),
);
key1.cmp(&key2) key1.cmp(&key2)
}); });
results results
@ -71,14 +103,53 @@ pub fn all_symbols<'db>(
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct AllSymbolInfo<'db> { pub struct AllSymbolInfo<'db> {
/// The symbol information. /// The symbol information.
pub symbol: SymbolInfo<'static>, ///
/// When absent, this implies the symbol is the module itself.
symbol: Option<SymbolInfo<'static>>,
/// The module containing the symbol. /// The module containing the symbol.
pub module: Module<'db>, module: Module<'db>,
/// The file containing the symbol. /// The file containing the symbol.
/// ///
/// This `File` is guaranteed to be the same /// This `File` is guaranteed to be the same
/// as the `File` underlying `module`. /// as the `File` underlying `module`.
pub file: File, file: File,
}
impl<'db> AllSymbolInfo<'db> {
/// Returns the name of this symbol as it exists in a file.
///
/// When absent, there is no concrete symbol in a module
/// somewhere. Instead, this represents importing a module.
/// In this case, if the caller needs a symbol name, they
/// should use `AllSymbolInfo::module().name()`.
pub fn name_in_file(&self) -> Option<&str> {
self.symbol.as_ref().map(|symbol| &*symbol.name)
}
/// Returns the "kind" of this symbol.
///
/// The kind of a symbol in the context of auto-import is
/// determined on a best effort basis. It may be imprecise
/// in some cases, e.g., reporting a module as a variable.
pub fn kind(&self) -> SymbolKind {
self.symbol
.as_ref()
.map(|symbol| symbol.kind)
.unwrap_or(SymbolKind::Module)
}
/// Returns the module this symbol is exported from.
pub fn module(&self) -> Module<'db> {
self.module
}
/// Returns the `File` corresponding to the module.
///
/// This is always equivalent to
/// `AllSymbolInfo::module().file().unwrap()`.
pub fn file(&self) -> File {
self.file
}
} }
#[cfg(test)] #[cfg(test)]
@ -162,25 +233,31 @@ ABCDEFGHIJKLMNOP = 'https://api.example.com'
return "No symbols found".to_string(); return "No symbols found".to_string();
} }
self.render_diagnostics(symbols.into_iter().map(AllSymbolDiagnostic::new)) self.render_diagnostics(symbols.into_iter().map(|symbol_info| AllSymbolDiagnostic {
db: &self.db,
symbol_info,
}))
} }
} }
struct AllSymbolDiagnostic<'db> { struct AllSymbolDiagnostic<'db> {
db: &'db dyn Db,
symbol_info: AllSymbolInfo<'db>, symbol_info: AllSymbolInfo<'db>,
} }
impl<'db> AllSymbolDiagnostic<'db> {
fn new(symbol_info: AllSymbolInfo<'db>) -> Self {
Self { symbol_info }
}
}
impl IntoDiagnostic for AllSymbolDiagnostic<'_> { impl IntoDiagnostic for AllSymbolDiagnostic<'_> {
fn into_diagnostic(self) -> Diagnostic { fn into_diagnostic(self) -> Diagnostic {
let symbol_kind_str = self.symbol_info.symbol.kind.to_string(); let symbol_kind_str = self.symbol_info.kind().to_string();
let info_text = format!("{} {}", symbol_kind_str, self.symbol_info.symbol.name); let info_text = format!(
"{} {}",
symbol_kind_str,
self.symbol_info.name_in_file().unwrap_or_else(|| self
.symbol_info
.module()
.name(self.db)
.as_str())
);
let sub = SubDiagnostic::new(SubDiagnosticSeverity::Info, info_text); let sub = SubDiagnostic::new(SubDiagnosticSeverity::Info, info_text);
@ -189,9 +266,12 @@ ABCDEFGHIJKLMNOP = 'https://api.example.com'
Severity::Info, Severity::Info,
"AllSymbolInfo".to_string(), "AllSymbolInfo".to_string(),
); );
main.annotate(Annotation::primary(
Span::from(self.symbol_info.file).with_range(self.symbol_info.symbol.name_range), let mut span = Span::from(self.symbol_info.file());
)); if let Some(ref symbol) = self.symbol_info.symbol {
span = span.with_range(symbol.name_range);
}
main.annotate(Annotation::primary(span));
main.sub(sub); main.sub(sub);
main main

View File

@ -5,7 +5,8 @@ use ruff_diagnostics::Edit;
use ruff_text_size::TextRange; use ruff_text_size::TextRange;
use ty_project::Db; use ty_project::Db;
use ty_python_semantic::create_suppression_fix; use ty_python_semantic::create_suppression_fix;
use ty_python_semantic::types::UNRESOLVED_REFERENCE; use ty_python_semantic::lint::LintId;
use ty_python_semantic::types::{UNDEFINED_REVEAL, UNRESOLVED_REFERENCE};
/// A `QuickFix` Code Action /// A `QuickFix` Code Action
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -28,12 +29,17 @@ pub fn code_actions(
let mut actions = Vec::new(); let mut actions = Vec::new();
if lint_id.name() == UNRESOLVED_REFERENCE.name() // Suggest imports for unresolved references (often ideal)
// TODO: suggest qualifying with an already imported symbol
let is_unresolved_reference =
lint_id == LintId::of(&UNRESOLVED_REFERENCE) || lint_id == LintId::of(&UNDEFINED_REVEAL);
if is_unresolved_reference
&& let Some(import_quick_fix) = create_import_symbol_quick_fix(db, file, diagnostic_range) && let Some(import_quick_fix) = create_import_symbol_quick_fix(db, file, diagnostic_range)
{ {
actions.extend(import_quick_fix); actions.extend(import_quick_fix);
} }
// Suggest just suppressing the lint (always a valid option, but never ideal)
actions.push(QuickFix { actions.push(QuickFix {
title: format!("Ignore '{}' for this line", lint_id.name()), title: format!("Ignore '{}' for this line", lint_id.name()),
edits: create_suppression_fix(db, file, lint_id, diagnostic_range).into_edits(), edits: create_suppression_fix(db, file, lint_id, diagnostic_range).into_edits(),
@ -80,7 +86,10 @@ mod tests {
use ruff_diagnostics::Fix; use ruff_diagnostics::Fix;
use ruff_text_size::{TextRange, TextSize}; use ruff_text_size::{TextRange, TextSize};
use ty_project::ProjectMetadata; use ty_project::ProjectMetadata;
use ty_python_semantic::{lint::LintMetadata, types::UNRESOLVED_REFERENCE}; use ty_python_semantic::{
lint::LintMetadata,
types::{UNDEFINED_REVEAL, UNRESOLVED_REFERENCE},
};
#[test] #[test]
fn add_ignore() { fn add_ignore() {
@ -435,6 +444,40 @@ mod tests {
"#); "#);
} }
#[test]
fn undefined_reveal_type() {
let test = CodeActionTest::with_source(
r#"
<START>reveal_type<END>(1)
"#,
);
assert_snapshot!(test.code_actions(&UNDEFINED_REVEAL), @r"
info[code-action]: import typing.reveal_type
--> main.py:2:13
|
2 | reveal_type(1)
| ^^^^^^^^^^^
|
help: This is a preferred code action
1 + from typing import reveal_type
2 |
3 | reveal_type(1)
4 |
info[code-action]: Ignore 'undefined-reveal' for this line
--> main.py:2:13
|
2 | reveal_type(1)
| ^^^^^^^^^^^
|
1 |
- reveal_type(1)
2 + reveal_type(1) # ty:ignore[undefined-reveal]
3 |
");
}
pub(super) struct CodeActionTest { pub(super) struct CodeActionTest {
pub(super) db: ty_project::TestDb, pub(super) db: ty_project::TestDb,
pub(super) file: File, pub(super) file: File,

File diff suppressed because it is too large Load Diff

View File

@ -236,4 +236,52 @@ def test():
assert_snapshot!(test.document_highlights(), @"No highlights found"); assert_snapshot!(test.document_highlights(), @"No highlights found");
} }
// TODO: Should only highlight the last use and the last declaration
#[test]
fn redeclarations() {
let test = CursorTest::builder()
.source(
"main.py",
r#"
a: str = "test"
a: int = 10
print(a<CURSOR>)
"#,
)
.build();
assert_snapshot!(test.document_highlights(), @r#"
info[document_highlights]: Highlight 1 (Write)
--> main.py:2:1
|
2 | a: str = "test"
| ^
3 |
4 | a: int = 10
|
info[document_highlights]: Highlight 2 (Write)
--> main.py:4:1
|
2 | a: str = "test"
3 |
4 | a: int = 10
| ^
5 |
6 | print(a)
|
info[document_highlights]: Highlight 3 (Read)
--> main.py:6:7
|
4 | a: int = 10
5 |
6 | print(a)
| ^
|
"#);
}
} }

View File

@ -1906,4 +1906,259 @@ func<CURSOR>_alias()
| |
"); ");
} }
#[test]
fn references_submodule_import_from_use() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg.submod import val
x = sub<CURSOR>pkg
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// TODO(submodule-imports): this should light up both instances of `subpkg`
assert_snapshot!(test.references(), @r"
info[references]: Reference 1
--> mypackage/__init__.py:4:5
|
2 | from .subpkg.submod import val
3 |
4 | x = subpkg
| ^^^^^^
|
");
}
#[test]
fn references_submodule_import_from_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .sub<CURSOR>pkg.submod import val
x = subpkg
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// TODO(submodule-imports): this should light up both instances of `subpkg`
assert_snapshot!(test.references(), @"No references found");
}
#[test]
fn references_submodule_import_from_wrong_use() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg.submod import val
x = sub<CURSOR>mod
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// No references is actually correct (or it should only see itself)
assert_snapshot!(test.references(), @"No references found");
}
#[test]
fn references_submodule_import_from_wrong_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg.sub<CURSOR>mod import val
x = submod
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// No references is actually correct (or it should only see itself)
assert_snapshot!(test.references(), @"No references found");
}
#[test]
fn references_submodule_import_from_confusing_shadowed_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .sub<CURSOR>pkg import subpkg
x = subpkg
"#,
)
.source(
"mypackage/subpkg/__init__.py",
r#"
subpkg: int = 10
"#,
)
.build();
// No references is actually correct (or it should only see itself)
assert_snapshot!(test.references(), @"No references found");
}
#[test]
fn references_submodule_import_from_confusing_real_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg import sub<CURSOR>pkg
x = subpkg
"#,
)
.source(
"mypackage/subpkg/__init__.py",
r#"
subpkg: int = 10
"#,
)
.build();
assert_snapshot!(test.references(), @r"
info[references]: Reference 1
--> mypackage/__init__.py:2:21
|
2 | from .subpkg import subpkg
| ^^^^^^
3 |
4 | x = subpkg
|
info[references]: Reference 2
--> mypackage/__init__.py:4:5
|
2 | from .subpkg import subpkg
3 |
4 | x = subpkg
| ^^^^^^
|
info[references]: Reference 3
--> mypackage/subpkg/__init__.py:2:1
|
2 | subpkg: int = 10
| ^^^^^^
|
");
}
#[test]
fn references_submodule_import_from_confusing_use() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg import subpkg
x = sub<CURSOR>pkg
"#,
)
.source(
"mypackage/subpkg/__init__.py",
r#"
subpkg: int = 10
"#,
)
.build();
// TODO: this should also highlight the RHS subpkg in the import
assert_snapshot!(test.references(), @r"
info[references]: Reference 1
--> mypackage/__init__.py:4:5
|
2 | from .subpkg import subpkg
3 |
4 | x = subpkg
| ^^^^^^
|
");
}
// TODO: Should only return references to the last declaration
#[test]
fn declarations() {
let test = CursorTest::builder()
.source(
"main.py",
r#"
a: str = "test"
a: int = 10
print(a<CURSOR>)
"#,
)
.build();
assert_snapshot!(test.references(), @r#"
info[references]: Reference 1
--> main.py:2:1
|
2 | a: str = "test"
| ^
3 |
4 | a: int = 10
|
info[references]: Reference 2
--> main.py:4:1
|
2 | a: str = "test"
3 |
4 | a: int = 10
| ^
5 |
6 | print(a)
|
info[references]: Reference 3
--> main.py:6:7
|
4 | a: int = 10
5 |
6 | print(a)
| ^
|
"#);
}
} }

View File

@ -73,19 +73,29 @@ pub(crate) enum GotoTarget<'a> {
/// ``` /// ```
ImportModuleAlias { ImportModuleAlias {
alias: &'a ast::Alias, alias: &'a ast::Alias,
asname: &'a ast::Identifier,
},
/// In an import statement, the named under which the symbol is exported
/// in the imported file.
///
/// ```py
/// from foo import bar as baz
/// ^^^
/// ```
ImportExportedName {
alias: &'a ast::Alias,
import_from: &'a ast::StmtImportFrom,
}, },
/// Import alias in from import statement /// Import alias in from import statement
/// ```py /// ```py
/// from foo import bar as baz /// from foo import bar as baz
/// ^^^ /// ^^^
/// from foo import bar as baz
/// ^^^
/// ``` /// ```
ImportSymbolAlias { ImportSymbolAlias {
alias: &'a ast::Alias, alias: &'a ast::Alias,
range: TextRange, asname: &'a ast::Identifier,
import_from: &'a ast::StmtImportFrom,
}, },
/// Go to on the exception handler variable /// Go to on the exception handler variable
@ -290,8 +300,9 @@ impl GotoTarget<'_> {
GotoTarget::FunctionDef(function) => function.inferred_type(model), GotoTarget::FunctionDef(function) => function.inferred_type(model),
GotoTarget::ClassDef(class) => class.inferred_type(model), GotoTarget::ClassDef(class) => class.inferred_type(model),
GotoTarget::Parameter(parameter) => parameter.inferred_type(model), GotoTarget::Parameter(parameter) => parameter.inferred_type(model),
GotoTarget::ImportSymbolAlias { alias, .. } => alias.inferred_type(model), GotoTarget::ImportSymbolAlias { alias, .. }
GotoTarget::ImportModuleAlias { alias } => alias.inferred_type(model), | GotoTarget::ImportModuleAlias { alias, .. }
| GotoTarget::ImportExportedName { alias, .. } => alias.inferred_type(model),
GotoTarget::ExceptVariable(except) => except.inferred_type(model), GotoTarget::ExceptVariable(except) => except.inferred_type(model),
GotoTarget::KeywordArgument { keyword, .. } => keyword.value.inferred_type(model), GotoTarget::KeywordArgument { keyword, .. } => keyword.value.inferred_type(model),
// When asking the type of a callable, usually you want the callable itself? // When asking the type of a callable, usually you want the callable itself?
@ -378,7 +389,9 @@ impl GotoTarget<'_> {
alias_resolution: ImportAliasResolution, alias_resolution: ImportAliasResolution,
) -> Option<Definitions<'db>> { ) -> Option<Definitions<'db>> {
let definitions = match self { let definitions = match self {
GotoTarget::Expression(expression) => definitions_for_expression(model, *expression), GotoTarget::Expression(expression) => {
definitions_for_expression(model, *expression, alias_resolution)
}
// For already-defined symbols, they are their own definitions // For already-defined symbols, they are their own definitions
GotoTarget::FunctionDef(function) => Some(vec![ResolvedDefinition::Definition( GotoTarget::FunctionDef(function) => Some(vec![ResolvedDefinition::Definition(
function.definition(model), function.definition(model),
@ -393,14 +406,14 @@ impl GotoTarget<'_> {
)]), )]),
// For import aliases (offset within 'y' or 'z' in "from x import y as z") // For import aliases (offset within 'y' or 'z' in "from x import y as z")
GotoTarget::ImportSymbolAlias { GotoTarget::ImportSymbolAlias { asname, .. } => Some(definitions_for_name(
alias, import_from, .. model,
} => { asname.as_str(),
if let Some(asname) = alias.asname.as_ref() AnyNodeRef::from(*asname),
&& alias_resolution == ImportAliasResolution::PreserveAliases alias_resolution,
{ )),
Some(definitions_for_name(model, asname.as_str(), asname.into()))
} else { GotoTarget::ImportExportedName { alias, import_from } => {
let symbol_name = alias.name.as_str(); let symbol_name = alias.name.as_str();
Some(definitions_for_imported_symbol( Some(definitions_for_imported_symbol(
model, model,
@ -409,7 +422,6 @@ impl GotoTarget<'_> {
alias_resolution, alias_resolution,
)) ))
} }
}
GotoTarget::ImportModuleComponent { GotoTarget::ImportModuleComponent {
module_name, module_name,
@ -423,15 +435,12 @@ impl GotoTarget<'_> {
} }
// Handle import aliases (offset within 'z' in "import x.y as z") // Handle import aliases (offset within 'z' in "import x.y as z")
GotoTarget::ImportModuleAlias { alias } => { GotoTarget::ImportModuleAlias { asname, .. } => Some(definitions_for_name(
if let Some(asname) = alias.asname.as_ref() model,
&& alias_resolution == ImportAliasResolution::PreserveAliases asname.as_str(),
{ AnyNodeRef::from(*asname),
Some(definitions_for_name(model, asname.as_str(), asname.into())) alias_resolution,
} else { )),
definitions_for_module(model, Some(alias.name.as_str()), 0)
}
}
// Handle keyword arguments in call expressions // Handle keyword arguments in call expressions
GotoTarget::KeywordArgument { GotoTarget::KeywordArgument {
@ -454,12 +463,22 @@ impl GotoTarget<'_> {
// because they're not expressions // because they're not expressions
GotoTarget::PatternMatchRest(pattern_mapping) => { GotoTarget::PatternMatchRest(pattern_mapping) => {
pattern_mapping.rest.as_ref().map(|name| { pattern_mapping.rest.as_ref().map(|name| {
definitions_for_name(model, name.as_str(), AnyNodeRef::Identifier(name)) definitions_for_name(
model,
name.as_str(),
AnyNodeRef::Identifier(name),
alias_resolution,
)
}) })
} }
GotoTarget::PatternMatchAsName(pattern_as) => pattern_as.name.as_ref().map(|name| { GotoTarget::PatternMatchAsName(pattern_as) => pattern_as.name.as_ref().map(|name| {
definitions_for_name(model, name.as_str(), AnyNodeRef::Identifier(name)) definitions_for_name(
model,
name.as_str(),
AnyNodeRef::Identifier(name),
alias_resolution,
)
}), }),
GotoTarget::PatternKeywordArgument(pattern_keyword) => { GotoTarget::PatternKeywordArgument(pattern_keyword) => {
@ -468,12 +487,18 @@ impl GotoTarget<'_> {
model, model,
name.as_str(), name.as_str(),
AnyNodeRef::Identifier(name), AnyNodeRef::Identifier(name),
alias_resolution,
)) ))
} }
GotoTarget::PatternMatchStarName(pattern_star) => { GotoTarget::PatternMatchStarName(pattern_star) => {
pattern_star.name.as_ref().map(|name| { pattern_star.name.as_ref().map(|name| {
definitions_for_name(model, name.as_str(), AnyNodeRef::Identifier(name)) definitions_for_name(
model,
name.as_str(),
AnyNodeRef::Identifier(name),
alias_resolution,
)
}) })
} }
@ -481,9 +506,18 @@ impl GotoTarget<'_> {
// //
// Prefer the function impl over the callable so that its docstrings win if defined. // Prefer the function impl over the callable so that its docstrings win if defined.
GotoTarget::Call { callable, call } => { GotoTarget::Call { callable, call } => {
let mut definitions = definitions_for_callable(model, call); let mut definitions = Vec::new();
// We prefer the specific overload for hover, go-to-def etc. However,
// `definitions_for_callable` always resolves import aliases. That's why we
// skip it in cases import alias resolution is turned of (rename, highlight references).
if alias_resolution == ImportAliasResolution::ResolveAliases {
definitions.extend(definitions_for_callable(model, call));
}
let expr_definitions = let expr_definitions =
definitions_for_expression(model, *callable).unwrap_or_default(); definitions_for_expression(model, *callable, alias_resolution)
.unwrap_or_default();
definitions.extend(expr_definitions); definitions.extend(expr_definitions);
if definitions.is_empty() { if definitions.is_empty() {
@ -517,7 +551,7 @@ impl GotoTarget<'_> {
let subexpr = covering_node(subast.syntax().into(), *subrange) let subexpr = covering_node(subast.syntax().into(), *subrange)
.node() .node()
.as_expr_ref()?; .as_expr_ref()?;
definitions_for_expression(&submodel, subexpr) definitions_for_expression(&submodel, subexpr, alias_resolution)
} }
// nonlocal and global are essentially loads, but again they're statements, // nonlocal and global are essentially loads, but again they're statements,
@ -527,6 +561,7 @@ impl GotoTarget<'_> {
model, model,
identifier.as_str(), identifier.as_str(),
AnyNodeRef::Identifier(identifier), AnyNodeRef::Identifier(identifier),
alias_resolution,
)) ))
} }
@ -537,6 +572,7 @@ impl GotoTarget<'_> {
model, model,
name.as_str(), name.as_str(),
AnyNodeRef::Identifier(name), AnyNodeRef::Identifier(name),
alias_resolution,
)) ))
} }
@ -546,6 +582,7 @@ impl GotoTarget<'_> {
model, model,
name.as_str(), name.as_str(),
AnyNodeRef::Identifier(name), AnyNodeRef::Identifier(name),
alias_resolution,
)) ))
} }
@ -555,6 +592,7 @@ impl GotoTarget<'_> {
model, model,
name.as_str(), name.as_str(),
AnyNodeRef::Identifier(name), AnyNodeRef::Identifier(name),
alias_resolution,
)) ))
} }
}; };
@ -580,13 +618,10 @@ impl GotoTarget<'_> {
GotoTarget::FunctionDef(function) => Some(Cow::Borrowed(function.name.as_str())), GotoTarget::FunctionDef(function) => Some(Cow::Borrowed(function.name.as_str())),
GotoTarget::ClassDef(class) => Some(Cow::Borrowed(class.name.as_str())), GotoTarget::ClassDef(class) => Some(Cow::Borrowed(class.name.as_str())),
GotoTarget::Parameter(parameter) => Some(Cow::Borrowed(parameter.name.as_str())), GotoTarget::Parameter(parameter) => Some(Cow::Borrowed(parameter.name.as_str())),
GotoTarget::ImportSymbolAlias { alias, .. } => { GotoTarget::ImportSymbolAlias { asname, .. } => Some(Cow::Borrowed(asname.as_str())),
if let Some(asname) = &alias.asname { GotoTarget::ImportExportedName { alias, .. } => {
Some(Cow::Borrowed(asname.as_str()))
} else {
Some(Cow::Borrowed(alias.name.as_str())) Some(Cow::Borrowed(alias.name.as_str()))
} }
}
GotoTarget::ImportModuleComponent { GotoTarget::ImportModuleComponent {
module_name, module_name,
component_index, component_index,
@ -599,13 +634,7 @@ impl GotoTarget<'_> {
Some(Cow::Borrowed(module_name)) Some(Cow::Borrowed(module_name))
} }
} }
GotoTarget::ImportModuleAlias { alias } => { GotoTarget::ImportModuleAlias { asname, .. } => Some(Cow::Borrowed(asname.as_str())),
if let Some(asname) = &alias.asname {
Some(Cow::Borrowed(asname.as_str()))
} else {
Some(Cow::Borrowed(alias.name.as_str()))
}
}
GotoTarget::ExceptVariable(except) => { GotoTarget::ExceptVariable(except) => {
Some(Cow::Borrowed(except.name.as_ref()?.as_str())) Some(Cow::Borrowed(except.name.as_ref()?.as_str()))
} }
@ -667,7 +696,7 @@ impl GotoTarget<'_> {
// Is the offset within the alias name (asname) part? // Is the offset within the alias name (asname) part?
if let Some(asname) = &alias.asname { if let Some(asname) = &alias.asname {
if asname.range.contains_inclusive(offset) { if asname.range.contains_inclusive(offset) {
return Some(GotoTarget::ImportModuleAlias { alias }); return Some(GotoTarget::ImportModuleAlias { alias, asname });
} }
} }
@ -699,21 +728,13 @@ impl GotoTarget<'_> {
// Is the offset within the alias name (asname) part? // Is the offset within the alias name (asname) part?
if let Some(asname) = &alias.asname { if let Some(asname) = &alias.asname {
if asname.range.contains_inclusive(offset) { if asname.range.contains_inclusive(offset) {
return Some(GotoTarget::ImportSymbolAlias { return Some(GotoTarget::ImportSymbolAlias { alias, asname });
alias,
range: asname.range,
import_from,
});
} }
} }
// Is the offset in the original name part? // Is the offset in the original name part?
if alias.name.range.contains_inclusive(offset) { if alias.name.range.contains_inclusive(offset) {
return Some(GotoTarget::ImportSymbolAlias { return Some(GotoTarget::ImportExportedName { alias, import_from });
alias,
range: alias.name.range,
import_from,
});
} }
None None
@ -893,12 +914,13 @@ impl Ranged for GotoTarget<'_> {
GotoTarget::FunctionDef(function) => function.name.range, GotoTarget::FunctionDef(function) => function.name.range,
GotoTarget::ClassDef(class) => class.name.range, GotoTarget::ClassDef(class) => class.name.range,
GotoTarget::Parameter(parameter) => parameter.name.range, GotoTarget::Parameter(parameter) => parameter.name.range,
GotoTarget::ImportSymbolAlias { range, .. } => *range, GotoTarget::ImportSymbolAlias { asname, .. } => asname.range,
Self::ImportExportedName { alias, .. } => alias.name.range,
GotoTarget::ImportModuleComponent { GotoTarget::ImportModuleComponent {
component_range, .. component_range, ..
} => *component_range, } => *component_range,
GotoTarget::StringAnnotationSubexpr { subrange, .. } => *subrange, GotoTarget::StringAnnotationSubexpr { subrange, .. } => *subrange,
GotoTarget::ImportModuleAlias { alias } => alias.asname.as_ref().unwrap().range, GotoTarget::ImportModuleAlias { asname, .. } => asname.range,
GotoTarget::ExceptVariable(except) => except.name.as_ref().unwrap().range, GotoTarget::ExceptVariable(except) => except.name.as_ref().unwrap().range,
GotoTarget::KeywordArgument { keyword, .. } => keyword.arg.as_ref().unwrap().range, GotoTarget::KeywordArgument { keyword, .. } => keyword.arg.as_ref().unwrap().range,
GotoTarget::PatternMatchRest(rest) => rest.rest.as_ref().unwrap().range, GotoTarget::PatternMatchRest(rest) => rest.rest.as_ref().unwrap().range,
@ -955,12 +977,14 @@ fn convert_resolved_definitions_to_targets<'db>(
fn definitions_for_expression<'db>( fn definitions_for_expression<'db>(
model: &SemanticModel<'db>, model: &SemanticModel<'db>,
expression: ruff_python_ast::ExprRef<'_>, expression: ruff_python_ast::ExprRef<'_>,
alias_resolution: ImportAliasResolution,
) -> Option<Vec<ResolvedDefinition<'db>>> { ) -> Option<Vec<ResolvedDefinition<'db>>> {
match expression { match expression {
ast::ExprRef::Name(name) => Some(definitions_for_name( ast::ExprRef::Name(name) => Some(definitions_for_name(
model, model,
name.id.as_str(), name.id.as_str(),
expression.into(), expression.into(),
alias_resolution,
)), )),
ast::ExprRef::Attribute(attribute) => Some(ty_python_semantic::definitions_for_attribute( ast::ExprRef::Attribute(attribute) => Some(ty_python_semantic::definitions_for_attribute(
model, attribute, model, attribute,

View File

@ -2602,6 +2602,378 @@ def ab(a: int, *, c: int): ...
"); ");
} }
#[test]
fn goto_declaration_submodule_import_from_use() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg.submod import val
x = sub<CURSOR>pkg
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// TODO(submodule-imports): this should only highlight `subpkg` in the import statement
// This happens because DefinitionKind::ImportFromSubmodule claims the entire ImportFrom node,
// which is correct but unhelpful. Unfortunately even if it only claimed the LHS identifier it
// would highlight `subpkg.submod` which is strictly better but still isn't what we want.
assert_snapshot!(test.goto_declaration(), @r"
info[goto-declaration]: Declaration
--> mypackage/__init__.py:2:1
|
2 | from .subpkg.submod import val
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
3 |
4 | x = subpkg
|
info: Source
--> mypackage/__init__.py:4:5
|
2 | from .subpkg.submod import val
3 |
4 | x = subpkg
| ^^^^^^
|
");
}
#[test]
fn goto_declaration_submodule_import_from_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .sub<CURSOR>pkg.submod import val
x = subpkg
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// TODO(submodule-imports): I don't *think* this is what we want..?
// It's a bit confusing because this symbol is essentially the LHS *and* RHS of
// `subpkg = mypackage.subpkg`. As in, it's both defining a local `subpkg` and
// loading the module `mypackage.subpkg`, so, it's understandable to get confused!
assert_snapshot!(test.goto_declaration(), @r"
info[goto-declaration]: Declaration
--> mypackage/subpkg/__init__.py:1:1
|
|
info: Source
--> mypackage/__init__.py:2:7
|
2 | from .subpkg.submod import val
| ^^^^^^
3 |
4 | x = subpkg
|
");
}
#[test]
fn goto_declaration_submodule_import_from_wrong_use() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg.submod import val
x = sub<CURSOR>mod
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// No result is correct!
assert_snapshot!(test.goto_declaration(), @"No goto target found");
}
#[test]
fn goto_declaration_submodule_import_from_wrong_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg.sub<CURSOR>mod import val
x = submod
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// Going to the submod module is correct!
assert_snapshot!(test.goto_declaration(), @r"
info[goto-declaration]: Declaration
--> mypackage/subpkg/submod.py:1:1
|
1 |
| ^
2 | val: int = 0
|
info: Source
--> mypackage/__init__.py:2:14
|
2 | from .subpkg.submod import val
| ^^^^^^
3 |
4 | x = submod
|
");
}
#[test]
fn goto_declaration_submodule_import_from_confusing_shadowed_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .sub<CURSOR>pkg import subpkg
x = subpkg
"#,
)
.source(
"mypackage/subpkg/__init__.py",
r#"
subpkg: int = 10
"#,
)
.build();
// Going to the subpkg module is correct!
assert_snapshot!(test.goto_declaration(), @r"
info[goto-declaration]: Declaration
--> mypackage/subpkg/__init__.py:1:1
|
1 |
| ^
2 | subpkg: int = 10
|
info: Source
--> mypackage/__init__.py:2:7
|
2 | from .subpkg import subpkg
| ^^^^^^
3 |
4 | x = subpkg
|
");
}
#[test]
fn goto_declaration_submodule_import_from_confusing_real_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg import sub<CURSOR>pkg
x = subpkg
"#,
)
.source(
"mypackage/subpkg/__init__.py",
r#"
subpkg: int = 10
"#,
)
.build();
// Going to the subpkg `int` is correct!
assert_snapshot!(test.goto_declaration(), @r"
info[goto-declaration]: Declaration
--> mypackage/subpkg/__init__.py:2:1
|
2 | subpkg: int = 10
| ^^^^^^
|
info: Source
--> mypackage/__init__.py:2:21
|
2 | from .subpkg import subpkg
| ^^^^^^
3 |
4 | x = subpkg
|
");
}
#[test]
fn goto_declaration_submodule_import_from_confusing_use() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg import subpkg
x = sub<CURSOR>pkg
"#,
)
.source(
"mypackage/subpkg/__init__.py",
r#"
subpkg: int = 10
"#,
)
.build();
// TODO(submodule-imports): Ok this one is FASCINATING and it's kinda right but confusing!
//
// So there's 3 relevant definitions here:
//
// * `subpkg: int = 10` in the other file is in fact the original definition
//
// * the LHS `subpkg` in the import is an instance of `subpkg = ...`
// because it's a `DefinitionKind::ImportFromSubmodle`.
// This is the span that covers the entire import.
//
// * `the RHS `subpkg` in the import is a second instance of `subpkg = ...`
// that *immediately* overwrites the `ImportFromSubmodule`'s definition
// This span seemingly doesn't appear at all!? Is it getting hidden by the LHS span?
assert_snapshot!(test.goto_declaration(), @r"
info[goto-declaration]: Declaration
--> mypackage/__init__.py:2:1
|
2 | from .subpkg import subpkg
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
3 |
4 | x = subpkg
|
info: Source
--> mypackage/__init__.py:4:5
|
2 | from .subpkg import subpkg
3 |
4 | x = subpkg
| ^^^^^^
|
info[goto-declaration]: Declaration
--> mypackage/subpkg/__init__.py:2:1
|
2 | subpkg: int = 10
| ^^^^^^
|
info: Source
--> mypackage/__init__.py:4:5
|
2 | from .subpkg import subpkg
3 |
4 | x = subpkg
| ^^^^^^
|
");
}
// TODO: Should only return `a: int`
#[test]
fn redeclarations() {
let test = CursorTest::builder()
.source(
"main.py",
r#"
a: str = "test"
a: int = 10
print(a<CURSOR>)
a: bool = True
"#,
)
.build();
assert_snapshot!(test.goto_declaration(), @r#"
info[goto-declaration]: Declaration
--> main.py:2:1
|
2 | a: str = "test"
| ^
3 |
4 | a: int = 10
|
info: Source
--> main.py:6:7
|
4 | a: int = 10
5 |
6 | print(a)
| ^
7 |
8 | a: bool = True
|
info[goto-declaration]: Declaration
--> main.py:4:1
|
2 | a: str = "test"
3 |
4 | a: int = 10
| ^
5 |
6 | print(a)
|
info: Source
--> main.py:6:7
|
4 | a: int = 10
5 |
6 | print(a)
| ^
7 |
8 | a: bool = True
|
info[goto-declaration]: Declaration
--> main.py:8:1
|
6 | print(a)
7 |
8 | a: bool = True
| ^
|
info: Source
--> main.py:6:7
|
4 | a: int = 10
5 |
6 | print(a)
| ^
7 |
8 | a: bool = True
|
"#);
}
impl CursorTest { impl CursorTest {
fn goto_declaration(&self) -> String { fn goto_declaration(&self) -> String {
let Some(targets) = goto_declaration(&self.db, self.cursor.file, self.cursor.offset) let Some(targets) = goto_declaration(&self.db, self.cursor.file, self.cursor.offset)

View File

@ -1714,6 +1714,86 @@ Traceb<CURSOR>ackType
assert_snapshot!(test.goto_definition(), @"No goto target found"); assert_snapshot!(test.goto_definition(), @"No goto target found");
} }
// TODO: Should only list `a: int`
#[test]
fn redeclarations() {
let test = CursorTest::builder()
.source(
"main.py",
r#"
a: str = "test"
a: int = 10
print(a<CURSOR>)
a: bool = True
"#,
)
.build();
assert_snapshot!(test.goto_definition(), @r#"
info[goto-definition]: Definition
--> main.py:2:1
|
2 | a: str = "test"
| ^
3 |
4 | a: int = 10
|
info: Source
--> main.py:6:7
|
4 | a: int = 10
5 |
6 | print(a)
| ^
7 |
8 | a: bool = True
|
info[goto-definition]: Definition
--> main.py:4:1
|
2 | a: str = "test"
3 |
4 | a: int = 10
| ^
5 |
6 | print(a)
|
info: Source
--> main.py:6:7
|
4 | a: int = 10
5 |
6 | print(a)
| ^
7 |
8 | a: bool = True
|
info[goto-definition]: Definition
--> main.py:8:1
|
6 | print(a)
7 |
8 | a: bool = True
| ^
|
info: Source
--> main.py:6:7
|
4 | a: int = 10
5 |
6 | print(a)
| ^
7 |
8 | a: bool = True
|
"#);
}
impl CursorTest { impl CursorTest {
fn goto_definition(&self) -> String { fn goto_definition(&self) -> String {
let Some(targets) = goto_definition(&self.db, self.cursor.file, self.cursor.offset) let Some(targets) = goto_definition(&self.db, self.cursor.file, self.cursor.offset)

View File

@ -1672,6 +1672,283 @@ def function():
"#); "#);
} }
#[test]
fn goto_type_submodule_import_from_use() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg.submod import val
x = sub<CURSOR>pkg
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// The module is the correct type definition
assert_snapshot!(test.goto_type_definition(), @r"
info[goto-type-definition]: Type definition
--> mypackage/subpkg/__init__.py:1:1
|
|
info: Source
--> mypackage/__init__.py:4:5
|
2 | from .subpkg.submod import val
3 |
4 | x = subpkg
| ^^^^^^
|
");
}
#[test]
fn goto_type_submodule_import_from_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .sub<CURSOR>pkg.submod import val
x = subpkg
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// The module is the correct type definition
assert_snapshot!(test.goto_type_definition(), @r"
info[goto-type-definition]: Type definition
--> mypackage/subpkg/__init__.py:1:1
|
|
info: Source
--> mypackage/__init__.py:2:7
|
2 | from .subpkg.submod import val
| ^^^^^^
3 |
4 | x = subpkg
|
");
}
#[test]
fn goto_type_submodule_import_from_wrong_use() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg.submod import val
x = sub<CURSOR>mod
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// Unknown is correct, `submod` is not in scope
assert_snapshot!(test.goto_type_definition(), @r"
info[goto-type-definition]: Type definition
--> stdlib/ty_extensions.pyi:20:1
|
19 | # Types
20 | Unknown = object()
| ^^^^^^^
21 | AlwaysTruthy = object()
22 | AlwaysFalsy = object()
|
info: Source
--> mypackage/__init__.py:4:5
|
2 | from .subpkg.submod import val
3 |
4 | x = submod
| ^^^^^^
|
");
}
#[test]
fn goto_type_submodule_import_from_wrong_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg.sub<CURSOR>mod import val
x = submod
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// The module is correct
assert_snapshot!(test.goto_type_definition(), @r"
info[goto-type-definition]: Type definition
--> mypackage/subpkg/submod.py:1:1
|
1 | /
2 | | val: int = 0
| |_____________^
|
info: Source
--> mypackage/__init__.py:2:14
|
2 | from .subpkg.submod import val
| ^^^^^^
3 |
4 | x = submod
|
");
}
#[test]
fn goto_type_submodule_import_from_confusing_shadowed_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .sub<CURSOR>pkg import subpkg
x = subpkg
"#,
)
.source(
"mypackage/subpkg/__init__.py",
r#"
subpkg: int = 10
"#,
)
.build();
// The module is correct
assert_snapshot!(test.goto_type_definition(), @r"
info[goto-type-definition]: Type definition
--> mypackage/subpkg/__init__.py:1:1
|
1 | /
2 | | subpkg: int = 10
| |_________________^
|
info: Source
--> mypackage/__init__.py:2:7
|
2 | from .subpkg import subpkg
| ^^^^^^
3 |
4 | x = subpkg
|
");
}
#[test]
fn goto_type_submodule_import_from_confusing_real_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg import sub<CURSOR>pkg
x = subpkg
"#,
)
.source(
"mypackage/subpkg/__init__.py",
r#"
subpkg: int = 10
"#,
)
.build();
// `int` is correct
assert_snapshot!(test.goto_type_definition(), @r#"
info[goto-type-definition]: Type definition
--> stdlib/builtins.pyi:348:7
|
347 | @disjoint_base
348 | class int:
| ^^^
349 | """int([x]) -> integer
350 | int(x, base=10) -> integer
|
info: Source
--> mypackage/__init__.py:2:21
|
2 | from .subpkg import subpkg
| ^^^^^^
3 |
4 | x = subpkg
|
"#);
}
#[test]
fn goto_type_submodule_import_from_confusing_use() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg import subpkg
x = sub<CURSOR>pkg
"#,
)
.source(
"mypackage/subpkg/__init__.py",
r#"
subpkg: int = 10
"#,
)
.build();
// `int` is correct
assert_snapshot!(test.goto_type_definition(), @r#"
info[goto-type-definition]: Type definition
--> stdlib/builtins.pyi:348:7
|
347 | @disjoint_base
348 | class int:
| ^^^
349 | """int([x]) -> integer
350 | int(x, base=10) -> integer
|
info: Source
--> mypackage/__init__.py:4:5
|
2 | from .subpkg import subpkg
3 |
4 | x = subpkg
| ^^^^^^
|
"#);
}
impl CursorTest { impl CursorTest {
fn goto_type_definition(&self) -> String { fn goto_type_definition(&self) -> String {
let Some(targets) = let Some(targets) =

View File

@ -2143,15 +2143,13 @@ def function():
"#, "#,
); );
// TODO: This should just be `**AB@Alias2 (<variance>)`
// https://github.com/astral-sh/ty/issues/1581
assert_snapshot!(test.hover(), @r" assert_snapshot!(test.hover(), @r"
( (**AB@Alias2) -> tuple[AB@Alias2]
...
) -> tuple[typing.ParamSpec]
--------------------------------------------- ---------------------------------------------
```python ```python
( (**AB@Alias2) -> tuple[AB@Alias2]
...
) -> tuple[typing.ParamSpec]
``` ```
--------------------------------------------- ---------------------------------------------
info[hover]: Hovered content is info[hover]: Hovered content is
@ -2292,12 +2290,12 @@ def function():
"#, "#,
); );
// TODO: This should be `P@Alias (<variance>)` // TODO: Should this be constravariant instead?
assert_snapshot!(test.hover(), @r" assert_snapshot!(test.hover(), @r"
typing.ParamSpec P@Alias (bivariant)
--------------------------------------------- ---------------------------------------------
```python ```python
typing.ParamSpec P@Alias (bivariant)
``` ```
--------------------------------------------- ---------------------------------------------
info[hover]: Hovered content is info[hover]: Hovered content is
@ -3321,6 +3319,297 @@ def function():
"); ");
} }
#[test]
fn hover_submodule_import_from_use() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg.submod import val
x = sub<CURSOR>pkg
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// The module is correct
assert_snapshot!(test.hover(), @r"
<module 'mypackage.subpkg'>
---------------------------------------------
```python
<module 'mypackage.subpkg'>
```
---------------------------------------------
info[hover]: Hovered content is
--> mypackage/__init__.py:4:5
|
2 | from .subpkg.submod import val
3 |
4 | x = subpkg
| ^^^-^^
| | |
| | Cursor offset
| source
|
");
}
#[test]
fn hover_submodule_import_from_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .sub<CURSOR>pkg.submod import val
x = subpkg
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// The module is correct
assert_snapshot!(test.hover(), @r"
<module 'mypackage.subpkg'>
---------------------------------------------
```python
<module 'mypackage.subpkg'>
```
---------------------------------------------
info[hover]: Hovered content is
--> mypackage/__init__.py:2:7
|
2 | from .subpkg.submod import val
| ^^^-^^
| | |
| | Cursor offset
| source
3 |
4 | x = subpkg
|
");
}
#[test]
fn hover_submodule_import_from_wrong_use() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg.submod import val
x = sub<CURSOR>mod
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// Unknown is correct
assert_snapshot!(test.hover(), @r"
Unknown
---------------------------------------------
```python
Unknown
```
---------------------------------------------
info[hover]: Hovered content is
--> mypackage/__init__.py:4:5
|
2 | from .subpkg.submod import val
3 |
4 | x = submod
| ^^^-^^
| | |
| | Cursor offset
| source
|
");
}
#[test]
fn hover_submodule_import_from_wrong_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg.sub<CURSOR>mod import val
x = submod
"#,
)
.source("mypackage/subpkg/__init__.py", r#""#)
.source(
"mypackage/subpkg/submod.py",
r#"
val: int = 0
"#,
)
.build();
// The submodule is correct
assert_snapshot!(test.hover(), @r"
<module 'mypackage.subpkg.submod'>
---------------------------------------------
```python
<module 'mypackage.subpkg.submod'>
```
---------------------------------------------
info[hover]: Hovered content is
--> mypackage/__init__.py:2:14
|
2 | from .subpkg.submod import val
| ^^^-^^
| | |
| | Cursor offset
| source
3 |
4 | x = submod
|
");
}
#[test]
fn hover_submodule_import_from_confusing_shadowed_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .sub<CURSOR>pkg import subpkg
x = subpkg
"#,
)
.source(
"mypackage/subpkg/__init__.py",
r#"
subpkg: int = 10
"#,
)
.build();
// The module is correct
assert_snapshot!(test.hover(), @r"
<module 'mypackage.subpkg'>
---------------------------------------------
```python
<module 'mypackage.subpkg'>
```
---------------------------------------------
info[hover]: Hovered content is
--> mypackage/__init__.py:2:7
|
2 | from .subpkg import subpkg
| ^^^-^^
| | |
| | Cursor offset
| source
3 |
4 | x = subpkg
|
");
}
#[test]
fn hover_submodule_import_from_confusing_real_def() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg import sub<CURSOR>pkg
x = subpkg
"#,
)
.source(
"mypackage/subpkg/__init__.py",
r#"
subpkg: int = 10
"#,
)
.build();
// int is correct
assert_snapshot!(test.hover(), @r"
int
---------------------------------------------
```python
int
```
---------------------------------------------
info[hover]: Hovered content is
--> mypackage/__init__.py:2:21
|
2 | from .subpkg import subpkg
| ^^^-^^
| | |
| | Cursor offset
| source
3 |
4 | x = subpkg
|
");
}
#[test]
fn hover_submodule_import_from_confusing_use() {
let test = CursorTest::builder()
.source(
"mypackage/__init__.py",
r#"
from .subpkg import subpkg
x = sub<CURSOR>pkg
"#,
)
.source(
"mypackage/subpkg/__init__.py",
r#"
subpkg: int = 10
"#,
)
.build();
// int is correct
assert_snapshot!(test.hover(), @r"
int
---------------------------------------------
```python
int
```
---------------------------------------------
info[hover]: Hovered content is
--> mypackage/__init__.py:4:5
|
2 | from .subpkg import subpkg
3 |
4 | x = subpkg
| ^^^-^^
| | |
| | Cursor offset
| source
|
");
}
impl CursorTest { impl CursorTest {
fn hover(&self) -> String { fn hover(&self) -> String {
use std::fmt::Write; use std::fmt::Write;

View File

@ -145,7 +145,7 @@ impl<'a> Importer<'a> {
members: &MembersInScope, members: &MembersInScope,
) -> ImportAction { ) -> ImportAction {
let request = request.avoid_conflicts(self.db, self.file, members); let request = request.avoid_conflicts(self.db, self.file, members);
let mut symbol_text: Box<str> = request.member.into(); let mut symbol_text: Box<str> = request.member.unwrap_or(request.module).into();
let Some(response) = self.find(&request, members.at) else { let Some(response) = self.find(&request, members.at) else {
let insertion = if let Some(future) = self.find_last_future_import(members.at) { let insertion = if let Some(future) = self.find_last_future_import(members.at) {
Insertion::end_of_statement(future.stmt, self.source, self.stylist) Insertion::end_of_statement(future.stmt, self.source, self.stylist)
@ -157,14 +157,27 @@ impl<'a> Importer<'a> {
Insertion::start_of_file(self.parsed.suite(), self.source, self.stylist, range) Insertion::start_of_file(self.parsed.suite(), self.source, self.stylist, range)
}; };
let import = insertion.into_edit(&request.to_string()); let import = insertion.into_edit(&request.to_string());
if matches!(request.style, ImportStyle::Import) { if let Some(member) = request.member
symbol_text = format!("{}.{}", request.module, request.member).into(); && matches!(request.style, ImportStyle::Import)
{
symbol_text = format!("{}.{}", request.module, member).into();
} }
return ImportAction { return ImportAction {
import: Some(import), import: Some(import),
symbol_text, symbol_text,
}; };
}; };
// When we just have a request to import a module (and not
// any members from that module), then the only way we can be
// here is if we found a pre-existing import that definitively
// satisfies the request. So we're done.
let Some(member) = request.member else {
return ImportAction {
import: None,
symbol_text,
};
};
match response.kind { match response.kind {
ImportResponseKind::Unqualified { ast, alias } => { ImportResponseKind::Unqualified { ast, alias } => {
let member = alias.asname.as_ref().unwrap_or(&alias.name).as_str(); let member = alias.asname.as_ref().unwrap_or(&alias.name).as_str();
@ -189,13 +202,10 @@ impl<'a> Importer<'a> {
let import = if let Some(insertion) = let import = if let Some(insertion) =
Insertion::existing_import(response.import.stmt, self.tokens) Insertion::existing_import(response.import.stmt, self.tokens)
{ {
insertion.into_edit(request.member) insertion.into_edit(member)
} else { } else {
Insertion::end_of_statement(response.import.stmt, self.source, self.stylist) Insertion::end_of_statement(response.import.stmt, self.source, self.stylist)
.into_edit(&format!( .into_edit(&format!("from {} import {member}", request.module))
"from {} import {}",
request.module, request.member
))
}; };
ImportAction { ImportAction {
import: Some(import), import: Some(import),
@ -481,6 +491,17 @@ impl<'ast> AstImportKind<'ast> {
Some(ImportResponseKind::Qualified { ast, alias }) Some(ImportResponseKind::Qualified { ast, alias })
} }
AstImportKind::ImportFrom(ast) => { AstImportKind::ImportFrom(ast) => {
// If the request is for a module itself, then we
// assume that it can never be satisfies by a
// `from ... import ...` statement. For example, a
// `request for collections.abc` needs an
// `import collections.abc`. Now, there could be a
// `from collections import abc`, and we could
// plausibly consider that a match and return a
// symbol text of `abc`. But it's not clear if that's
// the right choice or not.
let member = request.member?;
if request.force_style && !matches!(request.style, ImportStyle::ImportFrom) { if request.force_style && !matches!(request.style, ImportStyle::ImportFrom) {
return None; return None;
} }
@ -492,9 +513,7 @@ impl<'ast> AstImportKind<'ast> {
let kind = ast let kind = ast
.names .names
.iter() .iter()
.find(|alias| { .find(|alias| alias.name.as_str() == "*" || alias.name.as_str() == member)
alias.name.as_str() == "*" || alias.name.as_str() == request.member
})
.map(|alias| ImportResponseKind::Unqualified { ast, alias }) .map(|alias| ImportResponseKind::Unqualified { ast, alias })
.unwrap_or_else(|| ImportResponseKind::Partial(ast)); .unwrap_or_else(|| ImportResponseKind::Partial(ast));
Some(kind) Some(kind)
@ -510,7 +529,10 @@ pub(crate) struct ImportRequest<'a> {
/// `foo`, in `from foo import bar`). /// `foo`, in `from foo import bar`).
module: &'a str, module: &'a str,
/// The member to import (e.g., `bar`, in `from foo import bar`). /// The member to import (e.g., `bar`, in `from foo import bar`).
member: &'a str, ///
/// When `member` is absent, then this request reflects an import
/// of the module itself. i.e., `import module`.
member: Option<&'a str>,
/// The preferred style to use when importing the symbol (e.g., /// The preferred style to use when importing the symbol (e.g.,
/// `import foo` or `from foo import bar`). /// `import foo` or `from foo import bar`).
/// ///
@ -532,7 +554,7 @@ impl<'a> ImportRequest<'a> {
pub(crate) fn import(module: &'a str, member: &'a str) -> Self { pub(crate) fn import(module: &'a str, member: &'a str) -> Self {
Self { Self {
module, module,
member, member: Some(member),
style: ImportStyle::Import, style: ImportStyle::Import,
force_style: false, force_style: false,
} }
@ -545,12 +567,26 @@ impl<'a> ImportRequest<'a> {
pub(crate) fn import_from(module: &'a str, member: &'a str) -> Self { pub(crate) fn import_from(module: &'a str, member: &'a str) -> Self {
Self { Self {
module, module,
member, member: Some(member),
style: ImportStyle::ImportFrom, style: ImportStyle::ImportFrom,
force_style: false, force_style: false,
} }
} }
/// Create a new [`ImportRequest`] for bringing the given module
/// into scope.
///
/// This is for just importing the module itself, always via an
/// `import` statement.
pub(crate) fn module(module: &'a str) -> Self {
Self {
module,
member: None,
style: ImportStyle::Import,
force_style: false,
}
}
/// Causes this request to become a command. This will force the /// Causes this request to become a command. This will force the
/// requested import style, even if another style would be more /// requested import style, even if another style would be more
/// appropriate generally. /// appropriate generally.
@ -565,7 +601,13 @@ impl<'a> ImportRequest<'a> {
/// of an import conflict are minimized (although not always reduced /// of an import conflict are minimized (although not always reduced
/// to zero). /// to zero).
fn avoid_conflicts(self, db: &dyn Db, importing_file: File, members: &MembersInScope) -> Self { fn avoid_conflicts(self, db: &dyn Db, importing_file: File, members: &MembersInScope) -> Self {
match (members.map.get(self.module), members.map.get(self.member)) { let Some(member) = self.member else {
return Self {
style: ImportStyle::Import,
..self
};
};
match (members.map.get(self.module), members.map.get(member)) {
// Neither symbol exists, so we can just proceed as // Neither symbol exists, so we can just proceed as
// normal. // normal.
(None, None) => self, (None, None) => self,
@ -630,7 +672,10 @@ impl std::fmt::Display for ImportRequest<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self.style { match self.style {
ImportStyle::Import => write!(f, "import {}", self.module), ImportStyle::Import => write!(f, "import {}", self.module),
ImportStyle::ImportFrom => write!(f, "from {} import {}", self.module, self.member), ImportStyle::ImportFrom => match self.member {
None => write!(f, "import {}", self.module),
Some(member) => write!(f, "from {} import {member}", self.module),
},
} }
} }
} }
@ -843,6 +888,10 @@ mod tests {
self.add(ImportRequest::import_from(module, member)) self.add(ImportRequest::import_from(module, member))
} }
fn module(&self, module: &str) -> String {
self.add(ImportRequest::module(module))
}
fn add(&self, request: ImportRequest<'_>) -> String { fn add(&self, request: ImportRequest<'_>) -> String {
let node = covering_node( let node = covering_node(
self.cursor.parsed.syntax().into(), self.cursor.parsed.syntax().into(),
@ -2156,4 +2205,73 @@ except ImportError:
(bar.MAGIC) (bar.MAGIC)
"); ");
} }
#[test]
fn import_module_blank() {
let test = cursor_test(
"\
<CURSOR>
",
);
assert_snapshot!(
test.module("collections"), @r"
import collections
collections
");
}
#[test]
fn import_module_exists() {
let test = cursor_test(
"\
import collections
<CURSOR>
",
);
assert_snapshot!(
test.module("collections"), @r"
import collections
collections
");
}
#[test]
fn import_module_from_exists() {
let test = cursor_test(
"\
from collections import defaultdict
<CURSOR>
",
);
assert_snapshot!(
test.module("collections"), @r"
import collections
from collections import defaultdict
collections
");
}
// This test is working as intended. That is,
// `abc` is already in scope, so requesting an
// import for `collections.abc` could feasibly
// reuse the import and rewrite the symbol text
// to just `abc`. But for now it seems better
// to respect what has been written and add the
// `import collections.abc`. This behavior could
// plausibly be changed.
#[test]
fn import_module_from_via_member_exists() {
let test = cursor_test(
"\
from collections import abc
<CURSOR>
",
);
assert_snapshot!(
test.module("collections.abc"), @r"
import collections.abc
from collections import abc
collections.abc
");
}
} }

View File

@ -19,11 +19,22 @@ pub struct InlayHint {
} }
impl InlayHint { impl InlayHint {
fn variable_type(expr: &Expr, ty: Type, db: &dyn Db, allow_edits: bool) -> Self { fn variable_type(
expr: &Expr,
rhs: &Expr,
ty: Type,
db: &dyn Db,
allow_edits: bool,
) -> Option<Self> {
let position = expr.range().end(); let position = expr.range().end();
// Render the type to a string, and get subspans for all the types that make it up // Render the type to a string, and get subspans for all the types that make it up
let details = ty.display(db).to_string_parts(); let details = ty.display(db).to_string_parts();
// Filter out a reptitive hints like `x: T = T()`
if call_matches_name(rhs, &details.label) {
return None;
}
// Ok so the idea here is that we potentially have a random soup of spans here, // Ok so the idea here is that we potentially have a random soup of spans here,
// and each byte of the string can have at most one target associate with it. // and each byte of the string can have at most one target associate with it.
// Thankfully, they were generally pushed in print order, with the inner smaller types // Thankfully, they were generally pushed in print order, with the inner smaller types
@ -73,12 +84,12 @@ impl InlayHint {
vec![] vec![]
}; };
Self { Some(Self {
position, position,
kind: InlayHintKind::Type, kind: InlayHintKind::Type,
label: InlayHintLabel { parts: label_parts }, label: InlayHintLabel { parts: label_parts },
text_edits, text_edits,
} })
} }
fn call_argument_name( fn call_argument_name(
@ -250,7 +261,7 @@ struct InlayHintVisitor<'a, 'db> {
db: &'db dyn Db, db: &'db dyn Db,
model: SemanticModel<'db>, model: SemanticModel<'db>,
hints: Vec<InlayHint>, hints: Vec<InlayHint>,
in_assignment: bool, assignment_rhs: Option<&'a Expr>,
range: TextRange, range: TextRange,
settings: &'a InlayHintSettings, settings: &'a InlayHintSettings,
in_no_edits_allowed: bool, in_no_edits_allowed: bool,
@ -262,22 +273,22 @@ impl<'a, 'db> InlayHintVisitor<'a, 'db> {
db, db,
model: SemanticModel::new(db, file), model: SemanticModel::new(db, file),
hints: Vec::new(), hints: Vec::new(),
in_assignment: false, assignment_rhs: None,
range, range,
settings, settings,
in_no_edits_allowed: false, in_no_edits_allowed: false,
} }
} }
fn add_type_hint(&mut self, expr: &Expr, ty: Type<'db>, allow_edits: bool) { fn add_type_hint(&mut self, expr: &Expr, rhs: &Expr, ty: Type<'db>, allow_edits: bool) {
if !self.settings.variable_types { if !self.settings.variable_types {
return; return;
} }
let inlay_hint = InlayHint::variable_type(expr, ty, self.db, allow_edits); if let Some(inlay_hint) = InlayHint::variable_type(expr, rhs, ty, self.db, allow_edits) {
self.hints.push(inlay_hint); self.hints.push(inlay_hint);
} }
}
fn add_call_argument_name( fn add_call_argument_name(
&mut self, &mut self,
@ -299,8 +310,8 @@ impl<'a, 'db> InlayHintVisitor<'a, 'db> {
} }
} }
impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> { impl<'a> SourceOrderVisitor<'a> for InlayHintVisitor<'a, '_> {
fn enter_node(&mut self, node: AnyNodeRef<'_>) -> TraversalSignal { fn enter_node(&mut self, node: AnyNodeRef<'a>) -> TraversalSignal {
if self.range.intersect(node.range()).is_some() { if self.range.intersect(node.range()).is_some() {
TraversalSignal::Traverse TraversalSignal::Traverse
} else { } else {
@ -308,7 +319,7 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> {
} }
} }
fn visit_stmt(&mut self, stmt: &Stmt) { fn visit_stmt(&mut self, stmt: &'a Stmt) {
let node = AnyNodeRef::from(stmt); let node = AnyNodeRef::from(stmt);
if !self.enter_node(node).is_traverse() { if !self.enter_node(node).is_traverse() {
@ -317,7 +328,9 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> {
match stmt { match stmt {
Stmt::Assign(assign) => { Stmt::Assign(assign) => {
self.in_assignment = !type_hint_is_excessive_for_expr(&assign.value); if !type_hint_is_excessive_for_expr(&assign.value) {
self.assignment_rhs = Some(&*assign.value);
}
if !annotations_are_valid_syntax(assign) { if !annotations_are_valid_syntax(assign) {
self.in_no_edits_allowed = true; self.in_no_edits_allowed = true;
} }
@ -325,7 +338,7 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> {
self.visit_expr(target); self.visit_expr(target);
} }
self.in_no_edits_allowed = false; self.in_no_edits_allowed = false;
self.in_assignment = false; self.assignment_rhs = None;
self.visit_expr(&assign.value); self.visit_expr(&assign.value);
@ -344,22 +357,22 @@ impl SourceOrderVisitor<'_> for InlayHintVisitor<'_, '_> {
source_order::walk_stmt(self, stmt); source_order::walk_stmt(self, stmt);
} }
fn visit_expr(&mut self, expr: &'_ Expr) { fn visit_expr(&mut self, expr: &'a Expr) {
match expr { match expr {
Expr::Name(name) => { Expr::Name(name) => {
if self.in_assignment { if let Some(rhs) = self.assignment_rhs {
if name.ctx.is_store() { if name.ctx.is_store() {
let ty = expr.inferred_type(&self.model); let ty = expr.inferred_type(&self.model);
self.add_type_hint(expr, ty, !self.in_no_edits_allowed); self.add_type_hint(expr, rhs, ty, !self.in_no_edits_allowed);
} }
} }
source_order::walk_expr(self, expr); source_order::walk_expr(self, expr);
} }
Expr::Attribute(attribute) => { Expr::Attribute(attribute) => {
if self.in_assignment { if let Some(rhs) = self.assignment_rhs {
if attribute.ctx.is_store() { if attribute.ctx.is_store() {
let ty = expr.inferred_type(&self.model); let ty = expr.inferred_type(&self.model);
self.add_type_hint(expr, ty, !self.in_no_edits_allowed); self.add_type_hint(expr, rhs, ty, !self.in_no_edits_allowed);
} }
} }
source_order::walk_expr(self, expr); source_order::walk_expr(self, expr);
@ -416,6 +429,26 @@ fn arg_matches_name(arg_or_keyword: &ArgOrKeyword, name: &str) -> bool {
} }
} }
/// Given a function call, check if the expression is the "same name"
/// as the function being called.
///
/// This allows us to filter out reptitive inlay hints like `x: T = T(...)`.
/// While still allowing non-trivial ones like `x: T[U] = T()`.
fn call_matches_name(expr: &Expr, name: &str) -> bool {
// Only care about function calls
let Expr::Call(call) = expr else {
return false;
};
match &*call.func {
// `x: T = T()` is a match
Expr::Name(expr_name) => expr_name.id.as_str() == name,
// `x: T = a.T()` is a match
Expr::Attribute(expr_attribute) => expr_attribute.attr.as_str() == name,
_ => false,
}
}
/// Given an expression that's the RHS of an assignment, would it be excessive to /// Given an expression that's the RHS of an assignment, would it be excessive to
/// emit an inlay type hint for the variable assigned to it? /// emit an inlay type hint for the variable assigned to it?
/// ///
@ -1829,35 +1862,16 @@ mod tests {
", ",
); );
assert_snapshot!(test.inlay_hints(), @r#" assert_snapshot!(test.inlay_hints(), @r"
class A: class A:
def __init__(self, y): def __init__(self, y):
self.x[: int] = int(1) self.x = int(1)
self.y[: Unknown] = y self.y[: Unknown] = y
a[: A] = A([y=]2) a = A([y=]2)
a.y[: int] = int(3) a.y = int(3)
--------------------------------------------- ---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/builtins.pyi:348:7
|
347 | @disjoint_base
348 | class int:
| ^^^
349 | """int([x]) -> integer
350 | int(x, base=10) -> integer
|
info: Source
--> main2.py:4:18
|
2 | class A:
3 | def __init__(self, y):
4 | self.x[: int] = int(1)
| ^^^
5 | self.y[: Unknown] = y
|
info[inlay-hint-location]: Inlay Hint Target info[inlay-hint-location]: Inlay Hint Target
--> stdlib/ty_extensions.pyi:20:1 --> stdlib/ty_extensions.pyi:20:1
| |
@ -1871,29 +1885,11 @@ mod tests {
--> main2.py:5:18 --> main2.py:5:18
| |
3 | def __init__(self, y): 3 | def __init__(self, y):
4 | self.x[: int] = int(1) 4 | self.x = int(1)
5 | self.y[: Unknown] = y 5 | self.y[: Unknown] = y
| ^^^^^^^ | ^^^^^^^
6 | 6 |
7 | a[: A] = A([y=]2) 7 | a = A([y=]2)
|
info[inlay-hint-location]: Inlay Hint Target
--> main.py:2:7
|
2 | class A:
| ^
3 | def __init__(self, y):
4 | self.x = int(1)
|
info: Source
--> main2.py:7:5
|
5 | self.y[: Unknown] = y
6 |
7 | a[: A] = A([y=]2)
| ^
8 | a.y[: int] = int(3)
| |
info[inlay-hint-location]: Inlay Hint Target info[inlay-hint-location]: Inlay Hint Target
@ -1906,30 +1902,13 @@ mod tests {
5 | self.y = y 5 | self.y = y
| |
info: Source info: Source
--> main2.py:7:13 --> main2.py:7:8
| |
5 | self.y[: Unknown] = y 5 | self.y[: Unknown] = y
6 | 6 |
7 | a[: A] = A([y=]2) 7 | a = A([y=]2)
| ^ | ^
8 | a.y[: int] = int(3) 8 | a.y = int(3)
|
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/builtins.pyi:348:7
|
347 | @disjoint_base
348 | class int:
| ^^^
349 | """int([x]) -> integer
350 | int(x, base=10) -> integer
|
info: Source
--> main2.py:8:7
|
7 | a[: A] = A([y=]2)
8 | a.y[: int] = int(3)
| ^^^
| |
--------------------------------------------- ---------------------------------------------
@ -1938,12 +1917,12 @@ mod tests {
class A: class A:
def __init__(self, y): def __init__(self, y):
self.x: int = int(1) self.x = int(1)
self.y: Unknown = y self.y: Unknown = y
a: A = A(2) a = A(2)
a.y: int = int(3) a.y = int(3)
"#); ");
} }
#[test] #[test]
@ -2937,31 +2916,12 @@ mod tests {
def __init__(self): def __init__(self):
self.x: int = 1 self.x: int = 1
x[: MyClass] = MyClass() x = MyClass()
y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass())
a[: MyClass], b[: MyClass] = MyClass(), MyClass() a[: MyClass], b[: MyClass] = MyClass(), MyClass()
c[: MyClass], d[: MyClass] = (MyClass(), MyClass()) c[: MyClass], d[: MyClass] = (MyClass(), MyClass())
--------------------------------------------- ---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> main.py:2:7
|
2 | class MyClass:
| ^^^^^^^
3 | def __init__(self):
4 | self.x: int = 1
|
info: Source
--> main2.py:6:5
|
4 | self.x: int = 1
5 |
6 | x[: MyClass] = MyClass()
| ^^^^^^^
7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass())
8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass()
|
info[inlay-hint-location]: Inlay Hint Target info[inlay-hint-location]: Inlay Hint Target
--> stdlib/builtins.pyi:2695:7 --> stdlib/builtins.pyi:2695:7
| |
@ -2973,7 +2933,7 @@ mod tests {
info: Source info: Source
--> main2.py:7:5 --> main2.py:7:5
| |
6 | x[: MyClass] = MyClass() 6 | x = MyClass()
7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) 7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass())
| ^^^^^ | ^^^^^
8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass() 8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass()
@ -2991,7 +2951,7 @@ mod tests {
info: Source info: Source
--> main2.py:7:11 --> main2.py:7:11
| |
6 | x[: MyClass] = MyClass() 6 | x = MyClass()
7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) 7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass())
| ^^^^^^^ | ^^^^^^^
8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass() 8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass()
@ -3009,7 +2969,7 @@ mod tests {
info: Source info: Source
--> main2.py:7:20 --> main2.py:7:20
| |
6 | x[: MyClass] = MyClass() 6 | x = MyClass()
7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) 7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass())
| ^^^^^^^ | ^^^^^^^
8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass() 8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass()
@ -3027,7 +2987,7 @@ mod tests {
info: Source info: Source
--> main2.py:8:5 --> main2.py:8:5
| |
6 | x[: MyClass] = MyClass() 6 | x = MyClass()
7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) 7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass())
8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass() 8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass()
| ^^^^^^^ | ^^^^^^^
@ -3045,7 +3005,7 @@ mod tests {
info: Source info: Source
--> main2.py:8:19 --> main2.py:8:19
| |
6 | x[: MyClass] = MyClass() 6 | x = MyClass()
7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass()) 7 | y[: tuple[MyClass, MyClass]] = (MyClass(), MyClass())
8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass() 8 | a[: MyClass], b[: MyClass] = MyClass(), MyClass()
| ^^^^^^^ | ^^^^^^^
@ -3094,7 +3054,7 @@ mod tests {
def __init__(self): def __init__(self):
self.x: int = 1 self.x: int = 1
x: MyClass = MyClass() x = MyClass()
y: tuple[MyClass, MyClass] = (MyClass(), MyClass()) y: tuple[MyClass, MyClass] = (MyClass(), MyClass())
a, b = MyClass(), MyClass() a, b = MyClass(), MyClass()
c, d = (MyClass(), MyClass()) c, d = (MyClass(), MyClass())
@ -4097,31 +4057,11 @@ mod tests {
def __init__(self): def __init__(self):
self.x: int = 1 self.x: int = 1
self.y: int = 2 self.y: int = 2
val[: MyClass] = MyClass() val = MyClass()
foo(val.x) foo(val.x)
foo([x=]val.y) foo([x=]val.y)
--------------------------------------------- ---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> main.py:3:7
|
2 | def foo(x: int): pass
3 | class MyClass:
| ^^^^^^^
4 | def __init__(self):
5 | self.x: int = 1
|
info: Source
--> main2.py:7:7
|
5 | self.x: int = 1
6 | self.y: int = 2
7 | val[: MyClass] = MyClass()
| ^^^^^^^
8 |
9 | foo(val.x)
|
info[inlay-hint-location]: Inlay Hint Target info[inlay-hint-location]: Inlay Hint Target
--> main.py:2:9 --> main.py:2:9
| |
@ -4137,20 +4077,6 @@ mod tests {
10 | foo([x=]val.y) 10 | foo([x=]val.y)
| ^ | ^
| |
---------------------------------------------
info[inlay-hint-edit]: File after edits
info: Source
def foo(x: int): pass
class MyClass:
def __init__(self):
self.x: int = 1
self.y: int = 2
val: MyClass = MyClass()
foo(val.x)
foo(val.y)
"); ");
} }
@ -4176,31 +4102,11 @@ mod tests {
def __init__(self): def __init__(self):
self.x: int = 1 self.x: int = 1
self.y: int = 2 self.y: int = 2
x[: MyClass] = MyClass() x = MyClass()
foo(x.x) foo(x.x)
foo([x=]x.y) foo([x=]x.y)
--------------------------------------------- ---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> main.py:3:7
|
2 | def foo(x: int): pass
3 | class MyClass:
| ^^^^^^^
4 | def __init__(self):
5 | self.x: int = 1
|
info: Source
--> main2.py:7:5
|
5 | self.x: int = 1
6 | self.y: int = 2
7 | x[: MyClass] = MyClass()
| ^^^^^^^
8 |
9 | foo(x.x)
|
info[inlay-hint-location]: Inlay Hint Target info[inlay-hint-location]: Inlay Hint Target
--> main.py:2:9 --> main.py:2:9
| |
@ -4216,20 +4122,6 @@ mod tests {
10 | foo([x=]x.y) 10 | foo([x=]x.y)
| ^ | ^
| |
---------------------------------------------
info[inlay-hint-edit]: File after edits
info: Source
def foo(x: int): pass
class MyClass:
def __init__(self):
self.x: int = 1
self.y: int = 2
x: MyClass = MyClass()
foo(x.x)
foo(x.y)
"); ");
} }
@ -4258,31 +4150,11 @@ mod tests {
return 1 return 1
def y() -> int: def y() -> int:
return 2 return 2
val[: MyClass] = MyClass() val = MyClass()
foo(val.x()) foo(val.x())
foo([x=]val.y()) foo([x=]val.y())
--------------------------------------------- ---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> main.py:3:7
|
2 | def foo(x: int): pass
3 | class MyClass:
| ^^^^^^^
4 | def __init__(self):
5 | def x() -> int:
|
info: Source
--> main2.py:9:7
|
7 | def y() -> int:
8 | return 2
9 | val[: MyClass] = MyClass()
| ^^^^^^^
10 |
11 | foo(val.x())
|
info[inlay-hint-location]: Inlay Hint Target info[inlay-hint-location]: Inlay Hint Target
--> main.py:2:9 --> main.py:2:9
| |
@ -4298,22 +4170,6 @@ mod tests {
12 | foo([x=]val.y()) 12 | foo([x=]val.y())
| ^ | ^
| |
---------------------------------------------
info[inlay-hint-edit]: File after edits
info: Source
def foo(x: int): pass
class MyClass:
def __init__(self):
def x() -> int:
return 1
def y() -> int:
return 2
val: MyClass = MyClass()
foo(val.x())
foo(val.y())
"); ");
} }
@ -4346,31 +4202,11 @@ mod tests {
return 1 return 1
def y() -> List[int]: def y() -> List[int]:
return 2 return 2
val[: MyClass] = MyClass() val = MyClass()
foo(val.x()[0]) foo(val.x()[0])
foo([x=]val.y()[1]) foo([x=]val.y()[1])
--------------------------------------------- ---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> main.py:5:7
|
4 | def foo(x: int): pass
5 | class MyClass:
| ^^^^^^^
6 | def __init__(self):
7 | def x() -> List[int]:
|
info: Source
--> main2.py:11:7
|
9 | def y() -> List[int]:
10 | return 2
11 | val[: MyClass] = MyClass()
| ^^^^^^^
12 |
13 | foo(val.x()[0])
|
info[inlay-hint-location]: Inlay Hint Target info[inlay-hint-location]: Inlay Hint Target
--> main.py:4:9 --> main.py:4:9
| |
@ -4388,24 +4224,6 @@ mod tests {
14 | foo([x=]val.y()[1]) 14 | foo([x=]val.y()[1])
| ^ | ^
| |
---------------------------------------------
info[inlay-hint-edit]: File after edits
info: Source
from typing import List
def foo(x: int): pass
class MyClass:
def __init__(self):
def x() -> List[int]:
return 1
def y() -> List[int]:
return 2
val: MyClass = MyClass()
foo(val.x()[0])
foo(val.y()[1])
"); ");
} }
@ -4697,7 +4515,7 @@ mod tests {
class Foo: class Foo:
def __init__(self, x: int): pass def __init__(self, x: int): pass
Foo([x=]1) Foo([x=]1)
f[: Foo] = Foo([x=]1) f = Foo([x=]1)
--------------------------------------------- ---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target info[inlay-hint-location]: Inlay Hint Target
--> main.py:3:24 --> main.py:3:24
@ -4715,24 +4533,7 @@ mod tests {
3 | def __init__(self, x: int): pass 3 | def __init__(self, x: int): pass
4 | Foo([x=]1) 4 | Foo([x=]1)
| ^ | ^
5 | f[: Foo] = Foo([x=]1) 5 | f = Foo([x=]1)
|
info[inlay-hint-location]: Inlay Hint Target
--> main.py:2:7
|
2 | class Foo:
| ^^^
3 | def __init__(self, x: int): pass
4 | Foo(1)
|
info: Source
--> main2.py:5:5
|
3 | def __init__(self, x: int): pass
4 | Foo([x=]1)
5 | f[: Foo] = Foo([x=]1)
| ^^^
| |
info[inlay-hint-location]: Inlay Hint Target info[inlay-hint-location]: Inlay Hint Target
@ -4745,22 +4546,13 @@ mod tests {
5 | f = Foo(1) 5 | f = Foo(1)
| |
info: Source info: Source
--> main2.py:5:17 --> main2.py:5:10
| |
3 | def __init__(self, x: int): pass 3 | def __init__(self, x: int): pass
4 | Foo([x=]1) 4 | Foo([x=]1)
5 | f[: Foo] = Foo([x=]1) 5 | f = Foo([x=]1)
| ^ | ^
| |
---------------------------------------------
info[inlay-hint-edit]: File after edits
info: Source
class Foo:
def __init__(self, x: int): pass
Foo(1)
f: Foo = Foo(1)
"); ");
} }
@ -4778,7 +4570,7 @@ mod tests {
class Foo: class Foo:
def __new__(cls, x: int): pass def __new__(cls, x: int): pass
Foo([x=]1) Foo([x=]1)
f[: Foo] = Foo([x=]1) f = Foo([x=]1)
--------------------------------------------- ---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target info[inlay-hint-location]: Inlay Hint Target
--> main.py:3:22 --> main.py:3:22
@ -4796,24 +4588,7 @@ mod tests {
3 | def __new__(cls, x: int): pass 3 | def __new__(cls, x: int): pass
4 | Foo([x=]1) 4 | Foo([x=]1)
| ^ | ^
5 | f[: Foo] = Foo([x=]1) 5 | f = Foo([x=]1)
|
info[inlay-hint-location]: Inlay Hint Target
--> main.py:2:7
|
2 | class Foo:
| ^^^
3 | def __new__(cls, x: int): pass
4 | Foo(1)
|
info: Source
--> main2.py:5:5
|
3 | def __new__(cls, x: int): pass
4 | Foo([x=]1)
5 | f[: Foo] = Foo([x=]1)
| ^^^
| |
info[inlay-hint-location]: Inlay Hint Target info[inlay-hint-location]: Inlay Hint Target
@ -4826,22 +4601,13 @@ mod tests {
5 | f = Foo(1) 5 | f = Foo(1)
| |
info: Source info: Source
--> main2.py:5:17 --> main2.py:5:10
| |
3 | def __new__(cls, x: int): pass 3 | def __new__(cls, x: int): pass
4 | Foo([x=]1) 4 | Foo([x=]1)
5 | f[: Foo] = Foo([x=]1) 5 | f = Foo([x=]1)
| ^ | ^
| |
---------------------------------------------
info[inlay-hint-edit]: File after edits
info: Source
class Foo:
def __new__(cls, x: int): pass
Foo(1)
f: Foo = Foo(1)
"); ");
} }

View File

@ -37,6 +37,38 @@ pub enum ReferencesMode {
DocumentHighlights, DocumentHighlights,
} }
impl ReferencesMode {
pub(super) fn to_import_alias_resolution(self) -> ImportAliasResolution {
match self {
// Resolve import aliases for find references:
// ```py
// from warnings import deprecated as my_deprecated
//
// @my_deprecated
// def foo
// ```
//
// When finding references on `my_deprecated`, we want to find all usages of `deprecated` across the entire
// project.
Self::References | Self::ReferencesSkipDeclaration => {
ImportAliasResolution::ResolveAliases
}
// For rename, don't resolve import aliases.
//
// ```py
// from warnings import deprecated as my_deprecated
//
// @my_deprecated
// def foo
// ```
// When renaming `my_deprecated`, only rename the alias, but not the original definition in `warnings`.
Self::Rename | Self::RenameMultiFile | Self::DocumentHighlights => {
ImportAliasResolution::PreserveAliases
}
}
}
}
/// Find all references to a symbol at the given position. /// Find all references to a symbol at the given position.
/// Search for references across all files in the project. /// Search for references across all files in the project.
pub(crate) fn references( pub(crate) fn references(
@ -45,12 +77,9 @@ pub(crate) fn references(
goto_target: &GotoTarget, goto_target: &GotoTarget,
mode: ReferencesMode, mode: ReferencesMode,
) -> Option<Vec<ReferenceTarget>> { ) -> Option<Vec<ReferenceTarget>> {
// Get the definitions for the symbol at the cursor position
// When finding references, do not resolve any local aliases.
let model = SemanticModel::new(db, file); let model = SemanticModel::new(db, file);
let target_definitions = goto_target let target_definitions = goto_target
.get_definition_targets(&model, ImportAliasResolution::PreserveAliases)? .get_definition_targets(&model, mode.to_import_alias_resolution())?
.declaration_targets(db)?; .declaration_targets(db)?;
// Extract the target text from the goto target for fast comparison // Extract the target text from the goto target for fast comparison
@ -318,7 +347,7 @@ impl LocalReferencesFinder<'_> {
{ {
// Get the definitions for this goto target // Get the definitions for this goto target
if let Some(current_definitions) = goto_target if let Some(current_definitions) = goto_target
.get_definition_targets(self.model, ImportAliasResolution::PreserveAliases) .get_definition_targets(self.model, self.mode.to_import_alias_resolution())
.and_then(|definitions| definitions.declaration_targets(self.model.db())) .and_then(|definitions| definitions.declaration_targets(self.model.db()))
{ {
// Check if any of the current definitions match our target definitions // Check if any of the current definitions match our target definitions

File diff suppressed because it is too large Load Diff

View File

@ -259,7 +259,11 @@ impl<'db> SemanticTokenVisitor<'db> {
fn classify_name(&self, name: &ast::ExprName) -> (SemanticTokenType, SemanticTokenModifier) { fn classify_name(&self, name: &ast::ExprName) -> (SemanticTokenType, SemanticTokenModifier) {
// First try to classify the token based on its definition kind. // First try to classify the token based on its definition kind.
let definition = definition_for_name(self.model, name); let definition = definition_for_name(
self.model,
name,
ty_python_semantic::ImportAliasResolution::ResolveAliases,
);
if let Some(definition) = definition { if let Some(definition) = definition {
let name_str = name.id.as_str(); let name_str = name.id.as_str();

View File

@ -20,6 +20,7 @@ use ty_python_semantic::semantic_index::definition::Definition;
use ty_python_semantic::types::ide_support::{ use ty_python_semantic::types::ide_support::{
CallSignatureDetails, call_signature_details, find_active_signature_from_details, CallSignatureDetails, call_signature_details, find_active_signature_from_details,
}; };
use ty_python_semantic::types::{ParameterKind, Type};
// TODO: We may want to add special-case handling for calls to constructors // TODO: We may want to add special-case handling for calls to constructors
// so the class docstring is used in place of (or inaddition to) any docstring // so the class docstring is used in place of (or inaddition to) any docstring
@ -27,25 +28,29 @@ use ty_python_semantic::types::ide_support::{
/// Information about a function parameter /// Information about a function parameter
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct ParameterDetails { pub struct ParameterDetails<'db> {
/// The parameter name (e.g., "param1") /// The parameter name (e.g., "param1")
pub name: String, pub name: String,
/// The parameter label in the signature (e.g., "param1: str") /// The parameter label in the signature (e.g., "param1: str")
pub label: String, pub label: String,
/// The annotated type of the parameter, if any
pub ty: Option<Type<'db>>,
/// Documentation specific to the parameter, typically extracted from the /// Documentation specific to the parameter, typically extracted from the
/// function's docstring /// function's docstring
pub documentation: Option<String>, pub documentation: Option<String>,
/// True if the parameter is positional-only.
pub is_positional_only: bool,
} }
/// Information about a function signature /// Information about a function signature
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct SignatureDetails { pub struct SignatureDetails<'db> {
/// Text representation of the full signature (including input parameters and return type). /// Text representation of the full signature (including input parameters and return type).
pub label: String, pub label: String,
/// Documentation for the signature, typically from the function's docstring. /// Documentation for the signature, typically from the function's docstring.
pub documentation: Option<Docstring>, pub documentation: Option<Docstring>,
/// Information about each of the parameters in left-to-right order. /// Information about each of the parameters in left-to-right order.
pub parameters: Vec<ParameterDetails>, pub parameters: Vec<ParameterDetails<'db>>,
/// Index of the parameter that corresponds to the argument where the /// Index of the parameter that corresponds to the argument where the
/// user's cursor is currently positioned. /// user's cursor is currently positioned.
pub active_parameter: Option<usize>, pub active_parameter: Option<usize>,
@ -53,18 +58,18 @@ pub struct SignatureDetails {
/// Signature help information for function calls /// Signature help information for function calls
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct SignatureHelpInfo { pub struct SignatureHelpInfo<'db> {
/// Information about each of the signatures for the function call. We /// Information about each of the signatures for the function call. We
/// need to handle multiple because of unions, overloads, and composite /// need to handle multiple because of unions, overloads, and composite
/// calls like constructors (which invoke both __new__ and __init__). /// calls like constructors (which invoke both __new__ and __init__).
pub signatures: Vec<SignatureDetails>, pub signatures: Vec<SignatureDetails<'db>>,
/// Index of the "active signature" which is the first signature where /// Index of the "active signature" which is the first signature where
/// all arguments that are currently present in the code map to parameters. /// all arguments that are currently present in the code map to parameters.
pub active_signature: Option<usize>, pub active_signature: Option<usize>,
} }
/// Signature help information for function calls at the given position /// Signature help information for function calls at the given position
pub fn signature_help(db: &dyn Db, file: File, offset: TextSize) -> Option<SignatureHelpInfo> { pub fn signature_help(db: &dyn Db, file: File, offset: TextSize) -> Option<SignatureHelpInfo<'_>> {
let parsed = parsed_module(db, file).load(db); let parsed = parsed_module(db, file).load(db);
// Get the call expression at the given position. // Get the call expression at the given position.
@ -166,11 +171,11 @@ fn get_argument_index(call_expr: &ast::ExprCall, offset: TextSize) -> usize {
} }
/// Create signature details from `CallSignatureDetails`. /// Create signature details from `CallSignatureDetails`.
fn create_signature_details_from_call_signature_details( fn create_signature_details_from_call_signature_details<'db>(
db: &dyn crate::Db, db: &dyn crate::Db,
details: &CallSignatureDetails, details: &CallSignatureDetails<'db>,
current_arg_index: usize, current_arg_index: usize,
) -> SignatureDetails { ) -> SignatureDetails<'db> {
let signature_label = details.label.clone(); let signature_label = details.label.clone();
let documentation = get_callable_documentation(db, details.definition); let documentation = get_callable_documentation(db, details.definition);
@ -200,6 +205,8 @@ fn create_signature_details_from_call_signature_details(
&signature_label, &signature_label,
documentation.as_ref(), documentation.as_ref(),
&details.parameter_names, &details.parameter_names,
&details.parameter_kinds,
&details.parameter_types,
); );
SignatureDetails { SignatureDetails {
label: signature_label, label: signature_label,
@ -218,12 +225,14 @@ fn get_callable_documentation(
} }
/// Create `ParameterDetails` objects from parameter label offsets. /// Create `ParameterDetails` objects from parameter label offsets.
fn create_parameters_from_offsets( fn create_parameters_from_offsets<'db>(
parameter_offsets: &[TextRange], parameter_offsets: &[TextRange],
signature_label: &str, signature_label: &str,
docstring: Option<&Docstring>, docstring: Option<&Docstring>,
parameter_names: &[String], parameter_names: &[String],
) -> Vec<ParameterDetails> { parameter_kinds: &[ParameterKind],
parameter_types: &[Option<Type<'db>>],
) -> Vec<ParameterDetails<'db>> {
// Extract parameter documentation from the function's docstring if available. // Extract parameter documentation from the function's docstring if available.
let param_docs = if let Some(docstring) = docstring { let param_docs = if let Some(docstring) = docstring {
docstring.parameter_documentation() docstring.parameter_documentation()
@ -245,11 +254,18 @@ fn create_parameters_from_offsets(
// Get the parameter name for documentation lookup. // Get the parameter name for documentation lookup.
let param_name = parameter_names.get(i).map(String::as_str).unwrap_or(""); let param_name = parameter_names.get(i).map(String::as_str).unwrap_or("");
let is_positional_only = matches!(
parameter_kinds.get(i),
Some(ParameterKind::PositionalOnly { .. })
);
let ty = parameter_types.get(i).copied().flatten();
ParameterDetails { ParameterDetails {
name: param_name.to_string(), name: param_name.to_string(),
label, label,
ty,
documentation: param_docs.get(param_name).cloned(), documentation: param_docs.get(param_name).cloned(),
is_positional_only,
} }
}) })
.collect() .collect()
@ -1173,7 +1189,7 @@ def ab(a: int, *, c: int):
} }
impl CursorTest { impl CursorTest {
fn signature_help(&self) -> Option<SignatureHelpInfo> { fn signature_help(&self) -> Option<SignatureHelpInfo<'_>> {
crate::signature_help::signature_help(&self.db, self.cursor.file, self.cursor.offset) crate::signature_help::signature_help(&self.db, self.cursor.file, self.cursor.offset)
} }

File diff suppressed because it is too large Load Diff

View File

@ -37,14 +37,16 @@ class MDTestRunner:
mdtest_executable: Path | None mdtest_executable: Path | None
console: Console console: Console
filters: list[str] filters: list[str]
enable_external: bool
def __init__(self, filters: list[str] | None = None) -> None: def __init__(self, filters: list[str] | None, enable_external: bool) -> None:
self.mdtest_executable = None self.mdtest_executable = None
self.console = Console() self.console = Console()
self.filters = [ self.filters = [
f.removesuffix(".md").replace("/", "_").replace("-", "_") f.removesuffix(".md").replace("/", "_").replace("-", "_")
for f in (filters or []) for f in (filters or [])
] ]
self.enable_external = enable_external
def _run_cargo_test(self, *, message_format: Literal["human", "json"]) -> str: def _run_cargo_test(self, *, message_format: Literal["human", "json"]) -> str:
return subprocess.check_output( return subprocess.check_output(
@ -120,6 +122,7 @@ class MDTestRunner:
CLICOLOR_FORCE="1", CLICOLOR_FORCE="1",
INSTA_FORCE_PASS="1", INSTA_FORCE_PASS="1",
INSTA_OUTPUT="none", INSTA_OUTPUT="none",
MDTEST_EXTERNAL="1" if self.enable_external else "0",
), ),
capture_output=capture_output, capture_output=capture_output,
text=True, text=True,
@ -266,11 +269,19 @@ def main() -> None:
nargs="*", nargs="*",
help="Partial paths or mangled names, e.g., 'loops/for.md' or 'loops_for'", help="Partial paths or mangled names, e.g., 'loops/for.md' or 'loops_for'",
) )
parser.add_argument(
"--enable-external",
"-e",
action="store_true",
help="Enable tests with external dependencies",
)
args = parser.parse_args() args = parser.parse_args()
try: try:
runner = MDTestRunner(filters=args.filters) runner = MDTestRunner(
filters=args.filters, enable_external=args.enable_external
)
runner.watch() runner.watch()
except KeyboardInterrupt: except KeyboardInterrupt:
print() print()

View File

@ -0,0 +1,4 @@
from __future__ import annotations
class MyClass:
type: type = str

View File

@ -0,0 +1,6 @@
# This is a regression test for `store_expression_type`.
# ref: https://github.com/astral-sh/ty/issues/1688
x: int
type x[T] = x[T, U]

View File

@ -0,0 +1,6 @@
class C[T: (A, B)]:
def f(foo: T):
try:
pass
except foo:
pass

View File

@ -307,12 +307,10 @@ Using a `ParamSpec` in a `Callable` annotation:
from typing_extensions import Callable from typing_extensions import Callable
def _[**P1](c: Callable[P1, int]): def _[**P1](c: Callable[P1, int]):
# TODO: Should reveal `ParamSpecArgs` and `ParamSpecKwargs` reveal_type(P1.args) # revealed: P1@_.args
reveal_type(P1.args) # revealed: @Todo(ParamSpecArgs / ParamSpecKwargs) reveal_type(P1.kwargs) # revealed: P1@_.kwargs
reveal_type(P1.kwargs) # revealed: @Todo(ParamSpecArgs / ParamSpecKwargs)
# TODO: Signature should be (**P1) -> int reveal_type(c) # revealed: (**P1@_) -> int
reveal_type(c) # revealed: (...) -> int
``` ```
And, using the legacy syntax: And, using the legacy syntax:
@ -322,9 +320,8 @@ from typing_extensions import ParamSpec
P2 = ParamSpec("P2") P2 = ParamSpec("P2")
# TODO: argument list should not be `...` (requires `ParamSpec` support)
def _(c: Callable[P2, int]): def _(c: Callable[P2, int]):
reveal_type(c) # revealed: (...) -> int reveal_type(c) # revealed: (**P2@_) -> int
``` ```
## Using `typing.Unpack` ## Using `typing.Unpack`

View File

@ -18,9 +18,8 @@ def f(*args: Unpack[Ts]) -> tuple[Unpack[Ts]]:
def g() -> TypeGuard[int]: ... def g() -> TypeGuard[int]: ...
def i(callback: Callable[Concatenate[int, P], R_co], *args: P.args, **kwargs: P.kwargs) -> R_co: def i(callback: Callable[Concatenate[int, P], R_co], *args: P.args, **kwargs: P.kwargs) -> R_co:
# TODO: Should reveal a type representing `P.args` and `P.kwargs` reveal_type(args) # revealed: P@i.args
reveal_type(args) # revealed: tuple[@Todo(ParamSpecArgs / ParamSpecKwargs), ...] reveal_type(kwargs) # revealed: P@i.kwargs
reveal_type(kwargs) # revealed: dict[str, @Todo(ParamSpecArgs / ParamSpecKwargs)]
return callback(42, *args, **kwargs) return callback(42, *args, **kwargs)
class Foo: class Foo:
@ -65,8 +64,9 @@ def _(
reveal_type(c) # revealed: Unknown reveal_type(c) # revealed: Unknown
reveal_type(d) # revealed: Unknown reveal_type(d) # revealed: Unknown
# error: [invalid-type-form] "Variable of type `ParamSpec` is not allowed in a type expression"
def foo(a_: e) -> None: def foo(a_: e) -> None:
reveal_type(a_) # revealed: @Todo(Support for `typing.ParamSpec`) reveal_type(a_) # revealed: Unknown
``` ```
## Inheritance ## Inheritance

View File

@ -925,7 +925,7 @@ def _(t: tuple[int, str] | tuple[int, str, int]) -> None:
f(*t) # error: [no-matching-overload] f(*t) # error: [no-matching-overload]
``` ```
## Filtering based on variaidic arguments ## Filtering based on variadic arguments
This is step 4 of the overload call evaluation algorithm which specifies that: This is step 4 of the overload call evaluation algorithm which specifies that:
@ -1469,6 +1469,79 @@ def _(arg: list[Any]):
reveal_type(f4(*arg)) # revealed: Unknown reveal_type(f4(*arg)) # revealed: Unknown
``` ```
### Varidic argument with generics
`overloaded.pyi`:
```pyi
from typing import Any, TypeVar, overload
T1 = TypeVar("T1")
T2 = TypeVar("T2")
T3 = TypeVar("T3")
@overload
def f1(x: T1, /) -> tuple[T1]: ...
@overload
def f1(x1: T1, x2: T2, /) -> tuple[T1, T2]: ...
@overload
def f1(x1: T1, x2: T2, x3: T3, /) -> tuple[T1, T2, T3]: ...
@overload
def f1(*args: Any) -> tuple[Any, ...]: ...
@overload
def f2(x1: T1) -> tuple[T1]: ...
@overload
def f2(x1: T1, x2: T2) -> tuple[T1, T2]: ...
@overload
def f2(*args: Any, **kwargs: Any) -> tuple[Any, ...]: ...
@overload
def f3(x: T1) -> tuple[T1]: ...
@overload
def f3(x1: T1, x2: T2) -> tuple[T1, T2]: ...
@overload
def f3(*args: Any) -> tuple[Any, ...]: ...
@overload
def f3(**kwargs: Any) -> dict[str, Any]: ...
```
```py
from overloaded import f1, f2, f3
from typing import Any
# These calls only match the last overload
reveal_type(f1()) # revealed: tuple[Any, ...]
reveal_type(f1(1, 2, 3, 4)) # revealed: tuple[Any, ...]
# While these calls match multiple overloads but step 5 filters out all the remaining overloads
# except the most specific one in terms of the number of arguments.
reveal_type(f1(1)) # revealed: tuple[Literal[1]]
reveal_type(f1(1, 2)) # revealed: tuple[Literal[1], Literal[2]]
reveal_type(f1(1, 2, 3)) # revealed: tuple[Literal[1], Literal[2], Literal[3]]
def _(args1: list[int], args2: list[Any]):
reveal_type(f1(*args1)) # revealed: tuple[Any, ...]
reveal_type(f1(*args2)) # revealed: tuple[Any, ...]
reveal_type(f2()) # revealed: tuple[Any, ...]
reveal_type(f2(1, 2)) # revealed: tuple[Literal[1], Literal[2]]
# TODO: Should be `tuple[Literal[1], Literal[2]]`
reveal_type(f2(x1=1, x2=2)) # revealed: Unknown
# TODO: Should be `tuple[Literal[2], Literal[1]]`
reveal_type(f2(x2=1, x1=2)) # revealed: Unknown
reveal_type(f2(1, 2, z=3)) # revealed: tuple[Any, ...]
reveal_type(f3(1, 2)) # revealed: tuple[Literal[1], Literal[2]]
reveal_type(f3(1, 2, 3)) # revealed: tuple[Any, ...]
# TODO: Should be `tuple[Literal[1], Literal[2]]`
reveal_type(f3(x1=1, x2=2)) # revealed: Unknown
reveal_type(f3(z=1)) # revealed: dict[str, Any]
# error: [no-matching-overload]
reveal_type(f3(1, 2, x=3)) # revealed: Unknown
```
### Non-participating fully-static parameter ### Non-participating fully-static parameter
Ref: <https://github.com/astral-sh/ty/issues/552#issuecomment-2969052173> Ref: <https://github.com/astral-sh/ty/issues/552#issuecomment-2969052173>

View File

@ -227,17 +227,56 @@ def _(literals_2: Literal[0, 1], b: bool, flag: bool):
literals_16 = 4 * literals_4 + literals_4 # Literal[0, 1, .., 15] literals_16 = 4 * literals_4 + literals_4 # Literal[0, 1, .., 15]
literals_64 = 4 * literals_16 + literals_4 # Literal[0, 1, .., 63] literals_64 = 4 * literals_16 + literals_4 # Literal[0, 1, .., 63]
literals_128 = 2 * literals_64 + literals_2 # Literal[0, 1, .., 127] literals_128 = 2 * literals_64 + literals_2 # Literal[0, 1, .., 127]
literals_256 = 2 * literals_128 + literals_2 # Literal[0, 1, .., 255]
# Going beyond the MAX_UNION_LITERALS limit (currently 200): # Going beyond the MAX_NON_RECURSIVE_UNION_LITERALS limit (currently 256):
literals_256 = 16 * literals_16 + literals_16 reveal_type(literals_256 if flag else 256) # revealed: int
reveal_type(literals_256) # revealed: int
# Going beyond the limit when another type is already part of the union # Going beyond the limit when another type is already part of the union
bool_and_literals_128 = b if flag else literals_128 # bool | Literal[0, 1, ..., 127] bool_and_literals_128 = b if flag else literals_128 # bool | Literal[0, 1, ..., 127]
literals_128_shifted = literals_128 + 128 # Literal[128, 129, ..., 255] literals_128_shifted = literals_128 + 128 # Literal[128, 129, ..., 255]
literals_256_shifted = literals_256 + 256 # Literal[256, 257, ..., 511]
# Now union the two: # Now union the two:
reveal_type(bool_and_literals_128 if flag else literals_128_shifted) # revealed: int two = bool_and_literals_128 if flag else literals_128_shifted
# revealed: bool | Literal[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255]
reveal_type(two)
reveal_type(two if flag else literals_256_shifted) # revealed: int
```
Recursively defined literal union types are widened earlier than non-recursively defined types for
faster convergence.
```py
class RecursiveAttr:
def __init__(self):
self.i = 0
def update(self):
self.i = self.i + 1
reveal_type(RecursiveAttr().i) # revealed: Unknown | int
# Here are some recursive but saturating examples. Because it's difficult to statically determine whether literal unions saturate or diverge,
# we widen them early, even though they may actually be convergent.
class RecursiveAttr2:
def __init__(self):
self.i = 0
def update(self):
self.i = (self.i + 1) % 9
reveal_type(RecursiveAttr2().i) # revealed: Unknown | Literal[0, 1, 2, 3, 4, 5, 6, 7, 8]
class RecursiveAttr3:
def __init__(self):
self.i = 0
def update(self):
self.i = (self.i + 1) % 10
# Going beyond the MAX_RECURSIVE_UNION_LITERALS limit:
reveal_type(RecursiveAttr3().i) # revealed: Unknown | int
``` ```
## Simplifying gradually-equivalent types ## Simplifying gradually-equivalent types

View File

@ -7,10 +7,11 @@
```py ```py
from typing_extensions import assert_type from typing_extensions import assert_type
def _(x: int): def _(x: int, y: bool):
assert_type(x, int) # fine assert_type(x, int) # fine
assert_type(x, str) # error: [type-assertion-failure] assert_type(x, str) # error: [type-assertion-failure]
assert_type(assert_type(x, int), int) assert_type(assert_type(x, int), int)
assert_type(y, int) # error: [type-assertion-failure]
``` ```
## Narrowing ## Narrowing

View File

@ -0,0 +1,4 @@
# mdtests with external dependencies
This directory contains mdtests that make use of external packages. See the mdtest `README.md` for
more information.

View File

@ -0,0 +1,78 @@
# attrs
```toml
[environment]
python-version = "3.13"
python-platform = "linux"
[project]
dependencies = ["attrs==25.4.0"]
```
## Basic class (`attr`)
```py
import attr
@attr.s
class User:
id: int = attr.ib()
name: str = attr.ib()
user = User(id=1, name="John Doe")
reveal_type(user.id) # revealed: int
reveal_type(user.name) # revealed: str
```
## Basic class (`define`)
```py
from attrs import define, field
@define
class User:
id: int = field()
internal_name: str = field(alias="name")
user = User(id=1, name="John Doe")
reveal_type(user.id) # revealed: int
reveal_type(user.internal_name) # revealed: str
```
## Usage of `field` parameters
```py
from attrs import define, field
@define
class Product:
id: int = field(init=False)
name: str = field()
price_cent: int = field(kw_only=True)
reveal_type(Product.__init__) # revealed: (self: Product, name: str, *, price_cent: int) -> None
```
## Dedicated support for the `default` decorator?
We currently do not support this:
```py
from attrs import define, field
@define
class Person:
id: int = field()
name: str = field()
# error: [call-non-callable] "Object of type `_MISSING_TYPE` is not callable"
@id.default
def _default_id(self) -> int:
raise NotImplementedError
# error: [missing-argument] "No argument provided for required parameter `id`"
person = Person(name="Alice")
reveal_type(person.id) # revealed: int
reveal_type(person.name) # revealed: str
```

View File

@ -0,0 +1,23 @@
# numpy
```toml
[environment]
python-version = "3.13"
python-platform = "linux"
[project]
dependencies = ["numpy==2.3.0"]
```
## Basic usage
```py
import numpy as np
xs = np.array([1, 2, 3])
reveal_type(xs) # revealed: ndarray[tuple[Any, ...], dtype[Any]]
xs = np.array([1.0, 2.0, 3.0], dtype=np.float64)
# TODO: should be `ndarray[tuple[Any, ...], dtype[float64]]`
reveal_type(xs) # revealed: ndarray[tuple[Any, ...], dtype[Unknown]]
```

View File

@ -0,0 +1,48 @@
# Pydantic
```toml
[environment]
python-version = "3.12"
python-platform = "linux"
[project]
dependencies = ["pydantic==2.12.2"]
```
## Basic model
```py
from pydantic import BaseModel
class User(BaseModel):
id: int
name: str
reveal_type(User.__init__) # revealed: (self: User, *, id: int, name: str) -> None
user = User(id=1, name="John Doe")
reveal_type(user.id) # revealed: int
reveal_type(user.name) # revealed: str
# error: [missing-argument] "No argument provided for required parameter `name`"
invalid_user = User(id=2)
```
## Usage of `Field`
```py
from pydantic import BaseModel, Field
class Product(BaseModel):
id: int = Field(init=False)
name: str = Field(..., kw_only=False, min_length=1)
internal_price_cent: int = Field(..., gt=0, alias="price_cent")
reveal_type(Product.__init__) # revealed: (self: Product, name: str = Any, *, price_cent: int = Any) -> None
product = Product("Laptop", price_cent=999_00)
reveal_type(product.id) # revealed: int
reveal_type(product.name) # revealed: str
reveal_type(product.internal_price_cent) # revealed: int
```

View File

@ -0,0 +1,27 @@
# pytest
```toml
[environment]
python-version = "3.13"
python-platform = "linux"
[project]
dependencies = ["pytest==9.0.1"]
```
## `pytest.fail`
Make sure that we recognize `pytest.fail` calls as terminal:
```py
import pytest
def some_runtime_condition() -> bool:
return True
def test_something():
if not some_runtime_condition():
pytest.fail("Runtime condition failed")
no_error_here_this_is_unreachable
```

View File

@ -0,0 +1,195 @@
# SQLAlchemy
```toml
[environment]
python-version = "3.13"
python-platform = "linux"
[project]
dependencies = ["SQLAlchemy==2.0.44"]
```
## ORM Model
This test makes sure that ty understands SQLAlchemy's `dataclass_transform` setup:
```py
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
class Base(DeclarativeBase):
pass
class User(Base):
__tablename__ = "user"
id: Mapped[int] = mapped_column(primary_key=True, init=False)
internal_name: Mapped[str] = mapped_column(alias="name")
user = User(name="John Doe")
reveal_type(user.id) # revealed: int
reveal_type(user.internal_name) # revealed: str
```
Unfortunately, SQLAlchemy overrides `__init__` and explicitly accepts all combinations of keyword
arguments. This is why we currently cannot flag invalid constructor calls:
```py
reveal_type(User.__init__) # revealed: def __init__(self, **kw: Any) -> Unknown
# TODO: this should ideally be an error
invalid_user = User(invalid_arg=42)
```
## Basic query example
First, set up a `Session`:
```py
from sqlalchemy import select, Integer, Text, Boolean
from sqlalchemy.orm import Session
from sqlalchemy.orm import DeclarativeBase
from sqlalchemy.orm import Mapped, mapped_column
from sqlalchemy import create_engine
engine = create_engine("sqlite://example.db")
session = Session(engine)
```
And define a simple model:
```py
class Base(DeclarativeBase):
pass
class User(Base):
__tablename__ = "users"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
name: Mapped[str] = mapped_column(Text)
is_admin: Mapped[bool] = mapped_column(Boolean, default=False)
```
Finally, we can execute queries:
```py
stmt = select(User)
reveal_type(stmt) # revealed: Select[tuple[User]]
users = session.scalars(stmt).all()
reveal_type(users) # revealed: Sequence[User]
for row in session.execute(stmt):
reveal_type(row) # revealed: Row[tuple[User]]
stmt = select(User).where(User.name == "Alice")
alice1 = session.scalars(stmt).first()
reveal_type(alice1) # revealed: User | None
alice2 = session.scalar(stmt)
reveal_type(alice2) # revealed: User | None
result = session.execute(stmt)
row = result.one_or_none()
assert row is not None
(alice3,) = row._tuple()
reveal_type(alice3) # revealed: User
```
This also works with more complex queries:
```py
stmt = select(User).where(User.is_admin == True).order_by(User.name).limit(10)
admin_users = session.scalars(stmt).all()
reveal_type(admin_users) # revealed: Sequence[User]
```
We can also specify particular columns to select:
```py
stmt = select(User.id, User.name)
reveal_type(stmt) # revealed: Select[tuple[int, str]]
ids_and_names = session.execute(stmt).all()
reveal_type(ids_and_names) # revealed: Sequence[Row[tuple[int, str]]]
for row in session.execute(stmt):
reveal_type(row) # revealed: Row[tuple[int, str]]
for user_id, name in session.execute(stmt).tuples():
reveal_type(user_id) # revealed: int
reveal_type(name) # revealed: str
result = session.execute(stmt)
row = result.one_or_none()
assert row is not None
(user_id, name) = row._tuple()
reveal_type(user_id) # revealed: int
reveal_type(name) # revealed: str
stmt = select(User.id).where(User.name == "Alice")
reveal_type(stmt) # revealed: Select[tuple[int]]
alice_id = session.scalars(stmt).first()
reveal_type(alice_id) # revealed: int | None
alice_id = session.scalar(stmt)
reveal_type(alice_id) # revealed: int | None
```
Using the legacy `query` API also works:
```py
users_legacy = session.query(User).all()
reveal_type(users_legacy) # revealed: list[User]
query = session.query(User)
reveal_type(query) # revealed: Query[User]
reveal_type(query.all()) # revealed: list[User]
for row in query:
reveal_type(row) # revealed: User
```
And similarly when specifying particular columns:
```py
query = session.query(User.id, User.name)
reveal_type(query) # revealed: RowReturningQuery[tuple[int, str]]
reveal_type(query.all()) # revealed: list[Row[tuple[int, str]]]
for row in query:
reveal_type(row) # revealed: Row[tuple[int, str]]
```
## Async API
The async API is supported as well:
```py
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, Integer, Text
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
class Base(DeclarativeBase):
pass
class User(Base):
__tablename__ = "users"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
name: Mapped[str] = mapped_column(Text)
async def test_async(session: AsyncSession):
stmt = select(User).where(User.name == "Alice")
alice = await session.scalar(stmt)
reveal_type(alice) # revealed: User | None
stmt = select(User.id, User.name)
result = await session.execute(stmt)
for user_id, name in result.tuples():
reveal_type(user_id) # revealed: int
reveal_type(name) # revealed: str
```

View File

@ -0,0 +1,30 @@
# SQLModel
```toml
[environment]
python-version = "3.13"
python-platform = "linux"
[project]
dependencies = ["sqlmodel==0.0.27"]
```
## Basic model
```py
from sqlmodel import SQLModel
class User(SQLModel):
id: int
name: str
user = User(id=1, name="John Doe")
reveal_type(user.id) # revealed: int
reveal_type(user.name) # revealed: str
# TODO: this should not mention `__pydantic_self__`, and have proper parameters defined by the fields
reveal_type(User.__init__) # revealed: def __init__(__pydantic_self__, **data: Any) -> None
# TODO: this should be an error
User()
```

View File

@ -0,0 +1,27 @@
# Strawberry GraphQL
```toml
[environment]
python-version = "3.13"
python-platform = "linux"
[project]
dependencies = ["strawberry-graphql==0.283.3"]
```
## Basic model
```py
import strawberry
@strawberry.type
class User:
id: int
role: str = strawberry.field(default="user")
reveal_type(User.__init__) # revealed: (self: User, *, id: int, role: str = Any) -> None
user = User(id=1)
reveal_type(user.id) # revealed: int
reveal_type(user.role) # revealed: str
```

View File

@ -301,6 +301,7 @@ consistent with each other.
```py ```py
from typing_extensions import Generic, TypeVar from typing_extensions import Generic, TypeVar
from ty_extensions import generic_context, into_callable
T = TypeVar("T") T = TypeVar("T")
@ -308,6 +309,11 @@ class C(Generic[T]):
def __new__(cls, x: T) -> "C[T]": def __new__(cls, x: T) -> "C[T]":
return object.__new__(cls) return object.__new__(cls)
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C(1)) # revealed: C[int] reveal_type(C(1)) # revealed: C[int]
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`" # error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
@ -318,12 +324,18 @@ wrong_innards: C[int] = C("five")
```py ```py
from typing_extensions import Generic, TypeVar from typing_extensions import Generic, TypeVar
from ty_extensions import generic_context, into_callable
T = TypeVar("T") T = TypeVar("T")
class C(Generic[T]): class C(Generic[T]):
def __init__(self, x: T) -> None: ... def __init__(self, x: T) -> None: ...
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C(1)) # revealed: C[int] reveal_type(C(1)) # revealed: C[int]
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`" # error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
@ -334,6 +346,7 @@ wrong_innards: C[int] = C("five")
```py ```py
from typing_extensions import Generic, TypeVar from typing_extensions import Generic, TypeVar
from ty_extensions import generic_context, into_callable
T = TypeVar("T") T = TypeVar("T")
@ -343,6 +356,11 @@ class C(Generic[T]):
def __init__(self, x: T) -> None: ... def __init__(self, x: T) -> None: ...
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C(1)) # revealed: C[int] reveal_type(C(1)) # revealed: C[int]
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`" # error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
@ -353,6 +371,7 @@ wrong_innards: C[int] = C("five")
```py ```py
from typing_extensions import Generic, TypeVar from typing_extensions import Generic, TypeVar
from ty_extensions import generic_context, into_callable
T = TypeVar("T") T = TypeVar("T")
@ -362,6 +381,11 @@ class C(Generic[T]):
def __init__(self, x: T) -> None: ... def __init__(self, x: T) -> None: ...
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C(1)) # revealed: C[int] reveal_type(C(1)) # revealed: C[int]
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`" # error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
@ -373,6 +397,11 @@ class D(Generic[T]):
def __init__(self, *args, **kwargs) -> None: ... def __init__(self, *args, **kwargs) -> None: ...
# revealed: ty_extensions.GenericContext[T@D]
reveal_type(generic_context(D))
# revealed: ty_extensions.GenericContext[T@D]
reveal_type(generic_context(into_callable(D)))
reveal_type(D(1)) # revealed: D[int] reveal_type(D(1)) # revealed: D[int]
# error: [invalid-assignment] "Object of type `D[int | str]` is not assignable to `D[int]`" # error: [invalid-assignment] "Object of type `D[int | str]` is not assignable to `D[int]`"
@ -386,6 +415,7 @@ to specialize the class.
```py ```py
from typing_extensions import Generic, TypeVar from typing_extensions import Generic, TypeVar
from ty_extensions import generic_context, into_callable
T = TypeVar("T") T = TypeVar("T")
U = TypeVar("U") U = TypeVar("U")
@ -398,6 +428,11 @@ class C(Generic[T, U]):
class D(C[V, int]): class D(C[V, int]):
def __init__(self, x: V) -> None: ... def __init__(self, x: V) -> None: ...
# revealed: ty_extensions.GenericContext[V@D]
reveal_type(generic_context(D))
# revealed: ty_extensions.GenericContext[V@D]
reveal_type(generic_context(into_callable(D)))
reveal_type(D(1)) # revealed: D[int] reveal_type(D(1)) # revealed: D[int]
``` ```
@ -405,6 +440,7 @@ reveal_type(D(1)) # revealed: D[int]
```py ```py
from typing_extensions import Generic, TypeVar from typing_extensions import Generic, TypeVar
from ty_extensions import generic_context, into_callable
T = TypeVar("T") T = TypeVar("T")
U = TypeVar("U") U = TypeVar("U")
@ -415,6 +451,11 @@ class C(Generic[T, U]):
class D(C[T, U]): class D(C[T, U]):
pass pass
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(D))
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(into_callable(D)))
reveal_type(C(1, "str")) # revealed: C[int, str] reveal_type(C(1, "str")) # revealed: C[int, str]
reveal_type(D(1, "str")) # revealed: D[int, str] reveal_type(D(1, "str")) # revealed: D[int, str]
``` ```
@ -425,6 +466,7 @@ This is a specific example of the above, since it was reported specifically by a
```py ```py
from typing_extensions import Generic, TypeVar from typing_extensions import Generic, TypeVar
from ty_extensions import generic_context, into_callable
T = TypeVar("T") T = TypeVar("T")
U = TypeVar("U") U = TypeVar("U")
@ -432,6 +474,11 @@ U = TypeVar("U")
class D(dict[T, U]): class D(dict[T, U]):
pass pass
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(D))
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(into_callable(D)))
reveal_type(D(key=1)) # revealed: D[str, int] reveal_type(D(key=1)) # revealed: D[str, int]
``` ```
@ -443,12 +490,18 @@ context. But from the user's point of view, this is another example of the above
```py ```py
from typing_extensions import Generic, TypeVar from typing_extensions import Generic, TypeVar
from ty_extensions import generic_context, into_callable
T = TypeVar("T") T = TypeVar("T")
U = TypeVar("U") U = TypeVar("U")
class C(tuple[T, U]): ... class C(tuple[T, U]): ...
# revealed: ty_extensions.GenericContext[T@C, U@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C, U@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C((1, 2))) # revealed: C[int, int] reveal_type(C((1, 2))) # revealed: C[int, int]
``` ```
@ -480,6 +533,7 @@ def func8(t1: tuple[complex, list[int]], t2: tuple[int, *tuple[str, ...]], t3: t
```py ```py
from typing_extensions import Generic, TypeVar from typing_extensions import Generic, TypeVar
from ty_extensions import generic_context, into_callable
S = TypeVar("S") S = TypeVar("S")
T = TypeVar("T") T = TypeVar("T")
@ -487,6 +541,11 @@ T = TypeVar("T")
class C(Generic[T]): class C(Generic[T]):
def __init__(self, x: T, y: S) -> None: ... def __init__(self, x: T, y: S) -> None: ...
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C, S@__init__]
reveal_type(generic_context(into_callable(C)))
reveal_type(C(1, 1)) # revealed: C[int] reveal_type(C(1, 1)) # revealed: C[int]
reveal_type(C(1, "string")) # revealed: C[int] reveal_type(C(1, "string")) # revealed: C[int]
reveal_type(C(1, True)) # revealed: C[int] reveal_type(C(1, True)) # revealed: C[int]
@ -499,6 +558,7 @@ wrong_innards: C[int] = C("five", 1)
```py ```py
from typing_extensions import overload, Generic, TypeVar from typing_extensions import overload, Generic, TypeVar
from ty_extensions import generic_context, into_callable
T = TypeVar("T") T = TypeVar("T")
U = TypeVar("U") U = TypeVar("U")
@ -514,6 +574,11 @@ class C(Generic[T]):
def __init__(self, x: int) -> None: ... def __init__(self, x: int) -> None: ...
def __init__(self, x: str | bytes | int) -> None: ... def __init__(self, x: str | bytes | int) -> None: ...
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C("string")) # revealed: C[str] reveal_type(C("string")) # revealed: C[str]
reveal_type(C(b"bytes")) # revealed: C[bytes] reveal_type(C(b"bytes")) # revealed: C[bytes]
reveal_type(C(12)) # revealed: C[Unknown] reveal_type(C(12)) # revealed: C[Unknown]
@ -541,6 +606,11 @@ class D(Generic[T, U]):
def __init__(self, t: T, u: U) -> None: ... def __init__(self, t: T, u: U) -> None: ...
def __init__(self, *args) -> None: ... def __init__(self, *args) -> None: ...
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(D))
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(into_callable(D)))
reveal_type(D("string")) # revealed: D[str, str] reveal_type(D("string")) # revealed: D[str, str]
reveal_type(D(1)) # revealed: D[str, int] reveal_type(D(1)) # revealed: D[str, int]
reveal_type(D(1, "string")) # revealed: D[int, str] reveal_type(D(1, "string")) # revealed: D[int, str]
@ -551,6 +621,7 @@ reveal_type(D(1, "string")) # revealed: D[int, str]
```py ```py
from dataclasses import dataclass from dataclasses import dataclass
from typing_extensions import Generic, TypeVar from typing_extensions import Generic, TypeVar
from ty_extensions import generic_context, into_callable
T = TypeVar("T") T = TypeVar("T")
@ -558,6 +629,11 @@ T = TypeVar("T")
class A(Generic[T]): class A(Generic[T]):
x: T x: T
# revealed: ty_extensions.GenericContext[T@A]
reveal_type(generic_context(A))
# revealed: ty_extensions.GenericContext[T@A]
reveal_type(generic_context(into_callable(A)))
reveal_type(A(x=1)) # revealed: A[int] reveal_type(A(x=1)) # revealed: A[int]
``` ```
@ -565,17 +641,28 @@ reveal_type(A(x=1)) # revealed: A[int]
```py ```py
from typing_extensions import Generic, TypeVar from typing_extensions import Generic, TypeVar
from ty_extensions import generic_context, into_callable
T = TypeVar("T") T = TypeVar("T")
U = TypeVar("U", default=T) U = TypeVar("U", default=T)
class C(Generic[T, U]): ... class C(Generic[T, U]): ...
# revealed: ty_extensions.GenericContext[T@C, U@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C, U@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C()) # revealed: C[Unknown, Unknown] reveal_type(C()) # revealed: C[Unknown, Unknown]
class D(Generic[T, U]): class D(Generic[T, U]):
def __init__(self) -> None: ... def __init__(self) -> None: ...
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(D))
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(into_callable(D)))
reveal_type(D()) # revealed: D[Unknown, Unknown] reveal_type(D()) # revealed: D[Unknown, Unknown]
``` ```

View File

@ -347,6 +347,138 @@ reveal_type(tuple_param("a", ("a", 1))) # revealed: tuple[Literal["a"], Literal
reveal_type(tuple_param(1, ("a", 1))) # revealed: tuple[Literal["a"], Literal[1]] reveal_type(tuple_param(1, ("a", 1))) # revealed: tuple[Literal["a"], Literal[1]]
``` ```
When a union parameter contains generic classes like `P[T] | Q[T]`, we can infer the typevar from
the actual argument even for non-final classes.
```py
from typing import TypeVar, Generic
T = TypeVar("T")
class P(Generic[T]):
x: T
class Q(Generic[T]):
x: T
def extract_t(x: P[T] | Q[T]) -> T:
raise NotImplementedError
reveal_type(extract_t(P[int]())) # revealed: int
reveal_type(extract_t(Q[str]())) # revealed: str
```
Passing anything else results in an error:
```py
# error: [invalid-argument-type]
reveal_type(extract_t([1, 2])) # revealed: Unknown
```
This also works when different union elements have different typevars:
```py
S = TypeVar("S")
def extract_both(x: P[T] | Q[S]) -> tuple[T, S]:
raise NotImplementedError
reveal_type(extract_both(P[int]())) # revealed: tuple[int, Unknown]
reveal_type(extract_both(Q[str]())) # revealed: tuple[Unknown, str]
```
Inference also works when passing subclasses of the generic classes in the union.
```py
class SubP(P[T]):
pass
class SubQ(Q[T]):
pass
reveal_type(extract_t(SubP[int]())) # revealed: int
reveal_type(extract_t(SubQ[str]())) # revealed: str
reveal_type(extract_both(SubP[int]())) # revealed: tuple[int, Unknown]
reveal_type(extract_both(SubQ[str]())) # revealed: tuple[Unknown, str]
```
When a type is a subclass of both `P` and `Q` with different specializations, we cannot infer a
single type for `T` in `extract_t`, because `P` and `Q` are invariant. However, we can still infer
both types in a call to `extract_both`:
```py
class PandQ(P[int], Q[str]):
pass
# TODO: Ideally, we would return `Unknown` here.
# error: [invalid-argument-type]
reveal_type(extract_t(PandQ())) # revealed: int | str
reveal_type(extract_both(PandQ())) # revealed: tuple[int, str]
```
When non-generic types are part of the union, we can still infer typevars for the remaining generic
types:
```py
def extract_optional_t(x: None | P[T]) -> T:
raise NotImplementedError
reveal_type(extract_optional_t(None)) # revealed: Unknown
reveal_type(extract_optional_t(P[int]())) # revealed: int
```
Passing anything else results in an error:
```py
# error: [invalid-argument-type]
reveal_type(extract_optional_t(Q[str]())) # revealed: Unknown
```
If the union contains contains parent and child of a generic class, we ideally pick the union
element that is more precise:
```py
class Base(Generic[T]):
x: T
class Sub(Base[T]): ...
def f(t: Base[T] | Sub[T | None]) -> T:
raise NotImplementedError
reveal_type(f(Base[int]())) # revealed: int
# TODO: Should ideally be `str`
reveal_type(f(Sub[str | None]())) # revealed: str | None
```
If we have a case like the following, where only one of the union elements matches due to the
typevar bound, we do not emit a specialization error:
```py
from typing import TypeVar
I_int = TypeVar("I_int", bound=int)
S_str = TypeVar("S_str", bound=str)
class P(Generic[T]):
value: T
def f(t: P[I_int] | P[S_str]) -> tuple[I_int, S_str]:
raise NotImplementedError
reveal_type(f(P[int]())) # revealed: tuple[int, Unknown]
reveal_type(f(P[str]())) # revealed: tuple[Unknown, str]
```
However, if we pass something that does not match _any_ union element, we do emit an error:
```py
# error: [invalid-argument-type]
reveal_type(f(P[bytes]())) # revealed: tuple[Unknown, Unknown]
```
## Inferring nested generic function calls ## Inferring nested generic function calls
We can infer type assignments in nested calls to multiple generic functions. If they use the same We can infer type assignments in nested calls to multiple generic functions. If they use the same

View File

@ -102,6 +102,38 @@ Other values are invalid.
P4 = ParamSpec("P4", default=int) P4 = ParamSpec("P4", default=int)
``` ```
### `default` parameter in `typing_extensions.ParamSpec`
```toml
[environment]
python-version = "3.12"
```
The `default` parameter to `ParamSpec` is available from `typing_extensions` in Python 3.12 and
earlier.
```py
from typing import ParamSpec
from typing_extensions import ParamSpec as ExtParamSpec
# This shouldn't emit a diagnostic
P1 = ExtParamSpec("P1", default=[int, str])
# But, this should
# error: [invalid-paramspec] "The `default` parameter of `typing.ParamSpec` was added in Python 3.13"
P2 = ParamSpec("P2", default=[int, str])
```
And, it allows the same set of values as `typing.ParamSpec`.
```py
P3 = ExtParamSpec("P3", default=...)
P4 = ExtParamSpec("P4", default=P3)
# error: [invalid-paramspec]
P5 = ExtParamSpec("P5", default=int)
```
### Forward references in stub files ### Forward references in stub files
Stubs natively support forward references, so patterns that would raise `NameError` at runtime are Stubs natively support forward references, so patterns that would raise `NameError` at runtime are
@ -115,3 +147,297 @@ P = ParamSpec("P", default=[A, B])
class A: ... class A: ...
class B: ... class B: ...
``` ```
## Validating `ParamSpec` usage
In type annotations, `ParamSpec` is only valid as the first element to `Callable`, the final element
to `Concatenate`, or as a type parameter to `Protocol` or `Generic`.
```py
from typing import ParamSpec, Callable, Concatenate, Protocol, Generic
P = ParamSpec("P")
class ValidProtocol(Protocol[P]):
def method(self, c: Callable[P, int]) -> None: ...
class ValidGeneric(Generic[P]):
def method(self, c: Callable[P, int]) -> None: ...
def valid(
a1: Callable[P, int],
a2: Callable[Concatenate[int, P], int],
) -> None: ...
def invalid(
# TODO: error
a1: P,
# TODO: error
a2: list[P],
# TODO: error
a3: Callable[[P], int],
# TODO: error
a4: Callable[..., P],
# TODO: error
a5: Callable[Concatenate[P, ...], int],
) -> None: ...
```
## Validating `P.args` and `P.kwargs` usage
The components of `ParamSpec` i.e., `P.args` and `P.kwargs` are only valid when used as the
annotated types of `*args` and `**kwargs` respectively.
```py
from typing import Generic, Callable, ParamSpec
P = ParamSpec("P")
def foo1(c: Callable[P, int]) -> None:
def nested1(*args: P.args, **kwargs: P.kwargs) -> None: ...
def nested2(
# error: [invalid-type-form] "`P.kwargs` is valid only in `**kwargs` annotation: Did you mean `P.args`?"
*args: P.kwargs,
# error: [invalid-type-form] "`P.args` is valid only in `*args` annotation: Did you mean `P.kwargs`?"
**kwargs: P.args,
) -> None: ...
# TODO: error
def nested3(*args: P.args) -> None: ...
# TODO: error
def nested4(**kwargs: P.kwargs) -> None: ...
# TODO: error
def nested5(*args: P.args, x: int, **kwargs: P.kwargs) -> None: ...
# TODO: error
def bar1(*args: P.args, **kwargs: P.kwargs) -> None:
pass
class Foo1:
# TODO: error
def method(self, *args: P.args, **kwargs: P.kwargs) -> None: ...
```
And, they need to be used together.
```py
def foo2(c: Callable[P, int]) -> None:
# TODO: error
def nested1(*args: P.args) -> None: ...
# TODO: error
def nested2(**kwargs: P.kwargs) -> None: ...
class Foo2:
# TODO: error
args: P.args
# TODO: error
kwargs: P.kwargs
```
The name of these parameters does not need to be `args` or `kwargs`, it's the annotated type to the
respective variadic parameter that matters.
```py
class Foo3(Generic[P]):
def method1(self, *paramspec_args: P.args, **paramspec_kwargs: P.kwargs) -> None: ...
def method2(
self,
# error: [invalid-type-form] "`P.kwargs` is valid only in `**kwargs` annotation: Did you mean `P.args`?"
*paramspec_args: P.kwargs,
# error: [invalid-type-form] "`P.args` is valid only in `*args` annotation: Did you mean `P.kwargs`?"
**paramspec_kwargs: P.args,
) -> None: ...
```
## Specializing generic classes explicitly
```py
from typing import Any, Generic, ParamSpec, Callable, TypeVar
P1 = ParamSpec("P1")
P2 = ParamSpec("P2")
T1 = TypeVar("T1")
class OnlyParamSpec(Generic[P1]):
attr: Callable[P1, None]
class TwoParamSpec(Generic[P1, P2]):
attr1: Callable[P1, None]
attr2: Callable[P2, None]
class TypeVarAndParamSpec(Generic[T1, P1]):
attr: Callable[P1, T1]
```
Explicit specialization of a generic class involving `ParamSpec` is done by providing either a list
of types, `...`, or another in-scope `ParamSpec`.
```py
reveal_type(OnlyParamSpec[[]]().attr) # revealed: () -> None
reveal_type(OnlyParamSpec[[int, str]]().attr) # revealed: (int, str, /) -> None
reveal_type(OnlyParamSpec[...]().attr) # revealed: (...) -> None
def func(c: Callable[P2, None]):
reveal_type(OnlyParamSpec[P2]().attr) # revealed: (**P2@func) -> None
# TODO: error: paramspec is unbound
reveal_type(OnlyParamSpec[P2]().attr) # revealed: (...) -> None
# error: [invalid-type-arguments] "No type argument provided for required type variable `P1` of class `OnlyParamSpec`"
reveal_type(OnlyParamSpec[()]().attr) # revealed: (...) -> None
```
An explicit tuple expression (unlike an implicit one that omits the parentheses) is also accepted
when the `ParamSpec` is the only type variable. But, this isn't recommended is mainly a fallout of
it having the same AST as the one without the parentheses. Both mypy and Pyright also allow this.
```py
reveal_type(OnlyParamSpec[(int, str)]().attr) # revealed: (int, str, /) -> None
```
<!-- blacken-docs:off -->
```py
# error: [invalid-syntax]
reveal_type(OnlyParamSpec[]().attr) # revealed: (...) -> None
```
<!-- blacken-docs:on -->
The square brackets can be omitted when `ParamSpec` is the only type variable
```py
reveal_type(OnlyParamSpec[int, str]().attr) # revealed: (int, str, /) -> None
reveal_type(OnlyParamSpec[int,]().attr) # revealed: (int, /) -> None
# Even when there is only one element
reveal_type(OnlyParamSpec[Any]().attr) # revealed: (Any, /) -> None
reveal_type(OnlyParamSpec[object]().attr) # revealed: (object, /) -> None
reveal_type(OnlyParamSpec[int]().attr) # revealed: (int, /) -> None
```
But, they cannot be omitted when there are multiple type variables.
```py
reveal_type(TypeVarAndParamSpec[int, []]().attr) # revealed: () -> int
reveal_type(TypeVarAndParamSpec[int, [int, str]]().attr) # revealed: (int, str, /) -> int
reveal_type(TypeVarAndParamSpec[int, [str]]().attr) # revealed: (str, /) -> int
reveal_type(TypeVarAndParamSpec[int, ...]().attr) # revealed: (...) -> int
# TODO: We could still specialize for `T1` as the type is valid which would reveal `(...) -> int`
# TODO: error: paramspec is unbound
reveal_type(TypeVarAndParamSpec[int, P2]().attr) # revealed: (...) -> Unknown
# error: [invalid-type-arguments] "Type argument for `ParamSpec` must be"
reveal_type(TypeVarAndParamSpec[int, int]().attr) # revealed: (...) -> Unknown
# error: [invalid-type-arguments] "Type argument for `ParamSpec` must be"
reveal_type(TypeVarAndParamSpec[int, ()]().attr) # revealed: (...) -> Unknown
# error: [invalid-type-arguments] "Type argument for `ParamSpec` must be"
reveal_type(TypeVarAndParamSpec[int, (int, str)]().attr) # revealed: (...) -> Unknown
```
Nor can they be omitted when there are more than one `ParamSpec`s.
```py
p = TwoParamSpec[[int, str], [int]]()
reveal_type(p.attr1) # revealed: (int, str, /) -> None
reveal_type(p.attr2) # revealed: (int, /) -> None
# error: [invalid-type-arguments]
# error: [invalid-type-arguments]
TwoParamSpec[int, str]
```
Specializing `ParamSpec` type variable using `typing.Any` isn't explicitly allowed by the spec but
both mypy and Pyright allow this and there are usages of this in the wild e.g.,
`staticmethod[Any, Any]`.
```py
reveal_type(TypeVarAndParamSpec[int, Any]().attr) # revealed: (...) -> int
```
## Specialization when defaults are involved
```toml
[environment]
python-version = "3.13"
```
```py
from typing import Any, Generic, ParamSpec, Callable, TypeVar
P = ParamSpec("P")
PList = ParamSpec("PList", default=[int, str])
PEllipsis = ParamSpec("PEllipsis", default=...)
PAnother = ParamSpec("PAnother", default=P)
PAnotherWithDefault = ParamSpec("PAnotherWithDefault", default=PList)
```
```py
class ParamSpecWithDefault1(Generic[PList]):
attr: Callable[PList, None]
reveal_type(ParamSpecWithDefault1().attr) # revealed: (int, str, /) -> None
reveal_type(ParamSpecWithDefault1[[int]]().attr) # revealed: (int, /) -> None
```
```py
class ParamSpecWithDefault2(Generic[PEllipsis]):
attr: Callable[PEllipsis, None]
reveal_type(ParamSpecWithDefault2().attr) # revealed: (...) -> None
reveal_type(ParamSpecWithDefault2[[int, str]]().attr) # revealed: (int, str, /) -> None
```
```py
class ParamSpecWithDefault3(Generic[P, PAnother]):
attr1: Callable[P, None]
attr2: Callable[PAnother, None]
# `P` hasn't been specialized, so it defaults to `Unknown` gradual form
p1 = ParamSpecWithDefault3()
reveal_type(p1.attr1) # revealed: (...) -> None
reveal_type(p1.attr2) # revealed: (...) -> None
p2 = ParamSpecWithDefault3[[int, str]]()
reveal_type(p2.attr1) # revealed: (int, str, /) -> None
reveal_type(p2.attr2) # revealed: (int, str, /) -> None
p3 = ParamSpecWithDefault3[[int], [str]]()
reveal_type(p3.attr1) # revealed: (int, /) -> None
reveal_type(p3.attr2) # revealed: (str, /) -> None
class ParamSpecWithDefault4(Generic[PList, PAnotherWithDefault]):
attr1: Callable[PList, None]
attr2: Callable[PAnotherWithDefault, None]
p1 = ParamSpecWithDefault4()
reveal_type(p1.attr1) # revealed: (int, str, /) -> None
reveal_type(p1.attr2) # revealed: (int, str, /) -> None
p2 = ParamSpecWithDefault4[[int]]()
reveal_type(p2.attr1) # revealed: (int, /) -> None
reveal_type(p2.attr2) # revealed: (int, /) -> None
p3 = ParamSpecWithDefault4[[int], [str]]()
reveal_type(p3.attr1) # revealed: (int, /) -> None
reveal_type(p3.attr2) # revealed: (str, /) -> None
# TODO: error
# Un-ordered type variables as the default of `PAnother` is `P`
class ParamSpecWithDefault5(Generic[PAnother, P]):
attr: Callable[PAnother, None]
# TODO: error
# PAnother has default as P (another ParamSpec) which is not in scope
class ParamSpecWithDefault6(Generic[PAnother]):
attr: Callable[PAnother, None]
```
## Semantics
The semantics of `ParamSpec` are described in
[the PEP 695 `ParamSpec` document](./../pep695/paramspec.md) to avoid duplication unless there are
any behavior specific to the legacy `ParamSpec` implementation.

View File

@ -25,11 +25,11 @@ reveal_type(generic_context(SingleTypevar))
# revealed: ty_extensions.GenericContext[T@MultipleTypevars, S@MultipleTypevars] # revealed: ty_extensions.GenericContext[T@MultipleTypevars, S@MultipleTypevars]
reveal_type(generic_context(MultipleTypevars)) reveal_type(generic_context(MultipleTypevars))
# TODO: support `ParamSpec`/`TypeVarTuple` properly # TODO: support `TypeVarTuple` properly
# (these should include the `ParamSpec`s and `TypeVarTuple`s in their generic contexts) # (these should include the `TypeVarTuple`s in their generic contexts)
# revealed: ty_extensions.GenericContext[] # revealed: ty_extensions.GenericContext[P@SingleParamSpec]
reveal_type(generic_context(SingleParamSpec)) reveal_type(generic_context(SingleParamSpec))
# revealed: ty_extensions.GenericContext[T@TypeVarAndParamSpec] # revealed: ty_extensions.GenericContext[T@TypeVarAndParamSpec, P@TypeVarAndParamSpec]
reveal_type(generic_context(TypeVarAndParamSpec)) reveal_type(generic_context(TypeVarAndParamSpec))
# revealed: ty_extensions.GenericContext[] # revealed: ty_extensions.GenericContext[]
reveal_type(generic_context(SingleTypeVarTuple)) reveal_type(generic_context(SingleTypeVarTuple))

View File

@ -25,11 +25,11 @@ reveal_type(generic_context(SingleTypevar))
# revealed: ty_extensions.GenericContext[T@MultipleTypevars, S@MultipleTypevars] # revealed: ty_extensions.GenericContext[T@MultipleTypevars, S@MultipleTypevars]
reveal_type(generic_context(MultipleTypevars)) reveal_type(generic_context(MultipleTypevars))
# TODO: support `ParamSpec`/`TypeVarTuple` properly # TODO: support `TypeVarTuple` properly
# (these should include the `ParamSpec`s and `TypeVarTuple`s in their generic contexts) # (these should include the `TypeVarTuple`s in their generic contexts)
# revealed: ty_extensions.GenericContext[] # revealed: ty_extensions.GenericContext[P@SingleParamSpec]
reveal_type(generic_context(SingleParamSpec)) reveal_type(generic_context(SingleParamSpec))
# revealed: ty_extensions.GenericContext[T@TypeVarAndParamSpec] # revealed: ty_extensions.GenericContext[T@TypeVarAndParamSpec, P@TypeVarAndParamSpec]
reveal_type(generic_context(TypeVarAndParamSpec)) reveal_type(generic_context(TypeVarAndParamSpec))
# revealed: ty_extensions.GenericContext[] # revealed: ty_extensions.GenericContext[]
reveal_type(generic_context(SingleTypeVarTuple)) reveal_type(generic_context(SingleTypeVarTuple))
@ -264,12 +264,19 @@ signatures don't count towards variance).
### `__new__` only ### `__new__` only
```py ```py
from ty_extensions import generic_context, into_callable
class C[T]: class C[T]:
x: T x: T
def __new__(cls, x: T) -> "C[T]": def __new__(cls, x: T) -> "C[T]":
return object.__new__(cls) return object.__new__(cls)
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C(1)) # revealed: C[int] reveal_type(C(1)) # revealed: C[int]
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`" # error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
@ -279,11 +286,18 @@ wrong_innards: C[int] = C("five")
### `__init__` only ### `__init__` only
```py ```py
from ty_extensions import generic_context, into_callable
class C[T]: class C[T]:
x: T x: T
def __init__(self, x: T) -> None: ... def __init__(self, x: T) -> None: ...
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C(1)) # revealed: C[int] reveal_type(C(1)) # revealed: C[int]
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`" # error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
@ -293,6 +307,8 @@ wrong_innards: C[int] = C("five")
### Identical `__new__` and `__init__` signatures ### Identical `__new__` and `__init__` signatures
```py ```py
from ty_extensions import generic_context, into_callable
class C[T]: class C[T]:
x: T x: T
@ -301,6 +317,11 @@ class C[T]:
def __init__(self, x: T) -> None: ... def __init__(self, x: T) -> None: ...
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C(1)) # revealed: C[int] reveal_type(C(1)) # revealed: C[int]
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`" # error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
@ -310,6 +331,8 @@ wrong_innards: C[int] = C("five")
### Compatible `__new__` and `__init__` signatures ### Compatible `__new__` and `__init__` signatures
```py ```py
from ty_extensions import generic_context, into_callable
class C[T]: class C[T]:
x: T x: T
@ -318,6 +341,11 @@ class C[T]:
def __init__(self, x: T) -> None: ... def __init__(self, x: T) -> None: ...
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C(1)) # revealed: C[int] reveal_type(C(1)) # revealed: C[int]
# error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`" # error: [invalid-assignment] "Object of type `C[int | str]` is not assignable to `C[int]`"
@ -331,6 +359,11 @@ class D[T]:
def __init__(self, *args, **kwargs) -> None: ... def __init__(self, *args, **kwargs) -> None: ...
# revealed: ty_extensions.GenericContext[T@D]
reveal_type(generic_context(D))
# revealed: ty_extensions.GenericContext[T@D]
reveal_type(generic_context(into_callable(D)))
reveal_type(D(1)) # revealed: D[int] reveal_type(D(1)) # revealed: D[int]
# error: [invalid-assignment] "Object of type `D[int | str]` is not assignable to `D[int]`" # error: [invalid-assignment] "Object of type `D[int | str]` is not assignable to `D[int]`"
@ -343,6 +376,8 @@ If either method comes from a generic base class, we don't currently use its inf
to specialize the class. to specialize the class.
```py ```py
from ty_extensions import generic_context, into_callable
class C[T, U]: class C[T, U]:
def __new__(cls, *args, **kwargs) -> "C[T, U]": def __new__(cls, *args, **kwargs) -> "C[T, U]":
return object.__new__(cls) return object.__new__(cls)
@ -350,18 +385,30 @@ class C[T, U]:
class D[V](C[V, int]): class D[V](C[V, int]):
def __init__(self, x: V) -> None: ... def __init__(self, x: V) -> None: ...
# revealed: ty_extensions.GenericContext[V@D]
reveal_type(generic_context(D))
# revealed: ty_extensions.GenericContext[V@D]
reveal_type(generic_context(into_callable(D)))
reveal_type(D(1)) # revealed: D[Literal[1]] reveal_type(D(1)) # revealed: D[Literal[1]]
``` ```
### Generic class inherits `__init__` from generic base class ### Generic class inherits `__init__` from generic base class
```py ```py
from ty_extensions import generic_context, into_callable
class C[T, U]: class C[T, U]:
def __init__(self, t: T, u: U) -> None: ... def __init__(self, t: T, u: U) -> None: ...
class D[T, U](C[T, U]): class D[T, U](C[T, U]):
pass pass
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(D))
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(into_callable(D)))
reveal_type(C(1, "str")) # revealed: C[Literal[1], Literal["str"]] reveal_type(C(1, "str")) # revealed: C[Literal[1], Literal["str"]]
reveal_type(D(1, "str")) # revealed: D[Literal[1], Literal["str"]] reveal_type(D(1, "str")) # revealed: D[Literal[1], Literal["str"]]
``` ```
@ -371,9 +418,16 @@ reveal_type(D(1, "str")) # revealed: D[Literal[1], Literal["str"]]
This is a specific example of the above, since it was reported specifically by a user. This is a specific example of the above, since it was reported specifically by a user.
```py ```py
from ty_extensions import generic_context, into_callable
class D[T, U](dict[T, U]): class D[T, U](dict[T, U]):
pass pass
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(D))
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(into_callable(D)))
reveal_type(D(key=1)) # revealed: D[str, int] reveal_type(D(key=1)) # revealed: D[str, int]
``` ```
@ -384,8 +438,15 @@ for `tuple`, so we use a different mechanism to make sure it has the right inher
context. But from the user's point of view, this is another example of the above.) context. But from the user's point of view, this is another example of the above.)
```py ```py
from ty_extensions import generic_context, into_callable
class C[T, U](tuple[T, U]): ... class C[T, U](tuple[T, U]): ...
# revealed: ty_extensions.GenericContext[T@C, U@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C, U@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C((1, 2))) # revealed: C[Literal[1], Literal[2]] reveal_type(C((1, 2))) # revealed: C[Literal[1], Literal[2]]
``` ```
@ -409,11 +470,18 @@ def func8(t1: tuple[complex, list[int]], t2: tuple[int, *tuple[str, ...]], t3: t
### `__init__` is itself generic ### `__init__` is itself generic
```py ```py
from ty_extensions import generic_context, into_callable
class C[T]: class C[T]:
x: T x: T
def __init__[S](self, x: T, y: S) -> None: ... def __init__[S](self, x: T, y: S) -> None: ...
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C, S@__init__]
reveal_type(generic_context(into_callable(C)))
reveal_type(C(1, 1)) # revealed: C[int] reveal_type(C(1, 1)) # revealed: C[int]
reveal_type(C(1, "string")) # revealed: C[int] reveal_type(C(1, "string")) # revealed: C[int]
reveal_type(C(1, True)) # revealed: C[int] reveal_type(C(1, True)) # revealed: C[int]
@ -427,6 +495,7 @@ wrong_innards: C[int] = C("five", 1)
```py ```py
from __future__ import annotations from __future__ import annotations
from typing import overload from typing import overload
from ty_extensions import generic_context, into_callable
class C[T]: class C[T]:
# we need to use the type variable or else the class is bivariant in T, and # we need to use the type variable or else the class is bivariant in T, and
@ -443,6 +512,11 @@ class C[T]:
def __init__(self, x: int) -> None: ... def __init__(self, x: int) -> None: ...
def __init__(self, x: str | bytes | int) -> None: ... def __init__(self, x: str | bytes | int) -> None: ...
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C("string")) # revealed: C[str] reveal_type(C("string")) # revealed: C[str]
reveal_type(C(b"bytes")) # revealed: C[bytes] reveal_type(C(b"bytes")) # revealed: C[bytes]
reveal_type(C(12)) # revealed: C[Unknown] reveal_type(C(12)) # revealed: C[Unknown]
@ -470,6 +544,11 @@ class D[T, U]:
def __init__(self, t: T, u: U) -> None: ... def __init__(self, t: T, u: U) -> None: ...
def __init__(self, *args) -> None: ... def __init__(self, *args) -> None: ...
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(D))
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(into_callable(D)))
reveal_type(D("string")) # revealed: D[str, Literal["string"]] reveal_type(D("string")) # revealed: D[str, Literal["string"]]
reveal_type(D(1)) # revealed: D[str, Literal[1]] reveal_type(D(1)) # revealed: D[str, Literal[1]]
reveal_type(D(1, "string")) # revealed: D[Literal[1], Literal["string"]] reveal_type(D(1, "string")) # revealed: D[Literal[1], Literal["string"]]
@ -479,24 +558,42 @@ reveal_type(D(1, "string")) # revealed: D[Literal[1], Literal["string"]]
```py ```py
from dataclasses import dataclass from dataclasses import dataclass
from ty_extensions import generic_context, into_callable
@dataclass @dataclass
class A[T]: class A[T]:
x: T x: T
# revealed: ty_extensions.GenericContext[T@A]
reveal_type(generic_context(A))
# revealed: ty_extensions.GenericContext[T@A]
reveal_type(generic_context(into_callable(A)))
reveal_type(A(x=1)) # revealed: A[int] reveal_type(A(x=1)) # revealed: A[int]
``` ```
### Class typevar has another typevar as a default ### Class typevar has another typevar as a default
```py ```py
from ty_extensions import generic_context, into_callable
class C[T, U = T]: ... class C[T, U = T]: ...
# revealed: ty_extensions.GenericContext[T@C, U@C]
reveal_type(generic_context(C))
# revealed: ty_extensions.GenericContext[T@C, U@C]
reveal_type(generic_context(into_callable(C)))
reveal_type(C()) # revealed: C[Unknown, Unknown] reveal_type(C()) # revealed: C[Unknown, Unknown]
class D[T, U = T]: class D[T, U = T]:
def __init__(self) -> None: ... def __init__(self) -> None: ...
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(D))
# revealed: ty_extensions.GenericContext[T@D, U@D]
reveal_type(generic_context(into_callable(D)))
reveal_type(D()) # revealed: D[Unknown, Unknown] reveal_type(D()) # revealed: D[Unknown, Unknown]
``` ```

View File

@ -310,6 +310,127 @@ reveal_type(tuple_param("a", ("a", 1))) # revealed: tuple[Literal["a"], Literal
reveal_type(tuple_param(1, ("a", 1))) # revealed: tuple[Literal["a"], Literal[1]] reveal_type(tuple_param(1, ("a", 1))) # revealed: tuple[Literal["a"], Literal[1]]
``` ```
When a union parameter contains generic classes like `P[T] | Q[T]`, we can infer the typevar from
the actual argument even for non-final classes.
```py
class P[T]:
x: T # invariant
class Q[T]:
x: T # invariant
def extract_t[T](x: P[T] | Q[T]) -> T:
raise NotImplementedError
reveal_type(extract_t(P[int]())) # revealed: int
reveal_type(extract_t(Q[str]())) # revealed: str
```
Passing anything else results in an error:
```py
# error: [invalid-argument-type]
reveal_type(extract_t([1, 2])) # revealed: Unknown
```
This also works when different union elements have different typevars:
```py
def extract_both[T, S](x: P[T] | Q[S]) -> tuple[T, S]:
raise NotImplementedError
reveal_type(extract_both(P[int]())) # revealed: tuple[int, Unknown]
reveal_type(extract_both(Q[str]())) # revealed: tuple[Unknown, str]
```
Inference also works when passing subclasses of the generic classes in the union.
```py
class SubP[T](P[T]):
pass
class SubQ[T](Q[T]):
pass
reveal_type(extract_t(SubP[int]())) # revealed: int
reveal_type(extract_t(SubQ[str]())) # revealed: str
reveal_type(extract_both(SubP[int]())) # revealed: tuple[int, Unknown]
reveal_type(extract_both(SubQ[str]())) # revealed: tuple[Unknown, str]
```
When a type is a subclass of both `P` and `Q` with different specializations, we cannot infer a
single type for `T` in `extract_t`, because `P` and `Q` are invariant. However, we can still infer
both types in a call to `extract_both`:
```py
class PandQ(P[int], Q[str]):
pass
# TODO: Ideally, we would return `Unknown` here.
# error: [invalid-argument-type]
reveal_type(extract_t(PandQ())) # revealed: int | str
reveal_type(extract_both(PandQ())) # revealed: tuple[int, str]
```
When non-generic types are part of the union, we can still infer typevars for the remaining generic
types:
```py
def extract_optional_t[T](x: None | P[T]) -> T:
raise NotImplementedError
reveal_type(extract_optional_t(None)) # revealed: Unknown
reveal_type(extract_optional_t(P[int]())) # revealed: int
```
Passing anything else results in an error:
```py
# error: [invalid-argument-type]
reveal_type(extract_optional_t(Q[str]())) # revealed: Unknown
```
If the union contains contains parent and child of a generic class, we ideally pick the union
element that is more precise:
```py
class Base[T]:
x: T
class Sub[T](Base[T]): ...
def f[T](t: Base[T] | Sub[T | None]) -> T:
raise NotImplementedError
reveal_type(f(Base[int]())) # revealed: int
# TODO: Should ideally be `str`
reveal_type(f(Sub[str | None]())) # revealed: str | None
```
If we have a case like the following, where only one of the union elements matches due to the
typevar bound, we do not emit a specialization error:
```py
class P[T]:
value: T
def f[I: int, S: str](t: P[I] | P[S]) -> tuple[I, S]:
raise NotImplementedError
reveal_type(f(P[int]())) # revealed: tuple[int, Unknown]
reveal_type(f(P[str]())) # revealed: tuple[Unknown, str]
```
However, if we pass something that does not match _any_ union element, we do emit an error:
```py
# error: [invalid-argument-type]
reveal_type(f(P[bytes]())) # revealed: tuple[Unknown, Unknown]
```
## Inferring nested generic function calls ## Inferring nested generic function calls
We can infer type assignments in nested calls to multiple generic functions. If they use the same We can infer type assignments in nested calls to multiple generic functions. If they use the same

View File

@ -62,3 +62,614 @@ Other values are invalid.
def foo[**P = int]() -> None: def foo[**P = int]() -> None:
pass pass
``` ```
## Validating `ParamSpec` usage
`ParamSpec` is only valid as the first element to `Callable` or the final element to `Concatenate`.
```py
from typing import ParamSpec, Callable, Concatenate
def valid[**P](
a1: Callable[P, int],
a2: Callable[Concatenate[int, P], int],
) -> None: ...
def invalid[**P](
# TODO: error
a1: P,
# TODO: error
a2: list[P],
# TODO: error
a3: Callable[[P], int],
# TODO: error
a4: Callable[..., P],
# TODO: error
a5: Callable[Concatenate[P, ...], int],
) -> None: ...
```
## Validating `P.args` and `P.kwargs` usage
The components of `ParamSpec` i.e., `P.args` and `P.kwargs` are only valid when used as the
annotated types of `*args` and `**kwargs` respectively.
```py
from typing import Callable
def foo[**P](c: Callable[P, int]) -> None:
def nested1(*args: P.args, **kwargs: P.kwargs) -> None: ...
# error: [invalid-type-form] "`P.kwargs` is valid only in `**kwargs` annotation: Did you mean `P.args`?"
# error: [invalid-type-form] "`P.args` is valid only in `*args` annotation: Did you mean `P.kwargs`?"
def nested2(*args: P.kwargs, **kwargs: P.args) -> None: ...
# TODO: error
def nested3(*args: P.args) -> None: ...
# TODO: error
def nested4(**kwargs: P.kwargs) -> None: ...
# TODO: error
def nested5(*args: P.args, x: int, **kwargs: P.kwargs) -> None: ...
```
And, they need to be used together.
```py
def foo[**P](c: Callable[P, int]) -> None:
# TODO: error
def nested1(*args: P.args) -> None: ...
# TODO: error
def nested2(**kwargs: P.kwargs) -> None: ...
class Foo[**P]:
# TODO: error
args: P.args
# TODO: error
kwargs: P.kwargs
```
The name of these parameters does not need to be `args` or `kwargs`, it's the annotated type to the
respective variadic parameter that matters.
```py
class Foo3[**P]:
def method1(self, *paramspec_args: P.args, **paramspec_kwargs: P.kwargs) -> None: ...
def method2(
self,
# error: [invalid-type-form] "`P.kwargs` is valid only in `**kwargs` annotation: Did you mean `P.args`?"
*paramspec_args: P.kwargs,
# error: [invalid-type-form] "`P.args` is valid only in `*args` annotation: Did you mean `P.kwargs`?"
**paramspec_kwargs: P.args,
) -> None: ...
```
It isn't allowed to annotate an instance attribute either:
```py
class Foo4[**P]:
def __init__(self, fn: Callable[P, int], *args: P.args, **kwargs: P.kwargs) -> None:
self.fn = fn
# TODO: error
self.args: P.args = args
# TODO: error
self.kwargs: P.kwargs = kwargs
```
## Semantics of `P.args` and `P.kwargs`
The type of `args` and `kwargs` inside the function is `P.args` and `P.kwargs` respectively instead
of `tuple[P.args, ...]` and `dict[str, P.kwargs]`.
### Passing `*args` and `**kwargs` to a callable
```py
from typing import Callable
def f[**P](func: Callable[P, int]) -> Callable[P, None]:
def wrapper(*args: P.args, **kwargs: P.kwargs) -> None:
reveal_type(args) # revealed: P@f.args
reveal_type(kwargs) # revealed: P@f.kwargs
reveal_type(func(*args, **kwargs)) # revealed: int
# error: [invalid-argument-type] "Argument is incorrect: Expected `P@f.args`, found `P@f.kwargs`"
# error: [invalid-argument-type] "Argument is incorrect: Expected `P@f.kwargs`, found `P@f.args`"
reveal_type(func(*kwargs, **args)) # revealed: int
# error: [invalid-argument-type] "Argument is incorrect: Expected `P@f.args`, found `P@f.kwargs`"
reveal_type(func(args, kwargs)) # revealed: int
# Both parameters are required
# TODO: error
reveal_type(func()) # revealed: int
reveal_type(func(*args)) # revealed: int
reveal_type(func(**kwargs)) # revealed: int
return wrapper
```
### Operations on `P.args` and `P.kwargs`
The type of `P.args` and `P.kwargs` behave like a `tuple` and `dict` respectively. Internally, they
are represented as a type variable that has an upper bound of `tuple[object, ...]` and
`Top[dict[str, Any]]` respectively.
```py
from typing import Callable, Any
def f[**P](func: Callable[P, int], *args: P.args, **kwargs: P.kwargs) -> None:
reveal_type(args + ("extra",)) # revealed: tuple[object, ...]
reveal_type(args + (1, 2, 3)) # revealed: tuple[object, ...]
reveal_type(args[0]) # revealed: object
reveal_type("key" in kwargs) # revealed: bool
reveal_type(kwargs.get("key")) # revealed: object
reveal_type(kwargs["key"]) # revealed: object
```
## Specializing generic classes explicitly
```py
from typing import Any, Callable, ParamSpec
class OnlyParamSpec[**P1]:
attr: Callable[P1, None]
class TwoParamSpec[**P1, **P2]:
attr1: Callable[P1, None]
attr2: Callable[P2, None]
class TypeVarAndParamSpec[T1, **P1]:
attr: Callable[P1, T1]
```
Explicit specialization of a generic class involving `ParamSpec` is done by providing either a list
of types, `...`, or another in-scope `ParamSpec`.
```py
reveal_type(OnlyParamSpec[[]]().attr) # revealed: () -> None
reveal_type(OnlyParamSpec[[int, str]]().attr) # revealed: (int, str, /) -> None
reveal_type(OnlyParamSpec[...]().attr) # revealed: (...) -> None
def func[**P2](c: Callable[P2, None]):
reveal_type(OnlyParamSpec[P2]().attr) # revealed: (**P2@func) -> None
P2 = ParamSpec("P2")
# TODO: error: paramspec is unbound
reveal_type(OnlyParamSpec[P2]().attr) # revealed: (...) -> None
# error: [invalid-type-arguments] "No type argument provided for required type variable `P1` of class `OnlyParamSpec`"
reveal_type(OnlyParamSpec[()]().attr) # revealed: (...) -> None
```
An explicit tuple expression (unlike an implicit one that omits the parentheses) is also accepted
when the `ParamSpec` is the only type variable. But, this isn't recommended is mainly a fallout of
it having the same AST as the one without the parentheses. Both mypy and Pyright also allow this.
```py
reveal_type(OnlyParamSpec[(int, str)]().attr) # revealed: (int, str, /) -> None
```
<!-- blacken-docs:off -->
```py
# error: [invalid-syntax]
reveal_type(OnlyParamSpec[]().attr) # revealed: (...) -> None
```
<!-- blacken-docs:on -->
The square brackets can be omitted when `ParamSpec` is the only type variable
```py
reveal_type(OnlyParamSpec[int, str]().attr) # revealed: (int, str, /) -> None
reveal_type(OnlyParamSpec[int,]().attr) # revealed: (int, /) -> None
# Even when there is only one element
reveal_type(OnlyParamSpec[Any]().attr) # revealed: (Any, /) -> None
reveal_type(OnlyParamSpec[object]().attr) # revealed: (object, /) -> None
reveal_type(OnlyParamSpec[int]().attr) # revealed: (int, /) -> None
```
But, they cannot be omitted when there are multiple type variables.
```py
reveal_type(TypeVarAndParamSpec[int, []]().attr) # revealed: () -> int
reveal_type(TypeVarAndParamSpec[int, [int, str]]().attr) # revealed: (int, str, /) -> int
reveal_type(TypeVarAndParamSpec[int, [str]]().attr) # revealed: (str, /) -> int
reveal_type(TypeVarAndParamSpec[int, ...]().attr) # revealed: (...) -> int
# TODO: error: paramspec is unbound
reveal_type(TypeVarAndParamSpec[int, P2]().attr) # revealed: (...) -> Unknown
# error: [invalid-type-arguments] "Type argument for `ParamSpec` must be"
reveal_type(TypeVarAndParamSpec[int, int]().attr) # revealed: (...) -> Unknown
# error: [invalid-type-arguments] "Type argument for `ParamSpec` must be"
reveal_type(TypeVarAndParamSpec[int, ()]().attr) # revealed: (...) -> Unknown
# error: [invalid-type-arguments] "Type argument for `ParamSpec` must be"
reveal_type(TypeVarAndParamSpec[int, (int, str)]().attr) # revealed: (...) -> Unknown
```
Nor can they be omitted when there are more than one `ParamSpec`.
```py
p = TwoParamSpec[[int, str], [int]]()
reveal_type(p.attr1) # revealed: (int, str, /) -> None
reveal_type(p.attr2) # revealed: (int, /) -> None
# error: [invalid-type-arguments] "Type argument for `ParamSpec` must be either a list of types, `ParamSpec`, `Concatenate`, or `...`"
# error: [invalid-type-arguments] "Type argument for `ParamSpec` must be either a list of types, `ParamSpec`, `Concatenate`, or `...`"
TwoParamSpec[int, str]
```
Specializing `ParamSpec` type variable using `typing.Any` isn't explicitly allowed by the spec but
both mypy and Pyright allow this and there are usages of this in the wild e.g.,
`staticmethod[Any, Any]`.
```py
reveal_type(TypeVarAndParamSpec[int, Any]().attr) # revealed: (...) -> int
```
## Specialization when defaults are involved
```py
from typing import Callable, ParamSpec
class ParamSpecWithDefault1[**P1 = [int, str]]:
attr: Callable[P1, None]
reveal_type(ParamSpecWithDefault1().attr) # revealed: (int, str, /) -> None
reveal_type(ParamSpecWithDefault1[int]().attr) # revealed: (int, /) -> None
```
```py
class ParamSpecWithDefault2[**P1 = ...]:
attr: Callable[P1, None]
reveal_type(ParamSpecWithDefault2().attr) # revealed: (...) -> None
reveal_type(ParamSpecWithDefault2[int, str]().attr) # revealed: (int, str, /) -> None
```
```py
class ParamSpecWithDefault3[**P1, **P2 = P1]:
attr1: Callable[P1, None]
attr2: Callable[P2, None]
# `P1` hasn't been specialized, so it defaults to `...` gradual form
p1 = ParamSpecWithDefault3()
reveal_type(p1.attr1) # revealed: (...) -> None
reveal_type(p1.attr2) # revealed: (...) -> None
p2 = ParamSpecWithDefault3[[int, str]]()
reveal_type(p2.attr1) # revealed: (int, str, /) -> None
reveal_type(p2.attr2) # revealed: (int, str, /) -> None
p3 = ParamSpecWithDefault3[[int], [str]]()
reveal_type(p3.attr1) # revealed: (int, /) -> None
reveal_type(p3.attr2) # revealed: (str, /) -> None
class ParamSpecWithDefault4[**P1 = [int, str], **P2 = P1]:
attr1: Callable[P1, None]
attr2: Callable[P2, None]
p1 = ParamSpecWithDefault4()
reveal_type(p1.attr1) # revealed: (int, str, /) -> None
reveal_type(p1.attr2) # revealed: (int, str, /) -> None
p2 = ParamSpecWithDefault4[[int]]()
reveal_type(p2.attr1) # revealed: (int, /) -> None
reveal_type(p2.attr2) # revealed: (int, /) -> None
p3 = ParamSpecWithDefault4[[int], [str]]()
reveal_type(p3.attr1) # revealed: (int, /) -> None
reveal_type(p3.attr2) # revealed: (str, /) -> None
P2 = ParamSpec("P2")
# TODO: error: paramspec is out of scope
class ParamSpecWithDefault5[**P1 = P2]:
attr: Callable[P1, None]
```
## Semantics
Most of these test cases are adopted from the
[typing documentation on `ParamSpec` semantics](https://typing.python.org/en/latest/spec/generics.html#semantics).
### Return type change using `ParamSpec` once
```py
from typing import Callable
def converter[**P](func: Callable[P, int]) -> Callable[P, bool]:
def wrapper(*args: P.args, **kwargs: P.kwargs) -> bool:
func(*args, **kwargs)
return True
return wrapper
def f1(x: int, y: str) -> int:
return 1
# This should preserve all the information about the parameters of `f1`
f2 = converter(f1)
reveal_type(f2) # revealed: (x: int, y: str) -> bool
reveal_type(f1(1, "a")) # revealed: int
reveal_type(f2(1, "a")) # revealed: bool
# As it preserves the parameter kinds, the following should work as well
reveal_type(f2(1, y="a")) # revealed: bool
reveal_type(f2(x=1, y="a")) # revealed: bool
reveal_type(f2(y="a", x=1)) # revealed: bool
# error: [missing-argument] "No argument provided for required parameter `y`"
f2(1)
# error: [invalid-argument-type] "Argument is incorrect: Expected `int`, found `Literal["a"]`"
f2("a", "b")
```
The `converter` function act as a decorator here:
```py
@converter
def f3(x: int, y: str) -> int:
return 1
# TODO: This should reveal `(x: int, y: str) -> bool` but there's a cycle: https://github.com/astral-sh/ty/issues/1729
reveal_type(f3) # revealed: ((x: int, y: str) -> bool) | ((x: Divergent, y: Divergent) -> bool)
reveal_type(f3(1, "a")) # revealed: bool
reveal_type(f3(x=1, y="a")) # revealed: bool
reveal_type(f3(1, y="a")) # revealed: bool
reveal_type(f3(y="a", x=1)) # revealed: bool
# TODO: There should only be one error but the type of `f3` is a union: https://github.com/astral-sh/ty/issues/1729
# error: [missing-argument] "No argument provided for required parameter `y`"
# error: [missing-argument] "No argument provided for required parameter `y`"
f3(1)
# error: [invalid-argument-type] "Argument is incorrect: Expected `int`, found `Literal["a"]`"
f3("a", "b")
```
### Return type change using the same `ParamSpec` multiple times
```py
from typing import Callable
def multiple[**P](func1: Callable[P, int], func2: Callable[P, int]) -> Callable[P, bool]:
def wrapper(*args: P.args, **kwargs: P.kwargs) -> bool:
func1(*args, **kwargs)
func2(*args, **kwargs)
return True
return wrapper
```
As per the spec,
> A user may include the same `ParamSpec` multiple times in the arguments of the same function, to
> indicate a dependency between multiple arguments. In these cases a type checker may choose to
> solve to a common behavioral supertype (i.e. a set of parameters for which all of the valid calls
> are valid in both of the subtypes), but is not obligated to do so.
TODO: Currently, we don't do this
```py
def xy(x: int, y: str) -> int:
return 1
def yx(y: int, x: str) -> int:
return 2
reveal_type(multiple(xy, xy)) # revealed: (x: int, y: str) -> bool
# The common supertype is `(int, str, /)` which is converting the positional-or-keyword parameters
# into positional-only parameters because the position of the types are the same.
# TODO: This shouldn't error
# error: [invalid-argument-type]
reveal_type(multiple(xy, yx)) # revealed: (x: int, y: str) -> bool
def keyword_only_with_default_1(*, x: int = 42) -> int:
return 1
def keyword_only_with_default_2(*, y: int = 42) -> int:
return 2
# The common supertype for two functions with only keyword-only parameters would be an empty
# parameter list i.e., `()`
# TODO: This shouldn't error
# error: [invalid-argument-type]
# revealed: (*, x: int = Literal[42]) -> bool
reveal_type(multiple(keyword_only_with_default_1, keyword_only_with_default_2))
def keyword_only1(*, x: int) -> int:
return 1
def keyword_only2(*, y: int) -> int:
return 2
# On the other hand, combining two functions with only keyword-only parameters does not have a
# common supertype, so it should result in an error.
# error: [invalid-argument-type] "Argument to function `multiple` is incorrect: Expected `(*, x: int) -> int`, found `def keyword_only2(*, y: int) -> int`"
reveal_type(multiple(keyword_only1, keyword_only2)) # revealed: (*, x: int) -> bool
```
### Constructors of user-defined generic class on `ParamSpec`
```py
from typing import Callable
class C[**P]:
f: Callable[P, int]
def __init__(self, f: Callable[P, int]) -> None:
self.f = f
def f(x: int, y: str) -> bool:
return True
c = C(f)
reveal_type(c.f) # revealed: (x: int, y: str) -> int
```
### `ParamSpec` in prepended positional parameters
> If one of these prepended positional parameters contains a free `ParamSpec`, we consider that
> variable in scope for the purposes of extracting the components of that `ParamSpec`.
```py
from typing import Callable
def foo1[**P1](func: Callable[P1, int], *args: P1.args, **kwargs: P1.kwargs) -> int:
return func(*args, **kwargs)
def foo1_with_extra_arg[**P1](func: Callable[P1, int], extra: str, *args: P1.args, **kwargs: P1.kwargs) -> int:
return func(*args, **kwargs)
def foo2[**P2](func: Callable[P2, int], *args: P2.args, **kwargs: P2.kwargs) -> None:
foo1(func, *args, **kwargs)
# error: [invalid-argument-type] "Argument to function `foo1` is incorrect: Expected `P2@foo2.args`, found `Literal[1]`"
foo1(func, 1, *args, **kwargs)
# error: [invalid-argument-type] "Argument to function `foo1_with_extra_arg` is incorrect: Expected `str`, found `P2@foo2.args`"
foo1_with_extra_arg(func, *args, **kwargs)
foo1_with_extra_arg(func, "extra", *args, **kwargs)
```
Here, the first argument to `f` can specialize `P` to the parameters of the callable passed to it
which is then used to type the `ParamSpec` components used in `*args` and `**kwargs`.
```py
def f1(x: int, y: str) -> int:
return 1
foo1(f1, 1, "a")
foo1(f1, x=1, y="a")
foo1(f1, 1, y="a")
# error: [missing-argument] "No arguments provided for required parameters `x`, `y` of function `foo1`"
foo1(f1)
# error: [missing-argument] "No argument provided for required parameter `y` of function `foo1`"
foo1(f1, 1)
# error: [invalid-argument-type] "Argument to function `foo1` is incorrect: Expected `str`, found `Literal[2]`"
foo1(f1, 1, 2)
# error: [too-many-positional-arguments] "Too many positional arguments to function `foo1`: expected 2, got 3"
foo1(f1, 1, "a", "b")
# error: [missing-argument] "No argument provided for required parameter `y` of function `foo1`"
# error: [unknown-argument] "Argument `z` does not match any known parameter of function `foo1`"
foo1(f1, x=1, z="a")
```
### Specializing `ParamSpec` with another `ParamSpec`
```py
class Foo[**P]:
def __init__(self, *args: P.args, **kwargs: P.kwargs) -> None:
self.args = args
self.kwargs = kwargs
def bar[**P](foo: Foo[P]) -> None:
reveal_type(foo) # revealed: Foo[P@bar]
reveal_type(foo.args) # revealed: Unknown | P@bar.args
reveal_type(foo.kwargs) # revealed: Unknown | P@bar.kwargs
```
ty will check whether the argument after `**` is a mapping type but as instance attribute are
unioned with `Unknown`, it shouldn't error here.
```py
from typing import Callable
def baz[**P](fn: Callable[P, None], foo: Foo[P]) -> None:
fn(*foo.args, **foo.kwargs)
```
The `Unknown` can be eliminated by using annotating these attributes with `Final`:
```py
from typing import Final
class FooWithFinal[**P]:
def __init__(self, *args: P.args, **kwargs: P.kwargs) -> None:
self.args: Final = args
self.kwargs: Final = kwargs
def with_final[**P](foo: FooWithFinal[P]) -> None:
reveal_type(foo) # revealed: FooWithFinal[P@with_final]
reveal_type(foo.args) # revealed: P@with_final.args
reveal_type(foo.kwargs) # revealed: P@with_final.kwargs
```
### Specializing `Self` when `ParamSpec` is involved
```py
class Foo[**P]:
def method(self, *args: P.args, **kwargs: P.kwargs) -> str:
return "hello"
foo = Foo[int, str]()
reveal_type(foo) # revealed: Foo[(int, str, /)]
reveal_type(foo.method) # revealed: bound method Foo[(int, str, /)].method(int, str, /) -> str
reveal_type(foo.method(1, "a")) # revealed: str
```
### Overloads
`overloaded.pyi`:
```pyi
from typing import overload
@overload
def int_int(x: int) -> int: ...
@overload
def int_int(x: str) -> int: ...
@overload
def int_str(x: int) -> int: ...
@overload
def int_str(x: str) -> str: ...
@overload
def str_str(x: int) -> str: ...
@overload
def str_str(x: str) -> str: ...
```
```py
from typing import Callable
from overloaded import int_int, int_str, str_str
def change_return_type[**P](f: Callable[P, int]) -> Callable[P, str]:
def nested(*args: P.args, **kwargs: P.kwargs) -> str:
return str(f(*args, **kwargs))
return nested
def with_parameters[**P](f: Callable[P, int], *args: P.args, **kwargs: P.kwargs) -> Callable[P, str]:
def nested(*args: P.args, **kwargs: P.kwargs) -> str:
return str(f(*args, **kwargs))
return nested
reveal_type(change_return_type(int_int)) # revealed: Overload[(x: int) -> str, (x: str) -> str]
# TODO: This shouldn't error and should pick the first overload because of the return type
# error: [invalid-argument-type]
reveal_type(change_return_type(int_str)) # revealed: Overload[(x: int) -> str, (x: str) -> str]
# error: [invalid-argument-type]
reveal_type(change_return_type(str_str)) # revealed: Overload[(x: int) -> str, (x: str) -> str]
# TODO: Both of these shouldn't raise an error
# error: [invalid-argument-type]
reveal_type(with_parameters(int_int, 1)) # revealed: Overload[(x: int) -> str, (x: str) -> str]
# error: [invalid-argument-type]
reveal_type(with_parameters(int_int, "a")) # revealed: Overload[(x: int) -> str, (x: str) -> str]
```

View File

@ -398,7 +398,7 @@ reveal_type(Sum) # revealed: <class 'tuple[T@Sum, U@Sum]'>
reveal_type(ListOrTuple) # revealed: <types.UnionType special form 'list[T@ListOrTuple] | tuple[T@ListOrTuple, ...]'> reveal_type(ListOrTuple) # revealed: <types.UnionType special form 'list[T@ListOrTuple] | tuple[T@ListOrTuple, ...]'>
# revealed: <types.UnionType special form 'list[T@ListOrTupleLegacy] | tuple[T@ListOrTupleLegacy, ...]'> # revealed: <types.UnionType special form 'list[T@ListOrTupleLegacy] | tuple[T@ListOrTupleLegacy, ...]'>
reveal_type(ListOrTupleLegacy) reveal_type(ListOrTupleLegacy)
reveal_type(MyCallable) # revealed: @Todo(Callable[..] specialized with ParamSpec) reveal_type(MyCallable) # revealed: <typing.Callable special form '(**P@MyCallable) -> T@MyCallable'>
reveal_type(AnnotatedType) # revealed: <special form 'typing.Annotated[T@AnnotatedType, <metadata>]'> reveal_type(AnnotatedType) # revealed: <special form 'typing.Annotated[T@AnnotatedType, <metadata>]'>
reveal_type(TransparentAlias) # revealed: typing.TypeVar reveal_type(TransparentAlias) # revealed: typing.TypeVar
reveal_type(MyOptional) # revealed: <types.UnionType special form 'T@MyOptional | None'> reveal_type(MyOptional) # revealed: <types.UnionType special form 'T@MyOptional | None'>
@ -425,8 +425,7 @@ def _(
reveal_type(int_and_bytes) # revealed: tuple[int, bytes] reveal_type(int_and_bytes) # revealed: tuple[int, bytes]
reveal_type(list_or_tuple) # revealed: list[int] | tuple[int, ...] reveal_type(list_or_tuple) # revealed: list[int] | tuple[int, ...]
reveal_type(list_or_tuple_legacy) # revealed: list[int] | tuple[int, ...] reveal_type(list_or_tuple_legacy) # revealed: list[int] | tuple[int, ...]
# TODO: This should be `(str, bytes) -> int` reveal_type(my_callable) # revealed: (str, bytes, /) -> int
reveal_type(my_callable) # revealed: @Todo(Callable[..] specialized with ParamSpec)
reveal_type(annotated_int) # revealed: int reveal_type(annotated_int) # revealed: int
reveal_type(transparent_alias) # revealed: int reveal_type(transparent_alias) # revealed: int
reveal_type(optional_int) # revealed: int | None reveal_type(optional_int) # revealed: int | None
@ -463,7 +462,7 @@ reveal_type(ListOfPairs) # revealed: <class 'list[tuple[str, str]]'>
reveal_type(ListOrTupleOfInts) # revealed: <types.UnionType special form 'list[int] | tuple[int, ...]'> reveal_type(ListOrTupleOfInts) # revealed: <types.UnionType special form 'list[int] | tuple[int, ...]'>
reveal_type(AnnotatedInt) # revealed: <special form 'typing.Annotated[int, <metadata>]'> reveal_type(AnnotatedInt) # revealed: <special form 'typing.Annotated[int, <metadata>]'>
reveal_type(SubclassOfInt) # revealed: <special form 'type[int]'> reveal_type(SubclassOfInt) # revealed: <special form 'type[int]'>
reveal_type(CallableIntToStr) # revealed: @Todo(Callable[..] specialized with ParamSpec) reveal_type(CallableIntToStr) # revealed: <typing.Callable special form '(int, /) -> str'>
def _( def _(
ints_or_none: IntsOrNone, ints_or_none: IntsOrNone,
@ -480,8 +479,7 @@ def _(
reveal_type(list_or_tuple_of_ints) # revealed: list[int] | tuple[int, ...] reveal_type(list_or_tuple_of_ints) # revealed: list[int] | tuple[int, ...]
reveal_type(annotated_int) # revealed: int reveal_type(annotated_int) # revealed: int
reveal_type(subclass_of_int) # revealed: type[int] reveal_type(subclass_of_int) # revealed: type[int]
# TODO: This should be `(int, /) -> str` reveal_type(callable_int_to_str) # revealed: (int, /) -> str
reveal_type(callable_int_to_str) # revealed: @Todo(Callable[..] specialized with ParamSpec)
``` ```
A generic implicit type alias can also be used in another generic implicit type alias: A generic implicit type alias can also be used in another generic implicit type alias:
@ -534,8 +532,7 @@ def _(
reveal_type(unknown_and_unknown) # revealed: tuple[Unknown, Unknown] reveal_type(unknown_and_unknown) # revealed: tuple[Unknown, Unknown]
reveal_type(list_or_tuple) # revealed: list[Unknown] | tuple[Unknown, ...] reveal_type(list_or_tuple) # revealed: list[Unknown] | tuple[Unknown, ...]
reveal_type(list_or_tuple_legacy) # revealed: list[Unknown] | tuple[Unknown, ...] reveal_type(list_or_tuple_legacy) # revealed: list[Unknown] | tuple[Unknown, ...]
# TODO: should be (...) -> Unknown reveal_type(my_callable) # revealed: (...) -> Unknown
reveal_type(my_callable) # revealed: @Todo(Callable[..] specialized with ParamSpec)
reveal_type(annotated_unknown) # revealed: Unknown reveal_type(annotated_unknown) # revealed: Unknown
reveal_type(optional_unknown) # revealed: Unknown | None reveal_type(optional_unknown) # revealed: Unknown | None
``` ```

View File

@ -128,3 +128,16 @@ InvalidEmptyUnion = Union[]
def _(u: InvalidEmptyUnion): def _(u: InvalidEmptyUnion):
reveal_type(u) # revealed: Unknown reveal_type(u) # revealed: Unknown
``` ```
### `typing.Annotated`
```py
from typing import Annotated
# error: [invalid-syntax] "Expected index or slice expression"
# error: [invalid-type-form] "Special form `typing.Annotated` expected at least 2 arguments (one type and at least one metadata element)"
InvalidEmptyAnnotated = Annotated[]
def _(a: InvalidEmptyAnnotated):
reveal_type(a) # revealed: Unknown
```

View File

@ -218,8 +218,8 @@ class E(A[int]):
def method(self, x: object) -> None: ... # fine def method(self, x: object) -> None: ... # fine
class F[T](A[T]): class F[T](A[T]):
# TODO: we should emit `invalid-method-override` on this:
# `str` is not necessarily a supertype of `T`! # `str` is not necessarily a supertype of `T`!
# error: [invalid-method-override]
def method(self, x: str) -> None: ... def method(self, x: str) -> None: ...
class G(A[int]): class G(A[int]):

View File

@ -3010,6 +3010,31 @@ class Bar(Protocol[S]):
z: S | Bar[S] z: S | Bar[S]
``` ```
### Recursive generic protocols with growing specializations
This snippet caused a stack overflow in <https://github.com/astral-sh/ty/issues/1736> because the
type parameter grows with each recursive call (`C[set[T]]` leads to `C[set[set[T]]]`, then
`C[set[set[set[T]]]]`, etc.):
```toml
[environment]
python-version = "3.12"
```
```py
from typing import Protocol
class C[T](Protocol):
a: "C[set[T]]"
def takes_c(c: C[set[int]]) -> None: ...
def f(c: C[int]) -> None:
# The key thing is that we don't stack overflow while checking this.
# The cycle detection assumes compatibility when it detects potential
# infinite recursion between protocol specializations.
takes_c(c)
```
### Recursive legacy generic protocol ### Recursive legacy generic protocol
```py ```py
@ -3184,14 +3209,9 @@ from ty_extensions import reveal_protocol_interface
reveal_protocol_interface(Foo) reveal_protocol_interface(Foo)
``` ```
## Known panics ## Protocols generic over TypeVars bound to forward references
### Protocols generic over TypeVars bound to forward references Protocols can have TypeVars with forward reference bounds that form cycles.
This test currently panics because the `ClassLiteral::explicit_bases` query fails to converge. See
issue <https://github.com/astral-sh/ty/issues/1587>.
<!-- expect-panic: execute: too many cycle iterations -->
```py ```py
from typing import Any, Protocol, TypeVar from typing import Any, Protocol, TypeVar
@ -3209,6 +3229,19 @@ class A2(Protocol[T2]):
class B1(A1[T3], Protocol[T3]): ... class B1(A1[T3], Protocol[T3]): ...
class B2(A2[T4], Protocol[T4]): ... class B2(A2[T4], Protocol[T4]): ...
# TODO should just be `B2[Any]`
reveal_type(T3.__bound__) # revealed: B2[Any] | @Todo(specialized non-generic class)
# TODO error: [invalid-type-arguments]
def f(x: B1[int]):
pass
reveal_type(T4.__bound__) # revealed: B1[Any]
# error: [invalid-type-arguments]
def g(x: B2[int]):
pass
``` ```
## TODO ## TODO

View File

@ -0,0 +1,19 @@
# `ParamSpec` regression on 3.9
```toml
[environment]
python-version = "3.9"
```
This used to panic when run on Python 3.9 because `ParamSpec` was introduced in Python 3.10 and the
diagnostic message for `invalid-exception-caught` expects to construct `typing.ParamSpec`.
```py
# error: [invalid-syntax]
def foo[**P]() -> None:
try:
pass
# error: [invalid-exception-caught] "Invalid object caught in an exception handler: Object has type `typing.ParamSpec`"
except P:
pass
```

View File

@ -14,10 +14,11 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/directives/assert_type.m
``` ```
1 | from typing_extensions import assert_type 1 | from typing_extensions import assert_type
2 | 2 |
3 | def _(x: int): 3 | def _(x: int, y: bool):
4 | assert_type(x, int) # fine 4 | assert_type(x, int) # fine
5 | assert_type(x, str) # error: [type-assertion-failure] 5 | assert_type(x, str) # error: [type-assertion-failure]
6 | assert_type(assert_type(x, int), int) 6 | assert_type(assert_type(x, int), int)
7 | assert_type(y, int) # error: [type-assertion-failure]
``` ```
# Diagnostics # Diagnostics
@ -26,15 +27,32 @@ mdtest path: crates/ty_python_semantic/resources/mdtest/directives/assert_type.m
error[type-assertion-failure]: Argument does not have asserted type `str` error[type-assertion-failure]: Argument does not have asserted type `str`
--> src/mdtest_snippet.py:5:5 --> src/mdtest_snippet.py:5:5
| |
3 | def _(x: int): 3 | def _(x: int, y: bool):
4 | assert_type(x, int) # fine 4 | assert_type(x, int) # fine
5 | assert_type(x, str) # error: [type-assertion-failure] 5 | assert_type(x, str) # error: [type-assertion-failure]
| ^^^^^^^^^^^^-^^^^^^ | ^^^^^^^^^^^^-^^^^^^
| | | |
| Inferred type of argument is `int` | Inferred type is `int`
6 | assert_type(assert_type(x, int), int) 6 | assert_type(assert_type(x, int), int)
7 | assert_type(y, int) # error: [type-assertion-failure]
| |
info: `str` and `int` are not equivalent types info: `str` and `int` are not equivalent types
info: rule `type-assertion-failure` is enabled by default info: rule `type-assertion-failure` is enabled by default
``` ```
```
error[type-assertion-failure]: Argument does not have asserted type `int`
--> src/mdtest_snippet.py:7:5
|
5 | assert_type(x, str) # error: [type-assertion-failure]
6 | assert_type(assert_type(x, int), int)
7 | assert_type(y, int) # error: [type-assertion-failure]
| ^^^^^^^^^^^^-^^^^^^
| |
| Inferred type is `bool`
|
info: `bool` is a subtype of `int`, but they are not equivalent
info: rule `type-assertion-failure` is enabled by default
```

Some files were not shown because too many files have changed in this diff Show More