Compare commits

..

No commits in common. "main" and "0.14.9" have entirely different histories.
main ... 0.14.9

246 changed files with 4565 additions and 15785 deletions

View File

@ -60,7 +60,7 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3
with:
persist-credentials: false
submodules: recursive
@ -123,7 +123,7 @@ jobs:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3
with:
persist-credentials: false
submodules: recursive
@ -174,7 +174,7 @@ jobs:
outputs:
val: ${{ steps.host.outputs.manifest }}
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3
with:
persist-credentials: false
submodules: recursive
@ -250,7 +250,7 @@ jobs:
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3
with:
persist-credentials: false
submodules: recursive

View File

@ -67,7 +67,7 @@ jobs:
cd ..
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@2e1816eac09c90140b1ba51d19afc5f59da460f5"
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@55df3c868f3fa9ab34cff0498dd6106722aac205"
ecosystem-analyzer \
--repository ruff \

View File

@ -52,7 +52,7 @@ jobs:
cd ..
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@2e1816eac09c90140b1ba51d19afc5f59da460f5"
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@55df3c868f3fa9ab34cff0498dd6106722aac205"
ecosystem-analyzer \
--verbose \

57
Cargo.lock generated
View File

@ -254,21 +254,6 @@ dependencies = [
"syn",
]
[[package]]
name = "bit-set"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
dependencies = [
"bit-vec",
]
[[package]]
name = "bit-vec"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
[[package]]
name = "bitflags"
version = "1.3.2"
@ -959,18 +944,6 @@ dependencies = [
"parking_lot_core",
]
[[package]]
name = "datatest-stable"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a867d7322eb69cf3a68a5426387a25b45cb3b9c5ee41023ee6cea92e2afadd82"
dependencies = [
"camino",
"fancy-regex",
"libtest-mimic 0.8.1",
"walkdir",
]
[[package]]
name = "derive-where"
version = "1.6.0"
@ -1165,17 +1138,6 @@ dependencies = [
"windows-sys 0.61.0",
]
[[package]]
name = "fancy-regex"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
dependencies = [
"bit-set",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "fastrand"
version = "2.3.0"
@ -1663,6 +1625,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
dependencies = [
"console 0.15.11",
"globset",
"once_cell",
"pest",
"pest_derive",
@ -1670,6 +1633,7 @@ dependencies = [
"ron",
"serde",
"similar",
"walkdir",
]
[[package]]
@ -1955,18 +1919,6 @@ dependencies = [
"threadpool",
]
[[package]]
name = "libtest-mimic"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5297962ef19edda4ce33aaa484386e0a5b3d7f2f4e037cbeee00503ef6b29d33"
dependencies = [
"anstream",
"anstyle",
"clap",
"escape8259",
]
[[package]]
name = "linux-raw-sys"
version = "0.11.0"
@ -3326,7 +3278,6 @@ dependencies = [
"anyhow",
"clap",
"countme",
"datatest-stable",
"insta",
"itertools 0.14.0",
"memchr",
@ -3396,7 +3347,6 @@ dependencies = [
"bitflags 2.10.0",
"bstr",
"compact_str",
"datatest-stable",
"get-size2",
"insta",
"itertools 0.14.0",
@ -4361,7 +4311,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fe242ee9e646acec9ab73a5c540e8543ed1b107f0ce42be831e0775d423c396"
dependencies = [
"ignore",
"libtest-mimic 0.7.3",
"libtest-mimic",
"snapbox",
]
@ -4390,7 +4340,6 @@ dependencies = [
"ruff_python_trivia",
"salsa",
"tempfile",
"tikv-jemallocator",
"toml",
"tracing",
"tracing-flame",

View File

@ -5,7 +5,7 @@ resolver = "2"
[workspace.package]
# Please update rustfmt.toml when bumping the Rust edition
edition = "2024"
rust-version = "1.90"
rust-version = "1.89"
homepage = "https://docs.astral.sh/ruff"
documentation = "https://docs.astral.sh/ruff"
repository = "https://github.com/astral-sh/ruff"
@ -81,7 +81,6 @@ compact_str = "0.9.0"
criterion = { version = "0.7.0", default-features = false }
crossbeam = { version = "0.8.4" }
dashmap = { version = "6.0.1" }
datatest-stable = { version = "0.3.3" }
dir-test = { version = "0.4.0" }
dunce = { version = "1.0.5" }
drop_bomb = { version = "0.1.5" }

View File

@ -57,11 +57,8 @@ Ruff is extremely actively developed and used in major open-source projects like
...and [many more](#whos-using-ruff).
Ruff is backed by [Astral](https://astral.sh), the creators of
[uv](https://github.com/astral-sh/uv) and [ty](https://github.com/astral-sh/ty).
Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff), or the
original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
## Testimonials

View File

@ -10,7 +10,7 @@ use anyhow::bail;
use clap::builder::Styles;
use clap::builder::styling::{AnsiColor, Effects};
use clap::builder::{TypedValueParser, ValueParserFactory};
use clap::{Parser, Subcommand};
use clap::{Parser, Subcommand, command};
use colored::Colorize;
use itertools::Itertools;
use path_absolutize::path_dedot;

View File

@ -9,7 +9,7 @@ use std::sync::mpsc::channel;
use anyhow::Result;
use clap::CommandFactory;
use colored::Colorize;
use log::error;
use log::{error, warn};
use notify::{RecursiveMode, Watcher, recommended_watcher};
use args::{GlobalConfigArgs, ServerCommand};

View File

@ -194,7 +194,7 @@ static SYMPY: Benchmark = Benchmark::new(
max_dep_date: "2025-06-17",
python_version: PythonVersion::PY312,
},
13100,
13030,
);
static TANJUN: Benchmark = Benchmark::new(
@ -223,7 +223,7 @@ static STATIC_FRAME: Benchmark = Benchmark::new(
max_dep_date: "2025-08-09",
python_version: PythonVersion::PY311,
},
1100,
950,
);
#[track_caller]

View File

@ -144,8 +144,8 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parents: &[S
output.push('\n');
if let Some(deprecated) = &field.deprecated {
output.push_str("!!! warning \"Deprecated\"\n");
output.push_str(" This option has been deprecated");
output.push_str("> [!WARN] \"Deprecated\"\n");
output.push_str("> This option has been deprecated");
if let Some(since) = deprecated.since {
write!(output, " in {since}").unwrap();
@ -166,9 +166,8 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parents: &[S
output.push('\n');
let _ = writeln!(output, "**Type**: `{}`", field.value_type);
output.push('\n');
output.push_str("**Example usage**:\n\n");
output.push_str("**Example usage** (`pyproject.toml`):\n\n");
output.push_str(&format_example(
"pyproject.toml",
&format_header(
field.scope,
field.example,
@ -180,11 +179,11 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parents: &[S
output.push('\n');
}
fn format_example(title: &str, header: &str, content: &str) -> String {
fn format_example(header: &str, content: &str) -> String {
if header.is_empty() {
format!("```toml title=\"{title}\"\n{content}\n```\n",)
format!("```toml\n{content}\n```\n",)
} else {
format!("```toml title=\"{title}\"\n{header}\n{content}\n```\n",)
format!("```toml\n{header}\n{content}\n```\n",)
}
}

View File

@ -39,7 +39,7 @@ impl Edit {
/// Creates an edit that replaces the content in `range` with `content`.
pub fn range_replacement(content: String, range: TextRange) -> Self {
debug_assert!(!content.is_empty(), "Prefer `Edit::deletion`");
debug_assert!(!content.is_empty(), "Prefer `Fix::deletion`");
Self {
content: Some(Box::from(content)),

View File

@ -337,7 +337,7 @@ macro_rules! best_fitting {
#[cfg(test)]
mod tests {
use crate::prelude::*;
use crate::{FormatState, SimpleFormatOptions, VecBuffer};
use crate::{FormatState, SimpleFormatOptions, VecBuffer, write};
struct TestFormat;
@ -385,8 +385,8 @@ mod tests {
#[test]
fn best_fitting_variants_print_as_lists() {
use crate::Formatted;
use crate::prelude::*;
use crate::{Formatted, format, format_args};
// The second variant below should be selected when printing at a width of 30
let formatted_best_fitting = format!(

View File

@ -132,6 +132,7 @@ async def c():
# Non-errors
###
# False-negative: RustPython doesn't parse the `\N{snowman}`.
"\N{snowman} {}".format(a)
"{".format(a)
@ -275,6 +276,3 @@ if __name__ == "__main__":
number = 0
string = "{}".format(number := number + 1)
print(string)
# Unicode escape
"\N{angle}AOB = {angle}°".format(angle=180)

View File

@ -138,6 +138,5 @@ with open("file.txt", encoding="utf-8") as f:
with open("file.txt", encoding="utf-8") as f:
contents = process_contents(f.read())
with open("file1.txt", encoding="utf-8") as f:
with open("file.txt", encoding="utf-8") as f:
contents: str = process_contents(f.read())

View File

@ -1,8 +0,0 @@
from pathlib import Path
with Path("file.txt").open() as f:
contents = f.read()
with Path("file.txt").open("r") as f:
contents = f.read()

View File

@ -1,26 +0,0 @@
from pathlib import Path
with Path("file.txt").open("w") as f:
f.write("test")
with Path("file.txt").open("wb") as f:
f.write(b"test")
with Path("file.txt").open(mode="w") as f:
f.write("test")
with Path("file.txt").open("w", encoding="utf8") as f:
f.write("test")
with Path("file.txt").open("w", errors="ignore") as f:
f.write("test")
with Path(foo()).open("w") as f:
f.write("test")
p = Path("file.txt")
with p.open("w") as f:
f.write("test")
with Path("foo", "bar", "baz").open("w") as f:
f.write("test")

View File

@ -1,38 +0,0 @@
a: int = 1
def f1():
global a
a: str = "foo" # error
b: int = 1
def outer():
def inner():
global b
b: str = "nested" # error
c: int = 1
def f2():
global c
c: list[str] = [] # error
d: int = 1
def f3():
global d
d: str # error
e: int = 1
def f4():
e: str = "happy" # okay
global f
f: int = 1 # okay
g: int = 1
global g # error
class C:
x: str
global x # error
class D:
global x # error
x: str

View File

@ -286,7 +286,12 @@ pub(crate) fn add_argument(argument: &str, arguments: &Arguments, tokens: &Token
/// Generic function to add a (regular) parameter to a function definition.
pub(crate) fn add_parameter(parameter: &str, parameters: &Parameters, source: &str) -> Edit {
if let Some(last) = parameters.args.iter().rfind(|arg| arg.default.is_none()) {
if let Some(last) = parameters
.args
.iter()
.filter(|arg| arg.default.is_none())
.next_back()
{
// Case 1: at least one regular parameter, so append after the last one.
Edit::insertion(format!(", {parameter}"), last.end())
} else if !parameters.args.is_empty() {

View File

@ -1001,7 +1001,6 @@ mod tests {
#[test_case(Path::new("write_to_debug.py"), PythonVersion::PY310)]
#[test_case(Path::new("invalid_expression.py"), PythonVersion::PY312)]
#[test_case(Path::new("global_parameter.py"), PythonVersion::PY310)]
#[test_case(Path::new("annotated_global.py"), PythonVersion::PY314)]
fn test_semantic_errors(path: &Path, python_version: PythonVersion) -> Result<()> {
let snapshot = format!(
"semantic_syntax_error_{}_{}",

View File

@ -70,7 +70,7 @@ fn is_open_call(func: &Expr, semantic: &SemanticModel) -> bool {
}
/// Returns `true` if an expression resolves to a call to `pathlib.Path.open`.
pub(crate) fn is_open_call_from_pathlib(func: &Expr, semantic: &SemanticModel) -> bool {
fn is_open_call_from_pathlib(func: &Expr, semantic: &SemanticModel) -> bool {
let Expr::Attribute(ast::ExprAttribute { attr, value, .. }) = func else {
return false;
};

View File

@ -18,7 +18,7 @@ mod async_zero_sleep;
mod blocking_http_call;
mod blocking_http_call_httpx;
mod blocking_input;
pub(crate) mod blocking_open_call;
mod blocking_open_call;
mod blocking_path_methods;
mod blocking_process_invocation;
mod blocking_sleep;

View File

@ -146,7 +146,7 @@ fn reverse_comparison(expr: &Expr, locator: &Locator, stylist: &Stylist) -> Resu
let left = (*comparison.left).clone();
// Copy the right side to the left side.
*comparison.left = comparison.comparisons[0].comparator.clone();
comparison.left = Box::new(comparison.comparisons[0].comparator.clone());
// Copy the left side to the right side.
comparison.comparisons[0].comparator = left;

View File

@ -902,76 +902,56 @@ help: Convert to f-string
132 | # Non-errors
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:135:1
--> UP032_0.py:160:1
|
133 | ###
134 |
135 | "\N{snowman} {}".format(a)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
136 |
137 | "{".format(a)
|
help: Convert to f-string
132 | # Non-errors
133 | ###
134 |
- "\N{snowman} {}".format(a)
135 + f"\N{snowman} {a}"
136 |
137 | "{".format(a)
138 |
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:159:1
|
157 | r'"\N{snowman} {}".format(a)'
158 |
159 | / "123456789 {}".format(
160 | | 11111111111111111111111111111111111111111111111111111111111111111111111111,
161 | | )
158 | r'"\N{snowman} {}".format(a)'
159 |
160 | / "123456789 {}".format(
161 | | 11111111111111111111111111111111111111111111111111111111111111111111111111,
162 | | )
| |_^
162 |
163 | """
163 |
164 | """
|
help: Convert to f-string
156 |
157 | r'"\N{snowman} {}".format(a)'
158 |
157 |
158 | r'"\N{snowman} {}".format(a)'
159 |
- "123456789 {}".format(
- 11111111111111111111111111111111111111111111111111111111111111111111111111,
- )
159 + f"123456789 {11111111111111111111111111111111111111111111111111111111111111111111111111}"
160 |
161 | """
162 | {}
160 + f"123456789 {11111111111111111111111111111111111111111111111111111111111111111111111111}"
161 |
162 | """
163 | {}
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:163:1
--> UP032_0.py:164:1
|
161 | )
162 |
163 | / """
164 | | {}
162 | )
163 |
164 | / """
165 | | {}
166 | | {}
167 | | """.format(
168 | | 1,
169 | | 2,
170 | | 111111111111111111111111111111111111111111111111111111111111111111111111111111111111111,
171 | | )
167 | | {}
168 | | """.format(
169 | | 1,
170 | | 2,
171 | | 111111111111111111111111111111111111111111111111111111111111111111111111111111111111111,
172 | | )
| |_^
172 |
173 | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
173 |
174 | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
|
help: Convert to f-string
160 | 11111111111111111111111111111111111111111111111111111111111111111111111111,
161 | )
162 |
163 + f"""
164 + {1}
165 + {2}
166 + {111111111111111111111111111111111111111111111111111111111111111111111111111111111111111}
167 | """
161 | 11111111111111111111111111111111111111111111111111111111111111111111111111,
162 | )
163 |
164 + f"""
165 + {1}
166 + {2}
167 + {111111111111111111111111111111111111111111111111111111111111111111111111111111111111111}
168 | """
- {}
- {}
- {}
@ -980,408 +960,392 @@ help: Convert to f-string
- 2,
- 111111111111111111111111111111111111111111111111111111111111111111111111111111111111111,
- )
168 |
169 | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
170 | """.format(
169 |
170 | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
171 | """.format(
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:173:84
--> UP032_0.py:174:84
|
171 | )
172 |
173 | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
172 | )
173 |
174 | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
| ____________________________________________________________________________________^
174 | | """.format(
175 | | 111111
176 | | )
175 | | """.format(
176 | | 111111
177 | | )
| |_^
177 |
178 | "{}".format(
178 |
179 | "{}".format(
|
help: Convert to f-string
170 | 111111111111111111111111111111111111111111111111111111111111111111111111111111111111111,
171 | )
172 |
171 | 111111111111111111111111111111111111111111111111111111111111111111111111111111111111111,
172 | )
173 |
- aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
- """.format(
- 111111
- )
173 + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = f"""{111111}
174 + """
175 |
176 | "{}".format(
177 | [
174 + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = f"""{111111}
175 + """
176 |
177 | "{}".format(
178 | [
UP032 Use f-string instead of `format` call
--> UP032_0.py:201:1
--> UP032_0.py:202:1
|
199 | "{}".format(**c)
200 |
201 | / "{}".format(
202 | | 1 # comment
203 | | )
200 | "{}".format(**c)
201 |
202 | / "{}".format(
203 | | 1 # comment
204 | | )
| |_^
|
help: Convert to f-string
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:208:1
--> UP032_0.py:209:1
|
206 | # The fixed string will exceed the line length, but it's still smaller than the
207 | # existing line length, so it's fine.
208 | "<Customer: {}, {}, {}, {}, {}>".format(self.internal_ids, self.external_ids, self.properties, self.tags, self.others)
207 | # The fixed string will exceed the line length, but it's still smaller than the
208 | # existing line length, so it's fine.
209 | "<Customer: {}, {}, {}, {}, {}>".format(self.internal_ids, self.external_ids, self.properties, self.tags, self.others)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
209 |
210 | # When fixing, trim the trailing empty string.
210 |
211 | # When fixing, trim the trailing empty string.
|
help: Convert to f-string
205 |
206 | # The fixed string will exceed the line length, but it's still smaller than the
207 | # existing line length, so it's fine.
206 |
207 | # The fixed string will exceed the line length, but it's still smaller than the
208 | # existing line length, so it's fine.
- "<Customer: {}, {}, {}, {}, {}>".format(self.internal_ids, self.external_ids, self.properties, self.tags, self.others)
208 + f"<Customer: {self.internal_ids}, {self.external_ids}, {self.properties}, {self.tags}, {self.others}>"
209 |
210 | # When fixing, trim the trailing empty string.
211 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
209 + f"<Customer: {self.internal_ids}, {self.external_ids}, {self.properties}, {self.tags}, {self.others}>"
210 |
211 | # When fixing, trim the trailing empty string.
212 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:211:18
--> UP032_0.py:212:18
|
210 | # When fixing, trim the trailing empty string.
211 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
211 | # When fixing, trim the trailing empty string.
212 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
| __________________^
212 | | "".format(new_dict, d))
213 | | "".format(new_dict, d))
| |_______________________________________^
213 |
214 | # When fixing, trim the trailing empty string.
214 |
215 | # When fixing, trim the trailing empty string.
|
help: Convert to f-string
208 | "<Customer: {}, {}, {}, {}, {}>".format(self.internal_ids, self.external_ids, self.properties, self.tags, self.others)
209 |
210 | # When fixing, trim the trailing empty string.
209 | "<Customer: {}, {}, {}, {}, {}>".format(self.internal_ids, self.external_ids, self.properties, self.tags, self.others)
210 |
211 | # When fixing, trim the trailing empty string.
- raise ValueError("Conflicting configuration dicts: {!r} {!r}"
- "".format(new_dict, d))
211 + raise ValueError(f"Conflicting configuration dicts: {new_dict!r} {d!r}")
212 |
213 | # When fixing, trim the trailing empty string.
214 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:215:18
|
214 | # When fixing, trim the trailing empty string.
215 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
| __________________^
216 | | .format(new_dict, d))
| |_____________________________________^
217 |
218 | raise ValueError(
|
help: Convert to f-string
212 | "".format(new_dict, d))
212 + raise ValueError(f"Conflicting configuration dicts: {new_dict!r} {d!r}")
213 |
214 | # When fixing, trim the trailing empty string.
215 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:216:18
|
215 | # When fixing, trim the trailing empty string.
216 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
| __________________^
217 | | .format(new_dict, d))
| |_____________________________________^
218 |
219 | raise ValueError(
|
help: Convert to f-string
213 | "".format(new_dict, d))
214 |
215 | # When fixing, trim the trailing empty string.
- raise ValueError("Conflicting configuration dicts: {!r} {!r}"
- .format(new_dict, d))
215 + raise ValueError(f"Conflicting configuration dicts: {new_dict!r} {d!r}"
216 + )
217 |
218 | raise ValueError(
219 | "Conflicting configuration dicts: {!r} {!r}"
216 + raise ValueError(f"Conflicting configuration dicts: {new_dict!r} {d!r}"
217 + )
218 |
219 | raise ValueError(
220 | "Conflicting configuration dicts: {!r} {!r}"
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:219:5
--> UP032_0.py:220:5
|
218 | raise ValueError(
219 | / "Conflicting configuration dicts: {!r} {!r}"
220 | | "".format(new_dict, d)
219 | raise ValueError(
220 | / "Conflicting configuration dicts: {!r} {!r}"
221 | | "".format(new_dict, d)
| |__________________________^
221 | )
222 | )
|
help: Convert to f-string
216 | .format(new_dict, d))
217 |
218 | raise ValueError(
217 | .format(new_dict, d))
218 |
219 | raise ValueError(
- "Conflicting configuration dicts: {!r} {!r}"
- "".format(new_dict, d)
219 + f"Conflicting configuration dicts: {new_dict!r} {d!r}"
220 | )
221 |
222 | raise ValueError(
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:224:5
|
223 | raise ValueError(
224 | / "Conflicting configuration dicts: {!r} {!r}"
225 | | "".format(new_dict, d)
| |__________________________^
226 |
227 | )
|
help: Convert to f-string
220 + f"Conflicting configuration dicts: {new_dict!r} {d!r}"
221 | )
222 |
223 | raise ValueError(
- "Conflicting configuration dicts: {!r} {!r}"
- "".format(new_dict, d)
224 + f"Conflicting configuration dicts: {new_dict!r} {d!r}"
225 |
226 | )
227 |
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:230:1
--> UP032_0.py:225:5
|
229 | # The first string will be converted to an f-string and the curly braces in the second should be converted to be unescaped
230 | / (
231 | | "{}"
232 | | "{{}}"
233 | | ).format(a)
| |___________^
234 |
235 | ("{}" "{{}}").format(a)
224 | raise ValueError(
225 | / "Conflicting configuration dicts: {!r} {!r}"
226 | | "".format(new_dict, d)
| |__________________________^
227 |
228 | )
|
help: Convert to f-string
222 | )
223 |
224 | raise ValueError(
- "Conflicting configuration dicts: {!r} {!r}"
- "".format(new_dict, d)
225 + f"Conflicting configuration dicts: {new_dict!r} {d!r}"
226 |
227 | )
228 |
229 | # The first string will be converted to an f-string and the curly braces in the second should be converted to be unescaped
230 | (
231 + f"{a}"
232 | "{}"
- "{{}}"
- ).format(a)
233 + )
234 |
235 | ("{}" "{{}}").format(a)
236 |
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:235:1
--> UP032_0.py:231:1
|
233 | ).format(a)
234 |
235 | ("{}" "{{}}").format(a)
230 | # The first string will be converted to an f-string and the curly braces in the second should be converted to be unescaped
231 | / (
232 | | "{}"
233 | | "{{}}"
234 | | ).format(a)
| |___________^
235 |
236 | ("{}" "{{}}").format(a)
|
help: Convert to f-string
229 |
230 | # The first string will be converted to an f-string and the curly braces in the second should be converted to be unescaped
231 | (
232 + f"{a}"
233 | "{}"
- "{{}}"
- ).format(a)
234 + )
235 |
236 | ("{}" "{{}}").format(a)
237 |
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:236:1
|
234 | ).format(a)
235 |
236 | ("{}" "{{}}").format(a)
| ^^^^^^^^^^^^^^^^^^^^^^^
|
help: Convert to f-string
232 | "{{}}"
233 | ).format(a)
234 |
233 | "{{}}"
234 | ).format(a)
235 |
- ("{}" "{{}}").format(a)
235 + (f"{a}" "{}")
236 |
236 + (f"{a}" "{}")
237 |
238 | # Both strings will be converted to an f-string and the curly braces in the second should left escaped
238 |
239 | # Both strings will be converted to an f-string and the curly braces in the second should left escaped
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:239:1
--> UP032_0.py:240:1
|
238 | # Both strings will be converted to an f-string and the curly braces in the second should left escaped
239 | / (
240 | | "{}"
241 | | "{{{}}}"
242 | | ).format(a, b)
239 | # Both strings will be converted to an f-string and the curly braces in the second should left escaped
240 | / (
241 | | "{}"
242 | | "{{{}}}"
243 | | ).format(a, b)
| |______________^
243 |
244 | ("{}" "{{{}}}").format(a, b)
244 |
245 | ("{}" "{{{}}}").format(a, b)
|
help: Convert to f-string
237 |
238 | # Both strings will be converted to an f-string and the curly braces in the second should left escaped
239 | (
238 |
239 | # Both strings will be converted to an f-string and the curly braces in the second should left escaped
240 | (
- "{}"
- "{{{}}}"
- ).format(a, b)
240 + f"{a}"
241 + f"{{{b}}}"
242 + )
243 |
244 | ("{}" "{{{}}}").format(a, b)
245 |
241 + f"{a}"
242 + f"{{{b}}}"
243 + )
244 |
245 | ("{}" "{{{}}}").format(a, b)
246 |
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:244:1
--> UP032_0.py:245:1
|
242 | ).format(a, b)
243 |
244 | ("{}" "{{{}}}").format(a, b)
243 | ).format(a, b)
244 |
245 | ("{}" "{{{}}}").format(a, b)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
245 |
246 | # The dictionary should be parenthesized.
246 |
247 | # The dictionary should be parenthesized.
|
help: Convert to f-string
241 | "{{{}}}"
242 | ).format(a, b)
243 |
242 | "{{{}}}"
243 | ).format(a, b)
244 |
- ("{}" "{{{}}}").format(a, b)
244 + (f"{a}" f"{{{b}}}")
245 |
246 | # The dictionary should be parenthesized.
247 | "{}".format({0: 1}[0])
245 + (f"{a}" f"{{{b}}}")
246 |
247 | # The dictionary should be parenthesized.
248 | "{}".format({0: 1}[0])
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:247:1
--> UP032_0.py:248:1
|
246 | # The dictionary should be parenthesized.
247 | "{}".format({0: 1}[0])
247 | # The dictionary should be parenthesized.
248 | "{}".format({0: 1}[0])
| ^^^^^^^^^^^^^^^^^^^^^^
248 |
249 | # The dictionary should be parenthesized.
249 |
250 | # The dictionary should be parenthesized.
|
help: Convert to f-string
244 | ("{}" "{{{}}}").format(a, b)
245 |
246 | # The dictionary should be parenthesized.
245 | ("{}" "{{{}}}").format(a, b)
246 |
247 | # The dictionary should be parenthesized.
- "{}".format({0: 1}[0])
247 + f"{({0: 1}[0])}"
248 |
249 | # The dictionary should be parenthesized.
250 | "{}".format({0: 1}.bar)
248 + f"{({0: 1}[0])}"
249 |
250 | # The dictionary should be parenthesized.
251 | "{}".format({0: 1}.bar)
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:250:1
--> UP032_0.py:251:1
|
249 | # The dictionary should be parenthesized.
250 | "{}".format({0: 1}.bar)
250 | # The dictionary should be parenthesized.
251 | "{}".format({0: 1}.bar)
| ^^^^^^^^^^^^^^^^^^^^^^^
251 |
252 | # The dictionary should be parenthesized.
252 |
253 | # The dictionary should be parenthesized.
|
help: Convert to f-string
247 | "{}".format({0: 1}[0])
248 |
249 | # The dictionary should be parenthesized.
248 | "{}".format({0: 1}[0])
249 |
250 | # The dictionary should be parenthesized.
- "{}".format({0: 1}.bar)
250 + f"{({0: 1}.bar)}"
251 |
252 | # The dictionary should be parenthesized.
253 | "{}".format({0: 1}())
251 + f"{({0: 1}.bar)}"
252 |
253 | # The dictionary should be parenthesized.
254 | "{}".format({0: 1}())
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:253:1
--> UP032_0.py:254:1
|
252 | # The dictionary should be parenthesized.
253 | "{}".format({0: 1}())
253 | # The dictionary should be parenthesized.
254 | "{}".format({0: 1}())
| ^^^^^^^^^^^^^^^^^^^^^
254 |
255 | # The string shouldn't be converted, since it would require repeating the function call.
255 |
256 | # The string shouldn't be converted, since it would require repeating the function call.
|
help: Convert to f-string
250 | "{}".format({0: 1}.bar)
251 |
252 | # The dictionary should be parenthesized.
251 | "{}".format({0: 1}.bar)
252 |
253 | # The dictionary should be parenthesized.
- "{}".format({0: 1}())
253 + f"{({0: 1}())}"
254 |
255 | # The string shouldn't be converted, since it would require repeating the function call.
256 | "{x} {x}".format(x=foo())
254 + f"{({0: 1}())}"
255 |
256 | # The string shouldn't be converted, since it would require repeating the function call.
257 | "{x} {x}".format(x=foo())
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:260:1
--> UP032_0.py:261:1
|
259 | # The string _should_ be converted, since the function call is repeated in the arguments.
260 | "{0} {1}".format(foo(), foo())
260 | # The string _should_ be converted, since the function call is repeated in the arguments.
261 | "{0} {1}".format(foo(), foo())
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
261 |
262 | # The call should be removed, but the string itself should remain.
262 |
263 | # The call should be removed, but the string itself should remain.
|
help: Convert to f-string
257 | "{0} {0}".format(foo())
258 |
259 | # The string _should_ be converted, since the function call is repeated in the arguments.
258 | "{0} {0}".format(foo())
259 |
260 | # The string _should_ be converted, since the function call is repeated in the arguments.
- "{0} {1}".format(foo(), foo())
260 + f"{foo()} {foo()}"
261 |
262 | # The call should be removed, but the string itself should remain.
263 | ''.format(self.project)
261 + f"{foo()} {foo()}"
262 |
263 | # The call should be removed, but the string itself should remain.
264 | ''.format(self.project)
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:263:1
--> UP032_0.py:264:1
|
262 | # The call should be removed, but the string itself should remain.
263 | ''.format(self.project)
263 | # The call should be removed, but the string itself should remain.
264 | ''.format(self.project)
| ^^^^^^^^^^^^^^^^^^^^^^^
264 |
265 | # The call should be removed, but the string itself should remain.
265 |
266 | # The call should be removed, but the string itself should remain.
|
help: Convert to f-string
260 | "{0} {1}".format(foo(), foo())
261 |
262 | # The call should be removed, but the string itself should remain.
261 | "{0} {1}".format(foo(), foo())
262 |
263 | # The call should be removed, but the string itself should remain.
- ''.format(self.project)
263 + ''
264 |
265 | # The call should be removed, but the string itself should remain.
266 | "".format(self.project)
264 + ''
265 |
266 | # The call should be removed, but the string itself should remain.
267 | "".format(self.project)
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:266:1
--> UP032_0.py:267:1
|
265 | # The call should be removed, but the string itself should remain.
266 | "".format(self.project)
266 | # The call should be removed, but the string itself should remain.
267 | "".format(self.project)
| ^^^^^^^^^^^^^^^^^^^^^^^
267 |
268 | # Not a valid type annotation but this test shouldn't result in a panic.
268 |
269 | # Not a valid type annotation but this test shouldn't result in a panic.
|
help: Convert to f-string
263 | ''.format(self.project)
264 |
265 | # The call should be removed, but the string itself should remain.
264 | ''.format(self.project)
265 |
266 | # The call should be removed, but the string itself should remain.
- "".format(self.project)
266 + ""
267 |
268 | # Not a valid type annotation but this test shouldn't result in a panic.
269 | # Refer: https://github.com/astral-sh/ruff/issues/11736
267 + ""
268 |
269 | # Not a valid type annotation but this test shouldn't result in a panic.
270 | # Refer: https://github.com/astral-sh/ruff/issues/11736
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:270:5
--> UP032_0.py:271:5
|
268 | # Not a valid type annotation but this test shouldn't result in a panic.
269 | # Refer: https://github.com/astral-sh/ruff/issues/11736
270 | x: "'{} + {}'.format(x, y)"
269 | # Not a valid type annotation but this test shouldn't result in a panic.
270 | # Refer: https://github.com/astral-sh/ruff/issues/11736
271 | x: "'{} + {}'.format(x, y)"
| ^^^^^^^^^^^^^^^^^^^^^^
271 |
272 | # Regression https://github.com/astral-sh/ruff/issues/21000
272 |
273 | # Regression https://github.com/astral-sh/ruff/issues/21000
|
help: Convert to f-string
267 |
268 | # Not a valid type annotation but this test shouldn't result in a panic.
269 | # Refer: https://github.com/astral-sh/ruff/issues/11736
268 |
269 | # Not a valid type annotation but this test shouldn't result in a panic.
270 | # Refer: https://github.com/astral-sh/ruff/issues/11736
- x: "'{} + {}'.format(x, y)"
270 + x: "f'{x} + {y}'"
271 |
272 | # Regression https://github.com/astral-sh/ruff/issues/21000
273 | # Fix should parenthesize walrus
271 + x: "f'{x} + {y}'"
272 |
273 | # Regression https://github.com/astral-sh/ruff/issues/21000
274 | # Fix should parenthesize walrus
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:276:14
--> UP032_0.py:277:14
|
274 | if __name__ == "__main__":
275 | number = 0
276 | string = "{}".format(number := number + 1)
275 | if __name__ == "__main__":
276 | number = 0
277 | string = "{}".format(number := number + 1)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
277 | print(string)
278 | print(string)
|
help: Convert to f-string
273 | # Fix should parenthesize walrus
274 | if __name__ == "__main__":
275 | number = 0
274 | # Fix should parenthesize walrus
275 | if __name__ == "__main__":
276 | number = 0
- string = "{}".format(number := number + 1)
276 + string = f"{(number := number + 1)}"
277 | print(string)
278 |
279 | # Unicode escape
UP032 [*] Use f-string instead of `format` call
--> UP032_0.py:280:1
|
279 | # Unicode escape
280 | "\N{angle}AOB = {angle}°".format(angle=180)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
help: Convert to f-string
277 | print(string)
278 |
279 | # Unicode escape
- "\N{angle}AOB = {angle}°".format(angle=180)
280 + f"\N{angle}AOB = {180}°"
277 + string = f"{(number := number + 1)}"
278 | print(string)

View File

@ -3,11 +3,10 @@ use std::borrow::Cow;
use ruff_python_ast::PythonVersion;
use ruff_python_ast::{self as ast, Expr, name::Name, token::parenthesized_range};
use ruff_python_codegen::Generator;
use ruff_python_semantic::{ResolvedReference, SemanticModel};
use ruff_python_semantic::{BindingId, ResolvedReference, SemanticModel};
use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::Checker;
use crate::rules::flake8_async::rules::blocking_open_call::is_open_call_from_pathlib;
use crate::{Applicability, Edit, Fix};
/// Format a code snippet to call `name.method()`.
@ -120,13 +119,14 @@ impl OpenMode {
pub(super) struct FileOpen<'a> {
/// With item where the open happens, we use it for the reporting range.
pub(super) item: &'a ast::WithItem,
/// Filename expression used as the first argument in `open`, we use it in the diagnostic message.
pub(super) filename: &'a Expr,
/// The file open mode.
pub(super) mode: OpenMode,
/// The file open keywords.
pub(super) keywords: Vec<&'a ast::Keyword>,
/// We only check `open` operations whose file handles are used exactly once.
pub(super) reference: &'a ResolvedReference,
pub(super) argument: OpenArgument<'a>,
}
impl FileOpen<'_> {
@ -137,45 +137,6 @@ impl FileOpen<'_> {
}
}
#[derive(Debug, Clone, Copy)]
pub(super) enum OpenArgument<'a> {
/// The filename argument to `open`, e.g. "foo.txt" in:
///
/// ```py
/// f = open("foo.txt")
/// ```
Builtin { filename: &'a Expr },
/// The `Path` receiver of a `pathlib.Path.open` call, e.g. the `p` in the
/// context manager in:
///
/// ```py
/// p = Path("foo.txt")
/// with p.open() as f: ...
/// ```
///
/// or `Path("foo.txt")` in
///
/// ```py
/// with Path("foo.txt").open() as f: ...
/// ```
Pathlib { path: &'a Expr },
}
impl OpenArgument<'_> {
pub(super) fn display<'src>(&self, source: &'src str) -> &'src str {
&source[self.range()]
}
}
impl Ranged for OpenArgument<'_> {
fn range(&self) -> TextRange {
match self {
OpenArgument::Builtin { filename } => filename.range(),
OpenArgument::Pathlib { path } => path.range(),
}
}
}
/// Find and return all `open` operations in the given `with` statement.
pub(super) fn find_file_opens<'a>(
with: &'a ast::StmtWith,
@ -185,65 +146,10 @@ pub(super) fn find_file_opens<'a>(
) -> Vec<FileOpen<'a>> {
with.items
.iter()
.filter_map(|item| {
find_file_open(item, with, semantic, read_mode, python_version)
.or_else(|| find_path_open(item, with, semantic, read_mode, python_version))
})
.filter_map(|item| find_file_open(item, with, semantic, read_mode, python_version))
.collect()
}
fn resolve_file_open<'a>(
item: &'a ast::WithItem,
with: &'a ast::StmtWith,
semantic: &'a SemanticModel<'a>,
read_mode: bool,
mode: OpenMode,
keywords: Vec<&'a ast::Keyword>,
argument: OpenArgument<'a>,
) -> Option<FileOpen<'a>> {
match mode {
OpenMode::ReadText | OpenMode::ReadBytes => {
if !read_mode {
return None;
}
}
OpenMode::WriteText | OpenMode::WriteBytes => {
if read_mode {
return None;
}
}
}
if matches!(mode, OpenMode::ReadBytes | OpenMode::WriteBytes) && !keywords.is_empty() {
return None;
}
let var = item.optional_vars.as_deref()?.as_name_expr()?;
let scope = semantic.current_scope();
let binding = scope.get_all(var.id.as_str()).find_map(|id| {
let b = semantic.binding(id);
(b.range() == var.range()).then_some(b)
})?;
let references: Vec<&ResolvedReference> = binding
.references
.iter()
.map(|id| semantic.reference(*id))
.filter(|reference| with.range().contains_range(reference.range()))
.collect();
let [reference] = references.as_slice() else {
return None;
};
Some(FileOpen {
item,
mode,
keywords,
reference,
argument,
})
}
/// Find `open` operation in the given `with` item.
fn find_file_open<'a>(
item: &'a ast::WithItem,
@ -259,6 +165,8 @@ fn find_file_open<'a>(
..
} = item.context_expr.as_call_expr()?;
let var = item.optional_vars.as_deref()?.as_name_expr()?;
// Ignore calls with `*args` and `**kwargs`. In the exact case of `open(*filename, mode="w")`,
// it could be a match; but in all other cases, the call _could_ contain unsupported keyword
// arguments, like `buffering`.
@ -279,57 +187,58 @@ fn find_file_open<'a>(
let (keywords, kw_mode) = match_open_keywords(keywords, read_mode, python_version)?;
let mode = kw_mode.unwrap_or(pos_mode);
resolve_file_open(
item,
with,
semantic,
read_mode,
mode,
keywords,
OpenArgument::Builtin { filename },
)
match mode {
OpenMode::ReadText | OpenMode::ReadBytes => {
if !read_mode {
return None;
}
}
OpenMode::WriteText | OpenMode::WriteBytes => {
if read_mode {
return None;
}
}
}
fn find_path_open<'a>(
item: &'a ast::WithItem,
with: &'a ast::StmtWith,
semantic: &'a SemanticModel<'a>,
read_mode: bool,
python_version: PythonVersion,
) -> Option<FileOpen<'a>> {
let ast::ExprCall {
func,
arguments: ast::Arguments { args, keywords, .. },
..
} = item.context_expr.as_call_expr()?;
if args.iter().any(Expr::is_starred_expr)
|| keywords.iter().any(|keyword| keyword.arg.is_none())
{
// Path.read_bytes and Path.write_bytes do not support any kwargs.
if matches!(mode, OpenMode::ReadBytes | OpenMode::WriteBytes) && !keywords.is_empty() {
return None;
}
if !is_open_call_from_pathlib(func, semantic) {
// Now we need to find what is this variable bound to...
let scope = semantic.current_scope();
let bindings: Vec<BindingId> = scope.get_all(var.id.as_str()).collect();
let binding = bindings
.iter()
.map(|id| semantic.binding(*id))
// We might have many bindings with the same name, but we only care
// for the one we are looking at right now.
.find(|binding| binding.range() == var.range())?;
// Since many references can share the same binding, we can limit our attention span
// exclusively to the body of the current `with` statement.
let references: Vec<&ResolvedReference> = binding
.references
.iter()
.map(|id| semantic.reference(*id))
.filter(|reference| with.range().contains_range(reference.range()))
.collect();
// And even with all these restrictions, if the file handle gets used not exactly once,
// it doesn't fit the bill.
let [reference] = references.as_slice() else {
return None;
}
let attr = func.as_attribute_expr()?;
let mode = if args.is_empty() {
OpenMode::ReadText
} else {
match_open_mode(args.first()?)?
};
let (keywords, kw_mode) = match_open_keywords(keywords, read_mode, python_version)?;
let mode = kw_mode.unwrap_or(mode);
resolve_file_open(
Some(FileOpen {
item,
with,
semantic,
read_mode,
filename,
mode,
keywords,
OpenArgument::Pathlib {
path: attr.value.as_ref(),
},
)
reference,
})
}
/// Match positional arguments. Return expression for the file name and open mode.

View File

@ -15,8 +15,7 @@ mod tests {
use crate::test::test_path;
use crate::{assert_diagnostics, settings};
#[test_case(Rule::ReadWholeFile, Path::new("FURB101_0.py"))]
#[test_case(Rule::ReadWholeFile, Path::new("FURB101_1.py"))]
#[test_case(Rule::ReadWholeFile, Path::new("FURB101.py"))]
#[test_case(Rule::RepeatedAppend, Path::new("FURB113.py"))]
#[test_case(Rule::IfExpInsteadOfOrOperator, Path::new("FURB110.py"))]
#[test_case(Rule::ReimplementedOperator, Path::new("FURB118.py"))]
@ -47,8 +46,7 @@ mod tests {
#[test_case(Rule::MetaClassABCMeta, Path::new("FURB180.py"))]
#[test_case(Rule::HashlibDigestHex, Path::new("FURB181.py"))]
#[test_case(Rule::ListReverseCopy, Path::new("FURB187.py"))]
#[test_case(Rule::WriteWholeFile, Path::new("FURB103_0.py"))]
#[test_case(Rule::WriteWholeFile, Path::new("FURB103_1.py"))]
#[test_case(Rule::WriteWholeFile, Path::new("FURB103.py"))]
#[test_case(Rule::FStringNumberFormat, Path::new("FURB116.py"))]
#[test_case(Rule::SortedMinMax, Path::new("FURB192.py"))]
#[test_case(Rule::SliceToRemovePrefixOrSuffix, Path::new("FURB188.py"))]
@ -67,7 +65,7 @@ mod tests {
#[test]
fn write_whole_file_python_39() -> Result<()> {
let diagnostics = test_path(
Path::new("refurb/FURB103_0.py"),
Path::new("refurb/FURB103.py"),
&settings::LinterSettings::for_rule(Rule::WriteWholeFile)
.with_target_version(PythonVersion::PY39),
)?;

View File

@ -10,7 +10,7 @@ use ruff_text_size::{Ranged, TextRange};
use crate::checkers::ast::Checker;
use crate::fix::snippet::SourceCodeSnippet;
use crate::importer::ImportRequest;
use crate::rules::refurb::helpers::{FileOpen, OpenArgument, find_file_opens};
use crate::rules::refurb::helpers::{FileOpen, find_file_opens};
use crate::{FixAvailability, Violation};
/// ## What it does
@ -42,41 +42,27 @@ use crate::{FixAvailability, Violation};
/// - [Python documentation: `Path.read_text`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.read_text)
#[derive(ViolationMetadata)]
#[violation_metadata(preview_since = "v0.1.2")]
pub(crate) struct ReadWholeFile<'a> {
pub(crate) struct ReadWholeFile {
filename: SourceCodeSnippet,
suggestion: SourceCodeSnippet,
argument: OpenArgument<'a>,
}
impl Violation for ReadWholeFile<'_> {
impl Violation for ReadWholeFile {
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
#[derive_message_formats]
fn message(&self) -> String {
let filename = self.filename.truncated_display();
let suggestion = self.suggestion.truncated_display();
match self.argument {
OpenArgument::Pathlib { .. } => {
format!(
"`Path.open()` followed by `read()` can be replaced by `{filename}.{suggestion}`"
)
}
OpenArgument::Builtin { .. } => {
format!("`open` and `read` should be replaced by `Path({filename}).{suggestion}`")
}
}
}
fn fix_title(&self) -> Option<String> {
let filename = self.filename.truncated_display();
let suggestion = self.suggestion.truncated_display();
match self.argument {
OpenArgument::Pathlib { .. } => Some(format!("Replace with `{filename}.{suggestion}`")),
OpenArgument::Builtin { .. } => {
Some(format!("Replace with `Path({filename}).{suggestion}`"))
}
}
Some(format!(
"Replace with `Path({}).{}`",
self.filename.truncated_display(),
self.suggestion.truncated_display(),
))
}
}
@ -128,13 +114,13 @@ impl<'a> Visitor<'a> for ReadMatcher<'a, '_> {
.position(|open| open.is_ref(read_from))
{
let open = self.candidates.remove(open);
let filename_display = open.argument.display(self.checker.source());
let suggestion = make_suggestion(&open, self.checker.generator());
let mut diagnostic = self.checker.report_diagnostic(
ReadWholeFile {
filename: SourceCodeSnippet::from_str(filename_display),
filename: SourceCodeSnippet::from_str(
&self.checker.generator().expr(open.filename),
),
suggestion: SourceCodeSnippet::from_str(&suggestion),
argument: open.argument,
},
open.item.range(),
);
@ -202,6 +188,8 @@ fn generate_fix(
let locator = checker.locator();
let filename_code = locator.slice(open.filename.range());
let (import_edit, binding) = checker
.importer()
.get_or_import_symbol(
@ -218,15 +206,10 @@ fn generate_fix(
[Stmt::Assign(ast::StmtAssign { targets, value, .. })] if value.range() == expr.range() => {
match targets.as_slice() {
[Expr::Name(name)] => {
let target = match open.argument {
OpenArgument::Builtin { filename } => {
let filename_code = locator.slice(filename.range());
format!("{binding}({filename_code})")
}
OpenArgument::Pathlib { path } => locator.slice(path.range()).to_string(),
};
format!("{name} = {target}.{suggestion}", name = name.id)
format!(
"{name} = {binding}({filename_code}).{suggestion}",
name = name.id
)
}
_ => return None,
}
@ -240,16 +223,8 @@ fn generate_fix(
}),
] if value.range() == expr.range() => match target.as_ref() {
Expr::Name(name) => {
let target = match open.argument {
OpenArgument::Builtin { filename } => {
let filename_code = locator.slice(filename.range());
format!("{binding}({filename_code})")
}
OpenArgument::Pathlib { path } => locator.slice(path.range()).to_string(),
};
format!(
"{var}: {ann} = {target}.{suggestion}",
"{var}: {ann} = {binding}({filename_code}).{suggestion}",
var = name.id,
ann = locator.slice(annotation.range())
)

View File

@ -9,7 +9,7 @@ use ruff_text_size::Ranged;
use crate::checkers::ast::Checker;
use crate::fix::snippet::SourceCodeSnippet;
use crate::importer::ImportRequest;
use crate::rules::refurb::helpers::{FileOpen, OpenArgument, find_file_opens};
use crate::rules::refurb::helpers::{FileOpen, find_file_opens};
use crate::{FixAvailability, Locator, Violation};
/// ## What it does
@ -42,40 +42,26 @@ use crate::{FixAvailability, Locator, Violation};
/// - [Python documentation: `Path.write_text`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.write_text)
#[derive(ViolationMetadata)]
#[violation_metadata(preview_since = "v0.3.6")]
pub(crate) struct WriteWholeFile<'a> {
pub(crate) struct WriteWholeFile {
filename: SourceCodeSnippet,
suggestion: SourceCodeSnippet,
argument: OpenArgument<'a>,
}
impl Violation for WriteWholeFile<'_> {
impl Violation for WriteWholeFile {
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
#[derive_message_formats]
fn message(&self) -> String {
let filename = self.filename.truncated_display();
let suggestion = self.suggestion.truncated_display();
match self.argument {
OpenArgument::Pathlib { .. } => {
format!(
"`Path.open()` followed by `write()` can be replaced by `{filename}.{suggestion}`"
)
}
OpenArgument::Builtin { .. } => {
format!("`open` and `write` should be replaced by `Path({filename}).{suggestion}`")
}
}
}
fn fix_title(&self) -> Option<String> {
let filename = self.filename.truncated_display();
let suggestion = self.suggestion.truncated_display();
match self.argument {
OpenArgument::Pathlib { .. } => Some(format!("Replace with `{filename}.{suggestion}`")),
OpenArgument::Builtin { .. } => {
Some(format!("Replace with `Path({filename}).{suggestion}`"))
}
}
Some(format!(
"Replace with `Path({}).{}`",
self.filename.truncated_display(),
self.suggestion.truncated_display(),
))
}
}
@ -139,15 +125,16 @@ impl<'a> Visitor<'a> for WriteMatcher<'a, '_> {
.position(|open| open.is_ref(write_to))
{
let open = self.candidates.remove(open);
if self.loop_counter == 0 {
let filename_display = open.argument.display(self.checker.source());
let suggestion = make_suggestion(&open, content, self.checker.locator());
let mut diagnostic = self.checker.report_diagnostic(
WriteWholeFile {
filename: SourceCodeSnippet::from_str(filename_display),
filename: SourceCodeSnippet::from_str(
&self.checker.generator().expr(open.filename),
),
suggestion: SourceCodeSnippet::from_str(&suggestion),
argument: open.argument,
},
open.item.range(),
);
@ -211,6 +198,7 @@ fn generate_fix(
}
let locator = checker.locator();
let filename_code = locator.slice(open.filename.range());
let (import_edit, binding) = checker
.importer()
@ -221,15 +209,7 @@ fn generate_fix(
)
.ok()?;
let target = match open.argument {
OpenArgument::Builtin { filename } => {
let filename_code = locator.slice(filename.range());
format!("{binding}({filename_code})")
}
OpenArgument::Pathlib { path } => locator.slice(path.range()).to_string(),
};
let replacement = format!("{target}.{suggestion}");
let replacement = format!("{binding}({filename_code}).{suggestion}");
let applicability = if checker.comment_ranges().intersects(with_stmt.range()) {
Applicability::Unsafe

View File

@ -2,7 +2,7 @@
source: crates/ruff_linter/src/rules/refurb/mod.rs
---
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()`
--> FURB101_0.py:12:6
--> FURB101.py:12:6
|
11 | # FURB101
12 | with open("file.txt") as f:
@ -26,7 +26,7 @@ help: Replace with `Path("file.txt").read_text()`
16 | with open("file.txt", "rb") as f:
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
--> FURB101_0.py:16:6
--> FURB101.py:16:6
|
15 | # FURB101
16 | with open("file.txt", "rb") as f:
@ -50,7 +50,7 @@ help: Replace with `Path("file.txt").read_bytes()`
20 | with open("file.txt", mode="rb") as f:
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
--> FURB101_0.py:20:6
--> FURB101.py:20:6
|
19 | # FURB101
20 | with open("file.txt", mode="rb") as f:
@ -74,7 +74,7 @@ help: Replace with `Path("file.txt").read_bytes()`
24 | with open("file.txt", encoding="utf8") as f:
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf8")`
--> FURB101_0.py:24:6
--> FURB101.py:24:6
|
23 | # FURB101
24 | with open("file.txt", encoding="utf8") as f:
@ -98,7 +98,7 @@ help: Replace with `Path("file.txt").read_text(encoding="utf8")`
28 | with open("file.txt", errors="ignore") as f:
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(errors="ignore")`
--> FURB101_0.py:28:6
--> FURB101.py:28:6
|
27 | # FURB101
28 | with open("file.txt", errors="ignore") as f:
@ -122,7 +122,7 @@ help: Replace with `Path("file.txt").read_text(errors="ignore")`
32 | with open("file.txt", mode="r") as f: # noqa: FURB120
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()`
--> FURB101_0.py:32:6
--> FURB101.py:32:6
|
31 | # FURB101
32 | with open("file.txt", mode="r") as f: # noqa: FURB120
@ -147,7 +147,7 @@ help: Replace with `Path("file.txt").read_text()`
note: This is an unsafe fix and may change runtime behavior
FURB101 `open` and `read` should be replaced by `Path(foo()).read_bytes()`
--> FURB101_0.py:36:6
--> FURB101.py:36:6
|
35 | # FURB101
36 | with open(foo(), "rb") as f:
@ -158,7 +158,7 @@ FURB101 `open` and `read` should be replaced by `Path(foo()).read_bytes()`
help: Replace with `Path(foo()).read_bytes()`
FURB101 `open` and `read` should be replaced by `Path("a.txt").read_text()`
--> FURB101_0.py:44:6
--> FURB101.py:44:6
|
43 | # FURB101
44 | with open("a.txt") as a, open("b.txt", "rb") as b:
@ -169,7 +169,7 @@ FURB101 `open` and `read` should be replaced by `Path("a.txt").read_text()`
help: Replace with `Path("a.txt").read_text()`
FURB101 `open` and `read` should be replaced by `Path("b.txt").read_bytes()`
--> FURB101_0.py:44:26
--> FURB101.py:44:26
|
43 | # FURB101
44 | with open("a.txt") as a, open("b.txt", "rb") as b:
@ -180,7 +180,7 @@ FURB101 `open` and `read` should be replaced by `Path("b.txt").read_bytes()`
help: Replace with `Path("b.txt").read_bytes()`
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()`
--> FURB101_0.py:49:18
--> FURB101.py:49:18
|
48 | # FURB101
49 | with foo() as a, open("file.txt") as b, foo() as c:
@ -191,7 +191,7 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()`
help: Replace with `Path("file.txt").read_text()`
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
--> FURB101_0.py:130:6
--> FURB101.py:130:6
|
129 | # FURB101
130 | with open("file.txt", encoding="utf-8") as f:
@ -215,7 +215,7 @@ help: Replace with `Path("file.txt").read_text(encoding="utf-8")`
134 | with open("file.txt", encoding="utf-8") as f:
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
--> FURB101_0.py:134:6
--> FURB101.py:134:6
|
133 | # FURB101 but no fix because it would remove the assignment to `x`
134 | with open("file.txt", encoding="utf-8") as f:
@ -225,7 +225,7 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(enco
help: Replace with `Path("file.txt").read_text(encoding="utf-8")`
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
--> FURB101_0.py:138:6
--> FURB101.py:138:6
|
137 | # FURB101 but no fix because it would remove the `process_contents` call
138 | with open("file.txt", encoding="utf-8") as f:
@ -234,13 +234,13 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(enco
|
help: Replace with `Path("file.txt").read_text(encoding="utf-8")`
FURB101 `open` and `read` should be replaced by `Path("file1.txt").read_text(encoding="utf-8")`
--> FURB101_0.py:141:6
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
--> FURB101.py:141:6
|
139 | contents = process_contents(f.read())
140 |
141 | with open("file1.txt", encoding="utf-8") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
141 | with open("file.txt", encoding="utf-8") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
142 | contents: str = process_contents(f.read())
|
help: Replace with `Path("file1.txt").read_text(encoding="utf-8")`
help: Replace with `Path("file.txt").read_text(encoding="utf-8")`

View File

@ -1,39 +0,0 @@
---
source: crates/ruff_linter/src/rules/refurb/mod.rs
---
FURB101 [*] `Path.open()` followed by `read()` can be replaced by `Path("file.txt").read_text()`
--> FURB101_1.py:4:6
|
2 | from pathlib import Path
3 |
4 | with Path("file.txt").open() as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
5 | contents = f.read()
|
help: Replace with `Path("file.txt").read_text()`
1 |
2 | from pathlib import Path
3 |
- with Path("file.txt").open() as f:
- contents = f.read()
4 + contents = Path("file.txt").read_text()
5 |
6 | with Path("file.txt").open("r") as f:
7 | contents = f.read()
FURB101 [*] `Path.open()` followed by `read()` can be replaced by `Path("file.txt").read_text()`
--> FURB101_1.py:7:6
|
5 | contents = f.read()
6 |
7 | with Path("file.txt").open("r") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
8 | contents = f.read()
|
help: Replace with `Path("file.txt").read_text()`
4 | with Path("file.txt").open() as f:
5 | contents = f.read()
6 |
- with Path("file.txt").open("r") as f:
- contents = f.read()
7 + contents = Path("file.txt").read_text()

View File

@ -2,7 +2,7 @@
source: crates/ruff_linter/src/rules/refurb/mod.rs
---
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")`
--> FURB103_0.py:12:6
--> FURB103.py:12:6
|
11 | # FURB103
12 | with open("file.txt", "w") as f:
@ -26,7 +26,7 @@ help: Replace with `Path("file.txt").write_text("test")`
16 | with open("file.txt", "wb") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)`
--> FURB103_0.py:16:6
--> FURB103.py:16:6
|
15 | # FURB103
16 | with open("file.txt", "wb") as f:
@ -50,7 +50,7 @@ help: Replace with `Path("file.txt").write_bytes(foobar)`
20 | with open("file.txt", mode="wb") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")`
--> FURB103_0.py:20:6
--> FURB103.py:20:6
|
19 | # FURB103
20 | with open("file.txt", mode="wb") as f:
@ -74,7 +74,7 @@ help: Replace with `Path("file.txt").write_bytes(b"abc")`
24 | with open("file.txt", "w", encoding="utf8") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")`
--> FURB103_0.py:24:6
--> FURB103.py:24:6
|
23 | # FURB103
24 | with open("file.txt", "w", encoding="utf8") as f:
@ -98,7 +98,7 @@ help: Replace with `Path("file.txt").write_text(foobar, encoding="utf8")`
28 | with open("file.txt", "w", errors="ignore") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")`
--> FURB103_0.py:28:6
--> FURB103.py:28:6
|
27 | # FURB103
28 | with open("file.txt", "w", errors="ignore") as f:
@ -122,7 +122,7 @@ help: Replace with `Path("file.txt").write_text(foobar, errors="ignore")`
32 | with open("file.txt", mode="w") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)`
--> FURB103_0.py:32:6
--> FURB103.py:32:6
|
31 | # FURB103
32 | with open("file.txt", mode="w") as f:
@ -146,7 +146,7 @@ help: Replace with `Path("file.txt").write_text(foobar)`
36 | with open(foo(), "wb") as f:
FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())`
--> FURB103_0.py:36:6
--> FURB103.py:36:6
|
35 | # FURB103
36 | with open(foo(), "wb") as f:
@ -157,7 +157,7 @@ FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())
help: Replace with `Path(foo()).write_bytes(bar())`
FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
--> FURB103_0.py:44:6
--> FURB103.py:44:6
|
43 | # FURB103
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
@ -168,7 +168,7 @@ FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
help: Replace with `Path("a.txt").write_text(x)`
FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
--> FURB103_0.py:44:31
--> FURB103.py:44:31
|
43 | # FURB103
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
@ -179,7 +179,7 @@ FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
help: Replace with `Path("b.txt").write_bytes(y)`
FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(bar(bar(a + x)))`
--> FURB103_0.py:49:18
--> FURB103.py:49:18
|
48 | # FURB103
49 | with foo() as a, open("file.txt", "w") as b, foo() as c:
@ -190,7 +190,7 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(ba
help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))`
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
--> FURB103_0.py:58:6
--> FURB103.py:58:6
|
57 | # FURB103
58 | with open("file.txt", "w", newline="\r\n") as f:
@ -214,7 +214,7 @@ help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
62 | import builtins
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
--> FURB103_0.py:66:6
--> FURB103.py:66:6
|
65 | # FURB103
66 | with builtins.open("file.txt", "w", newline="\r\n") as f:
@ -237,7 +237,7 @@ help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
70 | from builtins import open as o
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
--> FURB103_0.py:74:6
--> FURB103.py:74:6
|
73 | # FURB103
74 | with o("file.txt", "w", newline="\r\n") as f:
@ -260,7 +260,7 @@ help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
78 |
FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....`
--> FURB103_0.py:154:6
--> FURB103.py:154:6
|
152 | data = {"price": 100}
153 |
@ -284,7 +284,7 @@ help: Replace with `Path("test.json")....`
158 | with open("tmp_path/pyproject.toml", "w") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("tmp_path/pyproject.toml")....`
--> FURB103_0.py:158:6
--> FURB103.py:158:6
|
157 | # See: https://github.com/astral-sh/ruff/issues/21381
158 | with open("tmp_path/pyproject.toml", "w") as f:

View File

@ -1,157 +0,0 @@
---
source: crates/ruff_linter/src/rules/refurb/mod.rs
---
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_text("test")`
--> FURB103_1.py:3:6
|
1 | from pathlib import Path
2 |
3 | with Path("file.txt").open("w") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
4 | f.write("test")
|
help: Replace with `Path("file.txt").write_text("test")`
1 | from pathlib import Path
2 |
- with Path("file.txt").open("w") as f:
- f.write("test")
3 + Path("file.txt").write_text("test")
4 |
5 | with Path("file.txt").open("wb") as f:
6 | f.write(b"test")
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_bytes(b"test")`
--> FURB103_1.py:6:6
|
4 | f.write("test")
5 |
6 | with Path("file.txt").open("wb") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
7 | f.write(b"test")
|
help: Replace with `Path("file.txt").write_bytes(b"test")`
3 | with Path("file.txt").open("w") as f:
4 | f.write("test")
5 |
- with Path("file.txt").open("wb") as f:
- f.write(b"test")
6 + Path("file.txt").write_bytes(b"test")
7 |
8 | with Path("file.txt").open(mode="w") as f:
9 | f.write("test")
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_text("test")`
--> FURB103_1.py:9:6
|
7 | f.write(b"test")
8 |
9 | with Path("file.txt").open(mode="w") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
10 | f.write("test")
|
help: Replace with `Path("file.txt").write_text("test")`
6 | with Path("file.txt").open("wb") as f:
7 | f.write(b"test")
8 |
- with Path("file.txt").open(mode="w") as f:
- f.write("test")
9 + Path("file.txt").write_text("test")
10 |
11 | with Path("file.txt").open("w", encoding="utf8") as f:
12 | f.write("test")
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_text("test", encoding="utf8")`
--> FURB103_1.py:12:6
|
10 | f.write("test")
11 |
12 | with Path("file.txt").open("w", encoding="utf8") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
13 | f.write("test")
|
help: Replace with `Path("file.txt").write_text("test", encoding="utf8")`
9 | with Path("file.txt").open(mode="w") as f:
10 | f.write("test")
11 |
- with Path("file.txt").open("w", encoding="utf8") as f:
- f.write("test")
12 + Path("file.txt").write_text("test", encoding="utf8")
13 |
14 | with Path("file.txt").open("w", errors="ignore") as f:
15 | f.write("test")
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_text("test", errors="ignore")`
--> FURB103_1.py:15:6
|
13 | f.write("test")
14 |
15 | with Path("file.txt").open("w", errors="ignore") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
16 | f.write("test")
|
help: Replace with `Path("file.txt").write_text("test", errors="ignore")`
12 | with Path("file.txt").open("w", encoding="utf8") as f:
13 | f.write("test")
14 |
- with Path("file.txt").open("w", errors="ignore") as f:
- f.write("test")
15 + Path("file.txt").write_text("test", errors="ignore")
16 |
17 | with Path(foo()).open("w") as f:
18 | f.write("test")
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path(foo()).write_text("test")`
--> FURB103_1.py:18:6
|
16 | f.write("test")
17 |
18 | with Path(foo()).open("w") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
19 | f.write("test")
|
help: Replace with `Path(foo()).write_text("test")`
15 | with Path("file.txt").open("w", errors="ignore") as f:
16 | f.write("test")
17 |
- with Path(foo()).open("w") as f:
- f.write("test")
18 + Path(foo()).write_text("test")
19 |
20 | p = Path("file.txt")
21 | with p.open("w") as f:
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `p.write_text("test")`
--> FURB103_1.py:22:6
|
21 | p = Path("file.txt")
22 | with p.open("w") as f:
| ^^^^^^^^^^^^^^^^
23 | f.write("test")
|
help: Replace with `p.write_text("test")`
19 | f.write("test")
20 |
21 | p = Path("file.txt")
- with p.open("w") as f:
- f.write("test")
22 + p.write_text("test")
23 |
24 | with Path("foo", "bar", "baz").open("w") as f:
25 | f.write("test")
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("foo", "bar", "baz").write_text("test")`
--> FURB103_1.py:25:6
|
23 | f.write("test")
24 |
25 | with Path("foo", "bar", "baz").open("w") as f:
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
26 | f.write("test")
|
help: Replace with `Path("foo", "bar", "baz").write_text("test")`
22 | with p.open("w") as f:
23 | f.write("test")
24 |
- with Path("foo", "bar", "baz").open("w") as f:
- f.write("test")
25 + Path("foo", "bar", "baz").write_text("test")

View File

@ -2,7 +2,7 @@
source: crates/ruff_linter/src/rules/refurb/mod.rs
---
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")`
--> FURB103_0.py:12:6
--> FURB103.py:12:6
|
11 | # FURB103
12 | with open("file.txt", "w") as f:
@ -26,7 +26,7 @@ help: Replace with `Path("file.txt").write_text("test")`
16 | with open("file.txt", "wb") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)`
--> FURB103_0.py:16:6
--> FURB103.py:16:6
|
15 | # FURB103
16 | with open("file.txt", "wb") as f:
@ -50,7 +50,7 @@ help: Replace with `Path("file.txt").write_bytes(foobar)`
20 | with open("file.txt", mode="wb") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")`
--> FURB103_0.py:20:6
--> FURB103.py:20:6
|
19 | # FURB103
20 | with open("file.txt", mode="wb") as f:
@ -74,7 +74,7 @@ help: Replace with `Path("file.txt").write_bytes(b"abc")`
24 | with open("file.txt", "w", encoding="utf8") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")`
--> FURB103_0.py:24:6
--> FURB103.py:24:6
|
23 | # FURB103
24 | with open("file.txt", "w", encoding="utf8") as f:
@ -98,7 +98,7 @@ help: Replace with `Path("file.txt").write_text(foobar, encoding="utf8")`
28 | with open("file.txt", "w", errors="ignore") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")`
--> FURB103_0.py:28:6
--> FURB103.py:28:6
|
27 | # FURB103
28 | with open("file.txt", "w", errors="ignore") as f:
@ -122,7 +122,7 @@ help: Replace with `Path("file.txt").write_text(foobar, errors="ignore")`
32 | with open("file.txt", mode="w") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)`
--> FURB103_0.py:32:6
--> FURB103.py:32:6
|
31 | # FURB103
32 | with open("file.txt", mode="w") as f:
@ -146,7 +146,7 @@ help: Replace with `Path("file.txt").write_text(foobar)`
36 | with open(foo(), "wb") as f:
FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())`
--> FURB103_0.py:36:6
--> FURB103.py:36:6
|
35 | # FURB103
36 | with open(foo(), "wb") as f:
@ -157,7 +157,7 @@ FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())
help: Replace with `Path(foo()).write_bytes(bar())`
FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
--> FURB103_0.py:44:6
--> FURB103.py:44:6
|
43 | # FURB103
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
@ -168,7 +168,7 @@ FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
help: Replace with `Path("a.txt").write_text(x)`
FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
--> FURB103_0.py:44:31
--> FURB103.py:44:31
|
43 | # FURB103
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
@ -179,7 +179,7 @@ FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
help: Replace with `Path("b.txt").write_bytes(y)`
FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(bar(bar(a + x)))`
--> FURB103_0.py:49:18
--> FURB103.py:49:18
|
48 | # FURB103
49 | with foo() as a, open("file.txt", "w") as b, foo() as c:
@ -190,7 +190,7 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(ba
help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))`
FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....`
--> FURB103_0.py:154:6
--> FURB103.py:154:6
|
152 | data = {"price": 100}
153 |
@ -214,7 +214,7 @@ help: Replace with `Path("test.json")....`
158 | with open("tmp_path/pyproject.toml", "w") as f:
FURB103 [*] `open` and `write` should be replaced by `Path("tmp_path/pyproject.toml")....`
--> FURB103_0.py:158:6
--> FURB103.py:158:6
|
157 | # See: https://github.com/astral-sh/ruff/issues/21381
158 | with open("tmp_path/pyproject.toml", "w") as f:

View File

@ -1,74 +0,0 @@
---
source: crates/ruff_linter/src/linter.rs
---
invalid-syntax: annotated name `a` can't be global
--> resources/test/fixtures/semantic_errors/annotated_global.py:4:5
|
2 | def f1():
3 | global a
4 | a: str = "foo" # error
| ^
5 |
6 | b: int = 1
|
invalid-syntax: annotated name `b` can't be global
--> resources/test/fixtures/semantic_errors/annotated_global.py:10:9
|
8 | def inner():
9 | global b
10 | b: str = "nested" # error
| ^
11 |
12 | c: int = 1
|
invalid-syntax: annotated name `c` can't be global
--> resources/test/fixtures/semantic_errors/annotated_global.py:15:5
|
13 | def f2():
14 | global c
15 | c: list[str] = [] # error
| ^
16 |
17 | d: int = 1
|
invalid-syntax: annotated name `d` can't be global
--> resources/test/fixtures/semantic_errors/annotated_global.py:20:5
|
18 | def f3():
19 | global d
20 | d: str # error
| ^
21 |
22 | e: int = 1
|
invalid-syntax: annotated name `g` can't be global
--> resources/test/fixtures/semantic_errors/annotated_global.py:29:1
|
27 | f: int = 1 # okay
28 |
29 | g: int = 1
| ^
30 | global g # error
|
invalid-syntax: annotated name `x` can't be global
--> resources/test/fixtures/semantic_errors/annotated_global.py:33:5
|
32 | class C:
33 | x: str
| ^
34 | global x # error
|
invalid-syntax: annotated name `x` can't be global
--> resources/test/fixtures/semantic_errors/annotated_global.py:38:5
|
36 | class D:
37 | global x # error
38 | x: str
| ^
|

View File

@ -1247,7 +1247,6 @@ impl<'a> Generator<'a> {
self.p_bytes_repr(&bytes_literal.value, bytes_literal.flags);
}
}
#[expect(clippy::eq_op)]
Expr::NumberLiteral(ast::ExprNumberLiteral { value, .. }) => {
static INF_STR: &str = "1e309";
assert_eq!(f64::MAX_10_EXP, 308);

View File

@ -43,8 +43,7 @@ tracing = { workspace = true }
[dev-dependencies]
ruff_formatter = { workspace = true }
datatest-stable = { workspace = true }
insta = { workspace = true }
insta = { workspace = true, features = ["glob"] }
regex = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
@ -55,8 +54,8 @@ similar = { workspace = true }
ignored = ["ruff_cache"]
[[test]]
name = "fixtures"
harness = false
name = "ruff_python_formatter_fixtures"
path = "tests/fixtures.rs"
test = true
required-features = ["serde"]

View File

@ -125,13 +125,6 @@ lambda a, /, c: a
*x: x
)
(
lambda
# comment
*x,
**y: x
)
(
lambda
# comment 1
@ -203,17 +196,6 @@ lambda: ( # comment
x
)
(
lambda # 1
# 2
x, # 3
# 4
y
: # 5
# 6
x
)
(
lambda
x,
@ -222,71 +204,6 @@ lambda: ( # comment
z
)
# Leading
lambda x: (
lambda y: lambda z: x
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ y
+ z # Trailing
) # Trailing
# Leading
lambda x: lambda y: lambda z: [
x,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
y,
z
] # Trailing
# Trailing
lambda self, araa, kkkwargs=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs), e=1, f=2, g=2: d
# Regression tests for https://github.com/astral-sh/ruff/issues/8179
@ -311,441 +228,6 @@ def a():
g = 10
)
def a():
return b(
c,
d,
e,
f=lambda self, *args, **kwargs: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(
*args, **kwargs
) + 1,
)
# Additional ecosystem cases from https://github.com/astral-sh/ruff/pull/21385
class C:
def foo():
mock_service.return_value.bucket.side_effect = lambda name: (
source_bucket
if name == source_bucket_name
else storage.Bucket(mock_service, destination_bucket_name)
)
class C:
function_dict: Dict[Text, Callable[[CRFToken], Any]] = {
CRFEntityExtractorOptions.POS2: lambda crf_token: crf_token.pos_tag[:2]
if crf_token.pos_tag is not None
else None,
}
name = re.sub(r"[^\x21\x23-\x5b\x5d-\x7e]...............", lambda m: f"\\{m.group(0)}", p["name"])
def foo():
if True:
if True:
return (
lambda x: np.exp(cs(np.log(x.to(u.MeV).value))) * u.MeV * u.cm**2 / u.g
)
class C:
_is_recognized_dtype: Callable[[DtypeObj], bool] = lambda x: lib.is_np_dtype(
x, "M"
) or isinstance(x, DatetimeTZDtype)
class C:
def foo():
if True:
transaction_count = self._query_txs_for_range(
get_count_fn=lambda from_ts, to_ts, _chain_id=chain_id: db_evmtx.count_transactions_in_range(
chain_id=_chain_id,
from_ts=from_ts,
to_ts=to_ts,
),
)
transaction_count = self._query_txs_for_range(
get_count_fn=lambda from_ts, to_ts, _chain_id=chain_id: db_evmtx.count_transactions_in_range[_chain_id, from_ts, to_ts],
)
def ddb():
sql = (
lambda var, table, n=N: f"""
CREATE TABLE {table} AS
SELECT ROW_NUMBER() OVER () AS id, {var}
FROM (
SELECT {var}
FROM RANGE({n}) _ ({var})
ORDER BY RANDOM()
)
"""
)
long_assignment_target.with_attribute.and_a_slice[with_an_index] = ( # 1
# 2
lambda x, y, z: # 3
# 4
x + y + z # 5
# 6
)
long_assignment_target.with_attribute.and_a_slice[with_an_index] = (
lambda x, y, z: x + y + z
)
long_assignment_target.with_attribute.and_a_slice[with_an_index] = lambda x, y, z: x + y + z
very_long_variable_name_x, very_long_variable_name_y = lambda a: a + some_very_long_expression, lambda b: b * another_very_long_expression_here
very_long_variable_name_for_result += lambda x: very_long_function_call_that_should_definitely_be_parenthesized_now(x, more_args, additional_parameters)
if 1:
if 2:
if 3:
if self.location in EVM_EVMLIKE_LOCATIONS and database is not None:
exported_dict["notes"] = EVM_ADDRESS_REGEX.sub(
repl=lambda matched_address: self._maybe_add_label_with_address(
database=database,
matched_address=matched_address,
),
string=exported_dict["notes"],
)
class C:
def f():
return dict(
filter(
lambda intent_response: self.is_retrieval_intent_response(
intent_response
),
self.responses.items(),
)
)
@pytest.mark.parametrize(
"op",
[
# Not fluent
param(
lambda left, right: (
ibis.timestamp("2017-04-01")
),
),
# These four are fluent and fit on one line inside the parenthesized
# lambda body
param(
lambda left, right: (
ibis.timestamp("2017-04-01").cast(dt.date)
),
),
param(
lambda left, right: (
ibis.timestamp("2017-04-01").cast(dt.date).between(left, right)
),
),
param(lambda left, right: ibis.timestamp("2017-04-01").cast(dt.date)),
param(lambda left, right: ibis.timestamp("2017-04-01").cast(dt.date).between(left, right)),
# This is too long on one line in the lambda body and gets wrapped
# inside the body.
param(
lambda left, right: (
ibis.timestamp("2017-04-01").cast(dt.date).between(left, right).between(left, right)
),
),
],
)
def test_string_temporal_compare_between(con, op, left, right): ...
[
(
lambda eval_df, _: MetricValue(
scores=eval_df["prediction"].tolist(),
aggregate_results={"prediction_sum": sum(eval_df["prediction"])},
)
),
]
# reuses the list parentheses
lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz: [xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz]
# adds parentheses around the body
lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz: xxxxxxxxxxxxxxxxxxxx + yyyyyyyyyyyyyyyyyyyy + zzzzzzzzzzzzzzzzzzzz
# removes parentheses around the body
lambda xxxxxxxxxxxxxxxxxxxx: (xxxxxxxxxxxxxxxxxxxx + 1)
mapper = lambda x: dict_with_default[np.nan if isinstance(x, float) and np.isnan(x) else x]
lambda x, y, z: (
x + y + z
)
lambda x, y, z: (
x + y + z
# trailing body
)
lambda x, y, z: (
x + y + z # trailing eol body
)
lambda x, y, z: (
x + y + z
) # trailing lambda
lambda x, y, z: (
# leading body
x + y + z
)
lambda x, y, z: ( # leading eol body
x + y + z
)
(
lambda name:
source_bucket # trailing eol comment
if name == source_bucket_name
else storage.Bucket(mock_service, destination_bucket_name)
)
(
lambda name:
# dangling header comment
source_bucket
if name == source_bucket_name
else storage.Bucket(mock_service, destination_bucket_name)
)
x = (
lambda name:
# dangling header comment
source_bucket
if name == source_bucket_name
else storage.Bucket(mock_service, destination_bucket_name)
)
(
lambda name: # dangling header comment
(
source_bucket
if name == source_bucket_name
else storage.Bucket(mock_service, destination_bucket_name)
)
)
(
lambda from_ts, to_ts, _chain_id=chain_id: # dangling eol header comment
db_evmtx.count_transactions_in_range(
chain_id=_chain_id,
from_ts=from_ts,
to_ts=to_ts,
)
)
(
lambda from_ts, to_ts, _chain_id=chain_id:
# dangling header comment before call
db_evmtx.count_transactions_in_range(
chain_id=_chain_id,
from_ts=from_ts,
to_ts=to_ts,
)
)
(
lambda left, right:
# comment
ibis.timestamp("2017-04-01").cast(dt.date).between(left, right)
)
(
lambda left, right:
ibis.timestamp("2017-04-01") # comment
.cast(dt.date)
.between(left, right)
)
(
lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy:
# comment
[xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz]
)
(
lambda x, y:
# comment
{
"key": x,
"another": y,
}
)
(
lambda x, y:
# comment
(
x,
y,
z
)
)
(
lambda x:
# comment
dict_with_default[np.nan if isinstance(x, float) and np.isnan(x) else x]
)
(
lambda from_ts, to_ts, _chain_id=chain_id:
db_evmtx.count_transactions_in_range[
# comment
_chain_id, from_ts, to_ts
]
)
(
lambda
# comment
*args, **kwargs:
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
)
(
lambda # comment
*args, **kwargs:
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
)
(
lambda # comment 1
# comment 2
*args, **kwargs: # comment 3
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
)
(
lambda # comment 1
*args, **kwargs: # comment 3
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
)
(
lambda *args, **kwargs:
# comment 1
( # comment 2
# comment 3
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1 # comment 4
# comment 5
) # comment 6
)
(
lambda *brgs, **kwargs:
# comment 1
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa( # comment 2
# comment 3
*brgs, **kwargs) + 1 # comment 4
# comment 5
)
(
lambda *crgs, **kwargs: # comment 1
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*crgs, **kwargs) + 1
)
(
lambda *drgs, **kwargs: # comment 1
(
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*drgs, **kwargs) + 1
)
)
(
lambda * # comment 1
ergs, **
# comment 2
kwargs # comment 3
: # comment 4
(
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*ergs, **kwargs) + 1
)
)
(
lambda # 1
# 2
left, # 3
# 4
right: # 5
# 6
ibis.timestamp("2017-04-01").cast(dt.date).between(left, right)
)
(
lambda x: # outer comment 1
(
lambda y: # inner comment 1
# inner comment 2
lambda z: (
# innermost comment
x + y + z
)
)
)
foo(
lambda from_ts, # comment prevents collapsing the parameters to one line
to_ts, _chain_id=chain_id: db_evmtx.count_transactions_in_range(
chain_id=_chain_id,
from_ts=from_ts,
to_ts=to_ts,
)
)
foo(
lambda from_ts, # but still wrap the body if it gets too long
to_ts,
_chain_id=chain_id: db_evmtx.count_transactions_in_rangeeeeeeeeeeeeeeeeeeeeeeeeeeeee(
chain_id=_chain_id,
from_ts=from_ts,
to_ts=to_ts,
)
)
transform = lambda left, right: ibis.timestamp("2017-04-01").cast(dt.date).between(left, right).between(left, right) # trailing comment
(
lambda: # comment
1
)
(
lambda # comment
:
1
)
(
lambda:
# comment
1
)
(
lambda: # comment 1
# comment 2
1
)
(
lambda # comment 1
# comment 2
: # comment 3
# comment 4
1
)
(
lambda
* # comment 2
@ -789,18 +271,3 @@ transform = lambda left, right: ibis.timestamp("2017-04-01").cast(dt.date).betwe
x:
x
)
(
lambda: # dangling-end-of-line
# dangling-own-line
( # leading-body-end-of-line
x
)
)
(
lambda: # dangling-end-of-line
( # leading-body-end-of-line
x
)
)

View File

@ -1 +0,0 @@
[{"line_width":8}]

View File

@ -1,35 +0,0 @@
# Fixtures for fluent formatting of call chains
# Note that `fluent.options.json` sets line width to 8
x = a.b()
x = a.b().c()
x = a.b().c().d
x = a.b.c.d().e()
x = a.b.c().d.e().f.g()
# Consecutive calls/subscripts are grouped together
# for the purposes of fluent formatting (though, as 2025.12.15,
# there may be a break inside of one of these
# calls/subscripts, but that is unrelated to the fluent format.)
x = a()[0]().b().c()
x = a.b()[0].c.d()[1]().e
# Parentheses affect both where the root of the call
# chain is and how many calls we require before applying
# fluent formatting (just 1, in the presence of a parenthesized
# root, as of 2025.12.15.)
x = (a).b()
x = (a()).b()
x = (a.b()).d.e()
x = (a.b().d).e()

View File

@ -216,69 +216,3 @@ max_message_id = (
.baz()
)
# Note in preview we split at `pl` which some
# folks may dislike. (Similarly with common
# `np` and `pd` invocations).
#
# This is because we cannot reliably predict,
# just from syntax, whether a short identifier
# is being used as a 'namespace' or as an 'object'.
#
# As of 2025.12.15, we do not indent methods in
# fluent formatting. If we ever decide to do so,
# it may make sense to special case call chain roots
# that are shorter than the indent-width (like Prettier does).
# This would have the benefit of handling these common
# two-letter aliases for libraries.
expr = (
pl.scan_parquet("/data/pypi-parquet/*.parquet")
.filter(
[
pl.col("path").str.contains(
r"\.(asm|c|cc|cpp|cxx|h|hpp|rs|[Ff][0-9]{0,2}(?:or)?|go)$"
),
~pl.col("path").str.contains(r"(^|/)test(|s|ing)"),
~pl.col("path").str.contains("/site-packages/", literal=True),
]
)
.with_columns(
month=pl.col("uploaded_on").dt.truncate("1mo"),
ext=pl.col("path")
.str.extract(pattern=r"\.([a-z0-9]+)$", group_index=1)
.str.replace_all(pattern=r"cxx|cpp|cc|c|hpp|h", value="C/C++")
.str.replace_all(pattern="^f.*$", value="Fortran")
.str.replace("rs", "Rust", literal=True)
.str.replace("go", "Go", literal=True)
.str.replace("asm", "Assembly", literal=True)
.replace({"": None}),
)
.group_by(["month", "ext"])
.agg(project_count=pl.col("project_name").n_unique())
.drop_nulls(["ext"])
.sort(["month", "project_count"], descending=True)
)
def indentation_matching_for_loop_in_preview():
if make_this:
if more_nested_because_line_length:
identical_hidden_layer_sizes = all(
current_hidden_layer_sizes == first_hidden_layer_sizes
for current_hidden_layer_sizes in self.component_config[
HIDDEN_LAYERS_SIZES
].values().attr
)
def indentation_matching_walrus_in_preview():
if make_this:
if more_nested_because_line_length:
with self.read_ctx(book_type) as cursor:
if (entry_count := len(names := cursor.execute(
'SELECT name FROM address_book WHERE address=?',
(address,),
).fetchall().some_attr)) == 0 or len(set(names)) > 1:
return
# behavior with parenthesized roots
x = (aaaaaaaaaaaaaaaaaaaaaa).bbbbbbbbbbbbbbbbbbb.cccccccccccccccccccccccc().dddddddddddddddddddddddd().eeeeeeeeeeee

View File

@ -1,4 +1,4 @@
use ruff_formatter::{Argument, Arguments, format_args, write};
use ruff_formatter::{Argument, Arguments, write};
use ruff_text_size::{Ranged, TextRange, TextSize};
use crate::context::{NodeLevel, WithNodeLevel};
@ -33,27 +33,20 @@ impl<'ast> Format<PyFormatContext<'ast>> for ParenthesizeIfExpands<'_, 'ast> {
{
let mut f = WithNodeLevel::new(NodeLevel::ParenthesizedExpression, f);
write!(
f,
[group(&format_with(|f| {
if_group_breaks(&token("(")).fmt(f)?;
if self.indent {
let parens_id = f.group_id("indented_parenthesize_if_expands");
group(&format_args![
if_group_breaks(&token("(")),
indent_if_group_breaks(
&format_args![soft_line_break(), &Arguments::from(&self.inner)],
parens_id
),
soft_line_break(),
if_group_breaks(&token(")"))
])
.with_id(Some(parens_id))
.fmt(&mut f)
soft_block_indent(&Arguments::from(&self.inner)).fmt(f)?;
} else {
group(&format_args![
if_group_breaks(&token("(")),
Arguments::from(&self.inner),
if_group_breaks(&token(")")),
])
.fmt(&mut f)
Arguments::from(&self.inner).fmt(f)?;
}
if_group_breaks(&token(")")).fmt(f)
}))]
)
}
}
}

View File

@ -3,7 +3,7 @@
use std::path::{Path, PathBuf};
use anyhow::{Context, Result};
use clap::{Parser, ValueEnum};
use clap::{Parser, ValueEnum, command};
use ruff_formatter::SourceCode;
use ruff_python_ast::{PySourceType, PythonVersion};

View File

@ -10,7 +10,6 @@ use crate::expression::parentheses::{
NeedsParentheses, OptionalParentheses, Parentheses, is_expression_parenthesized,
};
use crate::prelude::*;
use crate::preview::is_fluent_layout_split_first_call_enabled;
#[derive(Default)]
pub struct FormatExprAttribute {
@ -48,26 +47,20 @@ impl FormatNodeRule<ExprAttribute> for FormatExprAttribute {
)
};
if call_chain_layout.is_fluent() {
if call_chain_layout == CallChainLayout::Fluent {
if parenthesize_value {
// Don't propagate the call chain layout.
value.format().with_options(Parentheses::Always).fmt(f)?;
} else {
match value.as_ref() {
Expr::Attribute(expr) => {
expr.format()
.with_options(call_chain_layout.transition_after_attribute())
.fmt(f)?;
expr.format().with_options(call_chain_layout).fmt(f)?;
}
Expr::Call(expr) => {
expr.format()
.with_options(call_chain_layout.transition_after_attribute())
.fmt(f)?;
expr.format().with_options(call_chain_layout).fmt(f)?;
}
Expr::Subscript(expr) => {
expr.format()
.with_options(call_chain_layout.transition_after_attribute())
.fmt(f)?;
expr.format().with_options(call_chain_layout).fmt(f)?;
}
_ => {
value.format().with_options(Parentheses::Never).fmt(f)?;
@ -112,30 +105,8 @@ impl FormatNodeRule<ExprAttribute> for FormatExprAttribute {
// Allow the `.` on its own line if this is a fluent call chain
// and the value either requires parenthesizing or is a call or subscript expression
// (it's a fluent chain but not the first element).
//
// In preview we also break _at_ the first call in the chain.
// For example:
//
// ```diff
// # stable formatting vs. preview
// x = (
// - df.merge()
// + df
// + .merge()
// .groupby()
// .agg()
// .filter()
// )
// ```
else if call_chain_layout.is_fluent() {
if parenthesize_value
|| value.is_call_expr()
|| value.is_subscript_expr()
// Remember to update the doc-comment above when
// stabilizing this behavior.
|| (is_fluent_layout_split_first_call_enabled(f.context())
&& call_chain_layout.is_first_call_like())
{
else if call_chain_layout == CallChainLayout::Fluent {
if parenthesize_value || value.is_call_expr() || value.is_subscript_expr() {
soft_line_break().fmt(f)?;
}
}
@ -177,8 +148,8 @@ impl FormatNodeRule<ExprAttribute> for FormatExprAttribute {
)
});
let is_call_chain_root =
self.call_chain_layout == CallChainLayout::Default && call_chain_layout.is_fluent();
let is_call_chain_root = self.call_chain_layout == CallChainLayout::Default
&& call_chain_layout == CallChainLayout::Fluent;
if is_call_chain_root {
write!(f, [group(&format_inner)])
} else {
@ -198,8 +169,7 @@ impl NeedsParentheses for ExprAttribute {
self.into(),
context.comments().ranges(),
context.source(),
)
.is_fluent()
) == CallChainLayout::Fluent
{
OptionalParentheses::Multiline
} else if context.comments().has_dangling(self) {

View File

@ -47,10 +47,7 @@ impl FormatNodeRule<ExprCall> for FormatExprCall {
func.format().with_options(Parentheses::Always).fmt(f)
} else {
match func.as_ref() {
Expr::Attribute(expr) => expr
.format()
.with_options(call_chain_layout.decrement_call_like_count())
.fmt(f),
Expr::Attribute(expr) => expr.format().with_options(call_chain_layout).fmt(f),
Expr::Call(expr) => expr.format().with_options(call_chain_layout).fmt(f),
Expr::Subscript(expr) => expr.format().with_options(call_chain_layout).fmt(f),
_ => func.format().with_options(Parentheses::Never).fmt(f),
@ -70,7 +67,9 @@ impl FormatNodeRule<ExprCall> for FormatExprCall {
// queryset.distinct().order_by(field.name).values_list(field_name_flat_long_long=True)
// )
// ```
if call_chain_layout.is_fluent() && self.call_chain_layout == CallChainLayout::Default {
if call_chain_layout == CallChainLayout::Fluent
&& self.call_chain_layout == CallChainLayout::Default
{
group(&fmt_func).fmt(f)
} else {
fmt_func.fmt(f)
@ -88,8 +87,7 @@ impl NeedsParentheses for ExprCall {
self.into(),
context.comments().ranges(),
context.source(),
)
.is_fluent()
) == CallChainLayout::Fluent
{
OptionalParentheses::Multiline
} else if context.comments().has_dangling(self) {

View File

@ -1,21 +1,15 @@
use ruff_formatter::{FormatRuleWithOptions, RemoveSoftLinesBuffer, format_args, write};
use ruff_python_ast::{AnyNodeRef, Expr, ExprLambda};
use ruff_formatter::write;
use ruff_python_ast::AnyNodeRef;
use ruff_python_ast::ExprLambda;
use ruff_text_size::Ranged;
use crate::builders::parenthesize_if_expands;
use crate::comments::{SourceComment, dangling_comments, leading_comments, trailing_comments};
use crate::expression::parentheses::{
NeedsParentheses, OptionalParentheses, Parentheses, is_expression_parenthesized,
};
use crate::expression::{CallChainLayout, has_own_parentheses};
use crate::comments::dangling_comments;
use crate::expression::parentheses::{NeedsParentheses, OptionalParentheses};
use crate::other::parameters::ParametersParentheses;
use crate::prelude::*;
use crate::preview::is_parenthesize_lambda_bodies_enabled;
#[derive(Default)]
pub struct FormatExprLambda {
layout: ExprLambdaLayout,
}
pub struct FormatExprLambda;
impl FormatNodeRule<ExprLambda> for FormatExprLambda {
fn fmt_fields(&self, item: &ExprLambda, f: &mut PyFormatter) -> FormatResult<()> {
@ -26,19 +20,13 @@ impl FormatNodeRule<ExprLambda> for FormatExprLambda {
body,
} = item;
let body = &**body;
let parameters = parameters.as_deref();
let comments = f.context().comments().clone();
let dangling = comments.dangling(item);
let preview = is_parenthesize_lambda_bodies_enabled(f.context());
write!(f, [token("lambda")])?;
// Format any dangling comments before the parameters, but save any dangling comments after
// the parameters/after the header to be formatted with the body below.
let dangling_header_comments = if let Some(parameters) = parameters {
// In this context, a dangling comment can either be a comment between the `lambda` and the
if let Some(parameters) = parameters {
// In this context, a dangling comment can either be a comment between the `lambda` the
// parameters, or a comment between the parameters and the body.
let (dangling_before_parameters, dangling_after_parameters) = dangling
.split_at(dangling.partition_point(|comment| comment.end() < parameters.start()));
@ -98,7 +86,7 @@ impl FormatNodeRule<ExprLambda> for FormatExprLambda {
// *x: x
// )
// ```
if comments.has_leading(parameters) {
if comments.has_leading(&**parameters) {
hard_line_break().fmt(f)?;
} else {
write!(f, [space()])?;
@ -107,90 +95,32 @@ impl FormatNodeRule<ExprLambda> for FormatExprLambda {
write!(f, [dangling_comments(dangling_before_parameters)])?;
}
// Try to keep the parameters on a single line, unless there are intervening comments.
if preview && !comments.contains_comments(parameters.into()) {
let mut buffer = RemoveSoftLinesBuffer::new(f);
write!(
buffer,
[parameters
.format()
.with_options(ParametersParentheses::Never)]
)?;
} else {
write!(
f,
[parameters
.format()
.with_options(ParametersParentheses::Never)]
)?;
}
dangling_after_parameters
} else {
dangling
};
write!(f, [token(":")])?;
if dangling_header_comments.is_empty() {
if dangling_after_parameters.is_empty() {
write!(f, [space()])?;
} else if !preview {
write!(f, [dangling_comments(dangling_header_comments)])?;
} else {
write!(f, [dangling_comments(dangling_after_parameters)])?;
}
} else {
write!(f, [token(":")])?;
if !preview {
return body.format().fmt(f);
}
let fmt_body = FormatBody {
body,
dangling_header_comments,
};
match self.layout {
ExprLambdaLayout::Assignment => fits_expanded(&fmt_body).fmt(f),
ExprLambdaLayout::Default => fmt_body.fmt(f),
}
// In this context, a dangling comment is a comment between the `lambda` and the body.
if dangling.is_empty() {
write!(f, [space()])?;
} else {
write!(f, [dangling_comments(dangling)])?;
}
}
#[derive(Debug, Default, Copy, Clone)]
pub enum ExprLambdaLayout {
#[default]
Default,
/// The [`ExprLambda`] is the direct child of an assignment expression, so it needs to use
/// `fits_expanded` to prefer parenthesizing its own body before the assignment tries to
/// parenthesize the whole lambda. For example, we want this formatting:
///
/// ```py
/// long_assignment_target = lambda x, y, z: (
/// x + y + z
/// )
/// ```
///
/// instead of either of these:
///
/// ```py
/// long_assignment_target = (
/// lambda x, y, z: (
/// x + y + z
/// )
/// )
///
/// long_assignment_target = (
/// lambda x, y, z: x + y + z
/// )
/// ```
Assignment,
}
impl FormatRuleWithOptions<ExprLambda, PyFormatContext<'_>> for FormatExprLambda {
type Options = ExprLambdaLayout;
fn with_options(mut self, options: Self::Options) -> Self {
self.layout = options;
self
write!(f, [body.format()])
}
}
@ -207,267 +137,3 @@ impl NeedsParentheses for ExprLambda {
}
}
}
struct FormatBody<'a> {
body: &'a Expr,
/// Dangling comments attached to the lambda header that should be formatted with the body.
///
/// These can include both own-line and end-of-line comments. For lambdas with parameters, this
/// means comments after the parameters:
///
/// ```py
/// (
/// lambda x, y # 1
/// # 2
/// : # 3
/// # 4
/// x + y
/// )
/// ```
///
/// Or all dangling comments for lambdas without parameters:
///
/// ```py
/// (
/// lambda # 1
/// # 2
/// : # 3
/// # 4
/// 1
/// )
/// ```
///
/// In most cases these should formatted within the parenthesized body, as in:
///
/// ```py
/// (
/// lambda: ( # 1
/// # 2
/// # 3
/// # 4
/// 1
/// )
/// )
/// ```
///
/// or without `# 2`:
///
/// ```py
/// (
/// lambda: ( # 1 # 3
/// # 4
/// 1
/// )
/// )
/// ```
dangling_header_comments: &'a [SourceComment],
}
impl Format<PyFormatContext<'_>> for FormatBody<'_> {
fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> {
let FormatBody {
dangling_header_comments,
body,
} = self;
let body = *body;
let comments = f.context().comments().clone();
let body_comments = comments.leading_dangling_trailing(body);
if !dangling_header_comments.is_empty() {
// Split the dangling header comments into trailing comments formatted with the lambda
// header (1) and leading comments formatted with the body (2, 3, 4).
//
// ```python
// (
// lambda # 1
// # 2
// : # 3
// # 4
// y
// )
// ```
//
// Note that these are split based on their line position rather than using
// `partition_point` based on a range, for example.
let (trailing_header_comments, leading_body_comments) = dangling_header_comments
.split_at(
dangling_header_comments
.iter()
.position(|comment| comment.line_position().is_own_line())
.unwrap_or(dangling_header_comments.len()),
);
// If the body is parenthesized and has its own leading comments, preserve the
// separation between the dangling lambda comments and the body comments. For
// example, preserve this comment positioning:
//
// ```python
// (
// lambda: # 1
// # 2
// ( # 3
// x
// )
// )
// ```
//
// 1 and 2 are dangling on the lambda and emitted first, followed by a hard line
// break and the parenthesized body with its leading comments.
//
// However, when removing 2, 1 and 3 can instead be formatted on the same line:
//
// ```python
// (
// lambda: ( # 1 # 3
// x
// )
// )
// ```
let comments = f.context().comments();
if is_expression_parenthesized(body.into(), comments.ranges(), f.context().source())
&& comments.has_leading(body)
{
trailing_comments(dangling_header_comments).fmt(f)?;
// Note that `leading_body_comments` have already been formatted as part of
// `dangling_header_comments` above, but their presence still determines the spacing
// here.
if leading_body_comments.is_empty() {
space().fmt(f)?;
} else {
hard_line_break().fmt(f)?;
}
body.format().with_options(Parentheses::Always).fmt(f)
} else {
write!(
f,
[
space(),
token("("),
trailing_comments(trailing_header_comments),
block_indent(&format_args!(
leading_comments(leading_body_comments),
body.format().with_options(Parentheses::Never)
)),
token(")")
]
)
}
}
// If the body has comments, we always want to preserve the parentheses. This also
// ensures that we correctly handle parenthesized comments, and don't need to worry
// about them in the implementation below.
else if body_comments.has_leading() || body_comments.has_trailing_own_line() {
body.format().with_options(Parentheses::Always).fmt(f)
}
// Calls and subscripts require special formatting because they have their own
// parentheses, but they can also have an arbitrary amount of text before the
// opening parenthesis. We want to avoid cases where we keep a long callable on the
// same line as the lambda parameters. For example, `db_evmtx...` in:
//
// ```py
// transaction_count = self._query_txs_for_range(
// get_count_fn=lambda from_ts, to_ts, _chain_id=chain_id: db_evmtx.count_transactions_in_range(
// chain_id=_chain_id,
// from_ts=from_ts,
// to_ts=to_ts,
// ),
// )
// ```
//
// should cause the whole lambda body to be parenthesized instead:
//
// ```py
// transaction_count = self._query_txs_for_range(
// get_count_fn=lambda from_ts, to_ts, _chain_id=chain_id: (
// db_evmtx.count_transactions_in_range(
// chain_id=_chain_id,
// from_ts=from_ts,
// to_ts=to_ts,
// )
// ),
// )
// ```
else if matches!(body, Expr::Call(_) | Expr::Subscript(_)) {
let unparenthesized = body.format().with_options(Parentheses::Never);
if CallChainLayout::from_expression(
body.into(),
comments.ranges(),
f.context().source(),
)
.is_fluent()
{
parenthesize_if_expands(&unparenthesized).fmt(f)
} else {
let unparenthesized = unparenthesized.memoized();
if unparenthesized.inspect(f)?.will_break() {
expand_parent().fmt(f)?;
}
best_fitting![
// body all flat
unparenthesized,
// body expanded
group(&unparenthesized).should_expand(true),
// parenthesized
format_args![token("("), block_indent(&unparenthesized), token(")")]
]
.fmt(f)
}
}
// For other cases with their own parentheses, such as lists, sets, dicts, tuples,
// etc., we can just format the body directly. Their own formatting results in the
// lambda being formatted well too. For example:
//
// ```py
// lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz: [xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz]
// ```
//
// gets formatted as:
//
// ```py
// lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz: [
// xxxxxxxxxxxxxxxxxxxx,
// yyyyyyyyyyyyyyyyyyyy,
// zzzzzzzzzzzzzzzzzzzz
// ]
// ```
else if has_own_parentheses(body, f.context()).is_some() {
body.format().fmt(f)
}
// Finally, for expressions without their own parentheses, use
// `parenthesize_if_expands` to add parentheses around the body, only if it expands
// across multiple lines. The `Parentheses::Never` here also removes unnecessary
// parentheses around lambda bodies that fit on one line. For example:
//
// ```py
// lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz: xxxxxxxxxxxxxxxxxxxx + yyyyyyyyyyyyyyyyyyyy + zzzzzzzzzzzzzzzzzzzz
// ```
//
// is formatted as:
//
// ```py
// lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz: (
// xxxxxxxxxxxxxxxxxxxx + yyyyyyyyyyyyyyyyyyyy + zzzzzzzzzzzzzzzzzzzz
// )
// ```
//
// while
//
// ```py
// lambda xxxxxxxxxxxxxxxxxxxx: (xxxxxxxxxxxxxxxxxxxx + 1)
// ```
//
// is formatted as:
//
// ```py
// lambda xxxxxxxxxxxxxxxxxxxx: xxxxxxxxxxxxxxxxxxxx + 1
// ```
else {
parenthesize_if_expands(&body.format().with_options(Parentheses::Never)).fmt(f)
}
}
}

View File

@ -51,10 +51,7 @@ impl FormatNodeRule<ExprSubscript> for FormatExprSubscript {
value.format().with_options(Parentheses::Always).fmt(f)
} else {
match value.as_ref() {
Expr::Attribute(expr) => expr
.format()
.with_options(call_chain_layout.decrement_call_like_count())
.fmt(f),
Expr::Attribute(expr) => expr.format().with_options(call_chain_layout).fmt(f),
Expr::Call(expr) => expr.format().with_options(call_chain_layout).fmt(f),
Expr::Subscript(expr) => expr.format().with_options(call_chain_layout).fmt(f),
_ => value.format().with_options(Parentheses::Never).fmt(f),
@ -74,8 +71,8 @@ impl FormatNodeRule<ExprSubscript> for FormatExprSubscript {
.fmt(f)
});
let is_call_chain_root =
self.call_chain_layout == CallChainLayout::Default && call_chain_layout.is_fluent();
let is_call_chain_root = self.call_chain_layout == CallChainLayout::Default
&& call_chain_layout == CallChainLayout::Fluent;
if is_call_chain_root {
write!(f, [group(&format_inner)])
} else {
@ -95,8 +92,7 @@ impl NeedsParentheses for ExprSubscript {
self.into(),
context.comments().ranges(),
context.source(),
)
.is_fluent()
) == CallChainLayout::Fluent
{
OptionalParentheses::Multiline
} else if is_expression_parenthesized(

View File

@ -876,22 +876,6 @@ impl<'a> First<'a> {
/// )
/// ).all()
/// ```
///
/// In [`preview`](crate::preview::is_fluent_layout_split_first_call_enabled), we also track the position of the leftmost call or
/// subscript on an attribute in the chain and break just before the dot.
///
/// So, for example, the right-hand summand in the above expression
/// would get formatted as:
/// ```python
/// Blog.objects
/// .filter(
/// entry__headline__contains="McCartney",
/// )
/// .limit_results[:10]
/// .filter(
/// entry__pub_date__year=2010,
/// )
/// ```
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
pub enum CallChainLayout {
/// The root of a call chain
@ -899,149 +883,19 @@ pub enum CallChainLayout {
Default,
/// A nested call chain element that uses fluent style.
Fluent(AttributeState),
Fluent,
/// A nested call chain element not using fluent style.
NonFluent,
}
/// Records information about the current position within
/// a call chain.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum AttributeState {
/// Stores the number of calls or subscripts
/// to the left of the current position in a chain.
///
/// Consecutive calls/subscripts on a single
/// object only count once. For example, if we are at
/// `c` in `a.b()[0]()().c()` then this number would be 1.
///
/// Caveat: If the root of the chain is parenthesized,
/// it contributes +1 to this count, even if it is not
/// a call or subscript. But the name
/// `CallLikeOrParenthesizedRootPreceding`
/// is a tad unwieldy, and this also rarely occurs.
CallLikePreceding(u32),
/// Indicates that we are at the first called or
/// subscripted object in the chain
///
/// For example, if we are at `b` in `a.b()[0]()().c()`
FirstCallLike,
/// Indicates that we are to the left of the first
/// called or subscripted object in the chain, and therefore
/// need not break.
///
/// For example, if we are at `a` in `a.b()[0]()().c()`
BeforeFirstCallLike,
}
impl CallChainLayout {
/// Returns new state decreasing count of remaining calls/subscripts
/// to traverse, or the state `FirstCallOrSubscript`, as appropriate.
#[must_use]
pub(crate) fn decrement_call_like_count(self) -> Self {
match self {
Self::Fluent(AttributeState::CallLikePreceding(x)) => {
if x > 1 {
// Recall that we traverse call chains from right to
// left. So after moving from a call/subscript into
// an attribute, we _decrease_ the count of
// _remaining_ calls or subscripts to the left of our
// current position.
Self::Fluent(AttributeState::CallLikePreceding(x - 1))
} else {
Self::Fluent(AttributeState::FirstCallLike)
}
}
_ => self,
}
}
/// Returns with state change
/// `FirstCallOrSubscript` -> `BeforeFirstCallOrSubscript`
/// and otherwise returns unchanged.
#[must_use]
pub(crate) fn transition_after_attribute(self) -> Self {
match self {
Self::Fluent(AttributeState::FirstCallLike) => {
Self::Fluent(AttributeState::BeforeFirstCallLike)
}
_ => self,
}
}
pub(crate) fn is_first_call_like(self) -> bool {
matches!(self, Self::Fluent(AttributeState::FirstCallLike))
}
/// Returns either `Fluent` or `NonFluent` depending on a
/// heuristic computed for the whole chain.
///
/// Explicitly, the criterion to return `Fluent` is
/// as follows:
///
/// 1. Beginning from the right (i.e. the `expr` itself),
/// traverse inwards past calls, subscripts, and attribute
/// expressions until we meet the first expression that is
/// either none of these or else is parenthesized. This will
/// be the _root_ of the call chain.
/// 2. Count the number of _attribute values_ that are _called
/// or subscripted_ in the chain (note that this includes the
/// root but excludes the rightmost attribute in the chain since
/// it is not the _value_ of some attribute).
/// 3. If the root is parenthesized, add 1 to that value.
/// 4. If the total is at least 2, return `Fluent`. Otherwise
/// return `NonFluent`
pub(crate) fn from_expression(
mut expr: ExprRef,
comment_ranges: &CommentRanges,
source: &str,
) -> Self {
// TODO(dylan): Once the fluent layout preview style is
// stabilized, see if it is possible to simplify some of
// the logic around parenthesized roots. (While supporting
// both styles it is more difficult to do this.)
// Count of attribute _values_ which are called or
// subscripted, after the leftmost parenthesized
// value.
//
// Examples:
// ```
// # Count of 3 - notice that .d()
// # does not contribute
// a().b().c[0]()().d()
// # Count of 2 - notice that a()
// # does not contribute
// (a()).b().c[0].d
// ```
let mut computed_attribute_values_after_parentheses = 0;
// Similar to the above, but instead looks at all calls
// and subscripts rather than looking only at those on
// _attribute values_. So this count can differ from the
// above.
//
// Examples of `computed_attribute_values_after_parentheses` vs
// `call_like_count`:
//
// a().b ---> 1 vs 1
// a.b().c --> 1 vs 1
// a.b() ---> 0 vs 1
let mut call_like_count = 0;
// Going from right to left, we traverse calls, subscripts,
// and attributes until we get to an expression of a different
// kind _or_ to a parenthesized expression. This records
// the case where we end the traversal at a parenthesized expression.
//
// In these cases, the inferred semantics of the chain are different.
// We interpret this as the user indicating:
// "this parenthesized value is the object of interest and we are
// doing transformations on it". This increases our confidence that
// this should be fluently formatted, and also means we should make
// our first break after this value.
let mut root_value_parenthesized = false;
let mut attributes_after_parentheses = 0;
loop {
match expr {
ExprRef::Attribute(ast::ExprAttribute { value, .. }) => {
@ -1053,10 +907,10 @@ impl CallChainLayout {
// ```
if is_expression_parenthesized(value.into(), comment_ranges, source) {
// `(a).b`. We preserve these parentheses so don't recurse
root_value_parenthesized = true;
attributes_after_parentheses += 1;
break;
} else if matches!(value.as_ref(), Expr::Call(_) | Expr::Subscript(_)) {
computed_attribute_values_after_parentheses += 1;
attributes_after_parentheses += 1;
}
expr = ExprRef::from(value.as_ref());
@ -1071,68 +925,31 @@ impl CallChainLayout {
// ```
ExprRef::Call(ast::ExprCall { func: inner, .. })
| ExprRef::Subscript(ast::ExprSubscript { value: inner, .. }) => {
// We preserve these parentheses so don't recurse
// e.g. (a)[0].x().y().z()
// ^stop here
if is_expression_parenthesized(inner.into(), comment_ranges, source) {
break;
}
// Accumulate the `call_like_count`, but we only
// want to count things like `a()[0]()()` once.
if !inner.is_call_expr() && !inner.is_subscript_expr() {
call_like_count += 1;
}
expr = ExprRef::from(inner.as_ref());
}
_ => {
// We to format the following in fluent style:
// ```
// f2 = (a).w().t(1,)
// ^ expr
// ```
if is_expression_parenthesized(expr, comment_ranges, source) {
attributes_after_parentheses += 1;
}
break;
}
}
}
if computed_attribute_values_after_parentheses + u32::from(root_value_parenthesized) < 2 {
// We preserve these parentheses so don't recurse
if is_expression_parenthesized(expr, comment_ranges, source) {
break;
}
}
if attributes_after_parentheses < 2 {
CallChainLayout::NonFluent
} else {
CallChainLayout::Fluent(AttributeState::CallLikePreceding(
// We count a parenthesized root value as an extra
// call for the purposes of tracking state.
//
// The reason is that, in this case, we want the first
// "special" break to happen right after the root, as
// opposed to right after the first called/subscripted
// attribute.
//
// For example:
//
// ```
// (object_of_interest)
// .data.filter()
// .agg()
// .etc()
// ```
//
// instead of (in preview):
//
// ```
// (object_of_interest)
// .data
// .filter()
// .etc()
// ```
//
// For comparison, if we didn't have parentheses around
// the root, we want (and get, in preview):
//
// ```
// object_of_interest.data
// .filter()
// .agg()
// .etc()
// ```
call_like_count + u32::from(root_value_parenthesized),
))
CallChainLayout::Fluent
}
}
@ -1155,13 +972,9 @@ impl CallChainLayout {
CallChainLayout::NonFluent
}
}
layout @ (CallChainLayout::Fluent(_) | CallChainLayout::NonFluent) => layout,
layout @ (CallChainLayout::Fluent | CallChainLayout::NonFluent) => layout,
}
}
pub(crate) fn is_fluent(self) -> bool {
matches!(self, CallChainLayout::Fluent(_))
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]

View File

@ -52,17 +52,3 @@ pub(crate) const fn is_avoid_parens_for_long_as_captures_enabled(
) -> bool {
context.is_preview()
}
/// Returns `true` if the
/// [`parenthesize_lambda_bodies`](https://github.com/astral-sh/ruff/pull/21385) preview style is
/// enabled.
pub(crate) const fn is_parenthesize_lambda_bodies_enabled(context: &PyFormatContext) -> bool {
context.is_preview()
}
/// Returns `true` if the
/// [`fluent_layout_split_first_call`](https://github.com/astral-sh/ruff/pull/21369) preview
/// style is enabled.
pub(crate) const fn is_fluent_layout_split_first_call_enabled(context: &PyFormatContext) -> bool {
context.is_preview()
}

View File

@ -9,7 +9,6 @@ use crate::comments::{
Comments, LeadingDanglingTrailingComments, SourceComment, trailing_comments,
};
use crate::context::{NodeLevel, WithNodeLevel};
use crate::expression::expr_lambda::ExprLambdaLayout;
use crate::expression::parentheses::{
NeedsParentheses, OptionalParentheses, Parentheses, Parenthesize, is_expression_parenthesized,
optional_parentheses,
@ -19,7 +18,6 @@ use crate::expression::{
maybe_parenthesize_expression,
};
use crate::other::interpolated_string::InterpolatedStringLayout;
use crate::preview::is_parenthesize_lambda_bodies_enabled;
use crate::statement::trailing_semicolon;
use crate::string::StringLikeExtensions;
use crate::string::implicit::{
@ -305,7 +303,12 @@ impl Format<PyFormatContext<'_>> for FormatStatementsLastExpression<'_> {
&& format_implicit_flat.is_none()
&& format_interpolated_string.is_none()
{
return maybe_parenthesize_value(value, *statement).fmt(f);
return maybe_parenthesize_expression(
value,
*statement,
Parenthesize::IfBreaks,
)
.fmt(f);
}
let comments = f.context().comments().clone();
@ -583,7 +586,11 @@ impl Format<PyFormatContext<'_>> for FormatStatementsLastExpression<'_> {
space(),
operator,
space(),
maybe_parenthesize_value(value, *statement)
maybe_parenthesize_expression(
value,
*statement,
Parenthesize::IfBreaks
)
]
);
}
@ -1362,32 +1369,3 @@ fn is_attribute_with_parenthesized_value(target: &Expr, context: &PyFormatContex
_ => false,
}
}
/// Like [`maybe_parenthesize_expression`] but with special handling for lambdas in preview.
fn maybe_parenthesize_value<'a>(
expression: &'a Expr,
parent: AnyNodeRef<'a>,
) -> MaybeParenthesizeValue<'a> {
MaybeParenthesizeValue { expression, parent }
}
struct MaybeParenthesizeValue<'a> {
expression: &'a Expr,
parent: AnyNodeRef<'a>,
}
impl Format<PyFormatContext<'_>> for MaybeParenthesizeValue<'_> {
fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> {
let MaybeParenthesizeValue { expression, parent } = self;
if is_parenthesize_lambda_bodies_enabled(f.context())
&& let Expr::Lambda(lambda) = expression
&& !f.context().comments().has_leading(lambda)
{
parenthesize_if_expands(&lambda.format().with_options(ExprLambdaLayout::Assignment))
.fmt(f)
} else {
maybe_parenthesize_expression(expression, *parent, Parenthesize::IfBreaks).fmt(f)
}
}
}

View File

@ -1,7 +1,4 @@
use crate::normalizer::Normalizer;
use anyhow::anyhow;
use datatest_stable::Utf8Path;
use insta::assert_snapshot;
use ruff_db::diagnostic::{
Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, DisplayDiagnosticConfig,
DisplayDiagnostics, DummyFileResolver, Severity, Span, SubDiagnostic, SubDiagnosticSeverity,
@ -27,27 +24,26 @@ use std::{fmt, fs};
mod normalizer;
#[expect(clippy::needless_pass_by_value)]
fn black_compatibility(input_path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
let test_name = input_path
.strip_prefix("./resources/test/fixtures/black")
.unwrap_or(input_path)
.as_str();
#[test]
fn black_compatibility() {
let test_file = |input_path: &Path| {
let content = fs::read_to_string(input_path).unwrap();
let options_path = input_path.with_extension("options.json");
let options: PyFormatOptions = if let Ok(options_file) = fs::File::open(&options_path) {
let reader = BufReader::new(options_file);
serde_json::from_reader(reader).map_err(|err| {
anyhow!("Expected option file {options_path:?} to be a valid Json file: {err}")
})?
serde_json::from_reader(reader).unwrap_or_else(|_| {
panic!("Expected option file {options_path:?} to be a valid Json file")
})
} else {
PyFormatOptions::from_extension(input_path.as_std_path())
PyFormatOptions::from_extension(input_path)
};
let first_line = content.lines().next().unwrap_or_default();
let formatted_code =
if first_line.starts_with("# flags:") && first_line.contains("--line-ranges=") {
let formatted_code = if first_line.starts_with("# flags:")
&& first_line.contains("--line-ranges=")
{
let line_index = LineIndex::from_source_text(&content);
let ranges = first_line
@ -73,9 +69,13 @@ fn black_compatibility(input_path: &Utf8Path, content: String) -> datatest_stabl
let mut formatted_code = content.clone();
for range in ranges {
let formatted = format_range(&content, range, options.clone()).map_err(|err| {
anyhow!("Range-formatting to succeed but encountered error {err}")
})?;
let formatted =
format_range(&content, range, options.clone()).unwrap_or_else(|err| {
panic!(
"Range-formatting of {} to succeed but encountered error {err}",
input_path.display()
)
});
let range = formatted.source_range();
@ -86,8 +86,12 @@ fn black_compatibility(input_path: &Utf8Path, content: String) -> datatest_stabl
formatted_code
} else {
let printed = format_module_source(&content, options.clone())
.map_err(|err| anyhow!("Formatting to succeed but encountered error {err}"))?;
let printed = format_module_source(&content, options.clone()).unwrap_or_else(|err| {
panic!(
"Formatting of {} to succeed but encountered error {err}",
input_path.display()
)
});
let formatted_code = printed.into_code();
@ -98,7 +102,8 @@ fn black_compatibility(input_path: &Utf8Path, content: String) -> datatest_stabl
let extension = input_path
.extension()
.expect("Test file to have py or pyi extension");
.expect("Test file to have py or pyi extension")
.to_string_lossy();
let expected_path = input_path.with_extension(format!("{extension}.expect"));
let expected_output = fs::read_to_string(&expected_path)
.unwrap_or_else(|_| panic!("Expected Black output file '{expected_path:?}' to exist"));
@ -106,18 +111,25 @@ fn black_compatibility(input_path: &Utf8Path, content: String) -> datatest_stabl
let unsupported_syntax_errors =
ensure_unchanged_ast(&content, &formatted_code, &options, input_path);
if formatted_code == expected_output {
// Black and Ruff formatting matches. Delete any existing snapshot files because the Black output
// already perfectly captures the expected output.
// The following code mimics insta's logic generating the snapshot name for a test.
let workspace_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
let full_snapshot_name = format!("black_compatibility@{test_name}.snap",);
let mut components = input_path.components().rev();
let file_name = components.next().unwrap();
let test_suite = components.next().unwrap();
let snapshot_name = format!(
"black_compatibility@{}__{}.snap",
test_suite.as_os_str().to_string_lossy(),
file_name.as_os_str().to_string_lossy()
);
let snapshot_path = Path::new(&workspace_path)
.join("tests/snapshots")
.join(full_snapshot_name);
if formatted_code == expected_output {
.join(snapshot_name);
if snapshot_path.exists() && snapshot_path.is_file() {
// SAFETY: This is a convenience feature. That's why we don't want to abort
// when deleting a no longer needed snapshot fails.
@ -166,33 +178,37 @@ fn black_compatibility(input_path: &Utf8Path, content: String) -> datatest_stabl
.unwrap();
}
let mut settings = insta::Settings::clone_current();
settings.set_omit_expression(true);
settings.set_input_file(input_path);
settings.set_prepend_module_to_snapshot(false);
settings.set_snapshot_suffix(test_name);
let _settings = settings.bind_to_scope();
assert_snapshot!(snapshot);
insta::with_settings!({
omit_expression => true,
input_file => input_path,
prepend_module_to_snapshot => false,
}, {
insta::assert_snapshot!(snapshot);
});
}
Ok(())
};
insta::glob!(
"../resources",
"test/fixtures/black/**/*.{py,pyi}",
test_file
);
}
#[expect(clippy::needless_pass_by_value)]
fn format(input_path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
let test_name = input_path
.strip_prefix("./resources/test/fixtures/ruff")
.unwrap_or(input_path)
.as_str();
#[test]
fn format() {
let test_file = |input_path: &Path| {
let content = fs::read_to_string(input_path).unwrap();
let mut snapshot = format!("## Input\n{}", CodeFrame::new("python", &content));
let options_path = input_path.with_extension("options.json");
if let Ok(options_file) = fs::File::open(&options_path) {
let reader = BufReader::new(options_file);
let options: Vec<PyFormatOptions> = serde_json::from_reader(reader).map_err(|_| {
anyhow!("Expected option file {options_path:?} to be a valid Json file")
})?;
let options: Vec<PyFormatOptions> =
serde_json::from_reader(reader).unwrap_or_else(|_| {
panic!("Expected option file {options_path:?} to be a valid Json file")
});
writeln!(snapshot, "## Outputs").unwrap();
@ -248,7 +264,7 @@ fn format(input_path: &Utf8Path, content: String) -> datatest_stable::Result<()>
}
} else {
// We want to capture the differences in the preview style in our fixtures
let options = PyFormatOptions::from_extension(input_path.as_std_path());
let options = PyFormatOptions::from_extension(input_path);
let (formatted_code, unsupported_syntax_errors) =
format_file(&content, &options, input_path);
@ -293,27 +309,26 @@ fn format(input_path: &Utf8Path, content: String) -> datatest_stable::Result<()>
}
}
let mut settings = insta::Settings::clone_current();
settings.set_omit_expression(true);
settings.set_input_file(input_path);
settings.set_prepend_module_to_snapshot(false);
settings.set_snapshot_suffix(test_name);
let _settings = settings.bind_to_scope();
insta::with_settings!({
omit_expression => true,
input_file => input_path,
prepend_module_to_snapshot => false,
}, {
insta::assert_snapshot!(snapshot);
});
};
assert_snapshot!(snapshot);
Ok(())
}
datatest_stable::harness! {
{ test = black_compatibility, root = "./resources/test/fixtures/black", pattern = r".+\.pyi?$" },
{ test = format, root="./resources/test/fixtures/ruff", pattern = r".+\.pyi?$" }
insta::glob!(
"../resources",
"test/fixtures/ruff/**/*.{py,pyi}",
test_file
);
}
fn format_file(
source: &str,
options: &PyFormatOptions,
input_path: &Utf8Path,
input_path: &Path,
) -> (String, Vec<Diagnostic>) {
let (unformatted, formatted_code) = if source.contains("<RANGE_START>") {
let mut content = source.to_string();
@ -348,7 +363,8 @@ fn format_file(
let formatted =
format_range(&format_input, range, options.clone()).unwrap_or_else(|err| {
panic!(
"Range-formatting of {input_path} to succeed but encountered error {err}",
"Range-formatting of {} to succeed but encountered error {err}",
input_path.display()
)
});
@ -361,7 +377,10 @@ fn format_file(
(Cow::Owned(without_markers), content)
} else {
let printed = format_module_source(source, options.clone()).unwrap_or_else(|err| {
panic!("Formatting `{input_path} was expected to succeed but it failed: {err}",)
panic!(
"Formatting `{input_path} was expected to succeed but it failed: {err}",
input_path = input_path.display()
)
});
let formatted_code = printed.into_code();
@ -380,20 +399,22 @@ fn format_file(
fn ensure_stability_when_formatting_twice(
formatted_code: &str,
options: &PyFormatOptions,
input_path: &Utf8Path,
input_path: &Path,
) {
let reformatted = match format_module_source(formatted_code, options.clone()) {
Ok(reformatted) => reformatted,
Err(err) => {
let mut diag = Diagnostic::from(&err);
if let Some(range) = err.range() {
let file = SourceFileBuilder::new(input_path.as_str(), formatted_code).finish();
let file =
SourceFileBuilder::new(input_path.to_string_lossy(), formatted_code).finish();
let span = Span::from(file).with_range(range);
diag.annotate(Annotation::primary(span));
}
panic!(
"Expected formatted code of {input_path} to be valid syntax: {err}:\
"Expected formatted code of {} to be valid syntax: {err}:\
\n---\n{formatted_code}---\n{}",
input_path.display(),
diag.display(&DummyFileResolver, &DisplayDiagnosticConfig::default()),
);
}
@ -419,6 +440,7 @@ Formatted once:
Formatted twice:
---
{reformatted}---"#,
input_path = input_path.display(),
options = &DisplayPyOptions(options),
reformatted = reformatted.as_code(),
);
@ -445,7 +467,7 @@ fn ensure_unchanged_ast(
unformatted_code: &str,
formatted_code: &str,
options: &PyFormatOptions,
input_path: &Utf8Path,
input_path: &Path,
) -> Vec<Diagnostic> {
let source_type = options.source_type();
@ -477,7 +499,11 @@ fn ensure_unchanged_ast(
formatted_unsupported_syntax_errors
.retain(|fingerprint, _| !unformatted_unsupported_syntax_errors.contains_key(fingerprint));
let file = SourceFileBuilder::new(input_path.file_name().unwrap(), formatted_code).finish();
let file = SourceFileBuilder::new(
input_path.file_name().unwrap().to_string_lossy(),
formatted_code,
)
.finish();
let diagnostics = formatted_unsupported_syntax_errors
.values()
.map(|error| {
@ -507,10 +533,11 @@ fn ensure_unchanged_ast(
.header("Unformatted", "Formatted")
.to_string();
panic!(
r#"Reformatting the unformatted code of {input_path} resulted in AST changes.
r#"Reformatting the unformatted code of {} resulted in AST changes.
---
{diff}
"#,
input_path.display(),
);
}

View File

@ -192,7 +192,7 @@ class Random:
}
x = {
"foobar": (123) + 456,
@@ -97,24 +94,21 @@
@@ -97,24 +94,20 @@
my_dict = {
@ -221,14 +221,13 @@ class Random:
- .second_call()
- .third_call(some_args="some value")
- )
+ "a key in my dict": MyClass.some_attribute
+ .first_call()
+ "a key in my dict": MyClass.some_attribute.first_call()
+ .second_call()
+ .third_call(some_args="some value")
}
{
@@ -139,17 +133,17 @@
@@ -139,17 +132,17 @@
class Random:
def func():
@ -364,8 +363,7 @@ my_dict = {
/ 100000.0
}
my_dict = {
"a key in my dict": MyClass.some_attribute
.first_call()
"a key in my dict": MyClass.some_attribute.first_call()
.second_call()
.third_call(some_args="some value")
}

View File

@ -906,10 +906,11 @@ x = {
-)
+string_with_escaped_nameescape = "........................................................................... \\N{LAO KO LA}"
msg = lambda x: (
-msg = lambda x: (
- f"this is a very very very very long lambda value {x} that doesn't fit on a"
- " single line"
+ f"this is a very very very very long lambda value {x} that doesn't fit on a single line"
+msg = (
+ lambda x: f"this is a very very very very long lambda value {x} that doesn't fit on a single line"
)
dict_with_lambda_values = {
@ -1402,8 +1403,8 @@ string_with_escaped_nameescape = "..............................................
string_with_escaped_nameescape = "........................................................................... \\N{LAO KO LA}"
msg = lambda x: (
f"this is a very very very very long lambda value {x} that doesn't fit on a single line"
msg = (
lambda x: f"this is a very very very very long lambda value {x} that doesn't fit on a single line"
)
dict_with_lambda_values = {

View File

@ -375,7 +375,7 @@ a = b if """
# Another use case
data = yaml.load("""\
a: 1
@@ -77,10 +106,12 @@
@@ -77,19 +106,23 @@
b: 2
""",
)
@ -390,7 +390,19 @@ a = b if """
MULTILINE = """
foo
@@ -156,16 +187,24 @@
""".replace("\n", "")
-generated_readme = lambda project_name: """
+generated_readme = (
+ lambda project_name: """
{}
<Add content here!>
""".strip().format(project_name)
+)
parser.usage += """
Custom extra help summary.
@@ -156,16 +189,24 @@
10 LOAD_CONST 0 (None)
12 RETURN_VALUE
""" % (_C.__init__.__code__.co_firstlineno + 1,)
@ -421,7 +433,7 @@ a = b if """
[
"""cow
moos""",
@@ -206,7 +245,9 @@
@@ -206,7 +247,9 @@
"c"
)
@ -432,7 +444,7 @@ a = b if """
assert some_var == expected_result, """
test
@@ -224,10 +265,8 @@
@@ -224,10 +267,8 @@
"""Sxxxxxxx xxxxxxxx, xxxxxxx xx xxxxxxxxx
xxxxxxxxxxxxx xxxxxxx xxxxxxxxx xxx-xxxxxxxxxx xxxxxx xx xxx-xxxxxx"""
),
@ -445,7 +457,7 @@ a = b if """
},
}
@@ -246,14 +285,12 @@
@@ -246,14 +287,12 @@
a
a"""
),
@ -585,11 +597,13 @@ data = yaml.load(
MULTILINE = """
foo
""".replace("\n", "")
generated_readme = lambda project_name: """
generated_readme = (
lambda project_name: """
{}
<Add content here!>
""".strip().format(project_name)
)
parser.usage += """
Custom extra help summary.

View File

@ -1,6 +1,7 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/await.py
snapshot_kind: text
---
## Input
```python
@ -141,20 +142,3 @@ test_data = await (
.to_list()
)
```
## Preview changes
```diff
--- Stable
+++ Preview
@@ -65,7 +65,8 @@
# https://github.com/astral-sh/ruff/issues/8644
test_data = await (
- Stream.from_async(async_data)
+ Stream
+ .from_async(async_data)
.flat_map_async()
.map()
.filter_async(is_valid_data)
```

View File

@ -1,6 +1,7 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/call.py
snapshot_kind: text
---
## Input
```python
@ -556,20 +557,3 @@ result = (
result = (object[complicate_caller])("argument").a["b"].test(argument)
```
## Preview changes
```diff
--- Stable
+++ Preview
@@ -57,7 +57,8 @@
# Call chains/fluent interface (https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#call-chains)
result = (
- session.query(models.Customer.id)
+ session
+ .query(models.Customer.id)
.filter(
models.Customer.account_id == 10000,
models.Customer.email == "user@example.org",
```

View File

@ -1,6 +1,7 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/split_empty_brackets.py
snapshot_kind: text
---
## Input
```python

View File

@ -1,163 +0,0 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/fluent.py
---
## Input
```python
# Fixtures for fluent formatting of call chains
# Note that `fluent.options.json` sets line width to 8
x = a.b()
x = a.b().c()
x = a.b().c().d
x = a.b.c.d().e()
x = a.b.c().d.e().f.g()
# Consecutive calls/subscripts are grouped together
# for the purposes of fluent formatting (though, as 2025.12.15,
# there may be a break inside of one of these
# calls/subscripts, but that is unrelated to the fluent format.)
x = a()[0]().b().c()
x = a.b()[0].c.d()[1]().e
# Parentheses affect both where the root of the call
# chain is and how many calls we require before applying
# fluent formatting (just 1, in the presence of a parenthesized
# root, as of 2025.12.15.)
x = (a).b()
x = (a()).b()
x = (a.b()).d.e()
x = (a.b().d).e()
```
## Outputs
### Output 1
```
indent-style = space
line-width = 8
indent-width = 4
quote-style = Double
line-ending = LineFeed
magic-trailing-comma = Respect
docstring-code = Disabled
docstring-code-line-width = "dynamic"
preview = Disabled
target_version = 3.10
source_type = Python
```
```python
# Fixtures for fluent formatting of call chains
# Note that `fluent.options.json` sets line width to 8
x = a.b()
x = a.b().c()
x = (
a.b()
.c()
.d
)
x = a.b.c.d().e()
x = (
a.b.c()
.d.e()
.f.g()
)
# Consecutive calls/subscripts are grouped together
# for the purposes of fluent formatting (though, as 2025.12.15,
# there may be a break inside of one of these
# calls/subscripts, but that is unrelated to the fluent format.)
x = (
a()[
0
]()
.b()
.c()
)
x = (
a.b()[
0
]
.c.d()[
1
]()
.e
)
# Parentheses affect both where the root of the call
# chain is and how many calls we require before applying
# fluent formatting (just 1, in the presence of a parenthesized
# root, as of 2025.12.15.)
x = (
a
).b()
x = (
a()
).b()
x = (
a.b()
).d.e()
x = (
a.b().d
).e()
```
#### Preview changes
```diff
--- Stable
+++ Preview
@@ -7,7 +7,8 @@
x = a.b().c()
x = (
- a.b()
+ a
+ .b()
.c()
.d
)
@@ -15,7 +16,8 @@
x = a.b.c.d().e()
x = (
- a.b.c()
+ a.b
+ .c()
.d.e()
.f.g()
)
@@ -34,7 +36,8 @@
)
x = (
- a.b()[
+ a
+ .b()[
0
]
.c.d()[
```

View File

@ -1,6 +1,7 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/multiline_string_deviations.py
snapshot_kind: text
---
## Input
```python
@ -105,22 +106,3 @@ generated_readme = (
""".strip().format(project_name)
)
```
## Preview changes
```diff
--- Stable
+++ Preview
@@ -44,10 +44,8 @@
# this by changing `Lambda::needs_parentheses` to return `BestFit` but it causes
# issues when the lambda has comments.
# Let's keep this as a known deviation for now.
-generated_readme = (
- lambda project_name: """
+generated_readme = lambda project_name: """
{}
<Add content here!>
""".strip().format(project_name)
-)
```

View File

@ -1,6 +1,7 @@
---
source: crates/ruff_python_formatter/tests/fixtures.rs
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/parentheses/call_chains.py
snapshot_kind: text
---
## Input
```python
@ -222,72 +223,6 @@ max_message_id = (
.baz()
)
# Note in preview we split at `pl` which some
# folks may dislike. (Similarly with common
# `np` and `pd` invocations).
#
# This is because we cannot reliably predict,
# just from syntax, whether a short identifier
# is being used as a 'namespace' or as an 'object'.
#
# As of 2025.12.15, we do not indent methods in
# fluent formatting. If we ever decide to do so,
# it may make sense to special case call chain roots
# that are shorter than the indent-width (like Prettier does).
# This would have the benefit of handling these common
# two-letter aliases for libraries.
expr = (
pl.scan_parquet("/data/pypi-parquet/*.parquet")
.filter(
[
pl.col("path").str.contains(
r"\.(asm|c|cc|cpp|cxx|h|hpp|rs|[Ff][0-9]{0,2}(?:or)?|go)$"
),
~pl.col("path").str.contains(r"(^|/)test(|s|ing)"),
~pl.col("path").str.contains("/site-packages/", literal=True),
]
)
.with_columns(
month=pl.col("uploaded_on").dt.truncate("1mo"),
ext=pl.col("path")
.str.extract(pattern=r"\.([a-z0-9]+)$", group_index=1)
.str.replace_all(pattern=r"cxx|cpp|cc|c|hpp|h", value="C/C++")
.str.replace_all(pattern="^f.*$", value="Fortran")
.str.replace("rs", "Rust", literal=True)
.str.replace("go", "Go", literal=True)
.str.replace("asm", "Assembly", literal=True)
.replace({"": None}),
)
.group_by(["month", "ext"])
.agg(project_count=pl.col("project_name").n_unique())
.drop_nulls(["ext"])
.sort(["month", "project_count"], descending=True)
)
def indentation_matching_for_loop_in_preview():
if make_this:
if more_nested_because_line_length:
identical_hidden_layer_sizes = all(
current_hidden_layer_sizes == first_hidden_layer_sizes
for current_hidden_layer_sizes in self.component_config[
HIDDEN_LAYERS_SIZES
].values().attr
)
def indentation_matching_walrus_in_preview():
if make_this:
if more_nested_because_line_length:
with self.read_ctx(book_type) as cursor:
if (entry_count := len(names := cursor.execute(
'SELECT name FROM address_book WHERE address=?',
(address,),
).fetchall().some_attr)) == 0 or len(set(names)) > 1:
return
# behavior with parenthesized roots
x = (aaaaaaaaaaaaaaaaaaaaaa).bbbbbbbbbbbbbbbbbbb.cccccccccccccccccccccccc().dddddddddddddddddddddddd().eeeeeeeeeeee
```
## Output
@ -531,237 +466,4 @@ max_message_id = (
.sum()
.baz()
)
# Note in preview we split at `pl` which some
# folks may dislike. (Similarly with common
# `np` and `pd` invocations).
#
# This is because we cannot reliably predict,
# just from syntax, whether a short identifier
# is being used as a 'namespace' or as an 'object'.
#
# As of 2025.12.15, we do not indent methods in
# fluent formatting. If we ever decide to do so,
# it may make sense to special case call chain roots
# that are shorter than the indent-width (like Prettier does).
# This would have the benefit of handling these common
# two-letter aliases for libraries.
expr = (
pl.scan_parquet("/data/pypi-parquet/*.parquet")
.filter(
[
pl.col("path").str.contains(
r"\.(asm|c|cc|cpp|cxx|h|hpp|rs|[Ff][0-9]{0,2}(?:or)?|go)$"
),
~pl.col("path").str.contains(r"(^|/)test(|s|ing)"),
~pl.col("path").str.contains("/site-packages/", literal=True),
]
)
.with_columns(
month=pl.col("uploaded_on").dt.truncate("1mo"),
ext=pl.col("path")
.str.extract(pattern=r"\.([a-z0-9]+)$", group_index=1)
.str.replace_all(pattern=r"cxx|cpp|cc|c|hpp|h", value="C/C++")
.str.replace_all(pattern="^f.*$", value="Fortran")
.str.replace("rs", "Rust", literal=True)
.str.replace("go", "Go", literal=True)
.str.replace("asm", "Assembly", literal=True)
.replace({"": None}),
)
.group_by(["month", "ext"])
.agg(project_count=pl.col("project_name").n_unique())
.drop_nulls(["ext"])
.sort(["month", "project_count"], descending=True)
)
def indentation_matching_for_loop_in_preview():
if make_this:
if more_nested_because_line_length:
identical_hidden_layer_sizes = all(
current_hidden_layer_sizes == first_hidden_layer_sizes
for current_hidden_layer_sizes in self.component_config[
HIDDEN_LAYERS_SIZES
]
.values()
.attr
)
def indentation_matching_walrus_in_preview():
if make_this:
if more_nested_because_line_length:
with self.read_ctx(book_type) as cursor:
if (
entry_count := len(
names := cursor.execute(
"SELECT name FROM address_book WHERE address=?",
(address,),
)
.fetchall()
.some_attr
)
) == 0 or len(set(names)) > 1:
return
# behavior with parenthesized roots
x = (
(aaaaaaaaaaaaaaaaaaaaaa)
.bbbbbbbbbbbbbbbbbbb.cccccccccccccccccccccccc()
.dddddddddddddddddddddddd()
.eeeeeeeeeeee
)
```
## Preview changes
```diff
--- Stable
+++ Preview
@@ -21,7 +21,8 @@
)
raise OsError("") from (
- Blog.objects.filter(
+ Blog.objects
+ .filter(
entry__headline__contains="Lennon",
)
.filter(
@@ -33,7 +34,8 @@
)
raise OsError("sökdjffffsldkfjlhsakfjhalsökafhsöfdahsödfjösaaksjdllllllllllllll") from (
- Blog.objects.filter(
+ Blog.objects
+ .filter(
entry__headline__contains="Lennon",
)
.filter(
@@ -46,7 +48,8 @@
# Break only after calls and indexing
b1 = (
- session.query(models.Customer.id)
+ session
+ .query(models.Customer.id)
.filter(
models.Customer.account_id == account_id, models.Customer.email == email_address
)
@@ -54,7 +57,8 @@
)
b2 = (
- Blog.objects.filter(
+ Blog.objects
+ .filter(
entry__headline__contains="Lennon",
)
.limit_results[:10]
@@ -70,7 +74,8 @@
).filter(
entry__pub_date__year=2008,
)
- + Blog.objects.filter(
+ + Blog.objects
+ .filter(
entry__headline__contains="McCartney",
)
.limit_results[:10]
@@ -89,7 +94,8 @@
d11 = x.e().e().e() #
d12 = x.e().e().e() #
d13 = (
- x.e() #
+ x
+ .e() #
.e()
.e()
)
@@ -101,7 +107,8 @@
# Doesn't fit, fluent style
d3 = (
- x.e() #
+ x
+ .e() #
.esadjkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk()
.esadjkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk()
)
@@ -218,7 +225,8 @@
(
(
- df1_aaaaaaaaaaaa.merge()
+ df1_aaaaaaaaaaaa
+ .merge()
.groupby(
1,
)
@@ -228,7 +236,8 @@
(
(
- df1_aaaaaaaaaaaa.merge()
+ df1_aaaaaaaaaaaa
+ .merge()
.groupby(
1,
)
@@ -255,19 +264,19 @@
expr = (
- pl.scan_parquet("/data/pypi-parquet/*.parquet")
- .filter(
- [
- pl.col("path").str.contains(
- r"\.(asm|c|cc|cpp|cxx|h|hpp|rs|[Ff][0-9]{0,2}(?:or)?|go)$"
- ),
- ~pl.col("path").str.contains(r"(^|/)test(|s|ing)"),
- ~pl.col("path").str.contains("/site-packages/", literal=True),
- ]
- )
+ pl
+ .scan_parquet("/data/pypi-parquet/*.parquet")
+ .filter([
+ pl.col("path").str.contains(
+ r"\.(asm|c|cc|cpp|cxx|h|hpp|rs|[Ff][0-9]{0,2}(?:or)?|go)$"
+ ),
+ ~pl.col("path").str.contains(r"(^|/)test(|s|ing)"),
+ ~pl.col("path").str.contains("/site-packages/", literal=True),
+ ])
.with_columns(
month=pl.col("uploaded_on").dt.truncate("1mo"),
- ext=pl.col("path")
+ ext=pl
+ .col("path")
.str.extract(pattern=r"\.([a-z0-9]+)$", group_index=1)
.str.replace_all(pattern=r"cxx|cpp|cc|c|hpp|h", value="C/C++")
.str.replace_all(pattern="^f.*$", value="Fortran")
@@ -288,9 +297,8 @@
if more_nested_because_line_length:
identical_hidden_layer_sizes = all(
current_hidden_layer_sizes == first_hidden_layer_sizes
- for current_hidden_layer_sizes in self.component_config[
- HIDDEN_LAYERS_SIZES
- ]
+ for current_hidden_layer_sizes in self
+ .component_config[HIDDEN_LAYERS_SIZES]
.values()
.attr
)
@@ -302,7 +310,8 @@
with self.read_ctx(book_type) as cursor:
if (
entry_count := len(
- names := cursor.execute(
+ names := cursor
+ .execute(
"SELECT name FROM address_book WHERE address=?",
(address,),
)
```

View File

@ -592,23 +592,11 @@ impl FormatString {
fn parse_literal(text: &str) -> Result<(FormatPart, &str), FormatParseError> {
let mut cur_text = text;
let mut result_string = String::new();
let mut pending_escape = false;
while !cur_text.is_empty() {
if pending_escape
&& let Some((unicode_string, remaining)) =
FormatString::parse_escaped_unicode_string(cur_text)
{
result_string.push_str(unicode_string);
cur_text = remaining;
pending_escape = false;
continue;
}
match FormatString::parse_literal_single(cur_text) {
Ok((next_char, remaining)) => {
result_string.push(next_char);
cur_text = remaining;
pending_escape = next_char == '\\' && !pending_escape;
}
Err(err) => {
return if result_string.is_empty() {
@ -690,13 +678,6 @@ impl FormatString {
}
Err(FormatParseError::UnmatchedBracket)
}
fn parse_escaped_unicode_string(text: &str) -> Option<(&str, &str)> {
text.strip_prefix("N{")?.find('}').map(|idx| {
let end_idx = idx + 3; // 3 for "N{"
(&text[..end_idx], &text[end_idx..])
})
}
}
pub trait FromTemplate<'a>: Sized {
@ -1039,48 +1020,4 @@ mod tests {
Err(FormatParseError::InvalidCharacterAfterRightBracket)
);
}
#[test]
fn test_format_unicode_escape() {
let expected = Ok(FormatString {
format_parts: vec![FormatPart::Literal("I am a \\N{snowman}".to_owned())],
});
assert_eq!(FormatString::from_str("I am a \\N{snowman}"), expected);
}
#[test]
fn test_format_unicode_escape_with_field() {
let expected = Ok(FormatString {
format_parts: vec![
FormatPart::Literal("I am a \\N{snowman}".to_owned()),
FormatPart::Field {
field_name: "snowman".to_owned(),
conversion_spec: None,
format_spec: String::new(),
},
],
});
assert_eq!(
FormatString::from_str("I am a \\N{snowman}{snowman}"),
expected
);
}
#[test]
fn test_format_multiple_escape_with_field() {
let expected = Ok(FormatString {
format_parts: vec![
FormatPart::Literal("I am a \\\\N".to_owned()),
FormatPart::Field {
field_name: "snowman".to_owned(),
conversion_spec: None,
format_spec: String::new(),
},
],
});
assert_eq!(FormatString::from_str("I am a \\\\N{snowman}"), expected);
}
}

View File

@ -12,10 +12,6 @@ license = { workspace = true }
[lib]
[[test]]
name = "fixtures"
harness = false
[dependencies]
ruff_python_ast = { workspace = true, features = ["get-size"] }
ruff_python_trivia = { workspace = true }
@ -38,8 +34,7 @@ ruff_python_ast = { workspace = true, features = ["serde"] }
ruff_source_file = { workspace = true }
anyhow = { workspace = true }
datatest-stable = { workspace = true }
insta = { workspace = true }
insta = { workspace = true, features = ["glob"] }
itertools = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }

View File

@ -272,9 +272,7 @@ impl SemanticSyntaxChecker {
fn check_annotation<Ctx: SemanticSyntaxContext>(stmt: &ast::Stmt, ctx: &Ctx) {
match stmt {
Stmt::AnnAssign(ast::StmtAnnAssign {
target, annotation, ..
}) => {
Stmt::AnnAssign(ast::StmtAnnAssign { annotation, .. }) => {
if ctx.python_version() > PythonVersion::PY313 {
// test_ok valid_annotation_py313
// # parse_options: {"target-version": "3.13"}
@ -299,18 +297,6 @@ impl SemanticSyntaxChecker {
};
visitor.visit_expr(annotation);
}
if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
if let Some(global_stmt) = ctx.global(id.as_str()) {
let global_start = global_stmt.start();
if !ctx.in_module_scope() || target.start() < global_start {
Self::add_error(
ctx,
SemanticSyntaxErrorKind::AnnotatedGlobal(id.to_string()),
target.range(),
);
}
}
}
}
Stmt::FunctionDef(ast::StmtFunctionDef {
type_params,

View File

@ -1,8 +1,9 @@
use std::cell::RefCell;
use std::cmp::Ordering;
use std::fmt::{Formatter, Write};
use std::fs;
use std::path::Path;
use datatest_stable::Utf8Path;
use itertools::Itertools;
use ruff_annotate_snippets::{Level, Renderer, Snippet};
use ruff_python_ast::token::{Token, Tokens};
@ -16,49 +17,38 @@ use ruff_python_parser::{Mode, ParseErrorType, ParseOptions, Parsed, parse_unche
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
fn valid_syntax(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
test_valid_syntax(path, &content, "./resources/valid");
Ok(())
#[test]
fn valid_syntax() {
insta::glob!("../resources", "valid/**/*.py", test_valid_syntax);
}
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
fn invalid_syntax(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
test_invalid_syntax(path, &content, "./resources/invalid");
Ok(())
#[test]
fn invalid_syntax() {
insta::glob!("../resources", "invalid/**/*.py", test_invalid_syntax);
}
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
fn inline_ok(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
test_valid_syntax(path, &content, "./resources/inline/ok");
Ok(())
#[test]
fn inline_ok() {
insta::glob!("../resources/inline", "ok/**/*.py", test_valid_syntax);
}
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
fn inline_err(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
test_invalid_syntax(path, &content, "./resources/inline/err");
Ok(())
}
datatest_stable::harness! {
{ test = valid_syntax, root = "./resources/valid", pattern = r"\.pyi?$" },
{ test = inline_ok, root = "./resources/inline/ok", pattern = r"\.pyi?$" },
{ test = invalid_syntax, root = "./resources/invalid", pattern = r"\.pyi?$" },
{ test = inline_err, root="./resources/inline/err", pattern = r"\.pyi?$" }
#[test]
fn inline_err() {
insta::glob!("../resources/inline", "err/**/*.py", test_invalid_syntax);
}
/// Asserts that the parser generates no syntax errors for a valid program.
/// Snapshots the AST.
fn test_valid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
let test_name = input_path.strip_prefix(root).unwrap_or(input_path).as_str();
let options = extract_options(source).unwrap_or_else(|| {
fn test_valid_syntax(input_path: &Path) {
let source = fs::read_to_string(input_path).expect("Expected test file to exist");
let options = extract_options(&source).unwrap_or_else(|| {
ParseOptions::from(Mode::Module).with_target_version(PythonVersion::latest_preview())
});
let parsed = parse_unchecked(source, options.clone());
let parsed = parse_unchecked(&source, options.clone());
if parsed.has_syntax_errors() {
let line_index = LineIndex::from_source_text(source);
let source_code = SourceCode::new(source, &line_index);
let line_index = LineIndex::from_source_text(&source);
let source_code = SourceCode::new(&source, &line_index);
let mut message = "Expected no syntax errors for a valid program but the parser generated the following errors:\n".to_string();
@ -91,8 +81,8 @@ fn test_valid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
panic!("{input_path:?}: {message}");
}
validate_tokens(parsed.tokens(), source.text_len());
validate_ast(&parsed, source.text_len());
validate_tokens(parsed.tokens(), source.text_len(), input_path);
validate_ast(&parsed, source.text_len(), input_path);
let mut output = String::new();
writeln!(&mut output, "## AST").unwrap();
@ -101,7 +91,7 @@ fn test_valid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
let parsed = parsed.try_into_module().expect("Parsed with Mode::Module");
let mut visitor =
SemanticSyntaxCheckerVisitor::new(source).with_python_version(options.target_version());
SemanticSyntaxCheckerVisitor::new(&source).with_python_version(options.target_version());
for stmt in parsed.suite() {
visitor.visit_stmt(stmt);
@ -112,8 +102,8 @@ fn test_valid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
if !semantic_syntax_errors.is_empty() {
let mut message = "Expected no semantic syntax errors for a valid program:\n".to_string();
let line_index = LineIndex::from_source_text(source);
let source_code = SourceCode::new(source, &line_index);
let line_index = LineIndex::from_source_text(&source);
let source_code = SourceCode::new(&source, &line_index);
for error in semantic_syntax_errors {
writeln!(
@ -135,7 +125,6 @@ fn test_valid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
omit_expression => true,
input_file => input_path,
prepend_module_to_snapshot => false,
snapshot_suffix => test_name
}, {
insta::assert_snapshot!(output);
});
@ -143,23 +132,22 @@ fn test_valid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
/// Assert that the parser generates at least one syntax error for the given input file.
/// Snapshots the AST and the error messages.
fn test_invalid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
let test_name = input_path.strip_prefix(root).unwrap_or(input_path).as_str();
let options = extract_options(source).unwrap_or_else(|| {
fn test_invalid_syntax(input_path: &Path) {
let source = fs::read_to_string(input_path).expect("Expected test file to exist");
let options = extract_options(&source).unwrap_or_else(|| {
ParseOptions::from(Mode::Module).with_target_version(PythonVersion::PY314)
});
let parsed = parse_unchecked(source, options.clone());
let parsed = parse_unchecked(&source, options.clone());
validate_tokens(parsed.tokens(), source.text_len());
validate_ast(&parsed, source.text_len());
validate_tokens(parsed.tokens(), source.text_len(), input_path);
validate_ast(&parsed, source.text_len(), input_path);
let mut output = String::new();
writeln!(&mut output, "## AST").unwrap();
writeln!(&mut output, "\n```\n{:#?}\n```", parsed.syntax()).unwrap();
let line_index = LineIndex::from_source_text(source);
let source_code = SourceCode::new(source, &line_index);
let line_index = LineIndex::from_source_text(&source);
let source_code = SourceCode::new(&source, &line_index);
if !parsed.errors().is_empty() {
writeln!(&mut output, "## Errors\n").unwrap();
@ -198,7 +186,7 @@ fn test_invalid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
let parsed = parsed.try_into_module().expect("Parsed with Mode::Module");
let mut visitor =
SemanticSyntaxCheckerVisitor::new(source).with_python_version(options.target_version());
SemanticSyntaxCheckerVisitor::new(&source).with_python_version(options.target_version());
for stmt in parsed.suite() {
visitor.visit_stmt(stmt);
@ -208,7 +196,7 @@ fn test_invalid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
assert!(
parsed.has_syntax_errors() || !semantic_syntax_errors.is_empty(),
"Expected parser to generate at least one syntax error for a program containing syntax errors."
"{input_path:?}: Expected parser to generate at least one syntax error for a program containing syntax errors."
);
if !semantic_syntax_errors.is_empty() {
@ -232,7 +220,6 @@ fn test_invalid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
omit_expression => true,
input_file => input_path,
prepend_module_to_snapshot => false,
snapshot_suffix => test_name
}, {
insta::assert_snapshot!(output);
});
@ -385,24 +372,26 @@ impl std::fmt::Display for CodeFrame<'_> {
/// Verifies that:
/// * the ranges are strictly increasing when loop the tokens in insertion order
/// * all ranges are within the length of the source code
fn validate_tokens(tokens: &[Token], source_length: TextSize) {
fn validate_tokens(tokens: &[Token], source_length: TextSize, test_path: &Path) {
let mut previous: Option<&Token> = None;
for token in tokens {
assert!(
token.end() <= source_length,
"Token range exceeds the source code length. Token: {token:#?}",
"{path}: Token range exceeds the source code length. Token: {token:#?}",
path = test_path.display()
);
if let Some(previous) = previous {
assert_eq!(
previous.range().ordering(token.range()),
Ordering::Less,
"Token ranges are not in increasing order
"{path}: Token ranges are not in increasing order
Previous token: {previous:#?}
Current token: {token:#?}
Tokens: {tokens:#?}
",
path = test_path.display(),
);
}
@ -414,9 +403,9 @@ Tokens: {tokens:#?}
/// * the range of the parent node fully encloses all its child nodes
/// * the ranges are strictly increasing when traversing the nodes in pre-order.
/// * all ranges are within the length of the source code.
fn validate_ast(parsed: &Parsed<Mod>, source_len: TextSize) {
fn validate_ast(parsed: &Parsed<Mod>, source_len: TextSize, test_path: &Path) {
walk_module(
&mut ValidateAstVisitor::new(parsed.tokens(), source_len),
&mut ValidateAstVisitor::new(parsed.tokens(), source_len, test_path),
parsed.syntax(),
);
}
@ -427,15 +416,17 @@ struct ValidateAstVisitor<'a> {
parents: Vec<AnyNodeRef<'a>>,
previous: Option<AnyNodeRef<'a>>,
source_length: TextSize,
test_path: &'a Path,
}
impl<'a> ValidateAstVisitor<'a> {
fn new(tokens: &'a Tokens, source_length: TextSize) -> Self {
fn new(tokens: &'a Tokens, source_length: TextSize, test_path: &'a Path) -> Self {
Self {
tokens: tokens.iter().peekable(),
parents: Vec::new(),
previous: None,
source_length,
test_path,
}
}
}
@ -453,7 +444,8 @@ impl ValidateAstVisitor<'_> {
// At this point, next_token.end() > node.start()
assert!(
next.start() >= node.start(),
"The start of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
"{path}: The start of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
path = self.test_path.display(),
root = self.parents.first()
);
}
@ -472,7 +464,8 @@ impl ValidateAstVisitor<'_> {
// At this point, `next_token.end() > node.end()`
assert!(
next.start() >= node.end(),
"The end of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
"{path}: The end of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
path = self.test_path.display(),
root = self.parents.first()
);
}
@ -483,14 +476,16 @@ impl<'ast> SourceOrderVisitor<'ast> for ValidateAstVisitor<'ast> {
fn enter_node(&mut self, node: AnyNodeRef<'ast>) -> TraversalSignal {
assert!(
node.end() <= self.source_length,
"The range of the node exceeds the length of the source code. Node: {node:#?}",
"{path}: The range of the node exceeds the length of the source code. Node: {node:#?}",
path = self.test_path.display()
);
if let Some(previous) = self.previous {
assert_ne!(
previous.range().ordering(node.range()),
Ordering::Greater,
"The ranges of the nodes are not strictly increasing when traversing the AST in pre-order.\nPrevious node: {previous:#?}\n\nCurrent node: {node:#?}\n\nRoot: {root:#?}",
"{path}: The ranges of the nodes are not strictly increasing when traversing the AST in pre-order.\nPrevious node: {previous:#?}\n\nCurrent node: {node:#?}\n\nRoot: {root:#?}",
path = self.test_path.display(),
root = self.parents.first()
);
}
@ -498,7 +493,8 @@ impl<'ast> SourceOrderVisitor<'ast> for ValidateAstVisitor<'ast> {
if let Some(parent) = self.parents.last() {
assert!(
parent.range().contains_range(node.range()),
"The range of the parent node does not fully enclose the range of the child node.\nParent node: {parent:#?}\n\nChild node: {node:#?}\n\nRoot: {root:#?}",
"{path}: The range of the parent node does not fully enclose the range of the child node.\nParent node: {parent:#?}\n\nChild node: {node:#?}\n\nRoot: {root:#?}",
path = self.test_path.display(),
root = self.parents.first()
);
}

View File

@ -51,11 +51,5 @@ regex = { workspace = true }
tempfile = { workspace = true }
toml = { workspace = true }
[features]
default = []
[target.'cfg(all(not(target_os = "macos"), not(target_os = "windows"), not(target_os = "openbsd"), not(target_os = "aix"), not(target_os = "android"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dependencies]
tikv-jemallocator = { workspace = true }
[lints]
workspace = true

View File

@ -18,9 +18,9 @@ Valid severities are:
**Type**: `dict[RuleName, "ignore" | "warn" | "error"]`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[tool.ty.rules]
possibly-unresolved-reference = "warn"
division-by-zero = "ignore"
@ -45,9 +45,9 @@ configuration setting.
**Type**: `list[str]`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[tool.ty.environment]
extra-paths = ["./shared/my-search-path"]
```
@ -76,9 +76,9 @@ This option can be used to point to virtual or system Python environments.
**Type**: `str`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[tool.ty.environment]
python = "./custom-venv-location/.venv"
```
@ -103,9 +103,9 @@ If no platform is specified, ty will use the current platform:
**Type**: `"win32" | "darwin" | "android" | "ios" | "linux" | "all" | str`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[tool.ty.environment]
# Tailor type stubs and conditionalized type definitions to windows.
python-platform = "win32"
@ -137,9 +137,9 @@ to reflect the differing contents of the standard library across Python versions
**Type**: `"3.7" | "3.8" | "3.9" | "3.10" | "3.11" | "3.12" | "3.13" | "3.14" | <major>.<minor>`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[tool.ty.environment]
python-version = "3.12"
```
@ -158,16 +158,16 @@ If left unspecified, ty will try to detect common project layouts and initialize
* if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
* otherwise, default to `.` (flat layout)
Additionally, if a `./python` directory exists and is not a package (i.e. it does not contain an `__init__.py` or `__init__.pyi` file),
Besides, if a `./python` or `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` or `__init__.pyi` file),
it will also be included in the first party search path.
**Default value**: `null`
**Type**: `list[str]`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[tool.ty.environment]
# Multiple directories (priority order)
root = ["./src", "./lib", "./vendor"]
@ -185,9 +185,9 @@ bundled as a zip file in the binary
**Type**: `str`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[tool.ty.environment]
typeshed = "/path/to/custom/typeshed"
```
@ -200,22 +200,24 @@ Configuration override that applies to specific files based on glob patterns.
An override allows you to apply different rule configurations to specific
files or directories. Multiple overrides can match the same file, with
later overrides take precedence. Override rules take precedence over global
rules for matching files.
later overrides take precedence.
For example, to relax enforcement of rules in test files:
### Precedence
- Later overrides in the array take precedence over earlier ones
- Override rules take precedence over global rules for matching files
### Examples
```toml
# Relax rules for test files
[[tool.ty.overrides]]
include = ["tests/**", "**/test_*.py"]
[tool.ty.overrides.rules]
possibly-unresolved-reference = "warn"
```
Or, to ignore a rule in generated files but retain enforcement in an important file:
```toml
# Ignore generated files but still check important ones
[[tool.ty.overrides]]
include = ["generated/**"]
exclude = ["generated/important.py"]
@ -238,9 +240,9 @@ If not specified, defaults to `[]` (excludes no files).
**Type**: `list[str]`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[[tool.ty.overrides]]
exclude = [
"generated",
@ -266,9 +268,9 @@ If not specified, defaults to `["**"]` (matches all files).
**Type**: `list[str]`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[[tool.ty.overrides]]
include = [
"src",
@ -290,9 +292,9 @@ severity levels or disable them entirely.
**Type**: `dict[RuleName, "ignore" | "warn" | "error"]`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[[tool.ty.overrides]]
include = ["src"]
@ -356,9 +358,9 @@ to re-include `dist` use `exclude = ["!dist"]`
**Type**: `list[str]`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[tool.ty.src]
exclude = [
"generated",
@ -397,9 +399,9 @@ matches `<project_root>/src` and not `<project_root>/test/src`).
**Type**: `list[str]`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[tool.ty.src]
include = [
"src",
@ -419,9 +421,9 @@ Enabled by default.
**Type**: `bool`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[tool.ty.src]
respect-ignore-files = false
```
@ -430,8 +432,8 @@ respect-ignore-files = false
### `root`
!!! warning "Deprecated"
This option has been deprecated. Use `environment.root` instead.
> [!WARN] "Deprecated"
> This option has been deprecated. Use `environment.root` instead.
The root of the project, used for finding first-party modules.
@ -441,16 +443,16 @@ If left unspecified, ty will try to detect common project layouts and initialize
* if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
* otherwise, default to `.` (flat layout)
Additionally, if a `./python` directory exists and is not a package (i.e. it does not contain an `__init__.py` file),
Besides, if a `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` file),
it will also be included in the first party search path.
**Default value**: `null`
**Type**: `str`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[tool.ty.src]
root = "./app"
```
@ -469,9 +471,9 @@ Defaults to `false`.
**Type**: `bool`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[tool.ty.terminal]
# Error if ty emits any warning-level diagnostics.
error-on-warning = true
@ -489,9 +491,9 @@ Defaults to `full`.
**Type**: `full | concise`
**Example usage**:
**Example usage** (`pyproject.toml`):
```toml title="pyproject.toml"
```toml
[tool.ty.terminal]
output-format = "concise"
```

View File

@ -2,15 +2,6 @@
ty defines and respects the following environment variables:
### `TY_CONFIG_FILE`
Path to a `ty.toml` configuration file to use.
When set, ty will use this file for configuration instead of
discovering configuration files automatically.
Equivalent to the `--config-file` command-line argument.
### `TY_LOG`
If set, ty will use this value as the log level for its `--verbose` output.

201
crates/ty/docs/rules.md generated
View File

@ -39,7 +39,7 @@ def test(): -> "int":
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20call-non-callable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L135" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L134" target="_blank">View source</a>
</small>
@ -63,7 +63,7 @@ Calling a non-callable object will raise a `TypeError` at runtime.
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-argument-forms" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L179" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L178" target="_blank">View source</a>
</small>
@ -95,7 +95,7 @@ f(int) # error
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-declarations" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L205" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L204" target="_blank">View source</a>
</small>
@ -126,7 +126,7 @@ a = 1
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-metaclass" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L230" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L229" target="_blank">View source</a>
</small>
@ -158,7 +158,7 @@ class C(A, B): ...
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20cyclic-class-definition" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L256" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L255" target="_blank">View source</a>
</small>
@ -190,7 +190,7 @@ class B(A): ...
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/1.0.0">1.0.0</a>) ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20cyclic-type-alias-definition" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L282" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L281" target="_blank">View source</a>
</small>
@ -218,7 +218,7 @@ type B = A
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20duplicate-base" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L343" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L342" target="_blank">View source</a>
</small>
@ -245,7 +245,7 @@ class B(A, A): ...
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.12">0.0.1-alpha.12</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20duplicate-kw-only" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L364" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L363" target="_blank">View source</a>
</small>
@ -357,7 +357,7 @@ def test(): -> "Literal[5]":
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20inconsistent-mro" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L590" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L589" target="_blank">View source</a>
</small>
@ -387,7 +387,7 @@ class C(A, B): ...
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20index-out-of-bounds" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L614" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L613" target="_blank">View source</a>
</small>
@ -413,7 +413,7 @@ t[3] # IndexError: tuple index out of range
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.12">0.0.1-alpha.12</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20instance-layout-conflict" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L396" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L395" target="_blank">View source</a>
</small>
@ -502,7 +502,7 @@ an atypical memory layout.
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-argument-type" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L668" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L667" target="_blank">View source</a>
</small>
@ -529,7 +529,7 @@ func("foo") # error: [invalid-argument-type]
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-assignment" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L708" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L707" target="_blank">View source</a>
</small>
@ -557,7 +557,7 @@ a: int = ''
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-attribute-access" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2003" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1997" target="_blank">View source</a>
</small>
@ -591,7 +591,7 @@ C.instance_var = 3 # error: Cannot assign to instance variable
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.19">0.0.1-alpha.19</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-await" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L730" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L729" target="_blank">View source</a>
</small>
@ -627,7 +627,7 @@ asyncio.run(main())
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-base" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L760" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L759" target="_blank">View source</a>
</small>
@ -651,7 +651,7 @@ class A(42): ... # error: [invalid-base]
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-context-manager" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L811" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L810" target="_blank">View source</a>
</small>
@ -678,7 +678,7 @@ with 1:
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-declaration" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L832" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L831" target="_blank">View source</a>
</small>
@ -707,7 +707,7 @@ a: str
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-exception-caught" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L855" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L854" target="_blank">View source</a>
</small>
@ -751,7 +751,7 @@ except ZeroDivisionError:
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.28">0.0.1-alpha.28</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-explicit-override" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1673" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1667" target="_blank">View source</a>
</small>
@ -787,57 +787,13 @@ class D(A):
def foo(self): ... # fine: overrides `A.foo`
```
## `invalid-frozen-dataclass-subclass`
<small>
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.35">0.0.1-alpha.35</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-frozen-dataclass-subclass" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2229" target="_blank">View source</a>
</small>
**What it does**
Checks for dataclasses with invalid frozen inheritance:
- A frozen dataclass cannot inherit from a non-frozen dataclass.
- A non-frozen dataclass cannot inherit from a frozen dataclass.
**Why is this bad?**
Python raises a `TypeError` at runtime when either of these inheritance
patterns occurs.
**Example**
```python
from dataclasses import dataclass
@dataclass
class Base:
x: int
@dataclass(frozen=True)
class Child(Base): # Error raised here
y: int
@dataclass(frozen=True)
class FrozenBase:
x: int
@dataclass
class NonFrozenChild(FrozenBase): # Error raised here
y: int
```
## `invalid-generic-class`
<small>
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-generic-class" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L891" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L890" target="_blank">View source</a>
</small>
@ -848,21 +804,16 @@ Checks for the creation of invalid generic classes
**Why is this bad?**
There are several requirements that you must follow when defining a generic class.
Many of these result in `TypeError` being raised at runtime if they are violated.
**Examples**
```python
from typing_extensions import Generic, TypeVar
from typing import Generic, TypeVar
T = TypeVar("T")
U = TypeVar("U", default=int)
T = TypeVar("T") # okay
# error: class uses both PEP-695 syntax and legacy syntax
class C[U](Generic[T]): ...
# error: type parameter with default comes before type parameter without default
class D(Generic[U, T]): ...
```
**References**
@ -875,7 +826,7 @@ class D(Generic[U, T]): ...
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.17">0.0.1-alpha.17</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-key" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L635" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L634" target="_blank">View source</a>
</small>
@ -914,7 +865,7 @@ carol = Person(name="Carol", age=25) # typo!
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-legacy-type-variable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L922" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L916" target="_blank">View source</a>
</small>
@ -949,7 +900,7 @@ def f(t: TypeVar("U")): ...
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-metaclass" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1019" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1013" target="_blank">View source</a>
</small>
@ -983,7 +934,7 @@ class B(metaclass=f): ...
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-method-override" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2131" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2125" target="_blank">View source</a>
</small>
@ -1090,7 +1041,7 @@ Correct use of `@override` is enforced by ty's `invalid-explicit-override` rule.
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.19">0.0.1-alpha.19</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-named-tuple" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L542" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L541" target="_blank">View source</a>
</small>
@ -1144,7 +1095,7 @@ AttributeError: Cannot overwrite NamedTuple attribute _asdict
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/1.0.0">1.0.0</a>) ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-newtype" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L995" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L989" target="_blank">View source</a>
</small>
@ -1174,7 +1125,7 @@ Baz = NewType("Baz", int | str) # error: invalid base for `typing.NewType`
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-overload" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1046" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1040" target="_blank">View source</a>
</small>
@ -1224,7 +1175,7 @@ def foo(x: int) -> int: ...
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-parameter-default" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1145" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1139" target="_blank">View source</a>
</small>
@ -1250,7 +1201,7 @@ def f(a: int = ''): ...
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-paramspec" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L950" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L944" target="_blank">View source</a>
</small>
@ -1281,7 +1232,7 @@ P2 = ParamSpec("S2") # error: ParamSpec name must match the variable it's assig
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-protocol" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L478" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L477" target="_blank">View source</a>
</small>
@ -1315,7 +1266,7 @@ TypeError: Protocols can only inherit from other protocols, got <class 'int'>
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-raise" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1165" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1159" target="_blank">View source</a>
</small>
@ -1364,7 +1315,7 @@ def g():
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-return-type" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L689" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L688" target="_blank">View source</a>
</small>
@ -1389,7 +1340,7 @@ def func() -> int:
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-super-argument" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1208" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1202" target="_blank">View source</a>
</small>
@ -1447,7 +1398,7 @@ TODO #14889
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.6">0.0.1-alpha.6</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-alias-type" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L974" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L968" target="_blank">View source</a>
</small>
@ -1474,7 +1425,7 @@ NewAlias = TypeAliasType(get_name(), int) # error: TypeAliasType name mus
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.29">0.0.1-alpha.29</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-arguments" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1440" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1434" target="_blank">View source</a>
</small>
@ -1521,7 +1472,7 @@ Bar[int] # error: too few arguments
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-checking-constant" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1247" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1241" target="_blank">View source</a>
</small>
@ -1551,7 +1502,7 @@ TYPE_CHECKING = ''
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-form" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1271" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1265" target="_blank">View source</a>
</small>
@ -1581,7 +1532,7 @@ b: Annotated[int] # `Annotated` expects at least two arguments
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.11">0.0.1-alpha.11</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-guard-call" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1323" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1317" target="_blank">View source</a>
</small>
@ -1615,7 +1566,7 @@ f(10) # Error
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.11">0.0.1-alpha.11</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-guard-definition" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1295" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1289" target="_blank">View source</a>
</small>
@ -1649,7 +1600,7 @@ class C:
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-variable-constraints" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1351" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1345" target="_blank">View source</a>
</small>
@ -1684,7 +1635,7 @@ T = TypeVar('T', bound=str) # valid bound TypeVar
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20missing-argument" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1380" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1374" target="_blank">View source</a>
</small>
@ -1709,7 +1660,7 @@ func() # TypeError: func() missing 1 required positional argument: 'x'
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20missing-typed-dict-key" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2104" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2098" target="_blank">View source</a>
</small>
@ -1742,7 +1693,7 @@ alice["age"] # KeyError
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20no-matching-overload" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1399" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1393" target="_blank">View source</a>
</small>
@ -1771,7 +1722,7 @@ func("string") # error: [no-matching-overload]
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20non-subscriptable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1422" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1416" target="_blank">View source</a>
</small>
@ -1795,7 +1746,7 @@ Subscripting an object that does not support it will raise a `TypeError` at runt
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20not-iterable" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1481" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1475" target="_blank">View source</a>
</small>
@ -1821,7 +1772,7 @@ for i in 34: # TypeError: 'int' object is not iterable
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.29">0.0.1-alpha.29</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20override-of-final-method" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1646" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1640" target="_blank">View source</a>
</small>
@ -1854,7 +1805,7 @@ class B(A):
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20parameter-already-assigned" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1532" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1526" target="_blank">View source</a>
</small>
@ -1881,7 +1832,7 @@ f(1, x=2) # Error raised here
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20positional-only-parameter-as-kwarg" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1857" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1851" target="_blank">View source</a>
</small>
@ -1939,7 +1890,7 @@ def test(): -> "int":
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20static-assert-error" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1979" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1973" target="_blank">View source</a>
</small>
@ -1969,7 +1920,7 @@ static_assert(int(2.0 * 3.0) == 6) # error: does not have a statically known tr
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20subclass-of-final-class" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1623" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1617" target="_blank">View source</a>
</small>
@ -1998,7 +1949,7 @@ class B(A): ... # Error raised here
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.30">0.0.1-alpha.30</a>) ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20super-call-in-named-tuple-method" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1791" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1785" target="_blank">View source</a>
</small>
@ -2032,7 +1983,7 @@ class F(NamedTuple):
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20too-many-positional-arguments" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1731" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1725" target="_blank">View source</a>
</small>
@ -2059,7 +2010,7 @@ f("foo") # Error raised here
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20type-assertion-failure" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1709" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1703" target="_blank">View source</a>
</small>
@ -2087,7 +2038,7 @@ def _(x: int):
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unavailable-implicit-super-arguments" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1752" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1746" target="_blank">View source</a>
</small>
@ -2133,7 +2084,7 @@ class A:
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unknown-argument" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1836" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1830" target="_blank">View source</a>
</small>
@ -2160,7 +2111,7 @@ f(x=1, y=2) # Error raised here
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-attribute" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1878" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1872" target="_blank">View source</a>
</small>
@ -2188,7 +2139,7 @@ A().foo # AttributeError: 'A' object has no attribute 'foo'
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-import" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1900" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1894" target="_blank">View source</a>
</small>
@ -2213,7 +2164,7 @@ import foo # ModuleNotFoundError: No module named 'foo'
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-reference" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1919" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1913" target="_blank">View source</a>
</small>
@ -2238,7 +2189,7 @@ print(x) # NameError: name 'x' is not defined
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-bool-conversion" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1501" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1495" target="_blank">View source</a>
</small>
@ -2275,7 +2226,7 @@ b1 < b2 < b1 # exception raised here
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-operator" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1938" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1932" target="_blank">View source</a>
</small>
@ -2303,7 +2254,7 @@ A() + A() # TypeError: unsupported operand type(s) for +: 'A' and 'A'
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20zero-stepsize-in-slice" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1960" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1954" target="_blank">View source</a>
</small>
@ -2328,7 +2279,7 @@ l[1:10:0] # ValueError: slice step cannot be zero
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20ambiguous-protocol-member" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L507" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L506" target="_blank">View source</a>
</small>
@ -2369,7 +2320,7 @@ class SubProto(BaseProto, Protocol):
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.16">0.0.1-alpha.16</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20deprecated" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L322" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L321" target="_blank">View source</a>
</small>
@ -2457,7 +2408,7 @@ a = 20 / 0 # type: ignore
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-attribute" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1553" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1547" target="_blank">View source</a>
</small>
@ -2485,7 +2436,7 @@ A.c # AttributeError: type object 'A' has no attribute 'c'
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-implicit-call" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L153" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L152" target="_blank">View source</a>
</small>
@ -2517,7 +2468,7 @@ A()[0] # TypeError: 'A' object is not subscriptable
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-import" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1575" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1569" target="_blank">View source</a>
</small>
@ -2549,7 +2500,7 @@ from module import a # ImportError: cannot import name 'a' from 'module'
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20redundant-cast" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2031" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2025" target="_blank">View source</a>
</small>
@ -2576,7 +2527,7 @@ cast(int, f()) # Redundant
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20undefined-reveal" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1818" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1812" target="_blank">View source</a>
</small>
@ -2600,7 +2551,7 @@ reveal_type(1) # NameError: name 'reveal_type' is not defined
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.15">0.0.1-alpha.15</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-global" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2052" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2046" target="_blank">View source</a>
</small>
@ -2658,7 +2609,7 @@ def g():
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.7">0.0.1-alpha.7</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-base" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L778" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L777" target="_blank">View source</a>
</small>
@ -2697,7 +2648,7 @@ class D(C): ... # error: [unsupported-base]
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20useless-overload-body" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1089" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1083" target="_blank">View source</a>
</small>
@ -2760,7 +2711,7 @@ def foo(x: int | str) -> int | str:
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'ignore'."><code>ignore</code></a> ·
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a>) ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20division-by-zero" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L304" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L303" target="_blank">View source</a>
</small>
@ -2784,7 +2735,7 @@ Dividing by zero raises a `ZeroDivisionError` at runtime.
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'ignore'."><code>ignore</code></a> ·
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-unresolved-reference" target="_blank">Related issues</a> ·
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1601" target="_blank">View source</a>
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1595" target="_blank">View source</a>
</small>

View File

@ -9,7 +9,6 @@ use ty_combine::Combine;
use ty_project::metadata::options::{EnvironmentOptions, Options, SrcOptions, TerminalOptions};
use ty_project::metadata::value::{RangedValue, RelativeGlobPattern, RelativePathBuf, ValueSource};
use ty_python_semantic::lint;
use ty_static::EnvVars;
// Configures Clap v3-style help menu colors
const STYLES: Styles = Styles::styled()
@ -122,7 +121,7 @@ pub(crate) struct CheckCommand {
/// The path to a `ty.toml` file to use for configuration.
///
/// While ty configuration can be included in a `pyproject.toml` file, it is not allowed in this context.
#[arg(long, env = EnvVars::TY_CONFIG_FILE, value_name = "PATH")]
#[arg(long, env = "TY_CONFIG_FILE", value_name = "PATH")]
pub(crate) config_file: Option<SystemPathBuf>,
/// The format to use for printing diagnostic messages.

View File

@ -2,22 +2,6 @@ use colored::Colorize;
use std::io;
use ty::{ExitStatus, run};
#[cfg(all(
not(target_os = "macos"),
not(target_os = "windows"),
not(target_os = "openbsd"),
not(target_os = "aix"),
not(target_os = "android"),
any(
target_arch = "x86_64",
target_arch = "aarch64",
target_arch = "powerpc64",
target_arch = "riscv64"
)
))]
#[global_allocator]
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
pub fn main() -> ExitStatus {
run().unwrap_or_else(|error| {
use io::Write;

View File

@ -2390,14 +2390,14 @@ fn default_root_flat_layout() -> anyhow::Result<()> {
fn default_root_tests_folder() -> anyhow::Result<()> {
let case = CliTest::with_files([
("src/foo.py", "foo = 10"),
("tests/bar.py", "baz = 20"),
("tests/bar.py", "bar = 20"),
(
"tests/test_bar.py",
r#"
from foo import foo
from bar import baz
from bar import bar
print(f"{foo} {baz}")
print(f"{foo} {bar}")
"#,
),
])?;

View File

@ -29,11 +29,12 @@ pub fn code_actions(
let mut actions = Vec::new();
// Suggest imports/qualifications for unresolved references (often ideal)
// Suggest imports for unresolved references (often ideal)
// TODO: suggest qualifying with an already imported symbol
let is_unresolved_reference =
lint_id == LintId::of(&UNRESOLVED_REFERENCE) || lint_id == LintId::of(&UNDEFINED_REVEAL);
if is_unresolved_reference
&& let Some(import_quick_fix) = unresolved_fixes(db, file, diagnostic_range)
&& let Some(import_quick_fix) = create_import_symbol_quick_fix(db, file, diagnostic_range)
{
actions.extend(import_quick_fix);
}
@ -48,7 +49,7 @@ pub fn code_actions(
actions
}
fn unresolved_fixes(
fn create_import_symbol_quick_fix(
db: &dyn Db,
file: File,
diagnostic_range: TextRange,
@ -58,7 +59,7 @@ fn unresolved_fixes(
let symbol = &node.expr_name()?.id;
Some(
completion::unresolved_fixes(db, file, &parsed, symbol, node)
completion::missing_imports(db, file, &parsed, symbol, node)
.into_iter()
.map(|import| QuickFix {
title: import.label,
@ -83,7 +84,6 @@ mod tests {
system::{DbWithWritableSystem, SystemPathBuf},
};
use ruff_diagnostics::Fix;
use ruff_python_trivia::textwrap::dedent;
use ruff_text_size::{TextRange, TextSize};
use ty_project::ProjectMetadata;
use ty_python_semantic::{
@ -149,7 +149,7 @@ mod tests {
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:2:5
--> main.py:2:17
|
2 | b = a / 0 # ty:ignore[division-by-zero]
| ^
@ -157,6 +157,7 @@ mod tests {
1 |
- b = a / 0 # ty:ignore[division-by-zero]
2 + b = a / 0 # ty:ignore[division-by-zero, unresolved-reference]
3 |
");
}
@ -170,7 +171,7 @@ mod tests {
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:2:5
--> main.py:2:17
|
2 | b = a / 0 # ty:ignore[division-by-zero,]
| ^
@ -178,6 +179,7 @@ mod tests {
1 |
- b = a / 0 # ty:ignore[division-by-zero,]
2 + b = a / 0 # ty:ignore[division-by-zero, unresolved-reference]
3 |
");
}
@ -191,7 +193,7 @@ mod tests {
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:2:5
--> main.py:2:17
|
2 | b = a / 0 # ty:ignore[division-by-zero ]
| ^
@ -199,6 +201,7 @@ mod tests {
1 |
- b = a / 0 # ty:ignore[division-by-zero ]
2 + b = a / 0 # ty:ignore[division-by-zero, unresolved-reference ]
3 |
");
}
@ -212,7 +215,7 @@ mod tests {
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:2:5
--> main.py:2:17
|
2 | b = a / 0 # ty:ignore[division-by-zero] some explanation
| ^
@ -220,6 +223,7 @@ mod tests {
1 |
- b = a / 0 # ty:ignore[division-by-zero] some explanation
2 + b = a / 0 # ty:ignore[division-by-zero] some explanation # ty:ignore[unresolved-reference]
3 |
");
}
@ -237,13 +241,13 @@ mod tests {
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:3:9
--> main.py:3:21
|
2 | b = (
3 | / a # ty:ignore[division-by-zero]
4 | | /
5 | | 0
| |_________^
| |_____________________^
6 | )
|
1 |
@ -270,13 +274,13 @@ mod tests {
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:3:9
--> main.py:3:21
|
2 | b = (
3 | / a
4 | | /
5 | | 0 # ty:ignore[division-by-zero]
| |_________^
| |_____________________^
6 | )
|
2 | b = (
@ -285,6 +289,7 @@ mod tests {
- 0 # ty:ignore[division-by-zero]
5 + 0 # ty:ignore[division-by-zero, unresolved-reference]
6 | )
7 |
");
}
@ -302,13 +307,13 @@ mod tests {
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:3:9
--> main.py:3:21
|
2 | b = (
3 | / a # ty:ignore[division-by-zero]
4 | | /
5 | | 0 # ty:ignore[division-by-zero]
| |_________^
| |_____________________^
6 | )
|
1 |
@ -334,7 +339,7 @@ mod tests {
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:3:6
--> main.py:3:18
|
2 | b = f"""
3 | {a}
@ -347,6 +352,7 @@ mod tests {
4 | more text
- """
5 + """ # ty:ignore[unresolved-reference]
6 |
"#);
}
@ -365,7 +371,7 @@ mod tests {
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:4:5
--> main.py:4:17
|
2 | b = f"""
3 | {
@ -397,7 +403,7 @@ mod tests {
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:2:5
--> main.py:2:17
|
2 | b = a + """
| ^
@ -409,6 +415,7 @@ mod tests {
3 | more text
- """
4 + """ # ty:ignore[unresolved-reference]
5 |
"#);
}
@ -423,7 +430,7 @@ mod tests {
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:2:5
--> main.py:2:17
|
2 | b = a \
| ^
@ -433,6 +440,7 @@ mod tests {
2 | b = a \
- + "test"
3 + + "test" # ty:ignore[unresolved-reference]
4 |
"#);
}
@ -446,7 +454,7 @@ mod tests {
assert_snapshot!(test.code_actions(&UNDEFINED_REVEAL), @r"
info[code-action]: import typing.reveal_type
--> main.py:2:1
--> main.py:2:13
|
2 | reveal_type(1)
| ^^^^^^^^^^^
@ -455,9 +463,10 @@ mod tests {
1 + from typing import reveal_type
2 |
3 | reveal_type(1)
4 |
info[code-action]: Ignore 'undefined-reveal' for this line
--> main.py:2:1
--> main.py:2:13
|
2 | reveal_type(1)
| ^^^^^^^^^^^
@ -465,230 +474,7 @@ mod tests {
1 |
- reveal_type(1)
2 + reveal_type(1) # ty:ignore[undefined-reveal]
");
}
#[test]
fn unresolved_deprecated() {
let test = CodeActionTest::with_source(
r#"
@<START>deprecated<END>("do not use")
def my_func(): ...
"#,
);
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
info[code-action]: import warnings.deprecated
--> main.py:2:2
|
2 | @deprecated("do not use")
| ^^^^^^^^^^
3 | def my_func(): ...
|
help: This is a preferred code action
1 + from warnings import deprecated
2 |
3 | @deprecated("do not use")
4 | def my_func(): ...
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:2:2
|
2 | @deprecated("do not use")
| ^^^^^^^^^^
3 | def my_func(): ...
|
1 |
- @deprecated("do not use")
2 + @deprecated("do not use") # ty:ignore[unresolved-reference]
3 | def my_func(): ...
"#);
}
#[test]
fn unresolved_deprecated_warnings_imported() {
let test = CodeActionTest::with_source(
r#"
import warnings
@<START>deprecated<END>("do not use")
def my_func(): ...
"#,
);
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
info[code-action]: import warnings.deprecated
--> main.py:4:2
|
2 | import warnings
3 |
4 | @deprecated("do not use")
| ^^^^^^^^^^
5 | def my_func(): ...
|
help: This is a preferred code action
1 + from warnings import deprecated
2 |
3 | import warnings
4 |
info[code-action]: qualify warnings.deprecated
--> main.py:4:2
|
2 | import warnings
3 |
4 | @deprecated("do not use")
| ^^^^^^^^^^
5 | def my_func(): ...
|
help: This is a preferred code action
1 |
2 | import warnings
3 |
- @deprecated("do not use")
4 + @warnings.deprecated("do not use")
5 | def my_func(): ...
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:4:2
|
2 | import warnings
3 |
4 | @deprecated("do not use")
| ^^^^^^^^^^
5 | def my_func(): ...
|
1 |
2 | import warnings
3 |
- @deprecated("do not use")
4 + @deprecated("do not use") # ty:ignore[unresolved-reference]
5 | def my_func(): ...
"#);
}
// using `importlib.abc.ExecutionLoader` when no imports are in scope
#[test]
fn unresolved_loader() {
let test = CodeActionTest::with_source(
r#"
<START>ExecutionLoader<END>
"#,
);
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
info[code-action]: import importlib.abc.ExecutionLoader
--> main.py:2:1
|
2 | ExecutionLoader
| ^^^^^^^^^^^^^^^
|
help: This is a preferred code action
1 + from importlib.abc import ExecutionLoader
2 |
3 | ExecutionLoader
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:2:1
|
2 | ExecutionLoader
| ^^^^^^^^^^^^^^^
|
1 |
- ExecutionLoader
2 + ExecutionLoader # ty:ignore[unresolved-reference]
");
}
// using `importlib.abc.ExecutionLoader` when `import importlib` is in scope
//
// TODO: `importlib.abc` is available whenever `importlib` is, so qualifying
// `importlib.abc.ExecutionLoader` without adding imports is actually legal here!
#[test]
fn unresolved_loader_importlib_imported() {
let test = CodeActionTest::with_source(
r#"
import importlib
<START>ExecutionLoader<END>
"#,
);
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
info[code-action]: import importlib.abc.ExecutionLoader
--> main.py:3:1
|
2 | import importlib
3 | ExecutionLoader
| ^^^^^^^^^^^^^^^
|
help: This is a preferred code action
1 + from importlib.abc import ExecutionLoader
2 |
3 | import importlib
4 | ExecutionLoader
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:3:1
|
2 | import importlib
3 | ExecutionLoader
| ^^^^^^^^^^^^^^^
|
1 |
2 | import importlib
- ExecutionLoader
3 + ExecutionLoader # ty:ignore[unresolved-reference]
");
}
// Using `importlib.abc.ExecutionLoader` when `import importlib.abc` is in scope
#[test]
fn unresolved_loader_abc_imported() {
let test = CodeActionTest::with_source(
r#"
import importlib.abc
<START>ExecutionLoader<END>
"#,
);
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
info[code-action]: import importlib.abc.ExecutionLoader
--> main.py:3:1
|
2 | import importlib.abc
3 | ExecutionLoader
| ^^^^^^^^^^^^^^^
|
help: This is a preferred code action
1 + from importlib.abc import ExecutionLoader
2 |
3 | import importlib.abc
4 | ExecutionLoader
info[code-action]: qualify importlib.abc.ExecutionLoader
--> main.py:3:1
|
2 | import importlib.abc
3 | ExecutionLoader
| ^^^^^^^^^^^^^^^
|
help: This is a preferred code action
1 |
2 | import importlib.abc
- ExecutionLoader
3 + importlib.abc.ExecutionLoader
info[code-action]: Ignore 'unresolved-reference' for this line
--> main.py:3:1
|
2 | import importlib.abc
3 | ExecutionLoader
| ^^^^^^^^^^^^^^^
|
1 |
2 | import importlib.abc
- ExecutionLoader
3 + ExecutionLoader # ty:ignore[unresolved-reference]
");
}
@ -707,7 +493,7 @@ mod tests {
db.init_program().unwrap();
let mut cleansed = dedent(source).to_string();
let mut cleansed = source.to_string();
let start = cleansed
.find("<START>")

View File

@ -67,7 +67,6 @@ impl<'db> Completions<'db> {
self.items
}
// Convert this collection into a list of "import..." fixes
fn into_imports(mut self) -> Vec<ImportEdit> {
self.items.sort_by(compare_suggestions);
self.items
@ -83,28 +82,6 @@ impl<'db> Completions<'db> {
.collect()
}
// Convert this collection into a list of "qualify..." fixes
fn into_qualifications(mut self, range: TextRange) -> Vec<ImportEdit> {
self.items.sort_by(compare_suggestions);
self.items
.dedup_by(|c1, c2| (&c1.name, c1.module_name) == (&c2.name, c2.module_name));
self.items
.into_iter()
.filter_map(|item| {
// If we would have to actually import something, don't suggest the qualification
// (we could, maybe we should, but for now, we don't)
if item.import.is_some() {
return None;
}
Some(ImportEdit {
label: format!("qualify {}", item.insert.as_ref()?),
edit: Edit::replacement(item.insert?.into_string(), range.start(), range.end()),
})
})
.collect()
}
/// Attempts to adds the given completion to this collection.
///
/// When added, `true` is returned.
@ -490,17 +467,6 @@ pub fn completion<'db>(
!ty.is_notimplemented(db)
});
}
if is_specifying_for_statement_iterable(&parsed, offset, typed.as_deref()) {
// Remove all keywords that doesn't make sense given the context,
// even if they are syntatically valid, e.g. `None`.
completions.retain(|item| {
let Some(kind) = item.kind else { return true };
if kind != CompletionKind::Keyword {
return true;
}
matches!(item.name.as_str(), "await" | "lambda" | "yield")
});
}
completions.into_completions()
}
@ -515,18 +481,6 @@ fn detect_function_arg_completions<'db>(
parsed: &ParsedModuleRef,
offset: TextSize,
) -> Option<Vec<Completion<'db>>> {
// But be careful: this isn't as simple as just finding a call
// expression. We also have to make sure we are in the "arguments"
// portion of the call. Otherwise we risk incorrectly returning
// something for `(<CURSOR>)(arg1, arg2)`-style expressions.
if !covering_node(parsed.syntax().into(), TextRange::empty(offset))
.ancestors()
.take_while(|node| !node.is_statement())
.any(|node| node.is_arguments())
{
return None;
}
let sig_help = signature_help(db, file, offset)?;
let set_function_args = detect_set_function_args(parsed, offset);
@ -601,19 +555,15 @@ pub(crate) struct ImportEdit {
pub edit: Edit,
}
/// Get fixes that would resolve an unresolved reference
pub(crate) fn unresolved_fixes(
pub(crate) fn missing_imports(
db: &dyn Db,
file: File,
parsed: &ParsedModuleRef,
symbol: &str,
node: AnyNodeRef,
) -> Vec<ImportEdit> {
let mut results = Vec::new();
let scoped = ScopedTarget { node };
// Request imports we could add to put the symbol in scope
let mut completions = Completions::exactly(db, symbol);
let scoped = ScopedTarget { node };
add_unimported_completions(
db,
file,
@ -624,23 +574,8 @@ pub(crate) fn unresolved_fixes(
},
&mut completions,
);
results.extend(completions.into_imports());
// Request qualifications we could apply to the symbol to make it resolve
let mut completions = Completions::exactly(db, symbol);
add_unimported_completions(
db,
file,
parsed,
scoped,
|module_name: &ModuleName, symbol: &str| {
ImportRequest::import(module_name.as_str(), symbol).force()
},
&mut completions,
);
results.extend(completions.into_qualifications(node.range()));
results
completions.into_imports()
}
/// Adds completions derived from keywords.
@ -1630,7 +1565,12 @@ fn is_in_definition_place(
/// Returns true when the cursor sits on a binding statement.
/// E.g. naming a parameter, type parameter, or `for` <name>).
fn is_in_variable_binding(parsed: &ParsedModuleRef, offset: TextSize, typed: Option<&str>) -> bool {
let range = typed_text_range(typed, offset);
let range = if let Some(typed) = typed {
let start = offset.saturating_sub(typed.text_len());
TextRange::new(start, offset)
} else {
TextRange::empty(offset)
};
let covering = covering_node(parsed.syntax().into(), range);
covering.ancestors().any(|node| match node {
@ -1685,36 +1625,6 @@ fn is_raising_exception(tokens: &[Token]) -> bool {
false
}
/// Returns true when the cursor is after the `in` keyword in a
/// `for x in <CURSOR>` statement.
fn is_specifying_for_statement_iterable(
parsed: &ParsedModuleRef,
offset: TextSize,
typed: Option<&str>,
) -> bool {
let range = typed_text_range(typed, offset);
let covering = covering_node(parsed.syntax().into(), range);
covering.parent().is_some_and(|node| {
matches!(
node, ast::AnyNodeRef::StmtFor(stmt_for) if stmt_for.iter.range().contains_range(range)
)
})
}
/// Returns the `TextRange` of the `typed` text.
///
/// `typed` should be the text immediately before the
/// provided cursor `offset`.
fn typed_text_range(typed: Option<&str>, offset: TextSize) -> TextRange {
if let Some(typed) = typed {
let start = offset.saturating_sub(typed.text_len());
TextRange::new(start, offset)
} else {
TextRange::empty(offset)
}
}
/// Order completions according to the following rules:
///
/// 1) Names with no underscore prefix
@ -2606,7 +2516,9 @@ def frob(): ...
assert_snapshot!(
builder.skip_keywords().skip_builtins().build().snapshot(),
@"<No completions found after filtering out completions>",
@r"
foo
",
);
}
@ -2620,7 +2532,9 @@ def frob(): ...
assert_snapshot!(
builder.skip_keywords().skip_builtins().build().snapshot(),
@"<No completions found after filtering out completions>",
@r"
foo
",
);
}
@ -3212,7 +3126,7 @@ quux.<CURSOR>
assert_snapshot!(
builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r"
count :: bound method Quux.count(value: Any, /) -> int
index :: bound method Quux.index(value: Any, start: SupportsIndex = 0, stop: SupportsIndex = ..., /) -> int
index :: bound method Quux.index(value: Any, start: SupportsIndex = Literal[0], stop: SupportsIndex = int, /) -> int
x :: int
y :: str
__add__ :: Overload[(value: tuple[int | str, ...], /) -> tuple[int | str, ...], (value: tuple[_T@__add__, ...], /) -> tuple[int | str | _T@__add__, ...]]
@ -3277,7 +3191,7 @@ bar(o<CURSOR>
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
@r"
foo
okay=
okay
"
);
}
@ -3298,7 +3212,7 @@ bar(o<CURSOR>
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
@r"
foo
okay=
okay
"
);
}
@ -3316,9 +3230,9 @@ foo(b<CURSOR>
assert_snapshot!(
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
@r"
bar=
barbaz=
baz=
bar
barbaz
baz
"
);
}
@ -3335,7 +3249,9 @@ foo(bar=1, b<CURSOR>
assert_snapshot!(
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
@"baz="
@r"
baz
"
);
}
@ -3353,7 +3269,9 @@ abc(o<CURSOR>
assert_snapshot!(
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
@"okay="
@r"
okay
"
);
}
@ -3369,7 +3287,9 @@ abc(okay=1, ba<CURSOR> baz=5
assert_snapshot!(
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
@"bar="
@r"
bar
"
);
}
@ -3413,9 +3333,9 @@ bar(o<CURSOR>
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
@r"
foo
okay=
okay_abc=
okay_okay=
okay
okay_abc
okay_okay
"
);
}
@ -3435,7 +3355,7 @@ bar(<CURSOR>
assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r"
bar
foo
okay=
okay
");
}
@ -4791,7 +4711,8 @@ from os.<CURSOR>
let last_nonunderscore = test
.completions()
.iter()
.rfind(|c| !c.name.starts_with('_'))
.filter(|c| !c.name.starts_with('_'))
.next_back()
.unwrap();
assert_eq!(&last_nonunderscore.name, "type_check_only");
@ -5903,62 +5824,6 @@ def foo(param: s<CURSOR>)
.contains("str");
}
#[test]
fn no_statement_keywords_in_for_statement_simple1() {
completion_test_builder(
"\
for x in a<CURSOR>
",
)
.build()
.contains("lambda")
.contains("await")
.not_contains("raise")
.not_contains("False");
}
#[test]
fn no_statement_keywords_in_for_statement_simple2() {
completion_test_builder(
"\
for x, y, _ in a<CURSOR>
",
)
.build()
.contains("lambda")
.contains("await")
.not_contains("raise")
.not_contains("False");
}
#[test]
fn no_statement_keywords_in_for_statement_simple3() {
completion_test_builder(
"\
for i, (x, y, z) in a<CURSOR>
",
)
.build()
.contains("lambda")
.contains("await")
.not_contains("raise")
.not_contains("False");
}
#[test]
fn no_statement_keywords_in_for_statement_complex() {
completion_test_builder(
"\
for i, (obj.x, (a[0], b['k']), _), *rest in a<CURSOR>
",
)
.build()
.contains("lambda")
.contains("await")
.not_contains("raise")
.not_contains("False");
}
#[test]
fn favour_symbols_currently_imported() {
let snapshot = CursorTest::builder()
@ -6717,27 +6582,6 @@ def f(zqzqzq: str):
);
}
#[test]
fn auto_import_prioritizes_reusing_import_from_statements() {
let builder = completion_test_builder(
"\
import typing
from typing import Callable
TypedDi<CURSOR>
",
);
assert_snapshot!(
builder.imports().build().snapshot(),
@r"
TypedDict :: , TypedDict
is_typeddict :: , is_typeddict
_FilterConfigurationTypedDict :: from logging.config import _FilterConfigurationTypedDict
_FormatterConfigurationTypedDict :: from logging.config import _FormatterConfigurationTypedDict
",
);
}
/// A way to create a simple single-file (named `main.py`) completion test
/// builder.
///
@ -6763,7 +6607,6 @@ TypedDi<CURSOR>
skip_builtins: bool,
skip_keywords: bool,
type_signatures: bool,
imports: bool,
module_names: bool,
// This doesn't seem like a "very complex" type to me... ---AG
#[allow(clippy::type_complexity)]
@ -6796,7 +6639,6 @@ TypedDi<CURSOR>
original,
filtered,
type_signatures: self.type_signatures,
imports: self.imports,
module_names: self.module_names,
}
}
@ -6857,15 +6699,6 @@ TypedDi<CURSOR>
self
}
/// When set, include the import associated with the
/// completion.
///
/// Not enabled by default.
fn imports(mut self) -> CompletionTestBuilder {
self.imports = true;
self
}
/// When set, the module name for each symbol is included
/// in the snapshot (if available).
fn module_names(mut self) -> CompletionTestBuilder {
@ -6898,9 +6731,6 @@ TypedDi<CURSOR>
/// Whether type signatures should be included in the snapshot
/// generated by `CompletionTest::snapshot`.
type_signatures: bool,
/// Whether to show the import that will be inserted when this
/// completion is selected.
imports: bool,
/// Whether module names should be included in the snapshot
/// generated by `CompletionTest::snapshot`.
module_names: bool,
@ -6922,7 +6752,7 @@ TypedDi<CURSOR>
self.filtered
.iter()
.map(|c| {
let mut snapshot = c.insert.as_deref().unwrap_or(c.name.as_str()).to_string();
let mut snapshot = c.name.as_str().to_string();
if self.type_signatures {
let ty =
c.ty.map(|ty| ty.display(self.db).to_string())
@ -6936,17 +6766,6 @@ TypedDi<CURSOR>
.unwrap_or("<no import required>");
snapshot = format!("{snapshot} :: {module_name}");
}
if self.imports {
if let Some(ref edit) = c.import {
if let Some(import) = edit.content() {
snapshot = format!("{snapshot} :: {import}");
} else {
snapshot = format!("{snapshot} :: <import deletion>");
}
} else {
snapshot = format!("{snapshot} :: <no import edit>");
}
}
snapshot
})
.collect::<Vec<String>>()
@ -6995,7 +6814,6 @@ TypedDi<CURSOR>
skip_builtins: false,
skip_keywords: false,
type_signatures: false,
imports: false,
module_names: false,
predicate: None,
}

View File

@ -151,19 +151,14 @@ impl fmt::Display for DisplayHoverContent<'_, '_> {
Some(TypeVarVariance::Bivariant) => " (bivariant)",
None => "",
};
// Special types like `<special-form of whatever 'blahblah' with 'florps'>`
// render poorly with python syntax-highlighting but well as xml
let ty_string = ty
.display_with(self.db, DisplaySettings::default().multiline())
.to_string();
let syntax = if ty_string.starts_with('<') {
"xml"
} else {
"python"
};
self.kind
.fenced_code_block(format!("{ty_string}{variance}"), syntax)
.fenced_code_block(
format!(
"{}{variance}",
ty.display_with(self.db, DisplaySettings::default().multiline())
),
"python",
)
.fmt(f)
}
HoverContent::Docstring(docstring) => docstring.render(self.kind).fmt(f),
@ -363,7 +358,7 @@ mod tests {
Everyone loves my class!!
---------------------------------------------
```xml
```python
<class 'MyClass'>
```
---
@ -425,7 +420,7 @@ mod tests {
Everyone loves my class!!
---------------------------------------------
```xml
```python
<class 'MyClass'>
```
---
@ -485,7 +480,7 @@ mod tests {
initializes MyClass (perfectly)
---------------------------------------------
```xml
```python
<class 'MyClass'>
```
---
@ -541,7 +536,7 @@ mod tests {
initializes MyClass (perfectly)
---------------------------------------------
```xml
```python
<class 'MyClass'>
```
---
@ -600,7 +595,7 @@ mod tests {
Everyone loves my class!!
---------------------------------------------
```xml
```python
<class 'MyClass'>
```
---
@ -1685,7 +1680,7 @@ def ab(a: int, *, c: int):
Wow this module rocks.
---------------------------------------------
```xml
```python
<module 'lib'>
```
---
@ -2034,7 +2029,7 @@ def function():
assert_snapshot!(test.hover(), @r"
<class 'Click'>
---------------------------------------------
```xml
```python
<class 'Click'>
```
---------------------------------------------
@ -2239,7 +2234,7 @@ def function():
Wow this module rocks.
---------------------------------------------
```xml
```python
<module 'lib'>
```
---
@ -3062,10 +3057,10 @@ def function():
);
assert_snapshot!(test.hover(), @r"
TypeVar
typing.TypeVar
---------------------------------------------
```python
TypeVar
typing.TypeVar
```
---------------------------------------------
info[hover]: Hovered content is
@ -3125,10 +3120,10 @@ def function():
);
assert_snapshot!(test.hover(), @r"
TypeVar
typing.TypeVar
---------------------------------------------
```python
TypeVar
typing.TypeVar
```
---------------------------------------------
info[hover]: Hovered content is
@ -3348,7 +3343,7 @@ def function():
assert_snapshot!(test.hover(), @r"
<module 'mypackage.subpkg'>
---------------------------------------------
```xml
```python
<module 'mypackage.subpkg'>
```
---------------------------------------------
@ -3390,7 +3385,7 @@ def function():
assert_snapshot!(test.hover(), @r"
<module 'mypackage.subpkg'>
---------------------------------------------
```xml
```python
<module 'mypackage.subpkg'>
```
---------------------------------------------
@ -3474,7 +3469,7 @@ def function():
assert_snapshot!(test.hover(), @r"
<module 'mypackage.subpkg.submod'>
---------------------------------------------
```xml
```python
<module 'mypackage.subpkg.submod'>
```
---------------------------------------------
@ -3515,7 +3510,7 @@ def function():
assert_snapshot!(test.hover(), @r"
<module 'mypackage.subpkg'>
---------------------------------------------
```xml
```python
<module 'mypackage.subpkg'>
```
---------------------------------------------
@ -3629,37 +3624,6 @@ def function():
assert_snapshot!(test.hover(), @"Hover provided no content");
}
#[test]
fn hover_named_expression_target() {
let test = CursorTest::builder()
.source(
"mymod.py",
r#"
if a<CURSOR> := 10:
pass
"#,
)
.build();
assert_snapshot!(test.hover(), @r###"
Literal[10]
---------------------------------------------
```python
Literal[10]
```
---------------------------------------------
info[hover]: Hovered content is
--> mymod.py:2:4
|
2 | if a := 10:
| ^- Cursor offset
| |
| source
3 | pass
|
"###);
}
impl CursorTest {
fn hover(&self) -> String {
use std::fmt::Write;

View File

@ -745,17 +745,8 @@ impl ImportResponseKind<'_> {
fn priority(&self) -> usize {
match *self {
ImportResponseKind::Unqualified { .. } => 0,
ImportResponseKind::Partial(_) => 1,
// N.B. When given the choice between adding a
// name to an existing `from ... import ...`
// statement and using an existing `import ...`
// in a qualified manner, we currently choose
// the former. Originally we preferred qualification,
// but there is some evidence that this violates
// expectations.
//
// Ref: https://github.com/astral-sh/ty/issues/1274#issuecomment-3352233790
ImportResponseKind::Qualified { .. } => 2,
ImportResponseKind::Qualified { .. } => 1,
ImportResponseKind::Partial(_) => 2,
}
}
}
@ -1341,9 +1332,9 @@ import collections
);
assert_snapshot!(
test.import("collections", "defaultdict"), @r"
from collections import OrderedDict, defaultdict
from collections import OrderedDict
import collections
defaultdict
collections.defaultdict
");
}

View File

@ -6165,28 +6165,11 @@ mod tests {
test.with_extra_file("foo.py", "'''Foo module'''");
assert_snapshot!(test.inlay_hints(), @r#"
assert_snapshot!(test.inlay_hints(), @r"
import foo
a[: <module 'foo'>] = foo
---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/types.pyi:423:7
|
422 | @disjoint_base
423 | class ModuleType:
| ^^^^^^^^^^
424 | """Create a module object.
|
info: Source
--> main2.py:4:6
|
2 | import foo
3 |
4 | a[: <module 'foo'>] = foo
| ^^^^^^
|
info[inlay-hint-location]: Inlay Hint Target
--> foo.py:1:1
|
@ -6194,14 +6177,14 @@ mod tests {
| ^^^^^^^^^^^^^^^^
|
info: Source
--> main2.py:4:14
--> main2.py:4:5
|
2 | import foo
3 |
4 | a[: <module 'foo'>] = foo
| ^^^
| ^^^^^^^^^^^^^^
|
"#);
");
}
#[test]
@ -6216,598 +6199,10 @@ mod tests {
assert_snapshot!(test.inlay_hints(), @r#"
from typing import Literal
a[: <special-form 'Literal["a", "b", "c"]'>] = Literal['a', 'b', 'c']
---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/typing.pyi:351:1
|
349 | Final: _SpecialForm
350 |
351 | Literal: _SpecialForm
| ^^^^^^^
352 | TypedDict: _SpecialForm
|
info: Source
--> main2.py:4:20
|
2 | from typing import Literal
3 |
4 | a[: <special-form 'Literal["a", "b", "c"]'>] = Literal['a', 'b', 'c']
| ^^^^^^^
|
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/builtins.pyi:915:7
|
914 | @disjoint_base
915 | class str(Sequence[str]):
| ^^^
916 | """str(object='') -> str
917 | str(bytes_or_buffer[, encoding[, errors]]) -> str
|
info: Source
--> main2.py:4:28
|
2 | from typing import Literal
3 |
4 | a[: <special-form 'Literal["a", "b", "c"]'>] = Literal['a', 'b', 'c']
| ^^^
|
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/builtins.pyi:915:7
|
914 | @disjoint_base
915 | class str(Sequence[str]):
| ^^^
916 | """str(object='') -> str
917 | str(bytes_or_buffer[, encoding[, errors]]) -> str
|
info: Source
--> main2.py:4:33
|
2 | from typing import Literal
3 |
4 | a[: <special-form 'Literal["a", "b", "c"]'>] = Literal['a', 'b', 'c']
| ^^^
|
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/builtins.pyi:915:7
|
914 | @disjoint_base
915 | class str(Sequence[str]):
| ^^^
916 | """str(object='') -> str
917 | str(bytes_or_buffer[, encoding[, errors]]) -> str
|
info: Source
--> main2.py:4:38
|
2 | from typing import Literal
3 |
4 | a[: <special-form 'Literal["a", "b", "c"]'>] = Literal['a', 'b', 'c']
| ^^^
|
a[: <special form 'Literal["a", "b", "c"]'>] = Literal['a', 'b', 'c']
"#);
}
#[test]
fn test_wrapper_descriptor_inlay_hint() {
let mut test = inlay_hint_test(
"
from types import FunctionType
a = FunctionType.__get__",
);
assert_snapshot!(test.inlay_hints(), @r#"
from types import FunctionType
a[: <wrapper-descriptor '__get__' of 'function' objects>] = FunctionType.__get__
---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/types.pyi:670:7
|
669 | @final
670 | class WrapperDescriptorType:
| ^^^^^^^^^^^^^^^^^^^^^
671 | @property
672 | def __name__(self) -> str: ...
|
info: Source
--> main2.py:4:6
|
2 | from types import FunctionType
3 |
4 | a[: <wrapper-descriptor '__get__' of 'function' objects>] = FunctionType.__get__
| ^^^^^^^^^^^^^^^^^^
|
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/types.pyi:77:7
|
75 | # Make sure this class definition stays roughly in line with `builtins.function`
76 | @final
77 | class FunctionType:
| ^^^^^^^^^^^^
78 | """Create a function object.
|
info: Source
--> main2.py:4:39
|
2 | from types import FunctionType
3 |
4 | a[: <wrapper-descriptor '__get__' of 'function' objects>] = FunctionType.__get__
| ^^^^^^^^
|
"#);
}
#[test]
fn test_method_wrapper_inlay_hint() {
let mut test = inlay_hint_test(
"
def f(): ...
a = f.__call__",
);
assert_snapshot!(test.inlay_hints(), @r#"
def f(): ...
a[: <method-wrapper '__call__' of function 'f'>] = f.__call__
---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/types.pyi:684:7
|
683 | @final
684 | class MethodWrapperType:
| ^^^^^^^^^^^^^^^^^
685 | @property
686 | def __self__(self) -> object: ...
|
info: Source
--> main2.py:4:6
|
2 | def f(): ...
3 |
4 | a[: <method-wrapper '__call__' of function 'f'>] = f.__call__
| ^^^^^^^^^^^^^^
|
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/types.pyi:134:9
|
132 | ) -> Self: ...
133 |
134 | def __call__(self, *args: Any, **kwargs: Any) -> Any:
| ^^^^^^^^
135 | """Call self as a function."""
|
info: Source
--> main2.py:4:22
|
2 | def f(): ...
3 |
4 | a[: <method-wrapper '__call__' of function 'f'>] = f.__call__
| ^^^^^^^^
|
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/types.pyi:77:7
|
75 | # Make sure this class definition stays roughly in line with `builtins.function`
76 | @final
77 | class FunctionType:
| ^^^^^^^^^^^^
78 | """Create a function object.
|
info: Source
--> main2.py:4:35
|
2 | def f(): ...
3 |
4 | a[: <method-wrapper '__call__' of function 'f'>] = f.__call__
| ^^^^^^^^
|
info[inlay-hint-location]: Inlay Hint Target
--> main.py:2:5
|
2 | def f(): ...
| ^
3 |
4 | a = f.__call__
|
info: Source
--> main2.py:4:45
|
2 | def f(): ...
3 |
4 | a[: <method-wrapper '__call__' of function 'f'>] = f.__call__
| ^
|
"#);
}
#[test]
fn test_newtype_inlay_hint() {
let mut test = inlay_hint_test(
"
from typing import NewType
N = NewType('N', str)
Y = N",
);
assert_snapshot!(test.inlay_hints(), @r#"
from typing import NewType
N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
Y[: <NewType pseudo-class 'N'>] = N
---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/typing.pyi:615:11
|
613 | TypeGuard: _SpecialForm
614 |
615 | class NewType:
| ^^^^^^^
616 | """NewType creates simple unique types with almost zero runtime overhead.
|
info: Source
--> main2.py:4:6
|
2 | from typing import NewType
3 |
4 | N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
| ^^^^^^^
5 |
6 | Y[: <NewType pseudo-class 'N'>] = N
|
info[inlay-hint-location]: Inlay Hint Target
--> main.py:4:1
|
2 | from typing import NewType
3 |
4 | N = NewType('N', str)
| ^
5 |
6 | Y = N
|
info: Source
--> main2.py:4:28
|
2 | from typing import NewType
3 |
4 | N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
| ^
5 |
6 | Y[: <NewType pseudo-class 'N'>] = N
|
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/typing.pyi:637:28
|
635 | """
636 |
637 | def __init__(self, name: str, tp: Any) -> None: ... # AnnotationForm
| ^^^^
638 | if sys.version_info >= (3, 11):
639 | @staticmethod
|
info: Source
--> main2.py:4:44
|
2 | from typing import NewType
3 |
4 | N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
| ^^^^
5 |
6 | Y[: <NewType pseudo-class 'N'>] = N
|
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/typing.pyi:637:39
|
635 | """
636 |
637 | def __init__(self, name: str, tp: Any) -> None: ... # AnnotationForm
| ^^
638 | if sys.version_info >= (3, 11):
639 | @staticmethod
|
info: Source
--> main2.py:4:56
|
2 | from typing import NewType
3 |
4 | N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
| ^^
5 |
6 | Y[: <NewType pseudo-class 'N'>] = N
|
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/typing.pyi:615:11
|
613 | TypeGuard: _SpecialForm
614 |
615 | class NewType:
| ^^^^^^^
616 | """NewType creates simple unique types with almost zero runtime overhead.
|
info: Source
--> main2.py:6:6
|
4 | N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
5 |
6 | Y[: <NewType pseudo-class 'N'>] = N
| ^^^^^^^
|
info[inlay-hint-location]: Inlay Hint Target
--> main.py:4:1
|
2 | from typing import NewType
3 |
4 | N = NewType('N', str)
| ^
5 |
6 | Y = N
|
info: Source
--> main2.py:6:28
|
4 | N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
5 |
6 | Y[: <NewType pseudo-class 'N'>] = N
| ^
|
"#);
}
#[test]
fn test_meta_typevar_inlay_hint() {
let mut test = inlay_hint_test(
"
def f[T](x: type[T]):
y = x",
);
assert_snapshot!(test.inlay_hints(), @r#"
def f[T](x: type[T]):
y[: type[T@f]] = x
---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/builtins.pyi:247:7
|
246 | @disjoint_base
247 | class type:
| ^^^^
248 | """type(object) -> the object's type
249 | type(name, bases, dict, **kwds) -> a new type
|
info: Source
--> main2.py:3:9
|
2 | def f[T](x: type[T]):
3 | y[: type[T@f]] = x
| ^^^^
|
info[inlay-hint-location]: Inlay Hint Target
--> main.py:2:7
|
2 | def f[T](x: type[T]):
| ^
3 | y = x
|
info: Source
--> main2.py:3:14
|
2 | def f[T](x: type[T]):
3 | y[: type[T@f]] = x
| ^^^
|
---------------------------------------------
info[inlay-hint-edit]: File after edits
info: Source
def f[T](x: type[T]):
y: type[T@f] = x
"#);
}
#[test]
fn test_subscripted_protocol_inlay_hint() {
let mut test = inlay_hint_test(
"
from typing import Protocol, TypeVar
T = TypeVar('T')
Strange = Protocol[T]",
);
assert_snapshot!(test.inlay_hints(), @r"
from typing import Protocol, TypeVar
T = TypeVar([name=]'T')
Strange[: <special-form 'typing.Protocol[T]'>] = Protocol[T]
---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/typing.pyi:276:13
|
274 | def __new__(
275 | cls,
276 | name: str,
| ^^^^
277 | *constraints: Any, # AnnotationForm
278 | bound: Any | None = None, # AnnotationForm
|
info: Source
--> main2.py:3:14
|
2 | from typing import Protocol, TypeVar
3 | T = TypeVar([name=]'T')
| ^^^^
4 | Strange[: <special-form 'typing.Protocol[T]'>] = Protocol[T]
|
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/typing.pyi:341:1
|
340 | Union: _SpecialForm
341 | Protocol: _SpecialForm
| ^^^^^^^^
342 | Callable: _SpecialForm
343 | Type: _SpecialForm
|
info: Source
--> main2.py:4:26
|
2 | from typing import Protocol, TypeVar
3 | T = TypeVar([name=]'T')
4 | Strange[: <special-form 'typing.Protocol[T]'>] = Protocol[T]
| ^^^^^^^^^^^^^^^
|
info[inlay-hint-location]: Inlay Hint Target
--> main.py:3:1
|
2 | from typing import Protocol, TypeVar
3 | T = TypeVar('T')
| ^
4 | Strange = Protocol[T]
|
info: Source
--> main2.py:4:42
|
2 | from typing import Protocol, TypeVar
3 | T = TypeVar([name=]'T')
4 | Strange[: <special-form 'typing.Protocol[T]'>] = Protocol[T]
| ^
|
");
}
#[test]
fn test_paramspec_creation_inlay_hint() {
let mut test = inlay_hint_test(
"
from typing import ParamSpec
P = ParamSpec('P')",
);
assert_snapshot!(test.inlay_hints(), @r"
from typing import ParamSpec
P = ParamSpec([name=]'P')
---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/typing.pyi:552:17
|
550 | def __new__(
551 | cls,
552 | name: str,
| ^^^^
553 | *,
554 | bound: Any | None = None, # AnnotationForm
|
info: Source
--> main2.py:3:16
|
2 | from typing import ParamSpec
3 | P = ParamSpec([name=]'P')
| ^^^^
|
");
}
#[test]
fn test_typealiastype_creation_inlay_hint() {
let mut test = inlay_hint_test(
"
from typing_extensions import TypeAliasType
A = TypeAliasType('A', str)",
);
assert_snapshot!(test.inlay_hints(), @r#"
from typing_extensions import TypeAliasType
A = TypeAliasType([name=]'A', [value=]str)
---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/typing.pyi:2032:26
|
2030 | """
2031 |
2032 | def __new__(cls, name: str, value: Any, *, type_params: tuple[_TypeParameter, ...] = ()) -> Self: ...
| ^^^^
2033 | @property
2034 | def __value__(self) -> Any: ... # AnnotationForm
|
info: Source
--> main2.py:3:20
|
2 | from typing_extensions import TypeAliasType
3 | A = TypeAliasType([name=]'A', [value=]str)
| ^^^^
|
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/typing.pyi:2032:37
|
2030 | """
2031 |
2032 | def __new__(cls, name: str, value: Any, *, type_params: tuple[_TypeParameter, ...] = ()) -> Self: ...
| ^^^^^
2033 | @property
2034 | def __value__(self) -> Any: ... # AnnotationForm
|
info: Source
--> main2.py:3:32
|
2 | from typing_extensions import TypeAliasType
3 | A = TypeAliasType([name=]'A', [value=]str)
| ^^^^^
|
"#);
}
#[test]
fn test_typevartuple_creation_inlay_hint() {
let mut test = inlay_hint_test(
"
from typing_extensions import TypeVarTuple
Ts = TypeVarTuple('Ts')",
);
assert_snapshot!(test.inlay_hints(), @r"
from typing_extensions import TypeVarTuple
Ts = TypeVarTuple([name=]'Ts')
---------------------------------------------
info[inlay-hint-location]: Inlay Hint Target
--> stdlib/typing.pyi:412:30
|
410 | def has_default(self) -> bool: ...
411 | if sys.version_info >= (3, 13):
412 | def __new__(cls, name: str, *, default: Any = ...) -> Self: ... # AnnotationForm
| ^^^^
413 | elif sys.version_info >= (3, 12):
414 | def __new__(cls, name: str) -> Self: ...
|
info: Source
--> main2.py:3:20
|
2 | from typing_extensions import TypeVarTuple
3 | Ts = TypeVarTuple([name=]'Ts')
| ^^^^
|
");
}
struct InlayHintLocationDiagnostic {
source: FileRange,
target: FileRange,

View File

@ -84,7 +84,7 @@ pub fn rename(
/// Helper function to check if a file is included in the project.
fn is_file_in_project(db: &dyn Db, file: File) -> bool {
file.path(db).is_system_virtual_path() || db.project().files(db).contains(&file)
db.project().files(db).contains(&file)
}
#[cfg(test)]

View File

@ -254,9 +254,7 @@ impl<'db> SemanticTokenVisitor<'db> {
}
fn is_constant_name(name: &str) -> bool {
name.chars()
.all(|c| c.is_uppercase() || c == '_' || c.is_numeric())
&& name.len() > 1
name.chars().all(|c| c.is_uppercase() || c == '_') && name.len() > 1
}
fn classify_name(&self, name: &ast::ExprName) -> (SemanticTokenType, SemanticTokenModifier) {
@ -304,25 +302,17 @@ impl<'db> SemanticTokenVisitor<'db> {
let parsed = parsed_module(db, definition.file(db));
let ty = parameter.node(&parsed.load(db)).inferred_type(&model);
if let Some(ty) = ty {
let type_var = match ty {
Type::TypeVar(type_var) => Some((type_var, false)),
Type::SubclassOf(subclass_of) => {
subclass_of.into_type_var().map(|var| (var, true))
}
_ => None,
};
if let Some((type_var, is_cls)) = type_var
&& matches!(type_var.typevar(db).kind(db), TypeVarKind::TypingSelf)
if let Some(ty) = ty
&& let Type::TypeVar(type_var) = ty
{
let kind = if is_cls {
SemanticTokenType::ClsParameter
} else {
SemanticTokenType::SelfParameter
};
return Some((kind, modifiers));
match type_var.typevar(db).kind(db) {
TypeVarKind::TypingSelf => {
return Some((SemanticTokenType::SelfParameter, modifiers));
}
TypeVarKind::Legacy
| TypeVarKind::ParamSpec
| TypeVarKind::Pep695ParamSpec
| TypeVarKind::Pep695 => {}
}
}
@ -1213,7 +1203,7 @@ class MyClass:
"
class MyClass:
@classmethod
def method(cls, x): print(cls)
def method(cls, x): pass
",
);
@ -1225,8 +1215,6 @@ class MyClass:
"method" @ 41..47: Method [definition]
"cls" @ 48..51: ClsParameter [definition]
"x" @ 53..54: Parameter [definition]
"print" @ 57..62: Function
"cls" @ 63..66: ClsParameter
"#);
}
@ -1258,7 +1246,7 @@ class MyClass:
class MyClass:
def method(instance, x): pass
@classmethod
def other(klass, y): print(klass)
def other(klass, y): pass
def complex_method(instance, posonly, /, regular, *args, kwonly, **kwargs): pass
",
);
@ -1274,15 +1262,13 @@ class MyClass:
"other" @ 75..80: Method [definition]
"klass" @ 81..86: ClsParameter [definition]
"y" @ 88..89: Parameter [definition]
"print" @ 92..97: Function
"klass" @ 98..103: ClsParameter
"complex_method" @ 113..127: Method [definition]
"instance" @ 128..136: SelfParameter [definition]
"posonly" @ 138..145: Parameter [definition]
"regular" @ 150..157: Parameter [definition]
"args" @ 160..164: Parameter [definition]
"kwonly" @ 166..172: Parameter [definition]
"kwargs" @ 176..182: Parameter [definition]
"complex_method" @ 105..119: Method [definition]
"instance" @ 120..128: SelfParameter [definition]
"posonly" @ 130..137: Parameter [definition]
"regular" @ 142..149: Parameter [definition]
"args" @ 152..156: Parameter [definition]
"kwonly" @ 158..164: Parameter [definition]
"kwargs" @ 168..174: Parameter [definition]
"#);
}
@ -2232,49 +2218,6 @@ class MyClass:
"###);
}
#[test]
fn test_constant_variations() {
let test = SemanticTokenTest::new(
r#"
A = 1
AB = 1
ABC = 1
A1 = 1
AB1 = 1
ABC1 = 1
A_B = 1
A1_B = 1
A_B1 = 1
A_1 = 1
"#,
);
let tokens = test.highlight_file();
assert_snapshot!(test.to_snapshot(&tokens), @r#"
"A" @ 1..2: Variable [definition]
"1" @ 5..6: Number
"AB" @ 7..9: Variable [definition, readonly]
"1" @ 12..13: Number
"ABC" @ 14..17: Variable [definition, readonly]
"1" @ 20..21: Number
"A1" @ 22..24: Variable [definition, readonly]
"1" @ 27..28: Number
"AB1" @ 29..32: Variable [definition, readonly]
"1" @ 35..36: Number
"ABC1" @ 37..41: Variable [definition, readonly]
"1" @ 44..45: Number
"A_B" @ 46..49: Variable [definition, readonly]
"1" @ 52..53: Number
"A1_B" @ 54..58: Variable [definition, readonly]
"1" @ 61..62: Number
"A_B1" @ 63..67: Variable [definition, readonly]
"1" @ 70..71: Number
"A_1" @ 72..75: Variable [definition, readonly]
"1" @ 78..79: Number
"#);
}
#[test]
fn test_implicitly_concatenated_strings() {
let test = SemanticTokenTest::new(

View File

@ -124,11 +124,6 @@ fn get_call_expr(
})?;
// Find the covering node at the given position that is a function call.
// Note that we are okay with the range being anywhere within a call
// expression, even if it's not in the arguments portion of the call
// expression. This is because, e.g., a user can request signature
// information at a call site, and this should ideally work anywhere
// within the call site, even at the function name.
let call = covering_node(root_node, token.range())
.find_first(|node| {
if !node.is_expr_call() {

View File

@ -10,10 +10,10 @@ use ruff_db::files::File;
use ruff_db::parsed::parsed_module;
use ruff_index::{IndexVec, newtype_index};
use ruff_python_ast as ast;
use ruff_python_ast::name::{Name, UnqualifiedName};
use ruff_python_ast::name::Name;
use ruff_python_ast::visitor::source_order::{self, SourceOrderVisitor};
use ruff_text_size::{Ranged, TextRange};
use rustc_hash::{FxHashMap, FxHashSet};
use rustc_hash::FxHashSet;
use ty_project::Db;
use ty_python_semantic::{ModuleName, resolve_module};
@ -375,11 +375,7 @@ pub(crate) fn symbols_for_file(db: &dyn Db, file: File) -> FlatSymbols {
/// While callers can convert this into a hierarchical collection of
/// symbols, it won't result in anything meaningful since the flat list
/// returned doesn't include children.
#[salsa::tracked(
returns(ref),
cycle_initial=symbols_for_file_global_only_cycle_initial,
heap_size=ruff_memory_usage::heap_size,
)]
#[salsa::tracked(returns(ref), heap_size=ruff_memory_usage::heap_size)]
pub(crate) fn symbols_for_file_global_only(db: &dyn Db, file: File) -> FlatSymbols {
let parsed = parsed_module(db, file);
let module = parsed.load(db);
@ -398,14 +394,6 @@ pub(crate) fn symbols_for_file_global_only(db: &dyn Db, file: File) -> FlatSymbo
visitor.into_flat_symbols()
}
fn symbols_for_file_global_only_cycle_initial(
_db: &dyn Db,
_id: salsa::Id,
_file: File,
) -> FlatSymbols {
FlatSymbols::default()
}
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
struct SymbolTree {
parent: Option<SymbolId>,
@ -423,189 +411,6 @@ enum ImportKind {
Wildcard,
}
/// An abstraction for managing module scope imports.
///
/// This is meant to recognize the following idioms for updating
/// `__all__` in module scope:
///
/// ```ignore
/// __all__ += submodule.__all__
/// __all__.extend(submodule.__all__)
/// ```
///
/// # Correctness
///
/// The approach used here is not correct 100% of the time.
/// For example, it is somewhat easy to defeat it:
///
/// ```ignore
/// from numpy import *
/// from importlib import resources
/// import numpy as np
/// np = resources
/// __all__ = []
/// __all__ += np.__all__
/// ```
///
/// In this example, `np` will still be resolved to the `numpy`
/// module instead of the `importlib.resources` module. Namely, this
/// abstraction doesn't track all definitions. This would result in a
/// silently incorrect `__all__`.
///
/// This abstraction does handle the case when submodules are imported.
/// Namely, we do get this case correct:
///
/// ```ignore
/// from importlib.resources import *
/// from importlib import resources
/// __all__ = []
/// __all__ += resources.__all__
/// ```
///
/// We do this by treating all imports in a `from ... import ...`
/// statement as *possible* modules. Then when we lookup `resources`,
/// we attempt to resolve it to an actual module. If that fails, then
/// we consider `__all__` invalid.
///
/// There are likely many many other cases that we don't handle as
/// well, which ty does (it has its own `__all__` parsing using types
/// to deal with this case). We can add handling for those as they
/// come up in real world examples.
///
/// # Performance
///
/// This abstraction recognizes that, compared to all possible imports,
/// it is very rare to use one of them to update `__all__`. Therefore,
/// we are careful not to do too much work up-front (like eagerly
/// manifesting `ModuleName` values).
#[derive(Clone, Debug, Default, get_size2::GetSize)]
struct Imports<'db> {
/// A map from the name that a module is available
/// under to its actual module name (and our level
/// of certainty that it ought to be treated as a module).
module_names: FxHashMap<&'db str, ImportModuleKind<'db>>,
}
impl<'db> Imports<'db> {
/// Track the imports from the given `import ...` statement.
fn add_import(&mut self, import: &'db ast::StmtImport) {
for alias in &import.names {
let asname = alias
.asname
.as_ref()
.map(|ident| &ident.id)
.unwrap_or(&alias.name.id);
let module_name = ImportModuleName::Import(&alias.name.id);
self.module_names
.insert(asname, ImportModuleKind::Definitive(module_name));
}
}
/// Track the imports from the given `from ... import ...` statement.
fn add_import_from(&mut self, import_from: &'db ast::StmtImportFrom) {
for alias in &import_from.names {
if &alias.name == "*" {
// FIXME: We'd ideally include the names
// imported from the module, but we don't
// want to do this eagerly. So supporting
// this requires more infrastructure in
// `Imports`.
continue;
}
let asname = alias
.asname
.as_ref()
.map(|ident| &ident.id)
.unwrap_or(&alias.name.id);
let module_name = ImportModuleName::ImportFrom {
parent: import_from,
child: &alias.name.id,
};
self.module_names
.insert(asname, ImportModuleKind::Possible(module_name));
}
}
/// Return the symbols exported by the module referred to by `name`.
///
/// e.g., This can be used to resolve `__all__ += submodule.__all__`,
/// where `name` is `submodule`.
fn get_module_symbols(
&self,
db: &'db dyn Db,
importing_file: File,
name: &Name,
) -> Option<&'db FlatSymbols> {
let module_name = match self.module_names.get(name.as_str())? {
ImportModuleKind::Definitive(name) | ImportModuleKind::Possible(name) => {
name.to_module_name(db, importing_file)?
}
};
let module = resolve_module(db, importing_file, &module_name)?;
Some(symbols_for_file_global_only(db, module.file(db)?))
}
}
/// Describes the level of certainty that an import is a module.
///
/// For example, `import foo`, then `foo` is definitively a module.
/// But `from quux import foo`, then `quux.foo` is possibly a module.
#[derive(Debug, Clone, Copy, get_size2::GetSize)]
enum ImportModuleKind<'db> {
Definitive(ImportModuleName<'db>),
Possible(ImportModuleName<'db>),
}
/// A representation of something that can be turned into a
/// `ModuleName`.
///
/// We don't do this eagerly, and instead represent the constituent
/// pieces, in order to avoid the work needed to build a `ModuleName`.
/// In particular, it is somewhat rare for the visitor to need
/// to access the imports found in a module. At time of writing
/// (2025-12-10), this only happens when referencing a submodule
/// to augment an `__all__` definition. For example, as found in
/// `matplotlib`:
///
/// ```ignore
/// import numpy as np
/// __all__ = ['rand', 'randn', 'repmat']
/// __all__ += np.__all__
/// ```
///
/// This construct is somewhat rare and it would be sad to allocate a
/// `ModuleName` for every imported item unnecessarily.
#[derive(Debug, Clone, Copy, get_size2::GetSize)]
enum ImportModuleName<'db> {
/// The `foo` in `import quux, foo as blah, baz`.
Import(&'db Name),
/// A possible module in a `from ... import ...` statement.
ImportFrom {
/// The `..foo` in `from ..foo import quux`.
parent: &'db ast::StmtImportFrom,
/// The `foo` in `from quux import foo`.
child: &'db Name,
},
}
impl<'db> ImportModuleName<'db> {
/// Converts the lazy representation of a module name into an
/// actual `ModuleName` that can be used for module resolution.
fn to_module_name(self, db: &'db dyn Db, importing_file: File) -> Option<ModuleName> {
match self {
ImportModuleName::Import(name) => ModuleName::new(name),
ImportModuleName::ImportFrom { parent, child } => {
let mut module_name =
ModuleName::from_import_statement(db, importing_file, parent).ok()?;
let child_module_name = ModuleName::new(child)?;
module_name.extend(&child_module_name);
Some(module_name)
}
}
}
}
/// A visitor over all symbols in a single file.
///
/// This guarantees that child symbols have a symbol ID greater
@ -626,11 +431,7 @@ struct SymbolVisitor<'db> {
/// This is true even when we're inside a function definition
/// that is inside a class.
in_class: bool,
/// When enabled, the visitor should only try to extract
/// symbols from a module that we believed form the "exported"
/// interface for that module. i.e., `__all__` is only respected
/// when this is enabled. It's otherwise ignored.
exports_only: bool,
global_only: bool,
/// The origin of an `__all__` variable, if found.
all_origin: Option<DunderAllOrigin>,
/// A set of names extracted from `__all__`.
@ -639,11 +440,6 @@ struct SymbolVisitor<'db> {
/// `__all__` idioms or there are any invalid elements in
/// `__all__`.
all_invalid: bool,
/// A collection of imports found while visiting the AST.
///
/// These are used to help resolve references to modules
/// in some limited cases.
imports: Imports<'db>,
}
impl<'db> SymbolVisitor<'db> {
@ -655,27 +451,21 @@ impl<'db> SymbolVisitor<'db> {
symbol_stack: vec![],
in_function: false,
in_class: false,
exports_only: false,
global_only: false,
all_origin: None,
all_names: FxHashSet::default(),
all_invalid: false,
imports: Imports::default(),
}
}
fn globals(db: &'db dyn Db, file: File) -> Self {
Self {
exports_only: true,
global_only: true,
..Self::tree(db, file)
}
}
fn into_flat_symbols(mut self) -> FlatSymbols {
// If `__all__` was found but wasn't recognized,
// then we emit a diagnostic message indicating as such.
if self.all_invalid {
tracing::debug!("Invalid `__all__` in `{}`", self.file.path(self.db));
}
// We want to filter out some of the symbols we collected.
// Specifically, to respect conventions around library
// interface.
@ -684,28 +474,12 @@ impl<'db> SymbolVisitor<'db> {
// their position in a sequence. So when we filter some
// out, we need to remap the identifiers.
//
// We also want to deduplicate when `exports_only` is
// `true`. In particular, dealing with `__all__` can
// result in cycles, and we need to make sure our output
// is stable for that reason.
//
// N.B. The remapping could be skipped when `exports_only` is
// N.B. The remapping could be skipped when `global_only` is
// true, since in that case, none of the symbols have a parent
// ID by construction.
let mut remap = IndexVec::with_capacity(self.symbols.len());
let mut seen = self.exports_only.then(FxHashSet::default);
let mut new = IndexVec::with_capacity(self.symbols.len());
for mut symbol in std::mem::take(&mut self.symbols) {
// If we're deduplicating and we've already seen
// this symbol, then skip it.
//
// FIXME: We should do this without copying every
// symbol name. ---AG
if let Some(ref mut seen) = seen {
if !seen.insert(symbol.name.clone()) {
continue;
}
}
if !self.is_part_of_library_interface(&symbol) {
remap.push(None);
continue;
@ -736,7 +510,7 @@ impl<'db> SymbolVisitor<'db> {
}
}
fn visit_body(&mut self, body: &'db [ast::Stmt]) {
fn visit_body(&mut self, body: &[ast::Stmt]) {
for stmt in body {
self.visit_stmt(stmt);
}
@ -811,11 +585,6 @@ impl<'db> SymbolVisitor<'db> {
///
/// If the assignment isn't for `__all__`, then this is a no-op.
fn add_all_assignment(&mut self, targets: &[ast::Expr], value: Option<&ast::Expr>) {
// We don't care about `__all__` unless we're
// specifically looking for exported symbols.
if !self.exports_only {
return;
}
if self.in_function || self.in_class {
return;
}
@ -866,31 +635,6 @@ impl<'db> SymbolVisitor<'db> {
ast::Expr::List(ast::ExprList { elts, .. })
| ast::Expr::Tuple(ast::ExprTuple { elts, .. })
| ast::Expr::Set(ast::ExprSet { elts, .. }) => self.add_all_names(elts),
// `__all__ += module.__all__`
// `__all__.extend(module.__all__)`
ast::Expr::Attribute(ast::ExprAttribute { .. }) => {
let Some(unqualified) = UnqualifiedName::from_expr(expr) else {
return false;
};
let Some((&attr, rest)) = unqualified.segments().split_last() else {
return false;
};
if attr != "__all__" {
return false;
}
let possible_module_name = Name::new(rest.join("."));
let Some(symbols) =
self.imports
.get_module_symbols(self.db, self.file, &possible_module_name)
else {
return false;
};
let Some(ref all) = symbols.all_names else {
return false;
};
self.all_names.extend(all.iter().cloned());
true
}
_ => false,
}
}
@ -1057,12 +801,15 @@ impl<'db> SymbolVisitor<'db> {
// if a name should be part of the exported API of a module
// or not. When there is `__all__`, we currently follow it
// strictly.
//
if self.all_origin.is_some() {
// If `__all__` is somehow invalid, ignore it and fall
// through as-if `__all__` didn't exist.
if self.all_origin.is_some() && !self.all_invalid {
if self.all_invalid {
tracing::debug!("Invalid `__all__` in `{}`", self.file.path(self.db));
} else {
return self.all_names.contains(&*symbol.name);
}
}
// "Imported symbols are considered private by default. A fixed
// set of import forms re-export imported symbols." Specifically:
@ -1092,8 +839,8 @@ impl<'db> SymbolVisitor<'db> {
}
}
impl<'db> SourceOrderVisitor<'db> for SymbolVisitor<'db> {
fn visit_stmt(&mut self, stmt: &'db ast::Stmt) {
impl SourceOrderVisitor<'_> for SymbolVisitor<'_> {
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
match stmt {
ast::Stmt::FunctionDef(func_def) => {
let kind = if self
@ -1118,7 +865,7 @@ impl<'db> SourceOrderVisitor<'db> for SymbolVisitor<'db> {
import_kind: None,
};
if self.exports_only {
if self.global_only {
self.add_symbol(symbol);
// If global_only, don't walk function bodies
return;
@ -1147,7 +894,7 @@ impl<'db> SourceOrderVisitor<'db> for SymbolVisitor<'db> {
import_kind: None,
};
if self.exports_only {
if self.global_only {
self.add_symbol(symbol);
// If global_only, don't walk class bodies
return;
@ -1196,12 +943,6 @@ impl<'db> SourceOrderVisitor<'db> for SymbolVisitor<'db> {
ast::Stmt::AugAssign(ast::StmtAugAssign {
target, op, value, ..
}) => {
// We don't care about `__all__` unless we're
// specifically looking for exported symbols.
if !self.exports_only {
return;
}
if self.all_origin.is_none() {
// We can't update `__all__` if it doesn't already
// exist.
@ -1220,12 +961,6 @@ impl<'db> SourceOrderVisitor<'db> for SymbolVisitor<'db> {
}
}
ast::Stmt::Expr(expr) => {
// We don't care about `__all__` unless we're
// specifically looking for exported symbols.
if !self.exports_only {
return;
}
if self.all_origin.is_none() {
// We can't update `__all__` if it doesn't already exist.
return;
@ -1255,33 +990,19 @@ impl<'db> SourceOrderVisitor<'db> for SymbolVisitor<'db> {
source_order::walk_stmt(self, stmt);
}
ast::Stmt::Import(import) => {
// We ignore any names introduced by imports
// unless we're specifically looking for the
// set of exported symbols.
if !self.exports_only {
return;
}
// We only consider imports in global scope.
if self.in_function {
return;
}
self.imports.add_import(import);
for alias in &import.names {
self.add_import_alias(stmt, alias);
}
}
ast::Stmt::ImportFrom(import_from) => {
// We ignore any names introduced by imports
// unless we're specifically looking for the
// set of exported symbols.
if !self.exports_only {
return;
}
// We only consider imports in global scope.
if self.in_function {
return;
}
self.imports.add_import_from(import_from);
for alias in &import_from.names {
if &alias.name == "*" {
self.add_exported_from_wildcard(import_from);
@ -2254,363 +1975,6 @@ class X:
);
}
#[test]
fn reexport_and_extend_from_submodule_import_statement_plus_equals() {
let test = PublicTestBuilder::default()
.source(
"foo.py",
"
_ZQZQZQ = 1
__all__ = ['_ZQZQZQ']
",
)
.source(
"test.py",
"import foo
from foo import *
_ZYZYZY = 1
__all__ = ['_ZYZYZY']
__all__ += foo.__all__
",
)
.build();
insta::assert_snapshot!(
test.exports_for("test.py"),
@r"
_ZQZQZQ :: Constant
_ZYZYZY :: Constant
",
);
}
#[test]
fn reexport_and_extend_from_submodule_import_statement_extend() {
let test = PublicTestBuilder::default()
.source(
"foo.py",
"
_ZQZQZQ = 1
__all__ = ['_ZQZQZQ']
",
)
.source(
"test.py",
"import foo
from foo import *
_ZYZYZY = 1
__all__ = ['_ZYZYZY']
__all__.extend(foo.__all__)
",
)
.build();
insta::assert_snapshot!(
test.exports_for("test.py"),
@r"
_ZQZQZQ :: Constant
_ZYZYZY :: Constant
",
);
}
#[test]
fn reexport_and_extend_from_submodule_import_statement_alias() {
let test = PublicTestBuilder::default()
.source(
"foo.py",
"
_ZQZQZQ = 1
__all__ = ['_ZQZQZQ']
",
)
.source(
"test.py",
"import foo as blah
from foo import *
_ZYZYZY = 1
__all__ = ['_ZYZYZY']
__all__ += blah.__all__
",
)
.build();
insta::assert_snapshot!(
test.exports_for("test.py"),
@r"
_ZQZQZQ :: Constant
_ZYZYZY :: Constant
",
);
}
#[test]
fn reexport_and_extend_from_submodule_import_statement_nested_alias() {
let test = PublicTestBuilder::default()
.source("parent/__init__.py", "")
.source(
"parent/foo.py",
"
_ZQZQZQ = 1
__all__ = ['_ZQZQZQ']
",
)
.source(
"test.py",
"import parent.foo as blah
from parent.foo import *
_ZYZYZY = 1
__all__ = ['_ZYZYZY']
__all__ += blah.__all__
",
)
.build();
insta::assert_snapshot!(
test.exports_for("test.py"),
@r"
_ZQZQZQ :: Constant
_ZYZYZY :: Constant
",
);
}
#[test]
fn reexport_and_extend_from_submodule_import_from_statement_plus_equals() {
let test = PublicTestBuilder::default()
.source("parent/__init__.py", "")
.source(
"parent/foo.py",
"
_ZQZQZQ = 1
__all__ = ['_ZQZQZQ']
",
)
.source(
"test.py",
"from parent import foo
from parent.foo import *
_ZYZYZY = 1
__all__ = ['_ZYZYZY']
__all__ += foo.__all__
",
)
.build();
insta::assert_snapshot!(
test.exports_for("test.py"),
@r"
_ZQZQZQ :: Constant
_ZYZYZY :: Constant
",
);
}
#[test]
fn reexport_and_extend_from_submodule_import_from_statement_nested_module_reference() {
let test = PublicTestBuilder::default()
.source("parent/__init__.py", "")
.source(
"parent/foo.py",
"
_ZQZQZQ = 1
__all__ = ['_ZQZQZQ']
",
)
.source(
"test.py",
"import parent.foo
from parent.foo import *
_ZYZYZY = 1
__all__ = ['_ZYZYZY']
__all__ += parent.foo.__all__
",
)
.build();
insta::assert_snapshot!(
test.exports_for("test.py"),
@r"
_ZQZQZQ :: Constant
_ZYZYZY :: Constant
",
);
}
#[test]
fn reexport_and_extend_from_submodule_import_from_statement_extend() {
let test = PublicTestBuilder::default()
.source("parent/__init__.py", "")
.source(
"parent/foo.py",
"
_ZQZQZQ = 1
__all__ = ['_ZQZQZQ']
",
)
.source(
"test.py",
"import parent.foo
from parent.foo import *
_ZYZYZY = 1
__all__ = ['_ZYZYZY']
__all__.extend(parent.foo.__all__)
",
)
.build();
insta::assert_snapshot!(
test.exports_for("test.py"),
@r"
_ZQZQZQ :: Constant
_ZYZYZY :: Constant
",
);
}
#[test]
fn reexport_and_extend_from_submodule_import_from_statement_alias() {
let test = PublicTestBuilder::default()
.source("parent/__init__.py", "")
.source(
"parent/foo.py",
"
_ZQZQZQ = 1
__all__ = ['_ZQZQZQ']
",
)
.source(
"test.py",
"from parent import foo as blah
from parent.foo import *
_ZYZYZY = 1
__all__ = ['_ZYZYZY']
__all__ += blah.__all__
",
)
.build();
insta::assert_snapshot!(
test.exports_for("test.py"),
@r"
_ZQZQZQ :: Constant
_ZYZYZY :: Constant
",
);
}
#[test]
fn reexport_and_extend_from_submodule_cycle1() {
let test = PublicTestBuilder::default()
.source(
"a.py",
"from b import *
import b
_ZAZAZA = 1
__all__ = ['_ZAZAZA']
__all__ += b.__all__
",
)
.source(
"b.py",
"
from a import *
import a
_ZBZBZB = 1
__all__ = ['_ZBZBZB']
__all__ += a.__all__
",
)
.build();
insta::assert_snapshot!(
test.exports_for("a.py"),
@r"
_ZBZBZB :: Constant
_ZAZAZA :: Constant
",
);
}
#[test]
fn reexport_and_extend_from_submodule_import_statement_failure1() {
let test = PublicTestBuilder::default()
.source(
"foo.py",
"
_ZFZFZF = 1
__all__ = ['_ZFZFZF']
",
)
.source(
"bar.py",
"
_ZBZBZB = 1
__all__ = ['_ZBZBZB']
",
)
.source(
"test.py",
"import foo
import bar
from foo import *
from bar import *
foo = bar
_ZYZYZY = 1
__all__ = ['_ZYZYZY']
__all__.extend(foo.__all__)
",
)
.build();
// In this test, we resolve `foo.__all__` to the `__all__`
// attribute in module `foo` instead of in `bar`. This is
// because we don't track redefinitions of imports (as of
// 2025-12-11). Handling this correctly would mean exporting
// `_ZBZBZB` instead of `_ZFZFZF`.
insta::assert_snapshot!(
test.exports_for("test.py"),
@r"
_ZFZFZF :: Constant
_ZYZYZY :: Constant
",
);
}
#[test]
fn reexport_and_extend_from_submodule_import_statement_failure2() {
let test = PublicTestBuilder::default()
.source(
"parent/__init__.py",
"import parent.foo as foo
__all__ = ['foo']
",
)
.source(
"parent/foo.py",
"
_ZFZFZF = 1
__all__ = ['_ZFZFZF']
",
)
.source(
"test.py",
"from parent.foo import *
from parent import *
_ZYZYZY = 1
__all__ = ['_ZYZYZY']
__all__.extend(foo.__all__)
",
)
.build();
// This is not quite right either because we end up
// considering the `__all__` in `test.py` to be invalid.
// Namely, we don't pick up the `foo` that is in scope
// from the `from parent import *` import. The correct
// answer should just be `_ZFZFZF` and `_ZYZYZY`.
insta::assert_snapshot!(
test.exports_for("test.py"),
@r"
_ZFZFZF :: Constant
foo :: Module
_ZYZYZY :: Constant
__all__ :: Variable
",
);
}
fn matches(query: &str, symbol: &str) -> bool {
super::QueryPattern::fuzzy(query).is_match_symbol_name(symbol)
}

View File

@ -150,62 +150,6 @@ class Test:
");
}
#[test]
fn ignore_all() {
let test = CursorTest::builder()
.source(
"utils.py",
"
__all__ = []
class Test:
def from_path(): ...
<CURSOR>",
)
.build();
assert_snapshot!(test.workspace_symbols("from"), @r"
info[workspace-symbols]: WorkspaceSymbolInfo
--> utils.py:4:9
|
2 | __all__ = []
3 | class Test:
4 | def from_path(): ...
| ^^^^^^^^^
|
info: Method from_path
");
}
#[test]
fn ignore_imports() {
let test = CursorTest::builder()
.source(
"utils.py",
"
import re
import json as json
from collections import defaultdict
foo = 1
<CURSOR>",
)
.build();
assert_snapshot!(test.workspace_symbols("foo"), @r"
info[workspace-symbols]: WorkspaceSymbolInfo
--> utils.py:5:1
|
3 | import json as json
4 | from collections import defaultdict
5 | foo = 1
| ^^^
|
info: Variable foo
");
assert_snapshot!(test.workspace_symbols("re"), @"No symbols found");
assert_snapshot!(test.workspace_symbols("json"), @"No symbols found");
assert_snapshot!(test.workspace_symbols("default"), @"No symbols found");
}
impl CursorTest {
fn workspace_symbols(&self, query: &str) -> String {
let symbols = workspace_symbols(&self.db, query);

View File

@ -27,6 +27,7 @@ use std::iter::FusedIterator;
use std::panic::{AssertUnwindSafe, UnwindSafe};
use std::sync::Arc;
use thiserror::Error;
use tracing::error;
use ty_python_semantic::add_inferred_python_version_hint_to_diagnostic;
use ty_python_semantic::lint::RuleSelection;
use ty_python_semantic::types::check_types;

View File

@ -285,6 +285,22 @@ impl Options {
roots.push(python);
}
// Considering pytest test discovery conventions,
// we also include the `tests` directory if it exists and is not a package.
let tests_dir = project_root.join("tests");
if system.is_directory(&tests_dir)
&& !system.is_file(&tests_dir.join("__init__.py"))
&& !system.is_file(&tests_dir.join("__init__.pyi"))
&& !roots.contains(&tests_dir)
{
// If the `tests` directory exists and is not a package, include it as a source root.
tracing::debug!(
"Including `./tests` in `environment.root` because a `./tests` directory exists"
);
roots.push(tests_dir);
}
// The project root should always be included, and should always come
// after any subdirectories such as `./src`, `./tests` and/or `./python`.
roots.push(project_root.to_path_buf());
@ -516,7 +532,7 @@ pub struct EnvironmentOptions {
/// * if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
/// * otherwise, default to `.` (flat layout)
///
/// Additionally, if a `./python` directory exists and is not a package (i.e. it does not contain an `__init__.py` or `__init__.pyi` file),
/// Besides, if a `./python` or `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` or `__init__.pyi` file),
/// it will also be included in the first party search path.
#[serde(skip_serializing_if = "Option::is_none")]
#[option(
@ -658,7 +674,7 @@ pub struct SrcOptions {
/// * if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
/// * otherwise, default to `.` (flat layout)
///
/// Additionally, if a `./python` directory exists and is not a package (i.e. it does not contain an `__init__.py` file),
/// Besides, if a `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` file),
/// it will also be included in the first party search path.
#[serde(skip_serializing_if = "Option::is_none")]
#[option(
@ -1225,22 +1241,24 @@ pub struct TerminalOptions {
///
/// An override allows you to apply different rule configurations to specific
/// files or directories. Multiple overrides can match the same file, with
/// later overrides take precedence. Override rules take precedence over global
/// rules for matching files.
/// later overrides take precedence.
///
/// For example, to relax enforcement of rules in test files:
/// ### Precedence
///
/// - Later overrides in the array take precedence over earlier ones
/// - Override rules take precedence over global rules for matching files
///
/// ### Examples
///
/// ```toml
/// # Relax rules for test files
/// [[tool.ty.overrides]]
/// include = ["tests/**", "**/test_*.py"]
///
/// [tool.ty.overrides.rules]
/// possibly-unresolved-reference = "warn"
/// ```
///
/// Or, to ignore a rule in generated files but retain enforcement in an important file:
///
/// ```toml
/// # Ignore generated files but still check important ones
/// [[tool.ty.overrides]]
/// include = ["generated/**"]
/// exclude = ["generated/important.py"]

View File

@ -1,7 +0,0 @@
from typing import TypeAlias, TypeVar
T = TypeVar("T", bound="A[0]")
A: TypeAlias = T
def _(x: A):
if x:
pass

View File

@ -1 +0,0 @@
def _[T: (T if cond else U)[0], U](): pass

View File

@ -1,3 +0,0 @@
def _[T: T[0]](x: T):
if x:
pass

View File

@ -1,4 +0,0 @@
class _[T: (0, T[0])]:
def _(x: T):
if x:
pass

View File

@ -169,13 +169,13 @@ def f(x: Any[int]):
`Any` cannot be called (this leads to a `TypeError` at runtime):
```py
Any() # error: [call-non-callable] "Object of type `<special-form 'typing.Any'>` is not callable"
Any() # error: [call-non-callable] "Object of type `<special form 'typing.Any'>` is not callable"
```
`Any` also cannot be used as a metaclass (under the hood, this leads to an implicit call to `Any`):
```py
class F(metaclass=Any): ... # error: [invalid-metaclass] "Metaclass type `<special-form 'typing.Any'>` is not callable"
class F(metaclass=Any): ... # error: [invalid-metaclass] "Metaclass type `<special form 'typing.Any'>` is not callable"
```
And `Any` cannot be used in `isinstance()` checks:

View File

@ -407,22 +407,4 @@ def f_okay(c: Callable[[], None]):
c.__qualname__ = "my_callable" # okay
```
## From a class
### Subclasses should return themselves, not superclass
```py
from ty_extensions import into_callable
class Base:
def __init__(self) -> None:
pass
class A(Base):
pass
# revealed: () -> A
reveal_type(into_callable(A))
```
[gradual form]: https://typing.python.org/en/latest/spec/glossary.html#term-gradual-form

View File

@ -59,7 +59,7 @@ python-version = "3.11"
```py
from typing import Never
reveal_type(Never) # revealed: <special-form 'typing.Never'>
reveal_type(Never) # revealed: <special form 'typing.Never'>
```
### Python 3.10

View File

@ -146,10 +146,9 @@ Foo = NewType(name, int)
reveal_type(Foo) # revealed: <NewType pseudo-class 'Foo'>
```
## The base must be a class type or another newtype
## The second argument must be a class type or another newtype
Other typing constructs like `Union` are not _generally_ allowed. (However, see the next section for
a couple special cases.)
Other typing constructs like `Union` are not allowed.
```py
from typing_extensions import NewType
@ -168,61 +167,6 @@ on top of that:
Foo = NewType("Foo", 42)
```
## `float` and `complex` special cases
`float` and `complex` are subject to a special case in the typing spec, which we currently interpret
to mean that `float` in type position is `int | float`, and `complex` in type position is
`int | float | complex`. This is awkward for `NewType`, because as we just tested above, unions
aren't generally valid `NewType` bases. However, `float` and `complex` _are_ valid `NewType` bases,
and we accept the unions they expand into.
```py
from typing import NewType
Foo = NewType("Foo", float)
Foo(3.14)
Foo(42)
Foo("hello") # error: [invalid-argument-type] "Argument is incorrect: Expected `int | float`, found `Literal["hello"]`"
reveal_type(Foo(3.14).__class__) # revealed: type[int] | type[float]
reveal_type(Foo(42).__class__) # revealed: type[int] | type[float]
Bar = NewType("Bar", complex)
Bar(1 + 2j)
Bar(3.14)
Bar(42)
Bar("goodbye") # error: [invalid-argument-type]
reveal_type(Bar(1 + 2j).__class__) # revealed: type[int] | type[float] | type[complex]
reveal_type(Bar(3.14).__class__) # revealed: type[int] | type[float] | type[complex]
reveal_type(Bar(42).__class__) # revealed: type[int] | type[float] | type[complex]
```
We don't currently try to distinguish between an implicit union (e.g. `float`) and the equivalent
explicit union (e.g. `int | float`), so these two explicit unions are also allowed. But again, most
unions are not allowed:
```py
Baz = NewType("Baz", int | float)
Baz = NewType("Baz", int | float | complex)
Baz = NewType("Baz", int | str) # error: [invalid-newtype] "invalid base for `typing.NewType`"
```
Similarly, a `NewType` of `float` or `complex` is valid as a `Callable` of the corresponding union
type:
```py
from collections.abc import Callable
def f(_: Callable[[int | float], Foo]): ...
f(Foo)
def g(_: Callable[[int | float | complex], Bar]): ...
g(Bar)
```
## A `NewType` definition must be a simple variable assignment
```py
@ -235,7 +179,7 @@ N: NewType = NewType("N", int) # error: [invalid-newtype] "A `NewType` definiti
Cyclic newtypes are kind of silly, but it's possible for the user to express them, and it's
important that we don't go into infinite recursive loops and crash with a stack overflow. In fact,
this is _why_ base type evaluation is deferred; otherwise Salsa itself would crash.
this is *why* base type evaluation is deferred; otherwise Salsa itself would crash.
```py
from typing_extensions import NewType, reveal_type, cast

View File

@ -194,7 +194,7 @@ reveal_type(B().name_does_not_matter()) # revealed: B
reveal_type(B().positional_only(1)) # revealed: B
reveal_type(B().keyword_only(x=1)) # revealed: B
# TODO: This should deally be `B`
reveal_type(B().decorated_method()) # revealed: Self@decorated_method
reveal_type(B().decorated_method()) # revealed: Unknown
reveal_type(B().a_property) # revealed: B

View File

@ -152,20 +152,6 @@ The expressions in these string annotations aren't valid expressions in this con
shouldn't panic.
```py
# Regression test for https://github.com/astral-sh/ty/issues/1865
# error: [fstring-type-annotation]
stringified_fstring_with_conditional: "f'{1 if 1 else 1}'"
# error: [fstring-type-annotation]
stringified_fstring_with_boolean_expression: "f'{1 or 2}'"
# error: [fstring-type-annotation]
stringified_fstring_with_generator_expression: "f'{(i for i in range(5))}'"
# error: [fstring-type-annotation]
stringified_fstring_with_list_comprehension: "f'{[i for i in range(5)]}'"
# error: [fstring-type-annotation]
stringified_fstring_with_dict_comprehension: "f'{ {i: i for i in range(5)} }'"
# error: [fstring-type-annotation]
stringified_fstring_with_set_comprehension: "f'{ {i for i in range(5)} }'"
a: "1 or 2"
b: "(x := 1)"
# error: [invalid-type-form]

View File

@ -38,8 +38,6 @@ reveal_type(x) # revealed: int
## Unsupported types
<!-- snapshot-diagnostics -->
```py
class C:
def __isub__(self, other: str) -> int:

View File

@ -43,7 +43,9 @@ async def main():
loop = asyncio.get_event_loop()
with concurrent.futures.ThreadPoolExecutor() as pool:
result = await loop.run_in_executor(pool, blocking_function)
reveal_type(result) # revealed: int
# TODO: should be `int`
reveal_type(result) # revealed: Unknown
```
### `asyncio.Task`

View File

@ -1208,7 +1208,7 @@ def _(flag: bool):
reveal_type(C1.y) # revealed: int | str
C1.y = 100
# error: [invalid-assignment] "Object of type `Literal["problematic"]` is not assignable to attribute `y` on type `<class 'mdtest_snippet.<locals of function '_'>.C1 @ src/mdtest_snippet.py:3'> | <class 'mdtest_snippet.<locals of function '_'>.C1 @ src/mdtest_snippet.py:8'>`"
# error: [invalid-assignment] "Object of type `Literal["problematic"]` is not assignable to attribute `y` on type `<class 'C1'> | <class 'C1'>`"
C1.y = "problematic"
class C2:
@ -2162,8 +2162,8 @@ Some attributes are special-cased, however:
import types
from ty_extensions import static_assert, TypeOf, is_subtype_of
reveal_type(f.__get__) # revealed: <method-wrapper '__get__' of function 'f'>
reveal_type(f.__call__) # revealed: <method-wrapper '__call__' of function 'f'>
reveal_type(f.__get__) # revealed: <method-wrapper `__get__` of `f`>
reveal_type(f.__call__) # revealed: <method-wrapper `__call__` of `f`>
static_assert(is_subtype_of(TypeOf[f.__get__], types.MethodWrapperType))
static_assert(is_subtype_of(TypeOf[f.__call__], types.MethodWrapperType))
```

View File

@ -13,7 +13,7 @@ python-version = "3.10"
class A: ...
class B: ...
reveal_type(A | B) # revealed: <types.UnionType special-form 'A | B'>
reveal_type(A | B) # revealed: <types.UnionType special form 'A | B'>
```
## Union of two classes (prior to 3.10)
@ -43,14 +43,14 @@ class A: ...
class B: ...
def _(sub_a: type[A], sub_b: type[B]):
reveal_type(A | sub_b) # revealed: <types.UnionType special-form>
reveal_type(sub_a | B) # revealed: <types.UnionType special-form>
reveal_type(sub_a | sub_b) # revealed: <types.UnionType special-form>
reveal_type(A | sub_b) # revealed: <types.UnionType special form>
reveal_type(sub_a | B) # revealed: <types.UnionType special form>
reveal_type(sub_a | sub_b) # revealed: <types.UnionType special form>
class C[T]: ...
class D[T]: ...
reveal_type(C | D) # revealed: <types.UnionType special-form 'C[Unknown] | D[Unknown]'>
reveal_type(C | D) # revealed: <types.UnionType special form 'C[Unknown] | D[Unknown]'>
reveal_type(C[int] | D[str]) # revealed: <types.UnionType special-form 'C[int] | D[str]'>
reveal_type(C[int] | D[str]) # revealed: <types.UnionType special form 'C[int] | D[str]'>
```

View File

@ -79,31 +79,31 @@ reveal_type(Sub() & Sub()) # revealed: Literal["&"]
reveal_type(Sub() // Sub()) # revealed: Literal["//"]
# No does not implement any of the dunder methods.
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `No`"
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `No` and `No`"
reveal_type(No() + No()) # revealed: Unknown
# error: [unsupported-operator] "Operator `-` is not supported between two objects of type `No`"
# error: [unsupported-operator] "Operator `-` is not supported between objects of type `No` and `No`"
reveal_type(No() - No()) # revealed: Unknown
# error: [unsupported-operator] "Operator `*` is not supported between two objects of type `No`"
# error: [unsupported-operator] "Operator `*` is not supported between objects of type `No` and `No`"
reveal_type(No() * No()) # revealed: Unknown
# error: [unsupported-operator] "Operator `@` is not supported between two objects of type `No`"
# error: [unsupported-operator] "Operator `@` is not supported between objects of type `No` and `No`"
reveal_type(No() @ No()) # revealed: Unknown
# error: [unsupported-operator] "Operator `/` is not supported between two objects of type `No`"
# error: [unsupported-operator] "Operator `/` is not supported between objects of type `No` and `No`"
reveal_type(No() / No()) # revealed: Unknown
# error: [unsupported-operator] "Operator `%` is not supported between two objects of type `No`"
# error: [unsupported-operator] "Operator `%` is not supported between objects of type `No` and `No`"
reveal_type(No() % No()) # revealed: Unknown
# error: [unsupported-operator] "Operator `**` is not supported between two objects of type `No`"
# error: [unsupported-operator] "Operator `**` is not supported between objects of type `No` and `No`"
reveal_type(No() ** No()) # revealed: Unknown
# error: [unsupported-operator] "Operator `<<` is not supported between two objects of type `No`"
# error: [unsupported-operator] "Operator `<<` is not supported between objects of type `No` and `No`"
reveal_type(No() << No()) # revealed: Unknown
# error: [unsupported-operator] "Operator `>>` is not supported between two objects of type `No`"
# error: [unsupported-operator] "Operator `>>` is not supported between objects of type `No` and `No`"
reveal_type(No() >> No()) # revealed: Unknown
# error: [unsupported-operator] "Operator `|` is not supported between two objects of type `No`"
# error: [unsupported-operator] "Operator `|` is not supported between objects of type `No` and `No`"
reveal_type(No() | No()) # revealed: Unknown
# error: [unsupported-operator] "Operator `^` is not supported between two objects of type `No`"
# error: [unsupported-operator] "Operator `^` is not supported between objects of type `No` and `No`"
reveal_type(No() ^ No()) # revealed: Unknown
# error: [unsupported-operator] "Operator `&` is not supported between two objects of type `No`"
# error: [unsupported-operator] "Operator `&` is not supported between objects of type `No` and `No`"
reveal_type(No() & No()) # revealed: Unknown
# error: [unsupported-operator] "Operator `//` is not supported between two objects of type `No`"
# error: [unsupported-operator] "Operator `//` is not supported between objects of type `No` and `No`"
reveal_type(No() // No()) # revealed: Unknown
# Yes does not implement any of the reflected dunder methods.
@ -293,8 +293,6 @@ reveal_type(Yes() // No()) # revealed: Literal["//"]
## Classes
<!-- snapshot-diagnostics -->
Dunder methods defined in a class are available to instances of that class, but not to the class
itself. (For these operators to work on the class itself, they would have to be defined on the
class's type, i.e. `type`.)
@ -309,11 +307,11 @@ class Yes:
class Sub(Yes): ...
class No: ...
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `<class 'Yes'>`"
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `<class 'Yes'>` and `<class 'Yes'>`"
reveal_type(Yes + Yes) # revealed: Unknown
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `<class 'Sub'>`"
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `<class 'Sub'>` and `<class 'Sub'>`"
reveal_type(Sub + Sub) # revealed: Unknown
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `<class 'No'>`"
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `<class 'No'>` and `<class 'No'>`"
reveal_type(No + No) # revealed: Unknown
```
@ -338,11 +336,11 @@ def sub() -> type[Sub]:
def no() -> type[No]:
return No
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `type[Yes]`"
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `type[Yes]` and `type[Yes]`"
reveal_type(yes() + yes()) # revealed: Unknown
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `type[Sub]`"
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `type[Sub]` and `type[Sub]`"
reveal_type(sub() + sub()) # revealed: Unknown
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `type[No]`"
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `type[No]` and `type[No]`"
reveal_type(no() + no()) # revealed: Unknown
```
@ -352,54 +350,30 @@ reveal_type(no() + no()) # revealed: Unknown
def f():
pass
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `def f() -> Unknown`"
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
reveal_type(f + f) # revealed: Unknown
# error: [unsupported-operator] "Operator `-` is not supported between two objects of type `def f() -> Unknown`"
# error: [unsupported-operator] "Operator `-` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
reveal_type(f - f) # revealed: Unknown
# error: [unsupported-operator] "Operator `*` is not supported between two objects of type `def f() -> Unknown`"
# error: [unsupported-operator] "Operator `*` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
reveal_type(f * f) # revealed: Unknown
# error: [unsupported-operator] "Operator `@` is not supported between two objects of type `def f() -> Unknown`"
# error: [unsupported-operator] "Operator `@` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
reveal_type(f @ f) # revealed: Unknown
# error: [unsupported-operator] "Operator `/` is not supported between two objects of type `def f() -> Unknown`"
# error: [unsupported-operator] "Operator `/` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
reveal_type(f / f) # revealed: Unknown
# error: [unsupported-operator] "Operator `%` is not supported between two objects of type `def f() -> Unknown`"
# error: [unsupported-operator] "Operator `%` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
reveal_type(f % f) # revealed: Unknown
# error: [unsupported-operator] "Operator `**` is not supported between two objects of type `def f() -> Unknown`"
# error: [unsupported-operator] "Operator `**` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
reveal_type(f**f) # revealed: Unknown
# error: [unsupported-operator] "Operator `<<` is not supported between two objects of type `def f() -> Unknown`"
# error: [unsupported-operator] "Operator `<<` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
reveal_type(f << f) # revealed: Unknown
# error: [unsupported-operator] "Operator `>>` is not supported between two objects of type `def f() -> Unknown`"
# error: [unsupported-operator] "Operator `>>` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
reveal_type(f >> f) # revealed: Unknown
# error: [unsupported-operator] "Operator `|` is not supported between two objects of type `def f() -> Unknown`"
# error: [unsupported-operator] "Operator `|` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
reveal_type(f | f) # revealed: Unknown
# error: [unsupported-operator] "Operator `^` is not supported between two objects of type `def f() -> Unknown`"
# error: [unsupported-operator] "Operator `^` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
reveal_type(f ^ f) # revealed: Unknown
# error: [unsupported-operator] "Operator `&` is not supported between two objects of type `def f() -> Unknown`"
# error: [unsupported-operator] "Operator `&` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
reveal_type(f & f) # revealed: Unknown
# error: [unsupported-operator] "Operator `//` is not supported between two objects of type `def f() -> Unknown`"
# error: [unsupported-operator] "Operator `//` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
reveal_type(f // f) # revealed: Unknown
```
## Classes from different modules with the same name
We use the fully qualified names in diagnostics if the two classes have the same unqualified name,
but are nonetheless different.
<!-- snapshot-diagnostics -->
`mod1.py`:
```py
class A: ...
```
`mod2.py`:
```py
import mod1
class A: ...
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `mod2.A` and `mod1.A`"
A() + mod1.A()
```

View File

@ -412,7 +412,7 @@ class A:
def __init__(self):
self.__add__ = add_impl
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `A`"
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `A` and `A`"
# revealed: Unknown
reveal_type(A() + A())
```

View File

@ -18,7 +18,7 @@ cannot be added, because that would require addition of `int` and `str` or vice
def f2(i: int, s: str, int_or_str: int | str):
i + i
s + s
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `int | str`"
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `int | str` and `int | str`"
reveal_type(int_or_str + int_or_str) # revealed: Unknown
```

Some files were not shown because too many files have changed in this diff Show More