mirror of https://github.com/astral-sh/ruff
Compare commits
72 Commits
| Author | SHA1 | Date |
|---|---|---|
|
|
0bd7a94c27 | |
|
|
421f88bb32 | |
|
|
b0eb39d112 | |
|
|
260f463edd | |
|
|
52849a5e68 | |
|
|
2a61fe2353 | |
|
|
764ad8b29b | |
|
|
85af715880 | |
|
|
b0bc990cbf | |
|
|
ad3de4e488 | |
|
|
2214a46139 | |
|
|
c02bd11b93 | |
|
|
eeaaa8e9fe | |
|
|
7f7485d608 | |
|
|
d755f3b522 | |
|
|
83168a1bb1 | |
|
|
0f373603eb | |
|
|
cc23af944f | |
|
|
0589700ca1 | |
|
|
43d983ecae | |
|
|
5c69bb564c | |
|
|
89fed85a8d | |
|
|
051f6896ac | |
|
|
5b1d3ac9b9 | |
|
|
b2b0ad38ea | |
|
|
01c0a3e960 | |
|
|
5c942119f8 | |
|
|
2acf1cc0fd | |
|
|
4fdbe26445 | |
|
|
682d29c256 | |
|
|
8e13765b57 | |
|
|
7d3b7c5754 | |
|
|
d6a5bbd91c | |
|
|
1df6544ad8 | |
|
|
4e1cf5747a | |
|
|
cbfecfaf41 | |
|
|
8f530a7ab0 | |
|
|
5372bb3440 | |
|
|
d08e414179 | |
|
|
0b918ae4d5 | |
|
|
9838f81baf | |
|
|
ba47349c2e | |
|
|
04f9949711 | |
|
|
8bc753b842 | |
|
|
c7eea1f2e3 | |
|
|
be8eb92946 | |
|
|
a544c59186 | |
|
|
bb464ed924 | |
|
|
f57917becd | |
|
|
82a7598aa8 | |
|
|
e2ec2bc306 | |
|
|
b413a6dec4 | |
|
|
e19c050386 | |
|
|
5a2aba237b | |
|
|
ca5f099481 | |
|
|
a722df6a73 | |
|
|
dec4154c8a | |
|
|
69d1bfbebc | |
|
|
90b29c9e87 | |
|
|
0ebdebddd8 | |
|
|
d5546508cf | |
|
|
3ac58b47bd | |
|
|
a2b138e789 | |
|
|
ff0ed4e752 | |
|
|
bc8efa2fd8 | |
|
|
4249736d74 | |
|
|
0181568fb5 | |
|
|
8cc7c993de | |
|
|
315bf80eed | |
|
|
0138cd238a | |
|
|
5e42926eee | |
|
|
ddb7645e9d |
|
|
@ -60,7 +60,7 @@ jobs:
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
@ -123,7 +123,7 @@ jobs:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
@ -174,7 +174,7 @@ jobs:
|
||||||
outputs:
|
outputs:
|
||||||
val: ${{ steps.host.outputs.manifest }}
|
val: ${{ steps.host.outputs.manifest }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
@ -250,7 +250,7 @@ jobs:
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3
|
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
|
||||||
with:
|
with:
|
||||||
persist-credentials: false
|
persist-credentials: false
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
|
||||||
|
|
@ -67,7 +67,7 @@ jobs:
|
||||||
|
|
||||||
cd ..
|
cd ..
|
||||||
|
|
||||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@55df3c868f3fa9ab34cff0498dd6106722aac205"
|
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@2e1816eac09c90140b1ba51d19afc5f59da460f5"
|
||||||
|
|
||||||
ecosystem-analyzer \
|
ecosystem-analyzer \
|
||||||
--repository ruff \
|
--repository ruff \
|
||||||
|
|
|
||||||
|
|
@ -52,7 +52,7 @@ jobs:
|
||||||
|
|
||||||
cd ..
|
cd ..
|
||||||
|
|
||||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@55df3c868f3fa9ab34cff0498dd6106722aac205"
|
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@2e1816eac09c90140b1ba51d19afc5f59da460f5"
|
||||||
|
|
||||||
ecosystem-analyzer \
|
ecosystem-analyzer \
|
||||||
--verbose \
|
--verbose \
|
||||||
|
|
|
||||||
|
|
@ -254,6 +254,21 @@ dependencies = [
|
||||||
"syn",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bit-set"
|
||||||
|
version = "0.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3"
|
||||||
|
dependencies = [
|
||||||
|
"bit-vec",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bit-vec"
|
||||||
|
version = "0.8.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bitflags"
|
name = "bitflags"
|
||||||
version = "1.3.2"
|
version = "1.3.2"
|
||||||
|
|
@ -944,6 +959,18 @@ dependencies = [
|
||||||
"parking_lot_core",
|
"parking_lot_core",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "datatest-stable"
|
||||||
|
version = "0.3.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a867d7322eb69cf3a68a5426387a25b45cb3b9c5ee41023ee6cea92e2afadd82"
|
||||||
|
dependencies = [
|
||||||
|
"camino",
|
||||||
|
"fancy-regex",
|
||||||
|
"libtest-mimic 0.8.1",
|
||||||
|
"walkdir",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "derive-where"
|
name = "derive-where"
|
||||||
version = "1.6.0"
|
version = "1.6.0"
|
||||||
|
|
@ -1138,6 +1165,17 @@ dependencies = [
|
||||||
"windows-sys 0.61.0",
|
"windows-sys 0.61.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fancy-regex"
|
||||||
|
version = "0.14.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "6e24cb5a94bcae1e5408b0effca5cd7172ea3c5755049c5f3af4cd283a165298"
|
||||||
|
dependencies = [
|
||||||
|
"bit-set",
|
||||||
|
"regex-automata",
|
||||||
|
"regex-syntax",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fastrand"
|
name = "fastrand"
|
||||||
version = "2.3.0"
|
version = "2.3.0"
|
||||||
|
|
@ -1625,7 +1663,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
|
checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"console 0.15.11",
|
"console 0.15.11",
|
||||||
"globset",
|
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"pest",
|
"pest",
|
||||||
"pest_derive",
|
"pest_derive",
|
||||||
|
|
@ -1633,7 +1670,6 @@ dependencies = [
|
||||||
"ron",
|
"ron",
|
||||||
"serde",
|
"serde",
|
||||||
"similar",
|
"similar",
|
||||||
"walkdir",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -1919,6 +1955,18 @@ dependencies = [
|
||||||
"threadpool",
|
"threadpool",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "libtest-mimic"
|
||||||
|
version = "0.8.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "5297962ef19edda4ce33aaa484386e0a5b3d7f2f4e037cbeee00503ef6b29d33"
|
||||||
|
dependencies = [
|
||||||
|
"anstream",
|
||||||
|
"anstyle",
|
||||||
|
"clap",
|
||||||
|
"escape8259",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "linux-raw-sys"
|
name = "linux-raw-sys"
|
||||||
version = "0.11.0"
|
version = "0.11.0"
|
||||||
|
|
@ -3278,6 +3326,7 @@ dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap",
|
"clap",
|
||||||
"countme",
|
"countme",
|
||||||
|
"datatest-stable",
|
||||||
"insta",
|
"insta",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
"memchr",
|
"memchr",
|
||||||
|
|
@ -3347,6 +3396,7 @@ dependencies = [
|
||||||
"bitflags 2.10.0",
|
"bitflags 2.10.0",
|
||||||
"bstr",
|
"bstr",
|
||||||
"compact_str",
|
"compact_str",
|
||||||
|
"datatest-stable",
|
||||||
"get-size2",
|
"get-size2",
|
||||||
"insta",
|
"insta",
|
||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
|
|
@ -4311,7 +4361,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5fe242ee9e646acec9ab73a5c540e8543ed1b107f0ce42be831e0775d423c396"
|
checksum = "5fe242ee9e646acec9ab73a5c540e8543ed1b107f0ce42be831e0775d423c396"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ignore",
|
"ignore",
|
||||||
"libtest-mimic",
|
"libtest-mimic 0.7.3",
|
||||||
"snapbox",
|
"snapbox",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
@ -4340,6 +4390,7 @@ dependencies = [
|
||||||
"ruff_python_trivia",
|
"ruff_python_trivia",
|
||||||
"salsa",
|
"salsa",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
|
"tikv-jemallocator",
|
||||||
"toml",
|
"toml",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-flame",
|
"tracing-flame",
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ resolver = "2"
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
# Please update rustfmt.toml when bumping the Rust edition
|
# Please update rustfmt.toml when bumping the Rust edition
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
rust-version = "1.89"
|
rust-version = "1.90"
|
||||||
homepage = "https://docs.astral.sh/ruff"
|
homepage = "https://docs.astral.sh/ruff"
|
||||||
documentation = "https://docs.astral.sh/ruff"
|
documentation = "https://docs.astral.sh/ruff"
|
||||||
repository = "https://github.com/astral-sh/ruff"
|
repository = "https://github.com/astral-sh/ruff"
|
||||||
|
|
@ -81,6 +81,7 @@ compact_str = "0.9.0"
|
||||||
criterion = { version = "0.7.0", default-features = false }
|
criterion = { version = "0.7.0", default-features = false }
|
||||||
crossbeam = { version = "0.8.4" }
|
crossbeam = { version = "0.8.4" }
|
||||||
dashmap = { version = "6.0.1" }
|
dashmap = { version = "6.0.1" }
|
||||||
|
datatest-stable = { version = "0.3.3" }
|
||||||
dir-test = { version = "0.4.0" }
|
dir-test = { version = "0.4.0" }
|
||||||
dunce = { version = "1.0.5" }
|
dunce = { version = "1.0.5" }
|
||||||
drop_bomb = { version = "0.1.5" }
|
drop_bomb = { version = "0.1.5" }
|
||||||
|
|
|
||||||
|
|
@ -57,8 +57,11 @@ Ruff is extremely actively developed and used in major open-source projects like
|
||||||
|
|
||||||
...and [many more](#whos-using-ruff).
|
...and [many more](#whos-using-ruff).
|
||||||
|
|
||||||
Ruff is backed by [Astral](https://astral.sh). Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff),
|
Ruff is backed by [Astral](https://astral.sh), the creators of
|
||||||
or the original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
[uv](https://github.com/astral-sh/uv) and [ty](https://github.com/astral-sh/ty).
|
||||||
|
|
||||||
|
Read the [launch post](https://astral.sh/blog/announcing-astral-the-company-behind-ruff), or the
|
||||||
|
original [project announcement](https://notes.crmarsh.com/python-tooling-could-be-much-much-faster).
|
||||||
|
|
||||||
## Testimonials
|
## Testimonials
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ use anyhow::bail;
|
||||||
use clap::builder::Styles;
|
use clap::builder::Styles;
|
||||||
use clap::builder::styling::{AnsiColor, Effects};
|
use clap::builder::styling::{AnsiColor, Effects};
|
||||||
use clap::builder::{TypedValueParser, ValueParserFactory};
|
use clap::builder::{TypedValueParser, ValueParserFactory};
|
||||||
use clap::{Parser, Subcommand, command};
|
use clap::{Parser, Subcommand};
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use path_absolutize::path_dedot;
|
use path_absolutize::path_dedot;
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ use std::sync::mpsc::channel;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use clap::CommandFactory;
|
use clap::CommandFactory;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use log::{error, warn};
|
use log::error;
|
||||||
use notify::{RecursiveMode, Watcher, recommended_watcher};
|
use notify::{RecursiveMode, Watcher, recommended_watcher};
|
||||||
|
|
||||||
use args::{GlobalConfigArgs, ServerCommand};
|
use args::{GlobalConfigArgs, ServerCommand};
|
||||||
|
|
|
||||||
|
|
@ -194,7 +194,7 @@ static SYMPY: Benchmark = Benchmark::new(
|
||||||
max_dep_date: "2025-06-17",
|
max_dep_date: "2025-06-17",
|
||||||
python_version: PythonVersion::PY312,
|
python_version: PythonVersion::PY312,
|
||||||
},
|
},
|
||||||
13030,
|
13100,
|
||||||
);
|
);
|
||||||
|
|
||||||
static TANJUN: Benchmark = Benchmark::new(
|
static TANJUN: Benchmark = Benchmark::new(
|
||||||
|
|
@ -223,7 +223,7 @@ static STATIC_FRAME: Benchmark = Benchmark::new(
|
||||||
max_dep_date: "2025-08-09",
|
max_dep_date: "2025-08-09",
|
||||||
python_version: PythonVersion::PY311,
|
python_version: PythonVersion::PY311,
|
||||||
},
|
},
|
||||||
950,
|
1100,
|
||||||
);
|
);
|
||||||
|
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
|
|
|
||||||
|
|
@ -144,8 +144,8 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parents: &[S
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
|
|
||||||
if let Some(deprecated) = &field.deprecated {
|
if let Some(deprecated) = &field.deprecated {
|
||||||
output.push_str("> [!WARN] \"Deprecated\"\n");
|
output.push_str("!!! warning \"Deprecated\"\n");
|
||||||
output.push_str("> This option has been deprecated");
|
output.push_str(" This option has been deprecated");
|
||||||
|
|
||||||
if let Some(since) = deprecated.since {
|
if let Some(since) = deprecated.since {
|
||||||
write!(output, " in {since}").unwrap();
|
write!(output, " in {since}").unwrap();
|
||||||
|
|
@ -166,8 +166,9 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parents: &[S
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
let _ = writeln!(output, "**Type**: `{}`", field.value_type);
|
let _ = writeln!(output, "**Type**: `{}`", field.value_type);
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
output.push_str("**Example usage** (`pyproject.toml`):\n\n");
|
output.push_str("**Example usage**:\n\n");
|
||||||
output.push_str(&format_example(
|
output.push_str(&format_example(
|
||||||
|
"pyproject.toml",
|
||||||
&format_header(
|
&format_header(
|
||||||
field.scope,
|
field.scope,
|
||||||
field.example,
|
field.example,
|
||||||
|
|
@ -179,11 +180,11 @@ fn emit_field(output: &mut String, name: &str, field: &OptionField, parents: &[S
|
||||||
output.push('\n');
|
output.push('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_example(header: &str, content: &str) -> String {
|
fn format_example(title: &str, header: &str, content: &str) -> String {
|
||||||
if header.is_empty() {
|
if header.is_empty() {
|
||||||
format!("```toml\n{content}\n```\n",)
|
format!("```toml title=\"{title}\"\n{content}\n```\n",)
|
||||||
} else {
|
} else {
|
||||||
format!("```toml\n{header}\n{content}\n```\n",)
|
format!("```toml title=\"{title}\"\n{header}\n{content}\n```\n",)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ impl Edit {
|
||||||
|
|
||||||
/// Creates an edit that replaces the content in `range` with `content`.
|
/// Creates an edit that replaces the content in `range` with `content`.
|
||||||
pub fn range_replacement(content: String, range: TextRange) -> Self {
|
pub fn range_replacement(content: String, range: TextRange) -> Self {
|
||||||
debug_assert!(!content.is_empty(), "Prefer `Fix::deletion`");
|
debug_assert!(!content.is_empty(), "Prefer `Edit::deletion`");
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
content: Some(Box::from(content)),
|
content: Some(Box::from(content)),
|
||||||
|
|
|
||||||
|
|
@ -337,7 +337,7 @@ macro_rules! best_fitting {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::{FormatState, SimpleFormatOptions, VecBuffer, write};
|
use crate::{FormatState, SimpleFormatOptions, VecBuffer};
|
||||||
|
|
||||||
struct TestFormat;
|
struct TestFormat;
|
||||||
|
|
||||||
|
|
@ -385,8 +385,8 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn best_fitting_variants_print_as_lists() {
|
fn best_fitting_variants_print_as_lists() {
|
||||||
|
use crate::Formatted;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::{Formatted, format, format_args};
|
|
||||||
|
|
||||||
// The second variant below should be selected when printing at a width of 30
|
// The second variant below should be selected when printing at a width of 30
|
||||||
let formatted_best_fitting = format!(
|
let formatted_best_fitting = format!(
|
||||||
|
|
|
||||||
|
|
@ -132,7 +132,6 @@ async def c():
|
||||||
# Non-errors
|
# Non-errors
|
||||||
###
|
###
|
||||||
|
|
||||||
# False-negative: RustPython doesn't parse the `\N{snowman}`.
|
|
||||||
"\N{snowman} {}".format(a)
|
"\N{snowman} {}".format(a)
|
||||||
|
|
||||||
"{".format(a)
|
"{".format(a)
|
||||||
|
|
@ -276,3 +275,6 @@ if __name__ == "__main__":
|
||||||
number = 0
|
number = 0
|
||||||
string = "{}".format(number := number + 1)
|
string = "{}".format(number := number + 1)
|
||||||
print(string)
|
print(string)
|
||||||
|
|
||||||
|
# Unicode escape
|
||||||
|
"\N{angle}AOB = {angle}°".format(angle=180)
|
||||||
|
|
|
||||||
|
|
@ -138,5 +138,6 @@ with open("file.txt", encoding="utf-8") as f:
|
||||||
with open("file.txt", encoding="utf-8") as f:
|
with open("file.txt", encoding="utf-8") as f:
|
||||||
contents = process_contents(f.read())
|
contents = process_contents(f.read())
|
||||||
|
|
||||||
with open("file.txt", encoding="utf-8") as f:
|
with open("file1.txt", encoding="utf-8") as f:
|
||||||
contents: str = process_contents(f.read())
|
contents: str = process_contents(f.read())
|
||||||
|
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
with Path("file.txt").open() as f:
|
||||||
|
contents = f.read()
|
||||||
|
|
||||||
|
with Path("file.txt").open("r") as f:
|
||||||
|
contents = f.read()
|
||||||
|
|
@ -0,0 +1,26 @@
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
with Path("file.txt").open("w") as f:
|
||||||
|
f.write("test")
|
||||||
|
|
||||||
|
with Path("file.txt").open("wb") as f:
|
||||||
|
f.write(b"test")
|
||||||
|
|
||||||
|
with Path("file.txt").open(mode="w") as f:
|
||||||
|
f.write("test")
|
||||||
|
|
||||||
|
with Path("file.txt").open("w", encoding="utf8") as f:
|
||||||
|
f.write("test")
|
||||||
|
|
||||||
|
with Path("file.txt").open("w", errors="ignore") as f:
|
||||||
|
f.write("test")
|
||||||
|
|
||||||
|
with Path(foo()).open("w") as f:
|
||||||
|
f.write("test")
|
||||||
|
|
||||||
|
p = Path("file.txt")
|
||||||
|
with p.open("w") as f:
|
||||||
|
f.write("test")
|
||||||
|
|
||||||
|
with Path("foo", "bar", "baz").open("w") as f:
|
||||||
|
f.write("test")
|
||||||
38
crates/ruff_linter/resources/test/fixtures/semantic_errors/annotated_global.py
vendored
Normal file
38
crates/ruff_linter/resources/test/fixtures/semantic_errors/annotated_global.py
vendored
Normal file
|
|
@ -0,0 +1,38 @@
|
||||||
|
a: int = 1
|
||||||
|
def f1():
|
||||||
|
global a
|
||||||
|
a: str = "foo" # error
|
||||||
|
|
||||||
|
b: int = 1
|
||||||
|
def outer():
|
||||||
|
def inner():
|
||||||
|
global b
|
||||||
|
b: str = "nested" # error
|
||||||
|
|
||||||
|
c: int = 1
|
||||||
|
def f2():
|
||||||
|
global c
|
||||||
|
c: list[str] = [] # error
|
||||||
|
|
||||||
|
d: int = 1
|
||||||
|
def f3():
|
||||||
|
global d
|
||||||
|
d: str # error
|
||||||
|
|
||||||
|
e: int = 1
|
||||||
|
def f4():
|
||||||
|
e: str = "happy" # okay
|
||||||
|
|
||||||
|
global f
|
||||||
|
f: int = 1 # okay
|
||||||
|
|
||||||
|
g: int = 1
|
||||||
|
global g # error
|
||||||
|
|
||||||
|
class C:
|
||||||
|
x: str
|
||||||
|
global x # error
|
||||||
|
|
||||||
|
class D:
|
||||||
|
global x # error
|
||||||
|
x: str
|
||||||
|
|
@ -286,12 +286,7 @@ pub(crate) fn add_argument(argument: &str, arguments: &Arguments, tokens: &Token
|
||||||
|
|
||||||
/// Generic function to add a (regular) parameter to a function definition.
|
/// Generic function to add a (regular) parameter to a function definition.
|
||||||
pub(crate) fn add_parameter(parameter: &str, parameters: &Parameters, source: &str) -> Edit {
|
pub(crate) fn add_parameter(parameter: &str, parameters: &Parameters, source: &str) -> Edit {
|
||||||
if let Some(last) = parameters
|
if let Some(last) = parameters.args.iter().rfind(|arg| arg.default.is_none()) {
|
||||||
.args
|
|
||||||
.iter()
|
|
||||||
.filter(|arg| arg.default.is_none())
|
|
||||||
.next_back()
|
|
||||||
{
|
|
||||||
// Case 1: at least one regular parameter, so append after the last one.
|
// Case 1: at least one regular parameter, so append after the last one.
|
||||||
Edit::insertion(format!(", {parameter}"), last.end())
|
Edit::insertion(format!(", {parameter}"), last.end())
|
||||||
} else if !parameters.args.is_empty() {
|
} else if !parameters.args.is_empty() {
|
||||||
|
|
|
||||||
|
|
@ -1001,6 +1001,7 @@ mod tests {
|
||||||
#[test_case(Path::new("write_to_debug.py"), PythonVersion::PY310)]
|
#[test_case(Path::new("write_to_debug.py"), PythonVersion::PY310)]
|
||||||
#[test_case(Path::new("invalid_expression.py"), PythonVersion::PY312)]
|
#[test_case(Path::new("invalid_expression.py"), PythonVersion::PY312)]
|
||||||
#[test_case(Path::new("global_parameter.py"), PythonVersion::PY310)]
|
#[test_case(Path::new("global_parameter.py"), PythonVersion::PY310)]
|
||||||
|
#[test_case(Path::new("annotated_global.py"), PythonVersion::PY314)]
|
||||||
fn test_semantic_errors(path: &Path, python_version: PythonVersion) -> Result<()> {
|
fn test_semantic_errors(path: &Path, python_version: PythonVersion) -> Result<()> {
|
||||||
let snapshot = format!(
|
let snapshot = format!(
|
||||||
"semantic_syntax_error_{}_{}",
|
"semantic_syntax_error_{}_{}",
|
||||||
|
|
|
||||||
|
|
@ -70,7 +70,7 @@ fn is_open_call(func: &Expr, semantic: &SemanticModel) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if an expression resolves to a call to `pathlib.Path.open`.
|
/// Returns `true` if an expression resolves to a call to `pathlib.Path.open`.
|
||||||
fn is_open_call_from_pathlib(func: &Expr, semantic: &SemanticModel) -> bool {
|
pub(crate) fn is_open_call_from_pathlib(func: &Expr, semantic: &SemanticModel) -> bool {
|
||||||
let Expr::Attribute(ast::ExprAttribute { attr, value, .. }) = func else {
|
let Expr::Attribute(ast::ExprAttribute { attr, value, .. }) = func else {
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ mod async_zero_sleep;
|
||||||
mod blocking_http_call;
|
mod blocking_http_call;
|
||||||
mod blocking_http_call_httpx;
|
mod blocking_http_call_httpx;
|
||||||
mod blocking_input;
|
mod blocking_input;
|
||||||
mod blocking_open_call;
|
pub(crate) mod blocking_open_call;
|
||||||
mod blocking_path_methods;
|
mod blocking_path_methods;
|
||||||
mod blocking_process_invocation;
|
mod blocking_process_invocation;
|
||||||
mod blocking_sleep;
|
mod blocking_sleep;
|
||||||
|
|
|
||||||
|
|
@ -146,7 +146,7 @@ fn reverse_comparison(expr: &Expr, locator: &Locator, stylist: &Stylist) -> Resu
|
||||||
let left = (*comparison.left).clone();
|
let left = (*comparison.left).clone();
|
||||||
|
|
||||||
// Copy the right side to the left side.
|
// Copy the right side to the left side.
|
||||||
comparison.left = Box::new(comparison.comparisons[0].comparator.clone());
|
*comparison.left = comparison.comparisons[0].comparator.clone();
|
||||||
|
|
||||||
// Copy the left side to the right side.
|
// Copy the left side to the right side.
|
||||||
comparison.comparisons[0].comparator = left;
|
comparison.comparisons[0].comparator = left;
|
||||||
|
|
|
||||||
|
|
@ -902,56 +902,76 @@ help: Convert to f-string
|
||||||
132 | # Non-errors
|
132 | # Non-errors
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:160:1
|
--> UP032_0.py:135:1
|
||||||
|
|
|
|
||||||
158 | r'"\N{snowman} {}".format(a)'
|
133 | ###
|
||||||
159 |
|
134 |
|
||||||
160 | / "123456789 {}".format(
|
135 | "\N{snowman} {}".format(a)
|
||||||
161 | | 11111111111111111111111111111111111111111111111111111111111111111111111111,
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
162 | | )
|
136 |
|
||||||
| |_^
|
137 | "{".format(a)
|
||||||
163 |
|
|
||||||
164 | """
|
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
157 |
|
132 | # Non-errors
|
||||||
158 | r'"\N{snowman} {}".format(a)'
|
133 | ###
|
||||||
159 |
|
134 |
|
||||||
|
- "\N{snowman} {}".format(a)
|
||||||
|
135 + f"\N{snowman} {a}"
|
||||||
|
136 |
|
||||||
|
137 | "{".format(a)
|
||||||
|
138 |
|
||||||
|
|
||||||
|
UP032 [*] Use f-string instead of `format` call
|
||||||
|
--> UP032_0.py:159:1
|
||||||
|
|
|
||||||
|
157 | r'"\N{snowman} {}".format(a)'
|
||||||
|
158 |
|
||||||
|
159 | / "123456789 {}".format(
|
||||||
|
160 | | 11111111111111111111111111111111111111111111111111111111111111111111111111,
|
||||||
|
161 | | )
|
||||||
|
| |_^
|
||||||
|
162 |
|
||||||
|
163 | """
|
||||||
|
|
|
||||||
|
help: Convert to f-string
|
||||||
|
156 |
|
||||||
|
157 | r'"\N{snowman} {}".format(a)'
|
||||||
|
158 |
|
||||||
- "123456789 {}".format(
|
- "123456789 {}".format(
|
||||||
- 11111111111111111111111111111111111111111111111111111111111111111111111111,
|
- 11111111111111111111111111111111111111111111111111111111111111111111111111,
|
||||||
- )
|
- )
|
||||||
160 + f"123456789 {11111111111111111111111111111111111111111111111111111111111111111111111111}"
|
159 + f"123456789 {11111111111111111111111111111111111111111111111111111111111111111111111111}"
|
||||||
161 |
|
160 |
|
||||||
162 | """
|
161 | """
|
||||||
163 | {}
|
162 | {}
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:164:1
|
--> UP032_0.py:163:1
|
||||||
|
|
|
|
||||||
162 | )
|
161 | )
|
||||||
163 |
|
162 |
|
||||||
164 | / """
|
163 | / """
|
||||||
|
164 | | {}
|
||||||
165 | | {}
|
165 | | {}
|
||||||
166 | | {}
|
166 | | {}
|
||||||
167 | | {}
|
167 | | """.format(
|
||||||
168 | | """.format(
|
168 | | 1,
|
||||||
169 | | 1,
|
169 | | 2,
|
||||||
170 | | 2,
|
170 | | 111111111111111111111111111111111111111111111111111111111111111111111111111111111111111,
|
||||||
171 | | 111111111111111111111111111111111111111111111111111111111111111111111111111111111111111,
|
171 | | )
|
||||||
172 | | )
|
|
||||||
| |_^
|
| |_^
|
||||||
173 |
|
172 |
|
||||||
174 | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
|
173 | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
161 | 11111111111111111111111111111111111111111111111111111111111111111111111111,
|
160 | 11111111111111111111111111111111111111111111111111111111111111111111111111,
|
||||||
162 | )
|
161 | )
|
||||||
163 |
|
162 |
|
||||||
164 + f"""
|
163 + f"""
|
||||||
165 + {1}
|
164 + {1}
|
||||||
166 + {2}
|
165 + {2}
|
||||||
167 + {111111111111111111111111111111111111111111111111111111111111111111111111111111111111111}
|
166 + {111111111111111111111111111111111111111111111111111111111111111111111111111111111111111}
|
||||||
168 | """
|
167 | """
|
||||||
- {}
|
- {}
|
||||||
- {}
|
- {}
|
||||||
- {}
|
- {}
|
||||||
|
|
@ -960,392 +980,408 @@ help: Convert to f-string
|
||||||
- 2,
|
- 2,
|
||||||
- 111111111111111111111111111111111111111111111111111111111111111111111111111111111111111,
|
- 111111111111111111111111111111111111111111111111111111111111111111111111111111111111111,
|
||||||
- )
|
- )
|
||||||
169 |
|
168 |
|
||||||
170 | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
|
169 | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
|
||||||
171 | """.format(
|
170 | """.format(
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:174:84
|
--> UP032_0.py:173:84
|
||||||
|
|
|
|
||||||
172 | )
|
171 | )
|
||||||
173 |
|
172 |
|
||||||
174 | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
|
173 | aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
|
||||||
| ____________________________________________________________________________________^
|
| ____________________________________________________________________________________^
|
||||||
175 | | """.format(
|
174 | | """.format(
|
||||||
176 | | 111111
|
175 | | 111111
|
||||||
177 | | )
|
176 | | )
|
||||||
| |_^
|
| |_^
|
||||||
178 |
|
177 |
|
||||||
179 | "{}".format(
|
178 | "{}".format(
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
171 | 111111111111111111111111111111111111111111111111111111111111111111111111111111111111111,
|
170 | 111111111111111111111111111111111111111111111111111111111111111111111111111111111111111,
|
||||||
172 | )
|
171 | )
|
||||||
173 |
|
172 |
|
||||||
- aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
|
- aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = """{}
|
||||||
- """.format(
|
- """.format(
|
||||||
- 111111
|
- 111111
|
||||||
- )
|
- )
|
||||||
174 + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = f"""{111111}
|
173 + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = f"""{111111}
|
||||||
175 + """
|
174 + """
|
||||||
176 |
|
175 |
|
||||||
177 | "{}".format(
|
176 | "{}".format(
|
||||||
178 | [
|
177 | [
|
||||||
|
|
||||||
UP032 Use f-string instead of `format` call
|
UP032 Use f-string instead of `format` call
|
||||||
--> UP032_0.py:202:1
|
--> UP032_0.py:201:1
|
||||||
|
|
|
|
||||||
200 | "{}".format(**c)
|
199 | "{}".format(**c)
|
||||||
201 |
|
200 |
|
||||||
202 | / "{}".format(
|
201 | / "{}".format(
|
||||||
203 | | 1 # comment
|
202 | | 1 # comment
|
||||||
204 | | )
|
203 | | )
|
||||||
| |_^
|
| |_^
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:209:1
|
--> UP032_0.py:208:1
|
||||||
|
|
|
|
||||||
207 | # The fixed string will exceed the line length, but it's still smaller than the
|
206 | # The fixed string will exceed the line length, but it's still smaller than the
|
||||||
208 | # existing line length, so it's fine.
|
207 | # existing line length, so it's fine.
|
||||||
209 | "<Customer: {}, {}, {}, {}, {}>".format(self.internal_ids, self.external_ids, self.properties, self.tags, self.others)
|
208 | "<Customer: {}, {}, {}, {}, {}>".format(self.internal_ids, self.external_ids, self.properties, self.tags, self.others)
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
210 |
|
209 |
|
||||||
211 | # When fixing, trim the trailing empty string.
|
210 | # When fixing, trim the trailing empty string.
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
206 |
|
205 |
|
||||||
207 | # The fixed string will exceed the line length, but it's still smaller than the
|
206 | # The fixed string will exceed the line length, but it's still smaller than the
|
||||||
208 | # existing line length, so it's fine.
|
207 | # existing line length, so it's fine.
|
||||||
- "<Customer: {}, {}, {}, {}, {}>".format(self.internal_ids, self.external_ids, self.properties, self.tags, self.others)
|
- "<Customer: {}, {}, {}, {}, {}>".format(self.internal_ids, self.external_ids, self.properties, self.tags, self.others)
|
||||||
209 + f"<Customer: {self.internal_ids}, {self.external_ids}, {self.properties}, {self.tags}, {self.others}>"
|
208 + f"<Customer: {self.internal_ids}, {self.external_ids}, {self.properties}, {self.tags}, {self.others}>"
|
||||||
210 |
|
209 |
|
||||||
211 | # When fixing, trim the trailing empty string.
|
210 | # When fixing, trim the trailing empty string.
|
||||||
212 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
|
211 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:212:18
|
--> UP032_0.py:211:18
|
||||||
|
|
|
|
||||||
211 | # When fixing, trim the trailing empty string.
|
210 | # When fixing, trim the trailing empty string.
|
||||||
212 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
|
211 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
|
||||||
| __________________^
|
| __________________^
|
||||||
213 | | "".format(new_dict, d))
|
212 | | "".format(new_dict, d))
|
||||||
| |_______________________________________^
|
| |_______________________________________^
|
||||||
214 |
|
213 |
|
||||||
215 | # When fixing, trim the trailing empty string.
|
214 | # When fixing, trim the trailing empty string.
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
209 | "<Customer: {}, {}, {}, {}, {}>".format(self.internal_ids, self.external_ids, self.properties, self.tags, self.others)
|
208 | "<Customer: {}, {}, {}, {}, {}>".format(self.internal_ids, self.external_ids, self.properties, self.tags, self.others)
|
||||||
210 |
|
209 |
|
||||||
211 | # When fixing, trim the trailing empty string.
|
210 | # When fixing, trim the trailing empty string.
|
||||||
- raise ValueError("Conflicting configuration dicts: {!r} {!r}"
|
- raise ValueError("Conflicting configuration dicts: {!r} {!r}"
|
||||||
- "".format(new_dict, d))
|
- "".format(new_dict, d))
|
||||||
212 + raise ValueError(f"Conflicting configuration dicts: {new_dict!r} {d!r}")
|
211 + raise ValueError(f"Conflicting configuration dicts: {new_dict!r} {d!r}")
|
||||||
|
212 |
|
||||||
|
213 | # When fixing, trim the trailing empty string.
|
||||||
|
214 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
|
||||||
|
|
||||||
|
UP032 [*] Use f-string instead of `format` call
|
||||||
|
--> UP032_0.py:215:18
|
||||||
|
|
|
||||||
|
214 | # When fixing, trim the trailing empty string.
|
||||||
|
215 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
|
||||||
|
| __________________^
|
||||||
|
216 | | .format(new_dict, d))
|
||||||
|
| |_____________________________________^
|
||||||
|
217 |
|
||||||
|
218 | raise ValueError(
|
||||||
|
|
|
||||||
|
help: Convert to f-string
|
||||||
|
212 | "".format(new_dict, d))
|
||||||
213 |
|
213 |
|
||||||
214 | # When fixing, trim the trailing empty string.
|
214 | # When fixing, trim the trailing empty string.
|
||||||
215 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
|
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
|
||||||
--> UP032_0.py:216:18
|
|
||||||
|
|
|
||||||
215 | # When fixing, trim the trailing empty string.
|
|
||||||
216 | raise ValueError("Conflicting configuration dicts: {!r} {!r}"
|
|
||||||
| __________________^
|
|
||||||
217 | | .format(new_dict, d))
|
|
||||||
| |_____________________________________^
|
|
||||||
218 |
|
|
||||||
219 | raise ValueError(
|
|
||||||
|
|
|
||||||
help: Convert to f-string
|
|
||||||
213 | "".format(new_dict, d))
|
|
||||||
214 |
|
|
||||||
215 | # When fixing, trim the trailing empty string.
|
|
||||||
- raise ValueError("Conflicting configuration dicts: {!r} {!r}"
|
- raise ValueError("Conflicting configuration dicts: {!r} {!r}"
|
||||||
- .format(new_dict, d))
|
- .format(new_dict, d))
|
||||||
216 + raise ValueError(f"Conflicting configuration dicts: {new_dict!r} {d!r}"
|
215 + raise ValueError(f"Conflicting configuration dicts: {new_dict!r} {d!r}"
|
||||||
217 + )
|
216 + )
|
||||||
218 |
|
217 |
|
||||||
219 | raise ValueError(
|
218 | raise ValueError(
|
||||||
220 | "Conflicting configuration dicts: {!r} {!r}"
|
219 | "Conflicting configuration dicts: {!r} {!r}"
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:220:5
|
--> UP032_0.py:219:5
|
||||||
|
|
|
|
||||||
219 | raise ValueError(
|
218 | raise ValueError(
|
||||||
220 | / "Conflicting configuration dicts: {!r} {!r}"
|
219 | / "Conflicting configuration dicts: {!r} {!r}"
|
||||||
221 | | "".format(new_dict, d)
|
220 | | "".format(new_dict, d)
|
||||||
| |__________________________^
|
| |__________________________^
|
||||||
222 | )
|
221 | )
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
217 | .format(new_dict, d))
|
216 | .format(new_dict, d))
|
||||||
218 |
|
217 |
|
||||||
219 | raise ValueError(
|
218 | raise ValueError(
|
||||||
- "Conflicting configuration dicts: {!r} {!r}"
|
- "Conflicting configuration dicts: {!r} {!r}"
|
||||||
- "".format(new_dict, d)
|
- "".format(new_dict, d)
|
||||||
220 + f"Conflicting configuration dicts: {new_dict!r} {d!r}"
|
219 + f"Conflicting configuration dicts: {new_dict!r} {d!r}"
|
||||||
|
220 | )
|
||||||
|
221 |
|
||||||
|
222 | raise ValueError(
|
||||||
|
|
||||||
|
UP032 [*] Use f-string instead of `format` call
|
||||||
|
--> UP032_0.py:224:5
|
||||||
|
|
|
||||||
|
223 | raise ValueError(
|
||||||
|
224 | / "Conflicting configuration dicts: {!r} {!r}"
|
||||||
|
225 | | "".format(new_dict, d)
|
||||||
|
| |__________________________^
|
||||||
|
226 |
|
||||||
|
227 | )
|
||||||
|
|
|
||||||
|
help: Convert to f-string
|
||||||
221 | )
|
221 | )
|
||||||
222 |
|
222 |
|
||||||
223 | raise ValueError(
|
223 | raise ValueError(
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
|
||||||
--> UP032_0.py:225:5
|
|
||||||
|
|
|
||||||
224 | raise ValueError(
|
|
||||||
225 | / "Conflicting configuration dicts: {!r} {!r}"
|
|
||||||
226 | | "".format(new_dict, d)
|
|
||||||
| |__________________________^
|
|
||||||
227 |
|
|
||||||
228 | )
|
|
||||||
|
|
|
||||||
help: Convert to f-string
|
|
||||||
222 | )
|
|
||||||
223 |
|
|
||||||
224 | raise ValueError(
|
|
||||||
- "Conflicting configuration dicts: {!r} {!r}"
|
- "Conflicting configuration dicts: {!r} {!r}"
|
||||||
- "".format(new_dict, d)
|
- "".format(new_dict, d)
|
||||||
225 + f"Conflicting configuration dicts: {new_dict!r} {d!r}"
|
224 + f"Conflicting configuration dicts: {new_dict!r} {d!r}"
|
||||||
226 |
|
225 |
|
||||||
227 | )
|
226 | )
|
||||||
228 |
|
227 |
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:231:1
|
--> UP032_0.py:230:1
|
||||||
|
|
|
|
||||||
230 | # The first string will be converted to an f-string and the curly braces in the second should be converted to be unescaped
|
229 | # The first string will be converted to an f-string and the curly braces in the second should be converted to be unescaped
|
||||||
231 | / (
|
230 | / (
|
||||||
232 | | "{}"
|
231 | | "{}"
|
||||||
233 | | "{{}}"
|
232 | | "{{}}"
|
||||||
234 | | ).format(a)
|
233 | | ).format(a)
|
||||||
| |___________^
|
| |___________^
|
||||||
235 |
|
234 |
|
||||||
236 | ("{}" "{{}}").format(a)
|
235 | ("{}" "{{}}").format(a)
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
229 |
|
228 |
|
||||||
230 | # The first string will be converted to an f-string and the curly braces in the second should be converted to be unescaped
|
229 | # The first string will be converted to an f-string and the curly braces in the second should be converted to be unescaped
|
||||||
231 | (
|
230 | (
|
||||||
232 + f"{a}"
|
231 + f"{a}"
|
||||||
233 | "{}"
|
232 | "{}"
|
||||||
- "{{}}"
|
- "{{}}"
|
||||||
- ).format(a)
|
- ).format(a)
|
||||||
234 + )
|
233 + )
|
||||||
235 |
|
234 |
|
||||||
236 | ("{}" "{{}}").format(a)
|
235 | ("{}" "{{}}").format(a)
|
||||||
237 |
|
236 |
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:236:1
|
--> UP032_0.py:235:1
|
||||||
|
|
|
|
||||||
234 | ).format(a)
|
233 | ).format(a)
|
||||||
235 |
|
234 |
|
||||||
236 | ("{}" "{{}}").format(a)
|
235 | ("{}" "{{}}").format(a)
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
233 | "{{}}"
|
232 | "{{}}"
|
||||||
234 | ).format(a)
|
233 | ).format(a)
|
||||||
235 |
|
234 |
|
||||||
- ("{}" "{{}}").format(a)
|
- ("{}" "{{}}").format(a)
|
||||||
236 + (f"{a}" "{}")
|
235 + (f"{a}" "{}")
|
||||||
|
236 |
|
||||||
237 |
|
237 |
|
||||||
238 |
|
238 | # Both strings will be converted to an f-string and the curly braces in the second should left escaped
|
||||||
239 | # Both strings will be converted to an f-string and the curly braces in the second should left escaped
|
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:240:1
|
--> UP032_0.py:239:1
|
||||||
|
|
|
|
||||||
239 | # Both strings will be converted to an f-string and the curly braces in the second should left escaped
|
238 | # Both strings will be converted to an f-string and the curly braces in the second should left escaped
|
||||||
240 | / (
|
239 | / (
|
||||||
241 | | "{}"
|
240 | | "{}"
|
||||||
242 | | "{{{}}}"
|
241 | | "{{{}}}"
|
||||||
243 | | ).format(a, b)
|
242 | | ).format(a, b)
|
||||||
| |______________^
|
| |______________^
|
||||||
244 |
|
243 |
|
||||||
245 | ("{}" "{{{}}}").format(a, b)
|
244 | ("{}" "{{{}}}").format(a, b)
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
238 |
|
237 |
|
||||||
239 | # Both strings will be converted to an f-string and the curly braces in the second should left escaped
|
238 | # Both strings will be converted to an f-string and the curly braces in the second should left escaped
|
||||||
240 | (
|
239 | (
|
||||||
- "{}"
|
- "{}"
|
||||||
- "{{{}}}"
|
- "{{{}}}"
|
||||||
- ).format(a, b)
|
- ).format(a, b)
|
||||||
241 + f"{a}"
|
240 + f"{a}"
|
||||||
242 + f"{{{b}}}"
|
241 + f"{{{b}}}"
|
||||||
243 + )
|
242 + )
|
||||||
244 |
|
243 |
|
||||||
245 | ("{}" "{{{}}}").format(a, b)
|
244 | ("{}" "{{{}}}").format(a, b)
|
||||||
246 |
|
245 |
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:245:1
|
--> UP032_0.py:244:1
|
||||||
|
|
|
|
||||||
243 | ).format(a, b)
|
242 | ).format(a, b)
|
||||||
244 |
|
243 |
|
||||||
245 | ("{}" "{{{}}}").format(a, b)
|
244 | ("{}" "{{{}}}").format(a, b)
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
246 |
|
245 |
|
||||||
247 | # The dictionary should be parenthesized.
|
246 | # The dictionary should be parenthesized.
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
242 | "{{{}}}"
|
241 | "{{{}}}"
|
||||||
243 | ).format(a, b)
|
242 | ).format(a, b)
|
||||||
244 |
|
243 |
|
||||||
- ("{}" "{{{}}}").format(a, b)
|
- ("{}" "{{{}}}").format(a, b)
|
||||||
245 + (f"{a}" f"{{{b}}}")
|
244 + (f"{a}" f"{{{b}}}")
|
||||||
246 |
|
245 |
|
||||||
247 | # The dictionary should be parenthesized.
|
246 | # The dictionary should be parenthesized.
|
||||||
248 | "{}".format({0: 1}[0])
|
247 | "{}".format({0: 1}[0])
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:248:1
|
--> UP032_0.py:247:1
|
||||||
|
|
|
|
||||||
247 | # The dictionary should be parenthesized.
|
246 | # The dictionary should be parenthesized.
|
||||||
248 | "{}".format({0: 1}[0])
|
247 | "{}".format({0: 1}[0])
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||||
249 |
|
248 |
|
||||||
250 | # The dictionary should be parenthesized.
|
249 | # The dictionary should be parenthesized.
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
245 | ("{}" "{{{}}}").format(a, b)
|
244 | ("{}" "{{{}}}").format(a, b)
|
||||||
246 |
|
245 |
|
||||||
247 | # The dictionary should be parenthesized.
|
246 | # The dictionary should be parenthesized.
|
||||||
- "{}".format({0: 1}[0])
|
- "{}".format({0: 1}[0])
|
||||||
248 + f"{({0: 1}[0])}"
|
247 + f"{({0: 1}[0])}"
|
||||||
249 |
|
248 |
|
||||||
250 | # The dictionary should be parenthesized.
|
249 | # The dictionary should be parenthesized.
|
||||||
251 | "{}".format({0: 1}.bar)
|
250 | "{}".format({0: 1}.bar)
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:251:1
|
--> UP032_0.py:250:1
|
||||||
|
|
|
|
||||||
250 | # The dictionary should be parenthesized.
|
249 | # The dictionary should be parenthesized.
|
||||||
251 | "{}".format({0: 1}.bar)
|
250 | "{}".format({0: 1}.bar)
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
252 |
|
251 |
|
||||||
253 | # The dictionary should be parenthesized.
|
252 | # The dictionary should be parenthesized.
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
248 | "{}".format({0: 1}[0])
|
247 | "{}".format({0: 1}[0])
|
||||||
249 |
|
248 |
|
||||||
250 | # The dictionary should be parenthesized.
|
249 | # The dictionary should be parenthesized.
|
||||||
- "{}".format({0: 1}.bar)
|
- "{}".format({0: 1}.bar)
|
||||||
251 + f"{({0: 1}.bar)}"
|
250 + f"{({0: 1}.bar)}"
|
||||||
252 |
|
251 |
|
||||||
253 | # The dictionary should be parenthesized.
|
252 | # The dictionary should be parenthesized.
|
||||||
254 | "{}".format({0: 1}())
|
253 | "{}".format({0: 1}())
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:254:1
|
--> UP032_0.py:253:1
|
||||||
|
|
|
|
||||||
253 | # The dictionary should be parenthesized.
|
252 | # The dictionary should be parenthesized.
|
||||||
254 | "{}".format({0: 1}())
|
253 | "{}".format({0: 1}())
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^
|
||||||
255 |
|
254 |
|
||||||
256 | # The string shouldn't be converted, since it would require repeating the function call.
|
255 | # The string shouldn't be converted, since it would require repeating the function call.
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
251 | "{}".format({0: 1}.bar)
|
250 | "{}".format({0: 1}.bar)
|
||||||
252 |
|
251 |
|
||||||
253 | # The dictionary should be parenthesized.
|
252 | # The dictionary should be parenthesized.
|
||||||
- "{}".format({0: 1}())
|
- "{}".format({0: 1}())
|
||||||
254 + f"{({0: 1}())}"
|
253 + f"{({0: 1}())}"
|
||||||
255 |
|
254 |
|
||||||
256 | # The string shouldn't be converted, since it would require repeating the function call.
|
255 | # The string shouldn't be converted, since it would require repeating the function call.
|
||||||
257 | "{x} {x}".format(x=foo())
|
256 | "{x} {x}".format(x=foo())
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:261:1
|
--> UP032_0.py:260:1
|
||||||
|
|
|
|
||||||
260 | # The string _should_ be converted, since the function call is repeated in the arguments.
|
259 | # The string _should_ be converted, since the function call is repeated in the arguments.
|
||||||
261 | "{0} {1}".format(foo(), foo())
|
260 | "{0} {1}".format(foo(), foo())
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
262 |
|
261 |
|
||||||
263 | # The call should be removed, but the string itself should remain.
|
262 | # The call should be removed, but the string itself should remain.
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
258 | "{0} {0}".format(foo())
|
257 | "{0} {0}".format(foo())
|
||||||
259 |
|
258 |
|
||||||
260 | # The string _should_ be converted, since the function call is repeated in the arguments.
|
259 | # The string _should_ be converted, since the function call is repeated in the arguments.
|
||||||
- "{0} {1}".format(foo(), foo())
|
- "{0} {1}".format(foo(), foo())
|
||||||
261 + f"{foo()} {foo()}"
|
260 + f"{foo()} {foo()}"
|
||||||
262 |
|
261 |
|
||||||
263 | # The call should be removed, but the string itself should remain.
|
262 | # The call should be removed, but the string itself should remain.
|
||||||
264 | ''.format(self.project)
|
263 | ''.format(self.project)
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:264:1
|
--> UP032_0.py:263:1
|
||||||
|
|
|
|
||||||
263 | # The call should be removed, but the string itself should remain.
|
262 | # The call should be removed, but the string itself should remain.
|
||||||
264 | ''.format(self.project)
|
263 | ''.format(self.project)
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
265 |
|
264 |
|
||||||
266 | # The call should be removed, but the string itself should remain.
|
265 | # The call should be removed, but the string itself should remain.
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
261 | "{0} {1}".format(foo(), foo())
|
260 | "{0} {1}".format(foo(), foo())
|
||||||
262 |
|
261 |
|
||||||
263 | # The call should be removed, but the string itself should remain.
|
262 | # The call should be removed, but the string itself should remain.
|
||||||
- ''.format(self.project)
|
- ''.format(self.project)
|
||||||
264 + ''
|
263 + ''
|
||||||
265 |
|
264 |
|
||||||
266 | # The call should be removed, but the string itself should remain.
|
265 | # The call should be removed, but the string itself should remain.
|
||||||
267 | "".format(self.project)
|
266 | "".format(self.project)
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:267:1
|
--> UP032_0.py:266:1
|
||||||
|
|
|
|
||||||
266 | # The call should be removed, but the string itself should remain.
|
265 | # The call should be removed, but the string itself should remain.
|
||||||
267 | "".format(self.project)
|
266 | "".format(self.project)
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
268 |
|
267 |
|
||||||
269 | # Not a valid type annotation but this test shouldn't result in a panic.
|
268 | # Not a valid type annotation but this test shouldn't result in a panic.
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
264 | ''.format(self.project)
|
263 | ''.format(self.project)
|
||||||
265 |
|
264 |
|
||||||
266 | # The call should be removed, but the string itself should remain.
|
265 | # The call should be removed, but the string itself should remain.
|
||||||
- "".format(self.project)
|
- "".format(self.project)
|
||||||
267 + ""
|
266 + ""
|
||||||
268 |
|
267 |
|
||||||
269 | # Not a valid type annotation but this test shouldn't result in a panic.
|
268 | # Not a valid type annotation but this test shouldn't result in a panic.
|
||||||
270 | # Refer: https://github.com/astral-sh/ruff/issues/11736
|
269 | # Refer: https://github.com/astral-sh/ruff/issues/11736
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:271:5
|
--> UP032_0.py:270:5
|
||||||
|
|
|
|
||||||
269 | # Not a valid type annotation but this test shouldn't result in a panic.
|
268 | # Not a valid type annotation but this test shouldn't result in a panic.
|
||||||
270 | # Refer: https://github.com/astral-sh/ruff/issues/11736
|
269 | # Refer: https://github.com/astral-sh/ruff/issues/11736
|
||||||
271 | x: "'{} + {}'.format(x, y)"
|
270 | x: "'{} + {}'.format(x, y)"
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^
|
||||||
272 |
|
271 |
|
||||||
273 | # Regression https://github.com/astral-sh/ruff/issues/21000
|
272 | # Regression https://github.com/astral-sh/ruff/issues/21000
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
268 |
|
267 |
|
||||||
269 | # Not a valid type annotation but this test shouldn't result in a panic.
|
268 | # Not a valid type annotation but this test shouldn't result in a panic.
|
||||||
270 | # Refer: https://github.com/astral-sh/ruff/issues/11736
|
269 | # Refer: https://github.com/astral-sh/ruff/issues/11736
|
||||||
- x: "'{} + {}'.format(x, y)"
|
- x: "'{} + {}'.format(x, y)"
|
||||||
271 + x: "f'{x} + {y}'"
|
270 + x: "f'{x} + {y}'"
|
||||||
272 |
|
271 |
|
||||||
273 | # Regression https://github.com/astral-sh/ruff/issues/21000
|
272 | # Regression https://github.com/astral-sh/ruff/issues/21000
|
||||||
274 | # Fix should parenthesize walrus
|
273 | # Fix should parenthesize walrus
|
||||||
|
|
||||||
UP032 [*] Use f-string instead of `format` call
|
UP032 [*] Use f-string instead of `format` call
|
||||||
--> UP032_0.py:277:14
|
--> UP032_0.py:276:14
|
||||||
|
|
|
|
||||||
275 | if __name__ == "__main__":
|
274 | if __name__ == "__main__":
|
||||||
276 | number = 0
|
275 | number = 0
|
||||||
277 | string = "{}".format(number := number + 1)
|
276 | string = "{}".format(number := number + 1)
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
278 | print(string)
|
277 | print(string)
|
||||||
|
|
|
|
||||||
help: Convert to f-string
|
help: Convert to f-string
|
||||||
274 | # Fix should parenthesize walrus
|
273 | # Fix should parenthesize walrus
|
||||||
275 | if __name__ == "__main__":
|
274 | if __name__ == "__main__":
|
||||||
276 | number = 0
|
275 | number = 0
|
||||||
- string = "{}".format(number := number + 1)
|
- string = "{}".format(number := number + 1)
|
||||||
277 + string = f"{(number := number + 1)}"
|
276 + string = f"{(number := number + 1)}"
|
||||||
278 | print(string)
|
277 | print(string)
|
||||||
|
278 |
|
||||||
|
279 | # Unicode escape
|
||||||
|
|
||||||
|
UP032 [*] Use f-string instead of `format` call
|
||||||
|
--> UP032_0.py:280:1
|
||||||
|
|
|
||||||
|
279 | # Unicode escape
|
||||||
|
280 | "\N{angle}AOB = {angle}°".format(angle=180)
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
help: Convert to f-string
|
||||||
|
277 | print(string)
|
||||||
|
278 |
|
||||||
|
279 | # Unicode escape
|
||||||
|
- "\N{angle}AOB = {angle}°".format(angle=180)
|
||||||
|
280 + f"\N{angle}AOB = {180}°"
|
||||||
|
|
|
||||||
|
|
@ -3,10 +3,11 @@ use std::borrow::Cow;
|
||||||
use ruff_python_ast::PythonVersion;
|
use ruff_python_ast::PythonVersion;
|
||||||
use ruff_python_ast::{self as ast, Expr, name::Name, token::parenthesized_range};
|
use ruff_python_ast::{self as ast, Expr, name::Name, token::parenthesized_range};
|
||||||
use ruff_python_codegen::Generator;
|
use ruff_python_codegen::Generator;
|
||||||
use ruff_python_semantic::{BindingId, ResolvedReference, SemanticModel};
|
use ruff_python_semantic::{ResolvedReference, SemanticModel};
|
||||||
use ruff_text_size::{Ranged, TextRange};
|
use ruff_text_size::{Ranged, TextRange};
|
||||||
|
|
||||||
use crate::checkers::ast::Checker;
|
use crate::checkers::ast::Checker;
|
||||||
|
use crate::rules::flake8_async::rules::blocking_open_call::is_open_call_from_pathlib;
|
||||||
use crate::{Applicability, Edit, Fix};
|
use crate::{Applicability, Edit, Fix};
|
||||||
|
|
||||||
/// Format a code snippet to call `name.method()`.
|
/// Format a code snippet to call `name.method()`.
|
||||||
|
|
@ -119,14 +120,13 @@ impl OpenMode {
|
||||||
pub(super) struct FileOpen<'a> {
|
pub(super) struct FileOpen<'a> {
|
||||||
/// With item where the open happens, we use it for the reporting range.
|
/// With item where the open happens, we use it for the reporting range.
|
||||||
pub(super) item: &'a ast::WithItem,
|
pub(super) item: &'a ast::WithItem,
|
||||||
/// Filename expression used as the first argument in `open`, we use it in the diagnostic message.
|
|
||||||
pub(super) filename: &'a Expr,
|
|
||||||
/// The file open mode.
|
/// The file open mode.
|
||||||
pub(super) mode: OpenMode,
|
pub(super) mode: OpenMode,
|
||||||
/// The file open keywords.
|
/// The file open keywords.
|
||||||
pub(super) keywords: Vec<&'a ast::Keyword>,
|
pub(super) keywords: Vec<&'a ast::Keyword>,
|
||||||
/// We only check `open` operations whose file handles are used exactly once.
|
/// We only check `open` operations whose file handles are used exactly once.
|
||||||
pub(super) reference: &'a ResolvedReference,
|
pub(super) reference: &'a ResolvedReference,
|
||||||
|
pub(super) argument: OpenArgument<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileOpen<'_> {
|
impl FileOpen<'_> {
|
||||||
|
|
@ -137,6 +137,45 @@ impl FileOpen<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
pub(super) enum OpenArgument<'a> {
|
||||||
|
/// The filename argument to `open`, e.g. "foo.txt" in:
|
||||||
|
///
|
||||||
|
/// ```py
|
||||||
|
/// f = open("foo.txt")
|
||||||
|
/// ```
|
||||||
|
Builtin { filename: &'a Expr },
|
||||||
|
/// The `Path` receiver of a `pathlib.Path.open` call, e.g. the `p` in the
|
||||||
|
/// context manager in:
|
||||||
|
///
|
||||||
|
/// ```py
|
||||||
|
/// p = Path("foo.txt")
|
||||||
|
/// with p.open() as f: ...
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// or `Path("foo.txt")` in
|
||||||
|
///
|
||||||
|
/// ```py
|
||||||
|
/// with Path("foo.txt").open() as f: ...
|
||||||
|
/// ```
|
||||||
|
Pathlib { path: &'a Expr },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl OpenArgument<'_> {
|
||||||
|
pub(super) fn display<'src>(&self, source: &'src str) -> &'src str {
|
||||||
|
&source[self.range()]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ranged for OpenArgument<'_> {
|
||||||
|
fn range(&self) -> TextRange {
|
||||||
|
match self {
|
||||||
|
OpenArgument::Builtin { filename } => filename.range(),
|
||||||
|
OpenArgument::Pathlib { path } => path.range(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Find and return all `open` operations in the given `with` statement.
|
/// Find and return all `open` operations in the given `with` statement.
|
||||||
pub(super) fn find_file_opens<'a>(
|
pub(super) fn find_file_opens<'a>(
|
||||||
with: &'a ast::StmtWith,
|
with: &'a ast::StmtWith,
|
||||||
|
|
@ -146,10 +185,65 @@ pub(super) fn find_file_opens<'a>(
|
||||||
) -> Vec<FileOpen<'a>> {
|
) -> Vec<FileOpen<'a>> {
|
||||||
with.items
|
with.items
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|item| find_file_open(item, with, semantic, read_mode, python_version))
|
.filter_map(|item| {
|
||||||
|
find_file_open(item, with, semantic, read_mode, python_version)
|
||||||
|
.or_else(|| find_path_open(item, with, semantic, read_mode, python_version))
|
||||||
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn resolve_file_open<'a>(
|
||||||
|
item: &'a ast::WithItem,
|
||||||
|
with: &'a ast::StmtWith,
|
||||||
|
semantic: &'a SemanticModel<'a>,
|
||||||
|
read_mode: bool,
|
||||||
|
mode: OpenMode,
|
||||||
|
keywords: Vec<&'a ast::Keyword>,
|
||||||
|
argument: OpenArgument<'a>,
|
||||||
|
) -> Option<FileOpen<'a>> {
|
||||||
|
match mode {
|
||||||
|
OpenMode::ReadText | OpenMode::ReadBytes => {
|
||||||
|
if !read_mode {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
OpenMode::WriteText | OpenMode::WriteBytes => {
|
||||||
|
if read_mode {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if matches!(mode, OpenMode::ReadBytes | OpenMode::WriteBytes) && !keywords.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let var = item.optional_vars.as_deref()?.as_name_expr()?;
|
||||||
|
let scope = semantic.current_scope();
|
||||||
|
|
||||||
|
let binding = scope.get_all(var.id.as_str()).find_map(|id| {
|
||||||
|
let b = semantic.binding(id);
|
||||||
|
(b.range() == var.range()).then_some(b)
|
||||||
|
})?;
|
||||||
|
let references: Vec<&ResolvedReference> = binding
|
||||||
|
.references
|
||||||
|
.iter()
|
||||||
|
.map(|id| semantic.reference(*id))
|
||||||
|
.filter(|reference| with.range().contains_range(reference.range()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let [reference] = references.as_slice() else {
|
||||||
|
return None;
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(FileOpen {
|
||||||
|
item,
|
||||||
|
mode,
|
||||||
|
keywords,
|
||||||
|
reference,
|
||||||
|
argument,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/// Find `open` operation in the given `with` item.
|
/// Find `open` operation in the given `with` item.
|
||||||
fn find_file_open<'a>(
|
fn find_file_open<'a>(
|
||||||
item: &'a ast::WithItem,
|
item: &'a ast::WithItem,
|
||||||
|
|
@ -165,8 +259,6 @@ fn find_file_open<'a>(
|
||||||
..
|
..
|
||||||
} = item.context_expr.as_call_expr()?;
|
} = item.context_expr.as_call_expr()?;
|
||||||
|
|
||||||
let var = item.optional_vars.as_deref()?.as_name_expr()?;
|
|
||||||
|
|
||||||
// Ignore calls with `*args` and `**kwargs`. In the exact case of `open(*filename, mode="w")`,
|
// Ignore calls with `*args` and `**kwargs`. In the exact case of `open(*filename, mode="w")`,
|
||||||
// it could be a match; but in all other cases, the call _could_ contain unsupported keyword
|
// it could be a match; but in all other cases, the call _could_ contain unsupported keyword
|
||||||
// arguments, like `buffering`.
|
// arguments, like `buffering`.
|
||||||
|
|
@ -187,58 +279,57 @@ fn find_file_open<'a>(
|
||||||
let (keywords, kw_mode) = match_open_keywords(keywords, read_mode, python_version)?;
|
let (keywords, kw_mode) = match_open_keywords(keywords, read_mode, python_version)?;
|
||||||
|
|
||||||
let mode = kw_mode.unwrap_or(pos_mode);
|
let mode = kw_mode.unwrap_or(pos_mode);
|
||||||
|
resolve_file_open(
|
||||||
match mode {
|
|
||||||
OpenMode::ReadText | OpenMode::ReadBytes => {
|
|
||||||
if !read_mode {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
OpenMode::WriteText | OpenMode::WriteBytes => {
|
|
||||||
if read_mode {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Path.read_bytes and Path.write_bytes do not support any kwargs.
|
|
||||||
if matches!(mode, OpenMode::ReadBytes | OpenMode::WriteBytes) && !keywords.is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now we need to find what is this variable bound to...
|
|
||||||
let scope = semantic.current_scope();
|
|
||||||
let bindings: Vec<BindingId> = scope.get_all(var.id.as_str()).collect();
|
|
||||||
|
|
||||||
let binding = bindings
|
|
||||||
.iter()
|
|
||||||
.map(|id| semantic.binding(*id))
|
|
||||||
// We might have many bindings with the same name, but we only care
|
|
||||||
// for the one we are looking at right now.
|
|
||||||
.find(|binding| binding.range() == var.range())?;
|
|
||||||
|
|
||||||
// Since many references can share the same binding, we can limit our attention span
|
|
||||||
// exclusively to the body of the current `with` statement.
|
|
||||||
let references: Vec<&ResolvedReference> = binding
|
|
||||||
.references
|
|
||||||
.iter()
|
|
||||||
.map(|id| semantic.reference(*id))
|
|
||||||
.filter(|reference| with.range().contains_range(reference.range()))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// And even with all these restrictions, if the file handle gets used not exactly once,
|
|
||||||
// it doesn't fit the bill.
|
|
||||||
let [reference] = references.as_slice() else {
|
|
||||||
return None;
|
|
||||||
};
|
|
||||||
|
|
||||||
Some(FileOpen {
|
|
||||||
item,
|
item,
|
||||||
filename,
|
with,
|
||||||
|
semantic,
|
||||||
|
read_mode,
|
||||||
mode,
|
mode,
|
||||||
keywords,
|
keywords,
|
||||||
reference,
|
OpenArgument::Builtin { filename },
|
||||||
})
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_path_open<'a>(
|
||||||
|
item: &'a ast::WithItem,
|
||||||
|
with: &'a ast::StmtWith,
|
||||||
|
semantic: &'a SemanticModel<'a>,
|
||||||
|
read_mode: bool,
|
||||||
|
python_version: PythonVersion,
|
||||||
|
) -> Option<FileOpen<'a>> {
|
||||||
|
let ast::ExprCall {
|
||||||
|
func,
|
||||||
|
arguments: ast::Arguments { args, keywords, .. },
|
||||||
|
..
|
||||||
|
} = item.context_expr.as_call_expr()?;
|
||||||
|
if args.iter().any(Expr::is_starred_expr)
|
||||||
|
|| keywords.iter().any(|keyword| keyword.arg.is_none())
|
||||||
|
{
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
if !is_open_call_from_pathlib(func, semantic) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let attr = func.as_attribute_expr()?;
|
||||||
|
let mode = if args.is_empty() {
|
||||||
|
OpenMode::ReadText
|
||||||
|
} else {
|
||||||
|
match_open_mode(args.first()?)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let (keywords, kw_mode) = match_open_keywords(keywords, read_mode, python_version)?;
|
||||||
|
let mode = kw_mode.unwrap_or(mode);
|
||||||
|
resolve_file_open(
|
||||||
|
item,
|
||||||
|
with,
|
||||||
|
semantic,
|
||||||
|
read_mode,
|
||||||
|
mode,
|
||||||
|
keywords,
|
||||||
|
OpenArgument::Pathlib {
|
||||||
|
path: attr.value.as_ref(),
|
||||||
|
},
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Match positional arguments. Return expression for the file name and open mode.
|
/// Match positional arguments. Return expression for the file name and open mode.
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,8 @@ mod tests {
|
||||||
use crate::test::test_path;
|
use crate::test::test_path;
|
||||||
use crate::{assert_diagnostics, settings};
|
use crate::{assert_diagnostics, settings};
|
||||||
|
|
||||||
#[test_case(Rule::ReadWholeFile, Path::new("FURB101.py"))]
|
#[test_case(Rule::ReadWholeFile, Path::new("FURB101_0.py"))]
|
||||||
|
#[test_case(Rule::ReadWholeFile, Path::new("FURB101_1.py"))]
|
||||||
#[test_case(Rule::RepeatedAppend, Path::new("FURB113.py"))]
|
#[test_case(Rule::RepeatedAppend, Path::new("FURB113.py"))]
|
||||||
#[test_case(Rule::IfExpInsteadOfOrOperator, Path::new("FURB110.py"))]
|
#[test_case(Rule::IfExpInsteadOfOrOperator, Path::new("FURB110.py"))]
|
||||||
#[test_case(Rule::ReimplementedOperator, Path::new("FURB118.py"))]
|
#[test_case(Rule::ReimplementedOperator, Path::new("FURB118.py"))]
|
||||||
|
|
@ -46,7 +47,8 @@ mod tests {
|
||||||
#[test_case(Rule::MetaClassABCMeta, Path::new("FURB180.py"))]
|
#[test_case(Rule::MetaClassABCMeta, Path::new("FURB180.py"))]
|
||||||
#[test_case(Rule::HashlibDigestHex, Path::new("FURB181.py"))]
|
#[test_case(Rule::HashlibDigestHex, Path::new("FURB181.py"))]
|
||||||
#[test_case(Rule::ListReverseCopy, Path::new("FURB187.py"))]
|
#[test_case(Rule::ListReverseCopy, Path::new("FURB187.py"))]
|
||||||
#[test_case(Rule::WriteWholeFile, Path::new("FURB103.py"))]
|
#[test_case(Rule::WriteWholeFile, Path::new("FURB103_0.py"))]
|
||||||
|
#[test_case(Rule::WriteWholeFile, Path::new("FURB103_1.py"))]
|
||||||
#[test_case(Rule::FStringNumberFormat, Path::new("FURB116.py"))]
|
#[test_case(Rule::FStringNumberFormat, Path::new("FURB116.py"))]
|
||||||
#[test_case(Rule::SortedMinMax, Path::new("FURB192.py"))]
|
#[test_case(Rule::SortedMinMax, Path::new("FURB192.py"))]
|
||||||
#[test_case(Rule::SliceToRemovePrefixOrSuffix, Path::new("FURB188.py"))]
|
#[test_case(Rule::SliceToRemovePrefixOrSuffix, Path::new("FURB188.py"))]
|
||||||
|
|
@ -65,7 +67,7 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn write_whole_file_python_39() -> Result<()> {
|
fn write_whole_file_python_39() -> Result<()> {
|
||||||
let diagnostics = test_path(
|
let diagnostics = test_path(
|
||||||
Path::new("refurb/FURB103.py"),
|
Path::new("refurb/FURB103_0.py"),
|
||||||
&settings::LinterSettings::for_rule(Rule::WriteWholeFile)
|
&settings::LinterSettings::for_rule(Rule::WriteWholeFile)
|
||||||
.with_target_version(PythonVersion::PY39),
|
.with_target_version(PythonVersion::PY39),
|
||||||
)?;
|
)?;
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ use ruff_text_size::{Ranged, TextRange};
|
||||||
use crate::checkers::ast::Checker;
|
use crate::checkers::ast::Checker;
|
||||||
use crate::fix::snippet::SourceCodeSnippet;
|
use crate::fix::snippet::SourceCodeSnippet;
|
||||||
use crate::importer::ImportRequest;
|
use crate::importer::ImportRequest;
|
||||||
use crate::rules::refurb::helpers::{FileOpen, find_file_opens};
|
use crate::rules::refurb::helpers::{FileOpen, OpenArgument, find_file_opens};
|
||||||
use crate::{FixAvailability, Violation};
|
use crate::{FixAvailability, Violation};
|
||||||
|
|
||||||
/// ## What it does
|
/// ## What it does
|
||||||
|
|
@ -42,27 +42,41 @@ use crate::{FixAvailability, Violation};
|
||||||
/// - [Python documentation: `Path.read_text`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.read_text)
|
/// - [Python documentation: `Path.read_text`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.read_text)
|
||||||
#[derive(ViolationMetadata)]
|
#[derive(ViolationMetadata)]
|
||||||
#[violation_metadata(preview_since = "v0.1.2")]
|
#[violation_metadata(preview_since = "v0.1.2")]
|
||||||
pub(crate) struct ReadWholeFile {
|
pub(crate) struct ReadWholeFile<'a> {
|
||||||
filename: SourceCodeSnippet,
|
filename: SourceCodeSnippet,
|
||||||
suggestion: SourceCodeSnippet,
|
suggestion: SourceCodeSnippet,
|
||||||
|
argument: OpenArgument<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Violation for ReadWholeFile {
|
impl Violation for ReadWholeFile<'_> {
|
||||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||||
|
|
||||||
#[derive_message_formats]
|
#[derive_message_formats]
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
let filename = self.filename.truncated_display();
|
let filename = self.filename.truncated_display();
|
||||||
let suggestion = self.suggestion.truncated_display();
|
let suggestion = self.suggestion.truncated_display();
|
||||||
format!("`open` and `read` should be replaced by `Path({filename}).{suggestion}`")
|
match self.argument {
|
||||||
|
OpenArgument::Pathlib { .. } => {
|
||||||
|
format!(
|
||||||
|
"`Path.open()` followed by `read()` can be replaced by `{filename}.{suggestion}`"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
OpenArgument::Builtin { .. } => {
|
||||||
|
format!("`open` and `read` should be replaced by `Path({filename}).{suggestion}`")
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fix_title(&self) -> Option<String> {
|
fn fix_title(&self) -> Option<String> {
|
||||||
Some(format!(
|
let filename = self.filename.truncated_display();
|
||||||
"Replace with `Path({}).{}`",
|
let suggestion = self.suggestion.truncated_display();
|
||||||
self.filename.truncated_display(),
|
|
||||||
self.suggestion.truncated_display(),
|
match self.argument {
|
||||||
))
|
OpenArgument::Pathlib { .. } => Some(format!("Replace with `{filename}.{suggestion}`")),
|
||||||
|
OpenArgument::Builtin { .. } => {
|
||||||
|
Some(format!("Replace with `Path({filename}).{suggestion}`"))
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -114,13 +128,13 @@ impl<'a> Visitor<'a> for ReadMatcher<'a, '_> {
|
||||||
.position(|open| open.is_ref(read_from))
|
.position(|open| open.is_ref(read_from))
|
||||||
{
|
{
|
||||||
let open = self.candidates.remove(open);
|
let open = self.candidates.remove(open);
|
||||||
|
let filename_display = open.argument.display(self.checker.source());
|
||||||
let suggestion = make_suggestion(&open, self.checker.generator());
|
let suggestion = make_suggestion(&open, self.checker.generator());
|
||||||
let mut diagnostic = self.checker.report_diagnostic(
|
let mut diagnostic = self.checker.report_diagnostic(
|
||||||
ReadWholeFile {
|
ReadWholeFile {
|
||||||
filename: SourceCodeSnippet::from_str(
|
filename: SourceCodeSnippet::from_str(filename_display),
|
||||||
&self.checker.generator().expr(open.filename),
|
|
||||||
),
|
|
||||||
suggestion: SourceCodeSnippet::from_str(&suggestion),
|
suggestion: SourceCodeSnippet::from_str(&suggestion),
|
||||||
|
argument: open.argument,
|
||||||
},
|
},
|
||||||
open.item.range(),
|
open.item.range(),
|
||||||
);
|
);
|
||||||
|
|
@ -188,8 +202,6 @@ fn generate_fix(
|
||||||
|
|
||||||
let locator = checker.locator();
|
let locator = checker.locator();
|
||||||
|
|
||||||
let filename_code = locator.slice(open.filename.range());
|
|
||||||
|
|
||||||
let (import_edit, binding) = checker
|
let (import_edit, binding) = checker
|
||||||
.importer()
|
.importer()
|
||||||
.get_or_import_symbol(
|
.get_or_import_symbol(
|
||||||
|
|
@ -206,10 +218,15 @@ fn generate_fix(
|
||||||
[Stmt::Assign(ast::StmtAssign { targets, value, .. })] if value.range() == expr.range() => {
|
[Stmt::Assign(ast::StmtAssign { targets, value, .. })] if value.range() == expr.range() => {
|
||||||
match targets.as_slice() {
|
match targets.as_slice() {
|
||||||
[Expr::Name(name)] => {
|
[Expr::Name(name)] => {
|
||||||
format!(
|
let target = match open.argument {
|
||||||
"{name} = {binding}({filename_code}).{suggestion}",
|
OpenArgument::Builtin { filename } => {
|
||||||
name = name.id
|
let filename_code = locator.slice(filename.range());
|
||||||
)
|
format!("{binding}({filename_code})")
|
||||||
|
}
|
||||||
|
OpenArgument::Pathlib { path } => locator.slice(path.range()).to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
format!("{name} = {target}.{suggestion}", name = name.id)
|
||||||
}
|
}
|
||||||
_ => return None,
|
_ => return None,
|
||||||
}
|
}
|
||||||
|
|
@ -223,8 +240,16 @@ fn generate_fix(
|
||||||
}),
|
}),
|
||||||
] if value.range() == expr.range() => match target.as_ref() {
|
] if value.range() == expr.range() => match target.as_ref() {
|
||||||
Expr::Name(name) => {
|
Expr::Name(name) => {
|
||||||
|
let target = match open.argument {
|
||||||
|
OpenArgument::Builtin { filename } => {
|
||||||
|
let filename_code = locator.slice(filename.range());
|
||||||
|
format!("{binding}({filename_code})")
|
||||||
|
}
|
||||||
|
OpenArgument::Pathlib { path } => locator.slice(path.range()).to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
format!(
|
format!(
|
||||||
"{var}: {ann} = {binding}({filename_code}).{suggestion}",
|
"{var}: {ann} = {target}.{suggestion}",
|
||||||
var = name.id,
|
var = name.id,
|
||||||
ann = locator.slice(annotation.range())
|
ann = locator.slice(annotation.range())
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ use ruff_text_size::Ranged;
|
||||||
use crate::checkers::ast::Checker;
|
use crate::checkers::ast::Checker;
|
||||||
use crate::fix::snippet::SourceCodeSnippet;
|
use crate::fix::snippet::SourceCodeSnippet;
|
||||||
use crate::importer::ImportRequest;
|
use crate::importer::ImportRequest;
|
||||||
use crate::rules::refurb::helpers::{FileOpen, find_file_opens};
|
use crate::rules::refurb::helpers::{FileOpen, OpenArgument, find_file_opens};
|
||||||
use crate::{FixAvailability, Locator, Violation};
|
use crate::{FixAvailability, Locator, Violation};
|
||||||
|
|
||||||
/// ## What it does
|
/// ## What it does
|
||||||
|
|
@ -42,26 +42,40 @@ use crate::{FixAvailability, Locator, Violation};
|
||||||
/// - [Python documentation: `Path.write_text`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.write_text)
|
/// - [Python documentation: `Path.write_text`](https://docs.python.org/3/library/pathlib.html#pathlib.Path.write_text)
|
||||||
#[derive(ViolationMetadata)]
|
#[derive(ViolationMetadata)]
|
||||||
#[violation_metadata(preview_since = "v0.3.6")]
|
#[violation_metadata(preview_since = "v0.3.6")]
|
||||||
pub(crate) struct WriteWholeFile {
|
pub(crate) struct WriteWholeFile<'a> {
|
||||||
filename: SourceCodeSnippet,
|
filename: SourceCodeSnippet,
|
||||||
suggestion: SourceCodeSnippet,
|
suggestion: SourceCodeSnippet,
|
||||||
|
argument: OpenArgument<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Violation for WriteWholeFile {
|
impl Violation for WriteWholeFile<'_> {
|
||||||
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
const FIX_AVAILABILITY: FixAvailability = FixAvailability::Sometimes;
|
||||||
|
|
||||||
#[derive_message_formats]
|
#[derive_message_formats]
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
let filename = self.filename.truncated_display();
|
let filename = self.filename.truncated_display();
|
||||||
let suggestion = self.suggestion.truncated_display();
|
let suggestion = self.suggestion.truncated_display();
|
||||||
format!("`open` and `write` should be replaced by `Path({filename}).{suggestion}`")
|
match self.argument {
|
||||||
|
OpenArgument::Pathlib { .. } => {
|
||||||
|
format!(
|
||||||
|
"`Path.open()` followed by `write()` can be replaced by `{filename}.{suggestion}`"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
OpenArgument::Builtin { .. } => {
|
||||||
|
format!("`open` and `write` should be replaced by `Path({filename}).{suggestion}`")
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
fn fix_title(&self) -> Option<String> {
|
fn fix_title(&self) -> Option<String> {
|
||||||
Some(format!(
|
let filename = self.filename.truncated_display();
|
||||||
"Replace with `Path({}).{}`",
|
let suggestion = self.suggestion.truncated_display();
|
||||||
self.filename.truncated_display(),
|
|
||||||
self.suggestion.truncated_display(),
|
match self.argument {
|
||||||
))
|
OpenArgument::Pathlib { .. } => Some(format!("Replace with `{filename}.{suggestion}`")),
|
||||||
|
OpenArgument::Builtin { .. } => {
|
||||||
|
Some(format!("Replace with `Path({filename}).{suggestion}`"))
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -125,16 +139,15 @@ impl<'a> Visitor<'a> for WriteMatcher<'a, '_> {
|
||||||
.position(|open| open.is_ref(write_to))
|
.position(|open| open.is_ref(write_to))
|
||||||
{
|
{
|
||||||
let open = self.candidates.remove(open);
|
let open = self.candidates.remove(open);
|
||||||
|
|
||||||
if self.loop_counter == 0 {
|
if self.loop_counter == 0 {
|
||||||
|
let filename_display = open.argument.display(self.checker.source());
|
||||||
let suggestion = make_suggestion(&open, content, self.checker.locator());
|
let suggestion = make_suggestion(&open, content, self.checker.locator());
|
||||||
|
|
||||||
let mut diagnostic = self.checker.report_diagnostic(
|
let mut diagnostic = self.checker.report_diagnostic(
|
||||||
WriteWholeFile {
|
WriteWholeFile {
|
||||||
filename: SourceCodeSnippet::from_str(
|
filename: SourceCodeSnippet::from_str(filename_display),
|
||||||
&self.checker.generator().expr(open.filename),
|
|
||||||
),
|
|
||||||
suggestion: SourceCodeSnippet::from_str(&suggestion),
|
suggestion: SourceCodeSnippet::from_str(&suggestion),
|
||||||
|
argument: open.argument,
|
||||||
},
|
},
|
||||||
open.item.range(),
|
open.item.range(),
|
||||||
);
|
);
|
||||||
|
|
@ -198,7 +211,6 @@ fn generate_fix(
|
||||||
}
|
}
|
||||||
|
|
||||||
let locator = checker.locator();
|
let locator = checker.locator();
|
||||||
let filename_code = locator.slice(open.filename.range());
|
|
||||||
|
|
||||||
let (import_edit, binding) = checker
|
let (import_edit, binding) = checker
|
||||||
.importer()
|
.importer()
|
||||||
|
|
@ -209,7 +221,15 @@ fn generate_fix(
|
||||||
)
|
)
|
||||||
.ok()?;
|
.ok()?;
|
||||||
|
|
||||||
let replacement = format!("{binding}({filename_code}).{suggestion}");
|
let target = match open.argument {
|
||||||
|
OpenArgument::Builtin { filename } => {
|
||||||
|
let filename_code = locator.slice(filename.range());
|
||||||
|
format!("{binding}({filename_code})")
|
||||||
|
}
|
||||||
|
OpenArgument::Pathlib { path } => locator.slice(path.range()).to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let replacement = format!("{target}.{suggestion}");
|
||||||
|
|
||||||
let applicability = if checker.comment_ranges().intersects(with_stmt.range()) {
|
let applicability = if checker.comment_ranges().intersects(with_stmt.range()) {
|
||||||
Applicability::Unsafe
|
Applicability::Unsafe
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
source: crates/ruff_linter/src/rules/refurb/mod.rs
|
source: crates/ruff_linter/src/rules/refurb/mod.rs
|
||||||
---
|
---
|
||||||
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()`
|
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()`
|
||||||
--> FURB101.py:12:6
|
--> FURB101_0.py:12:6
|
||||||
|
|
|
|
||||||
11 | # FURB101
|
11 | # FURB101
|
||||||
12 | with open("file.txt") as f:
|
12 | with open("file.txt") as f:
|
||||||
|
|
@ -26,7 +26,7 @@ help: Replace with `Path("file.txt").read_text()`
|
||||||
16 | with open("file.txt", "rb") as f:
|
16 | with open("file.txt", "rb") as f:
|
||||||
|
|
||||||
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
|
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
|
||||||
--> FURB101.py:16:6
|
--> FURB101_0.py:16:6
|
||||||
|
|
|
|
||||||
15 | # FURB101
|
15 | # FURB101
|
||||||
16 | with open("file.txt", "rb") as f:
|
16 | with open("file.txt", "rb") as f:
|
||||||
|
|
@ -50,7 +50,7 @@ help: Replace with `Path("file.txt").read_bytes()`
|
||||||
20 | with open("file.txt", mode="rb") as f:
|
20 | with open("file.txt", mode="rb") as f:
|
||||||
|
|
||||||
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
|
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_bytes()`
|
||||||
--> FURB101.py:20:6
|
--> FURB101_0.py:20:6
|
||||||
|
|
|
|
||||||
19 | # FURB101
|
19 | # FURB101
|
||||||
20 | with open("file.txt", mode="rb") as f:
|
20 | with open("file.txt", mode="rb") as f:
|
||||||
|
|
@ -74,7 +74,7 @@ help: Replace with `Path("file.txt").read_bytes()`
|
||||||
24 | with open("file.txt", encoding="utf8") as f:
|
24 | with open("file.txt", encoding="utf8") as f:
|
||||||
|
|
||||||
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf8")`
|
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf8")`
|
||||||
--> FURB101.py:24:6
|
--> FURB101_0.py:24:6
|
||||||
|
|
|
|
||||||
23 | # FURB101
|
23 | # FURB101
|
||||||
24 | with open("file.txt", encoding="utf8") as f:
|
24 | with open("file.txt", encoding="utf8") as f:
|
||||||
|
|
@ -98,7 +98,7 @@ help: Replace with `Path("file.txt").read_text(encoding="utf8")`
|
||||||
28 | with open("file.txt", errors="ignore") as f:
|
28 | with open("file.txt", errors="ignore") as f:
|
||||||
|
|
||||||
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(errors="ignore")`
|
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(errors="ignore")`
|
||||||
--> FURB101.py:28:6
|
--> FURB101_0.py:28:6
|
||||||
|
|
|
|
||||||
27 | # FURB101
|
27 | # FURB101
|
||||||
28 | with open("file.txt", errors="ignore") as f:
|
28 | with open("file.txt", errors="ignore") as f:
|
||||||
|
|
@ -122,7 +122,7 @@ help: Replace with `Path("file.txt").read_text(errors="ignore")`
|
||||||
32 | with open("file.txt", mode="r") as f: # noqa: FURB120
|
32 | with open("file.txt", mode="r") as f: # noqa: FURB120
|
||||||
|
|
||||||
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()`
|
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text()`
|
||||||
--> FURB101.py:32:6
|
--> FURB101_0.py:32:6
|
||||||
|
|
|
|
||||||
31 | # FURB101
|
31 | # FURB101
|
||||||
32 | with open("file.txt", mode="r") as f: # noqa: FURB120
|
32 | with open("file.txt", mode="r") as f: # noqa: FURB120
|
||||||
|
|
@ -147,7 +147,7 @@ help: Replace with `Path("file.txt").read_text()`
|
||||||
note: This is an unsafe fix and may change runtime behavior
|
note: This is an unsafe fix and may change runtime behavior
|
||||||
|
|
||||||
FURB101 `open` and `read` should be replaced by `Path(foo()).read_bytes()`
|
FURB101 `open` and `read` should be replaced by `Path(foo()).read_bytes()`
|
||||||
--> FURB101.py:36:6
|
--> FURB101_0.py:36:6
|
||||||
|
|
|
|
||||||
35 | # FURB101
|
35 | # FURB101
|
||||||
36 | with open(foo(), "rb") as f:
|
36 | with open(foo(), "rb") as f:
|
||||||
|
|
@ -158,7 +158,7 @@ FURB101 `open` and `read` should be replaced by `Path(foo()).read_bytes()`
|
||||||
help: Replace with `Path(foo()).read_bytes()`
|
help: Replace with `Path(foo()).read_bytes()`
|
||||||
|
|
||||||
FURB101 `open` and `read` should be replaced by `Path("a.txt").read_text()`
|
FURB101 `open` and `read` should be replaced by `Path("a.txt").read_text()`
|
||||||
--> FURB101.py:44:6
|
--> FURB101_0.py:44:6
|
||||||
|
|
|
|
||||||
43 | # FURB101
|
43 | # FURB101
|
||||||
44 | with open("a.txt") as a, open("b.txt", "rb") as b:
|
44 | with open("a.txt") as a, open("b.txt", "rb") as b:
|
||||||
|
|
@ -169,7 +169,7 @@ FURB101 `open` and `read` should be replaced by `Path("a.txt").read_text()`
|
||||||
help: Replace with `Path("a.txt").read_text()`
|
help: Replace with `Path("a.txt").read_text()`
|
||||||
|
|
||||||
FURB101 `open` and `read` should be replaced by `Path("b.txt").read_bytes()`
|
FURB101 `open` and `read` should be replaced by `Path("b.txt").read_bytes()`
|
||||||
--> FURB101.py:44:26
|
--> FURB101_0.py:44:26
|
||||||
|
|
|
|
||||||
43 | # FURB101
|
43 | # FURB101
|
||||||
44 | with open("a.txt") as a, open("b.txt", "rb") as b:
|
44 | with open("a.txt") as a, open("b.txt", "rb") as b:
|
||||||
|
|
@ -180,7 +180,7 @@ FURB101 `open` and `read` should be replaced by `Path("b.txt").read_bytes()`
|
||||||
help: Replace with `Path("b.txt").read_bytes()`
|
help: Replace with `Path("b.txt").read_bytes()`
|
||||||
|
|
||||||
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()`
|
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()`
|
||||||
--> FURB101.py:49:18
|
--> FURB101_0.py:49:18
|
||||||
|
|
|
|
||||||
48 | # FURB101
|
48 | # FURB101
|
||||||
49 | with foo() as a, open("file.txt") as b, foo() as c:
|
49 | with foo() as a, open("file.txt") as b, foo() as c:
|
||||||
|
|
@ -191,7 +191,7 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text()`
|
||||||
help: Replace with `Path("file.txt").read_text()`
|
help: Replace with `Path("file.txt").read_text()`
|
||||||
|
|
||||||
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
|
FURB101 [*] `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
|
||||||
--> FURB101.py:130:6
|
--> FURB101_0.py:130:6
|
||||||
|
|
|
|
||||||
129 | # FURB101
|
129 | # FURB101
|
||||||
130 | with open("file.txt", encoding="utf-8") as f:
|
130 | with open("file.txt", encoding="utf-8") as f:
|
||||||
|
|
@ -215,7 +215,7 @@ help: Replace with `Path("file.txt").read_text(encoding="utf-8")`
|
||||||
134 | with open("file.txt", encoding="utf-8") as f:
|
134 | with open("file.txt", encoding="utf-8") as f:
|
||||||
|
|
||||||
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
|
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
|
||||||
--> FURB101.py:134:6
|
--> FURB101_0.py:134:6
|
||||||
|
|
|
|
||||||
133 | # FURB101 but no fix because it would remove the assignment to `x`
|
133 | # FURB101 but no fix because it would remove the assignment to `x`
|
||||||
134 | with open("file.txt", encoding="utf-8") as f:
|
134 | with open("file.txt", encoding="utf-8") as f:
|
||||||
|
|
@ -225,7 +225,7 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(enco
|
||||||
help: Replace with `Path("file.txt").read_text(encoding="utf-8")`
|
help: Replace with `Path("file.txt").read_text(encoding="utf-8")`
|
||||||
|
|
||||||
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
|
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
|
||||||
--> FURB101.py:138:6
|
--> FURB101_0.py:138:6
|
||||||
|
|
|
|
||||||
137 | # FURB101 but no fix because it would remove the `process_contents` call
|
137 | # FURB101 but no fix because it would remove the `process_contents` call
|
||||||
138 | with open("file.txt", encoding="utf-8") as f:
|
138 | with open("file.txt", encoding="utf-8") as f:
|
||||||
|
|
@ -234,13 +234,13 @@ FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(enco
|
||||||
|
|
|
|
||||||
help: Replace with `Path("file.txt").read_text(encoding="utf-8")`
|
help: Replace with `Path("file.txt").read_text(encoding="utf-8")`
|
||||||
|
|
||||||
FURB101 `open` and `read` should be replaced by `Path("file.txt").read_text(encoding="utf-8")`
|
FURB101 `open` and `read` should be replaced by `Path("file1.txt").read_text(encoding="utf-8")`
|
||||||
--> FURB101.py:141:6
|
--> FURB101_0.py:141:6
|
||||||
|
|
|
|
||||||
139 | contents = process_contents(f.read())
|
139 | contents = process_contents(f.read())
|
||||||
140 |
|
140 |
|
||||||
141 | with open("file.txt", encoding="utf-8") as f:
|
141 | with open("file1.txt", encoding="utf-8") as f:
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
142 | contents: str = process_contents(f.read())
|
142 | contents: str = process_contents(f.read())
|
||||||
|
|
|
|
||||||
help: Replace with `Path("file.txt").read_text(encoding="utf-8")`
|
help: Replace with `Path("file1.txt").read_text(encoding="utf-8")`
|
||||||
|
|
@ -0,0 +1,39 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff_linter/src/rules/refurb/mod.rs
|
||||||
|
---
|
||||||
|
FURB101 [*] `Path.open()` followed by `read()` can be replaced by `Path("file.txt").read_text()`
|
||||||
|
--> FURB101_1.py:4:6
|
||||||
|
|
|
||||||
|
2 | from pathlib import Path
|
||||||
|
3 |
|
||||||
|
4 | with Path("file.txt").open() as f:
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
5 | contents = f.read()
|
||||||
|
|
|
||||||
|
help: Replace with `Path("file.txt").read_text()`
|
||||||
|
1 |
|
||||||
|
2 | from pathlib import Path
|
||||||
|
3 |
|
||||||
|
- with Path("file.txt").open() as f:
|
||||||
|
- contents = f.read()
|
||||||
|
4 + contents = Path("file.txt").read_text()
|
||||||
|
5 |
|
||||||
|
6 | with Path("file.txt").open("r") as f:
|
||||||
|
7 | contents = f.read()
|
||||||
|
|
||||||
|
FURB101 [*] `Path.open()` followed by `read()` can be replaced by `Path("file.txt").read_text()`
|
||||||
|
--> FURB101_1.py:7:6
|
||||||
|
|
|
||||||
|
5 | contents = f.read()
|
||||||
|
6 |
|
||||||
|
7 | with Path("file.txt").open("r") as f:
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
8 | contents = f.read()
|
||||||
|
|
|
||||||
|
help: Replace with `Path("file.txt").read_text()`
|
||||||
|
4 | with Path("file.txt").open() as f:
|
||||||
|
5 | contents = f.read()
|
||||||
|
6 |
|
||||||
|
- with Path("file.txt").open("r") as f:
|
||||||
|
- contents = f.read()
|
||||||
|
7 + contents = Path("file.txt").read_text()
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
source: crates/ruff_linter/src/rules/refurb/mod.rs
|
source: crates/ruff_linter/src/rules/refurb/mod.rs
|
||||||
---
|
---
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")`
|
||||||
--> FURB103.py:12:6
|
--> FURB103_0.py:12:6
|
||||||
|
|
|
|
||||||
11 | # FURB103
|
11 | # FURB103
|
||||||
12 | with open("file.txt", "w") as f:
|
12 | with open("file.txt", "w") as f:
|
||||||
|
|
@ -26,7 +26,7 @@ help: Replace with `Path("file.txt").write_text("test")`
|
||||||
16 | with open("file.txt", "wb") as f:
|
16 | with open("file.txt", "wb") as f:
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)`
|
||||||
--> FURB103.py:16:6
|
--> FURB103_0.py:16:6
|
||||||
|
|
|
|
||||||
15 | # FURB103
|
15 | # FURB103
|
||||||
16 | with open("file.txt", "wb") as f:
|
16 | with open("file.txt", "wb") as f:
|
||||||
|
|
@ -50,7 +50,7 @@ help: Replace with `Path("file.txt").write_bytes(foobar)`
|
||||||
20 | with open("file.txt", mode="wb") as f:
|
20 | with open("file.txt", mode="wb") as f:
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")`
|
||||||
--> FURB103.py:20:6
|
--> FURB103_0.py:20:6
|
||||||
|
|
|
|
||||||
19 | # FURB103
|
19 | # FURB103
|
||||||
20 | with open("file.txt", mode="wb") as f:
|
20 | with open("file.txt", mode="wb") as f:
|
||||||
|
|
@ -74,7 +74,7 @@ help: Replace with `Path("file.txt").write_bytes(b"abc")`
|
||||||
24 | with open("file.txt", "w", encoding="utf8") as f:
|
24 | with open("file.txt", "w", encoding="utf8") as f:
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")`
|
||||||
--> FURB103.py:24:6
|
--> FURB103_0.py:24:6
|
||||||
|
|
|
|
||||||
23 | # FURB103
|
23 | # FURB103
|
||||||
24 | with open("file.txt", "w", encoding="utf8") as f:
|
24 | with open("file.txt", "w", encoding="utf8") as f:
|
||||||
|
|
@ -98,7 +98,7 @@ help: Replace with `Path("file.txt").write_text(foobar, encoding="utf8")`
|
||||||
28 | with open("file.txt", "w", errors="ignore") as f:
|
28 | with open("file.txt", "w", errors="ignore") as f:
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")`
|
||||||
--> FURB103.py:28:6
|
--> FURB103_0.py:28:6
|
||||||
|
|
|
|
||||||
27 | # FURB103
|
27 | # FURB103
|
||||||
28 | with open("file.txt", "w", errors="ignore") as f:
|
28 | with open("file.txt", "w", errors="ignore") as f:
|
||||||
|
|
@ -122,7 +122,7 @@ help: Replace with `Path("file.txt").write_text(foobar, errors="ignore")`
|
||||||
32 | with open("file.txt", mode="w") as f:
|
32 | with open("file.txt", mode="w") as f:
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)`
|
||||||
--> FURB103.py:32:6
|
--> FURB103_0.py:32:6
|
||||||
|
|
|
|
||||||
31 | # FURB103
|
31 | # FURB103
|
||||||
32 | with open("file.txt", mode="w") as f:
|
32 | with open("file.txt", mode="w") as f:
|
||||||
|
|
@ -146,7 +146,7 @@ help: Replace with `Path("file.txt").write_text(foobar)`
|
||||||
36 | with open(foo(), "wb") as f:
|
36 | with open(foo(), "wb") as f:
|
||||||
|
|
||||||
FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())`
|
FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())`
|
||||||
--> FURB103.py:36:6
|
--> FURB103_0.py:36:6
|
||||||
|
|
|
|
||||||
35 | # FURB103
|
35 | # FURB103
|
||||||
36 | with open(foo(), "wb") as f:
|
36 | with open(foo(), "wb") as f:
|
||||||
|
|
@ -157,7 +157,7 @@ FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())
|
||||||
help: Replace with `Path(foo()).write_bytes(bar())`
|
help: Replace with `Path(foo()).write_bytes(bar())`
|
||||||
|
|
||||||
FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
|
FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
|
||||||
--> FURB103.py:44:6
|
--> FURB103_0.py:44:6
|
||||||
|
|
|
|
||||||
43 | # FURB103
|
43 | # FURB103
|
||||||
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
|
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
|
||||||
|
|
@ -168,7 +168,7 @@ FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
|
||||||
help: Replace with `Path("a.txt").write_text(x)`
|
help: Replace with `Path("a.txt").write_text(x)`
|
||||||
|
|
||||||
FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
|
FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
|
||||||
--> FURB103.py:44:31
|
--> FURB103_0.py:44:31
|
||||||
|
|
|
|
||||||
43 | # FURB103
|
43 | # FURB103
|
||||||
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
|
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
|
||||||
|
|
@ -179,7 +179,7 @@ FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
|
||||||
help: Replace with `Path("b.txt").write_bytes(y)`
|
help: Replace with `Path("b.txt").write_bytes(y)`
|
||||||
|
|
||||||
FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(bar(bar(a + x)))`
|
FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(bar(bar(a + x)))`
|
||||||
--> FURB103.py:49:18
|
--> FURB103_0.py:49:18
|
||||||
|
|
|
|
||||||
48 | # FURB103
|
48 | # FURB103
|
||||||
49 | with foo() as a, open("file.txt", "w") as b, foo() as c:
|
49 | with foo() as a, open("file.txt", "w") as b, foo() as c:
|
||||||
|
|
@ -190,7 +190,7 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(ba
|
||||||
help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))`
|
help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))`
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
|
||||||
--> FURB103.py:58:6
|
--> FURB103_0.py:58:6
|
||||||
|
|
|
|
||||||
57 | # FURB103
|
57 | # FURB103
|
||||||
58 | with open("file.txt", "w", newline="\r\n") as f:
|
58 | with open("file.txt", "w", newline="\r\n") as f:
|
||||||
|
|
@ -214,7 +214,7 @@ help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
|
||||||
62 | import builtins
|
62 | import builtins
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
|
||||||
--> FURB103.py:66:6
|
--> FURB103_0.py:66:6
|
||||||
|
|
|
|
||||||
65 | # FURB103
|
65 | # FURB103
|
||||||
66 | with builtins.open("file.txt", "w", newline="\r\n") as f:
|
66 | with builtins.open("file.txt", "w", newline="\r\n") as f:
|
||||||
|
|
@ -237,7 +237,7 @@ help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
|
||||||
70 | from builtins import open as o
|
70 | from builtins import open as o
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, newline="\r\n")`
|
||||||
--> FURB103.py:74:6
|
--> FURB103_0.py:74:6
|
||||||
|
|
|
|
||||||
73 | # FURB103
|
73 | # FURB103
|
||||||
74 | with o("file.txt", "w", newline="\r\n") as f:
|
74 | with o("file.txt", "w", newline="\r\n") as f:
|
||||||
|
|
@ -260,7 +260,7 @@ help: Replace with `Path("file.txt").write_text(foobar, newline="\r\n")`
|
||||||
78 |
|
78 |
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....`
|
FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....`
|
||||||
--> FURB103.py:154:6
|
--> FURB103_0.py:154:6
|
||||||
|
|
|
|
||||||
152 | data = {"price": 100}
|
152 | data = {"price": 100}
|
||||||
153 |
|
153 |
|
||||||
|
|
@ -284,7 +284,7 @@ help: Replace with `Path("test.json")....`
|
||||||
158 | with open("tmp_path/pyproject.toml", "w") as f:
|
158 | with open("tmp_path/pyproject.toml", "w") as f:
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("tmp_path/pyproject.toml")....`
|
FURB103 [*] `open` and `write` should be replaced by `Path("tmp_path/pyproject.toml")....`
|
||||||
--> FURB103.py:158:6
|
--> FURB103_0.py:158:6
|
||||||
|
|
|
|
||||||
157 | # See: https://github.com/astral-sh/ruff/issues/21381
|
157 | # See: https://github.com/astral-sh/ruff/issues/21381
|
||||||
158 | with open("tmp_path/pyproject.toml", "w") as f:
|
158 | with open("tmp_path/pyproject.toml", "w") as f:
|
||||||
|
|
@ -0,0 +1,157 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff_linter/src/rules/refurb/mod.rs
|
||||||
|
---
|
||||||
|
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_text("test")`
|
||||||
|
--> FURB103_1.py:3:6
|
||||||
|
|
|
||||||
|
1 | from pathlib import Path
|
||||||
|
2 |
|
||||||
|
3 | with Path("file.txt").open("w") as f:
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
4 | f.write("test")
|
||||||
|
|
|
||||||
|
help: Replace with `Path("file.txt").write_text("test")`
|
||||||
|
1 | from pathlib import Path
|
||||||
|
2 |
|
||||||
|
- with Path("file.txt").open("w") as f:
|
||||||
|
- f.write("test")
|
||||||
|
3 + Path("file.txt").write_text("test")
|
||||||
|
4 |
|
||||||
|
5 | with Path("file.txt").open("wb") as f:
|
||||||
|
6 | f.write(b"test")
|
||||||
|
|
||||||
|
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_bytes(b"test")`
|
||||||
|
--> FURB103_1.py:6:6
|
||||||
|
|
|
||||||
|
4 | f.write("test")
|
||||||
|
5 |
|
||||||
|
6 | with Path("file.txt").open("wb") as f:
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
7 | f.write(b"test")
|
||||||
|
|
|
||||||
|
help: Replace with `Path("file.txt").write_bytes(b"test")`
|
||||||
|
3 | with Path("file.txt").open("w") as f:
|
||||||
|
4 | f.write("test")
|
||||||
|
5 |
|
||||||
|
- with Path("file.txt").open("wb") as f:
|
||||||
|
- f.write(b"test")
|
||||||
|
6 + Path("file.txt").write_bytes(b"test")
|
||||||
|
7 |
|
||||||
|
8 | with Path("file.txt").open(mode="w") as f:
|
||||||
|
9 | f.write("test")
|
||||||
|
|
||||||
|
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_text("test")`
|
||||||
|
--> FURB103_1.py:9:6
|
||||||
|
|
|
||||||
|
7 | f.write(b"test")
|
||||||
|
8 |
|
||||||
|
9 | with Path("file.txt").open(mode="w") as f:
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
10 | f.write("test")
|
||||||
|
|
|
||||||
|
help: Replace with `Path("file.txt").write_text("test")`
|
||||||
|
6 | with Path("file.txt").open("wb") as f:
|
||||||
|
7 | f.write(b"test")
|
||||||
|
8 |
|
||||||
|
- with Path("file.txt").open(mode="w") as f:
|
||||||
|
- f.write("test")
|
||||||
|
9 + Path("file.txt").write_text("test")
|
||||||
|
10 |
|
||||||
|
11 | with Path("file.txt").open("w", encoding="utf8") as f:
|
||||||
|
12 | f.write("test")
|
||||||
|
|
||||||
|
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_text("test", encoding="utf8")`
|
||||||
|
--> FURB103_1.py:12:6
|
||||||
|
|
|
||||||
|
10 | f.write("test")
|
||||||
|
11 |
|
||||||
|
12 | with Path("file.txt").open("w", encoding="utf8") as f:
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
13 | f.write("test")
|
||||||
|
|
|
||||||
|
help: Replace with `Path("file.txt").write_text("test", encoding="utf8")`
|
||||||
|
9 | with Path("file.txt").open(mode="w") as f:
|
||||||
|
10 | f.write("test")
|
||||||
|
11 |
|
||||||
|
- with Path("file.txt").open("w", encoding="utf8") as f:
|
||||||
|
- f.write("test")
|
||||||
|
12 + Path("file.txt").write_text("test", encoding="utf8")
|
||||||
|
13 |
|
||||||
|
14 | with Path("file.txt").open("w", errors="ignore") as f:
|
||||||
|
15 | f.write("test")
|
||||||
|
|
||||||
|
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("file.txt").write_text("test", errors="ignore")`
|
||||||
|
--> FURB103_1.py:15:6
|
||||||
|
|
|
||||||
|
13 | f.write("test")
|
||||||
|
14 |
|
||||||
|
15 | with Path("file.txt").open("w", errors="ignore") as f:
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
16 | f.write("test")
|
||||||
|
|
|
||||||
|
help: Replace with `Path("file.txt").write_text("test", errors="ignore")`
|
||||||
|
12 | with Path("file.txt").open("w", encoding="utf8") as f:
|
||||||
|
13 | f.write("test")
|
||||||
|
14 |
|
||||||
|
- with Path("file.txt").open("w", errors="ignore") as f:
|
||||||
|
- f.write("test")
|
||||||
|
15 + Path("file.txt").write_text("test", errors="ignore")
|
||||||
|
16 |
|
||||||
|
17 | with Path(foo()).open("w") as f:
|
||||||
|
18 | f.write("test")
|
||||||
|
|
||||||
|
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path(foo()).write_text("test")`
|
||||||
|
--> FURB103_1.py:18:6
|
||||||
|
|
|
||||||
|
16 | f.write("test")
|
||||||
|
17 |
|
||||||
|
18 | with Path(foo()).open("w") as f:
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
19 | f.write("test")
|
||||||
|
|
|
||||||
|
help: Replace with `Path(foo()).write_text("test")`
|
||||||
|
15 | with Path("file.txt").open("w", errors="ignore") as f:
|
||||||
|
16 | f.write("test")
|
||||||
|
17 |
|
||||||
|
- with Path(foo()).open("w") as f:
|
||||||
|
- f.write("test")
|
||||||
|
18 + Path(foo()).write_text("test")
|
||||||
|
19 |
|
||||||
|
20 | p = Path("file.txt")
|
||||||
|
21 | with p.open("w") as f:
|
||||||
|
|
||||||
|
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `p.write_text("test")`
|
||||||
|
--> FURB103_1.py:22:6
|
||||||
|
|
|
||||||
|
21 | p = Path("file.txt")
|
||||||
|
22 | with p.open("w") as f:
|
||||||
|
| ^^^^^^^^^^^^^^^^
|
||||||
|
23 | f.write("test")
|
||||||
|
|
|
||||||
|
help: Replace with `p.write_text("test")`
|
||||||
|
19 | f.write("test")
|
||||||
|
20 |
|
||||||
|
21 | p = Path("file.txt")
|
||||||
|
- with p.open("w") as f:
|
||||||
|
- f.write("test")
|
||||||
|
22 + p.write_text("test")
|
||||||
|
23 |
|
||||||
|
24 | with Path("foo", "bar", "baz").open("w") as f:
|
||||||
|
25 | f.write("test")
|
||||||
|
|
||||||
|
FURB103 [*] `Path.open()` followed by `write()` can be replaced by `Path("foo", "bar", "baz").write_text("test")`
|
||||||
|
--> FURB103_1.py:25:6
|
||||||
|
|
|
||||||
|
23 | f.write("test")
|
||||||
|
24 |
|
||||||
|
25 | with Path("foo", "bar", "baz").open("w") as f:
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
26 | f.write("test")
|
||||||
|
|
|
||||||
|
help: Replace with `Path("foo", "bar", "baz").write_text("test")`
|
||||||
|
22 | with p.open("w") as f:
|
||||||
|
23 | f.write("test")
|
||||||
|
24 |
|
||||||
|
- with Path("foo", "bar", "baz").open("w") as f:
|
||||||
|
- f.write("test")
|
||||||
|
25 + Path("foo", "bar", "baz").write_text("test")
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
source: crates/ruff_linter/src/rules/refurb/mod.rs
|
source: crates/ruff_linter/src/rules/refurb/mod.rs
|
||||||
---
|
---
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text("test")`
|
||||||
--> FURB103.py:12:6
|
--> FURB103_0.py:12:6
|
||||||
|
|
|
|
||||||
11 | # FURB103
|
11 | # FURB103
|
||||||
12 | with open("file.txt", "w") as f:
|
12 | with open("file.txt", "w") as f:
|
||||||
|
|
@ -26,7 +26,7 @@ help: Replace with `Path("file.txt").write_text("test")`
|
||||||
16 | with open("file.txt", "wb") as f:
|
16 | with open("file.txt", "wb") as f:
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(foobar)`
|
||||||
--> FURB103.py:16:6
|
--> FURB103_0.py:16:6
|
||||||
|
|
|
|
||||||
15 | # FURB103
|
15 | # FURB103
|
||||||
16 | with open("file.txt", "wb") as f:
|
16 | with open("file.txt", "wb") as f:
|
||||||
|
|
@ -50,7 +50,7 @@ help: Replace with `Path("file.txt").write_bytes(foobar)`
|
||||||
20 | with open("file.txt", mode="wb") as f:
|
20 | with open("file.txt", mode="wb") as f:
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_bytes(b"abc")`
|
||||||
--> FURB103.py:20:6
|
--> FURB103_0.py:20:6
|
||||||
|
|
|
|
||||||
19 | # FURB103
|
19 | # FURB103
|
||||||
20 | with open("file.txt", mode="wb") as f:
|
20 | with open("file.txt", mode="wb") as f:
|
||||||
|
|
@ -74,7 +74,7 @@ help: Replace with `Path("file.txt").write_bytes(b"abc")`
|
||||||
24 | with open("file.txt", "w", encoding="utf8") as f:
|
24 | with open("file.txt", "w", encoding="utf8") as f:
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, encoding="utf8")`
|
||||||
--> FURB103.py:24:6
|
--> FURB103_0.py:24:6
|
||||||
|
|
|
|
||||||
23 | # FURB103
|
23 | # FURB103
|
||||||
24 | with open("file.txt", "w", encoding="utf8") as f:
|
24 | with open("file.txt", "w", encoding="utf8") as f:
|
||||||
|
|
@ -98,7 +98,7 @@ help: Replace with `Path("file.txt").write_text(foobar, encoding="utf8")`
|
||||||
28 | with open("file.txt", "w", errors="ignore") as f:
|
28 | with open("file.txt", "w", errors="ignore") as f:
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar, errors="ignore")`
|
||||||
--> FURB103.py:28:6
|
--> FURB103_0.py:28:6
|
||||||
|
|
|
|
||||||
27 | # FURB103
|
27 | # FURB103
|
||||||
28 | with open("file.txt", "w", errors="ignore") as f:
|
28 | with open("file.txt", "w", errors="ignore") as f:
|
||||||
|
|
@ -122,7 +122,7 @@ help: Replace with `Path("file.txt").write_text(foobar, errors="ignore")`
|
||||||
32 | with open("file.txt", mode="w") as f:
|
32 | with open("file.txt", mode="w") as f:
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)`
|
FURB103 [*] `open` and `write` should be replaced by `Path("file.txt").write_text(foobar)`
|
||||||
--> FURB103.py:32:6
|
--> FURB103_0.py:32:6
|
||||||
|
|
|
|
||||||
31 | # FURB103
|
31 | # FURB103
|
||||||
32 | with open("file.txt", mode="w") as f:
|
32 | with open("file.txt", mode="w") as f:
|
||||||
|
|
@ -146,7 +146,7 @@ help: Replace with `Path("file.txt").write_text(foobar)`
|
||||||
36 | with open(foo(), "wb") as f:
|
36 | with open(foo(), "wb") as f:
|
||||||
|
|
||||||
FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())`
|
FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())`
|
||||||
--> FURB103.py:36:6
|
--> FURB103_0.py:36:6
|
||||||
|
|
|
|
||||||
35 | # FURB103
|
35 | # FURB103
|
||||||
36 | with open(foo(), "wb") as f:
|
36 | with open(foo(), "wb") as f:
|
||||||
|
|
@ -157,7 +157,7 @@ FURB103 `open` and `write` should be replaced by `Path(foo()).write_bytes(bar())
|
||||||
help: Replace with `Path(foo()).write_bytes(bar())`
|
help: Replace with `Path(foo()).write_bytes(bar())`
|
||||||
|
|
||||||
FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
|
FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
|
||||||
--> FURB103.py:44:6
|
--> FURB103_0.py:44:6
|
||||||
|
|
|
|
||||||
43 | # FURB103
|
43 | # FURB103
|
||||||
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
|
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
|
||||||
|
|
@ -168,7 +168,7 @@ FURB103 `open` and `write` should be replaced by `Path("a.txt").write_text(x)`
|
||||||
help: Replace with `Path("a.txt").write_text(x)`
|
help: Replace with `Path("a.txt").write_text(x)`
|
||||||
|
|
||||||
FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
|
FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
|
||||||
--> FURB103.py:44:31
|
--> FURB103_0.py:44:31
|
||||||
|
|
|
|
||||||
43 | # FURB103
|
43 | # FURB103
|
||||||
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
|
44 | with open("a.txt", "w") as a, open("b.txt", "wb") as b:
|
||||||
|
|
@ -179,7 +179,7 @@ FURB103 `open` and `write` should be replaced by `Path("b.txt").write_bytes(y)`
|
||||||
help: Replace with `Path("b.txt").write_bytes(y)`
|
help: Replace with `Path("b.txt").write_bytes(y)`
|
||||||
|
|
||||||
FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(bar(bar(a + x)))`
|
FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(bar(bar(a + x)))`
|
||||||
--> FURB103.py:49:18
|
--> FURB103_0.py:49:18
|
||||||
|
|
|
|
||||||
48 | # FURB103
|
48 | # FURB103
|
||||||
49 | with foo() as a, open("file.txt", "w") as b, foo() as c:
|
49 | with foo() as a, open("file.txt", "w") as b, foo() as c:
|
||||||
|
|
@ -190,7 +190,7 @@ FURB103 `open` and `write` should be replaced by `Path("file.txt").write_text(ba
|
||||||
help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))`
|
help: Replace with `Path("file.txt").write_text(bar(bar(a + x)))`
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....`
|
FURB103 [*] `open` and `write` should be replaced by `Path("test.json")....`
|
||||||
--> FURB103.py:154:6
|
--> FURB103_0.py:154:6
|
||||||
|
|
|
|
||||||
152 | data = {"price": 100}
|
152 | data = {"price": 100}
|
||||||
153 |
|
153 |
|
||||||
|
|
@ -214,7 +214,7 @@ help: Replace with `Path("test.json")....`
|
||||||
158 | with open("tmp_path/pyproject.toml", "w") as f:
|
158 | with open("tmp_path/pyproject.toml", "w") as f:
|
||||||
|
|
||||||
FURB103 [*] `open` and `write` should be replaced by `Path("tmp_path/pyproject.toml")....`
|
FURB103 [*] `open` and `write` should be replaced by `Path("tmp_path/pyproject.toml")....`
|
||||||
--> FURB103.py:158:6
|
--> FURB103_0.py:158:6
|
||||||
|
|
|
|
||||||
157 | # See: https://github.com/astral-sh/ruff/issues/21381
|
157 | # See: https://github.com/astral-sh/ruff/issues/21381
|
||||||
158 | with open("tmp_path/pyproject.toml", "w") as f:
|
158 | with open("tmp_path/pyproject.toml", "w") as f:
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,74 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff_linter/src/linter.rs
|
||||||
|
---
|
||||||
|
invalid-syntax: annotated name `a` can't be global
|
||||||
|
--> resources/test/fixtures/semantic_errors/annotated_global.py:4:5
|
||||||
|
|
|
||||||
|
2 | def f1():
|
||||||
|
3 | global a
|
||||||
|
4 | a: str = "foo" # error
|
||||||
|
| ^
|
||||||
|
5 |
|
||||||
|
6 | b: int = 1
|
||||||
|
|
|
||||||
|
|
||||||
|
invalid-syntax: annotated name `b` can't be global
|
||||||
|
--> resources/test/fixtures/semantic_errors/annotated_global.py:10:9
|
||||||
|
|
|
||||||
|
8 | def inner():
|
||||||
|
9 | global b
|
||||||
|
10 | b: str = "nested" # error
|
||||||
|
| ^
|
||||||
|
11 |
|
||||||
|
12 | c: int = 1
|
||||||
|
|
|
||||||
|
|
||||||
|
invalid-syntax: annotated name `c` can't be global
|
||||||
|
--> resources/test/fixtures/semantic_errors/annotated_global.py:15:5
|
||||||
|
|
|
||||||
|
13 | def f2():
|
||||||
|
14 | global c
|
||||||
|
15 | c: list[str] = [] # error
|
||||||
|
| ^
|
||||||
|
16 |
|
||||||
|
17 | d: int = 1
|
||||||
|
|
|
||||||
|
|
||||||
|
invalid-syntax: annotated name `d` can't be global
|
||||||
|
--> resources/test/fixtures/semantic_errors/annotated_global.py:20:5
|
||||||
|
|
|
||||||
|
18 | def f3():
|
||||||
|
19 | global d
|
||||||
|
20 | d: str # error
|
||||||
|
| ^
|
||||||
|
21 |
|
||||||
|
22 | e: int = 1
|
||||||
|
|
|
||||||
|
|
||||||
|
invalid-syntax: annotated name `g` can't be global
|
||||||
|
--> resources/test/fixtures/semantic_errors/annotated_global.py:29:1
|
||||||
|
|
|
||||||
|
27 | f: int = 1 # okay
|
||||||
|
28 |
|
||||||
|
29 | g: int = 1
|
||||||
|
| ^
|
||||||
|
30 | global g # error
|
||||||
|
|
|
||||||
|
|
||||||
|
invalid-syntax: annotated name `x` can't be global
|
||||||
|
--> resources/test/fixtures/semantic_errors/annotated_global.py:33:5
|
||||||
|
|
|
||||||
|
32 | class C:
|
||||||
|
33 | x: str
|
||||||
|
| ^
|
||||||
|
34 | global x # error
|
||||||
|
|
|
||||||
|
|
||||||
|
invalid-syntax: annotated name `x` can't be global
|
||||||
|
--> resources/test/fixtures/semantic_errors/annotated_global.py:38:5
|
||||||
|
|
|
||||||
|
36 | class D:
|
||||||
|
37 | global x # error
|
||||||
|
38 | x: str
|
||||||
|
| ^
|
||||||
|
|
|
||||||
|
|
@ -1247,6 +1247,7 @@ impl<'a> Generator<'a> {
|
||||||
self.p_bytes_repr(&bytes_literal.value, bytes_literal.flags);
|
self.p_bytes_repr(&bytes_literal.value, bytes_literal.flags);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#[expect(clippy::eq_op)]
|
||||||
Expr::NumberLiteral(ast::ExprNumberLiteral { value, .. }) => {
|
Expr::NumberLiteral(ast::ExprNumberLiteral { value, .. }) => {
|
||||||
static INF_STR: &str = "1e309";
|
static INF_STR: &str = "1e309";
|
||||||
assert_eq!(f64::MAX_10_EXP, 308);
|
assert_eq!(f64::MAX_10_EXP, 308);
|
||||||
|
|
|
||||||
|
|
@ -43,7 +43,8 @@ tracing = { workspace = true }
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
ruff_formatter = { workspace = true }
|
ruff_formatter = { workspace = true }
|
||||||
|
|
||||||
insta = { workspace = true, features = ["glob"] }
|
datatest-stable = { workspace = true }
|
||||||
|
insta = { workspace = true }
|
||||||
regex = { workspace = true }
|
regex = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
|
@ -54,8 +55,8 @@ similar = { workspace = true }
|
||||||
ignored = ["ruff_cache"]
|
ignored = ["ruff_cache"]
|
||||||
|
|
||||||
[[test]]
|
[[test]]
|
||||||
name = "ruff_python_formatter_fixtures"
|
name = "fixtures"
|
||||||
path = "tests/fixtures.rs"
|
harness = false
|
||||||
test = true
|
test = true
|
||||||
required-features = ["serde"]
|
required-features = ["serde"]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -125,6 +125,13 @@ lambda a, /, c: a
|
||||||
*x: x
|
*x: x
|
||||||
)
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda
|
||||||
|
# comment
|
||||||
|
*x,
|
||||||
|
**y: x
|
||||||
|
)
|
||||||
|
|
||||||
(
|
(
|
||||||
lambda
|
lambda
|
||||||
# comment 1
|
# comment 1
|
||||||
|
|
@ -196,6 +203,17 @@ lambda: ( # comment
|
||||||
x
|
x
|
||||||
)
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda # 1
|
||||||
|
# 2
|
||||||
|
x, # 3
|
||||||
|
# 4
|
||||||
|
y
|
||||||
|
: # 5
|
||||||
|
# 6
|
||||||
|
x
|
||||||
|
)
|
||||||
|
|
||||||
(
|
(
|
||||||
lambda
|
lambda
|
||||||
x,
|
x,
|
||||||
|
|
@ -204,6 +222,71 @@ lambda: ( # comment
|
||||||
z
|
z
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Leading
|
||||||
|
lambda x: (
|
||||||
|
lambda y: lambda z: x
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ y
|
||||||
|
+ z # Trailing
|
||||||
|
) # Trailing
|
||||||
|
|
||||||
|
|
||||||
|
# Leading
|
||||||
|
lambda x: lambda y: lambda z: [
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
y,
|
||||||
|
z
|
||||||
|
] # Trailing
|
||||||
|
# Trailing
|
||||||
|
|
||||||
lambda self, araa, kkkwargs=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs), e=1, f=2, g=2: d
|
lambda self, araa, kkkwargs=aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs), e=1, f=2, g=2: d
|
||||||
|
|
||||||
# Regression tests for https://github.com/astral-sh/ruff/issues/8179
|
# Regression tests for https://github.com/astral-sh/ruff/issues/8179
|
||||||
|
|
@ -228,6 +311,441 @@ def a():
|
||||||
g = 10
|
g = 10
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def a():
|
||||||
|
return b(
|
||||||
|
c,
|
||||||
|
d,
|
||||||
|
e,
|
||||||
|
f=lambda self, *args, **kwargs: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(
|
||||||
|
*args, **kwargs
|
||||||
|
) + 1,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Additional ecosystem cases from https://github.com/astral-sh/ruff/pull/21385
|
||||||
|
class C:
|
||||||
|
def foo():
|
||||||
|
mock_service.return_value.bucket.side_effect = lambda name: (
|
||||||
|
source_bucket
|
||||||
|
if name == source_bucket_name
|
||||||
|
else storage.Bucket(mock_service, destination_bucket_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
class C:
|
||||||
|
function_dict: Dict[Text, Callable[[CRFToken], Any]] = {
|
||||||
|
CRFEntityExtractorOptions.POS2: lambda crf_token: crf_token.pos_tag[:2]
|
||||||
|
if crf_token.pos_tag is not None
|
||||||
|
else None,
|
||||||
|
}
|
||||||
|
|
||||||
|
name = re.sub(r"[^\x21\x23-\x5b\x5d-\x7e]...............", lambda m: f"\\{m.group(0)}", p["name"])
|
||||||
|
|
||||||
|
def foo():
|
||||||
|
if True:
|
||||||
|
if True:
|
||||||
|
return (
|
||||||
|
lambda x: np.exp(cs(np.log(x.to(u.MeV).value))) * u.MeV * u.cm**2 / u.g
|
||||||
|
)
|
||||||
|
|
||||||
|
class C:
|
||||||
|
_is_recognized_dtype: Callable[[DtypeObj], bool] = lambda x: lib.is_np_dtype(
|
||||||
|
x, "M"
|
||||||
|
) or isinstance(x, DatetimeTZDtype)
|
||||||
|
|
||||||
|
class C:
|
||||||
|
def foo():
|
||||||
|
if True:
|
||||||
|
transaction_count = self._query_txs_for_range(
|
||||||
|
get_count_fn=lambda from_ts, to_ts, _chain_id=chain_id: db_evmtx.count_transactions_in_range(
|
||||||
|
chain_id=_chain_id,
|
||||||
|
from_ts=from_ts,
|
||||||
|
to_ts=to_ts,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
transaction_count = self._query_txs_for_range(
|
||||||
|
get_count_fn=lambda from_ts, to_ts, _chain_id=chain_id: db_evmtx.count_transactions_in_range[_chain_id, from_ts, to_ts],
|
||||||
|
)
|
||||||
|
|
||||||
|
def ddb():
|
||||||
|
sql = (
|
||||||
|
lambda var, table, n=N: f"""
|
||||||
|
CREATE TABLE {table} AS
|
||||||
|
SELECT ROW_NUMBER() OVER () AS id, {var}
|
||||||
|
FROM (
|
||||||
|
SELECT {var}
|
||||||
|
FROM RANGE({n}) _ ({var})
|
||||||
|
ORDER BY RANDOM()
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
long_assignment_target.with_attribute.and_a_slice[with_an_index] = ( # 1
|
||||||
|
# 2
|
||||||
|
lambda x, y, z: # 3
|
||||||
|
# 4
|
||||||
|
x + y + z # 5
|
||||||
|
# 6
|
||||||
|
)
|
||||||
|
|
||||||
|
long_assignment_target.with_attribute.and_a_slice[with_an_index] = (
|
||||||
|
lambda x, y, z: x + y + z
|
||||||
|
)
|
||||||
|
|
||||||
|
long_assignment_target.with_attribute.and_a_slice[with_an_index] = lambda x, y, z: x + y + z
|
||||||
|
|
||||||
|
very_long_variable_name_x, very_long_variable_name_y = lambda a: a + some_very_long_expression, lambda b: b * another_very_long_expression_here
|
||||||
|
|
||||||
|
very_long_variable_name_for_result += lambda x: very_long_function_call_that_should_definitely_be_parenthesized_now(x, more_args, additional_parameters)
|
||||||
|
|
||||||
|
|
||||||
|
if 1:
|
||||||
|
if 2:
|
||||||
|
if 3:
|
||||||
|
if self.location in EVM_EVMLIKE_LOCATIONS and database is not None:
|
||||||
|
exported_dict["notes"] = EVM_ADDRESS_REGEX.sub(
|
||||||
|
repl=lambda matched_address: self._maybe_add_label_with_address(
|
||||||
|
database=database,
|
||||||
|
matched_address=matched_address,
|
||||||
|
),
|
||||||
|
string=exported_dict["notes"],
|
||||||
|
)
|
||||||
|
|
||||||
|
class C:
|
||||||
|
def f():
|
||||||
|
return dict(
|
||||||
|
filter(
|
||||||
|
lambda intent_response: self.is_retrieval_intent_response(
|
||||||
|
intent_response
|
||||||
|
),
|
||||||
|
self.responses.items(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"op",
|
||||||
|
[
|
||||||
|
# Not fluent
|
||||||
|
param(
|
||||||
|
lambda left, right: (
|
||||||
|
ibis.timestamp("2017-04-01")
|
||||||
|
),
|
||||||
|
),
|
||||||
|
# These four are fluent and fit on one line inside the parenthesized
|
||||||
|
# lambda body
|
||||||
|
param(
|
||||||
|
lambda left, right: (
|
||||||
|
ibis.timestamp("2017-04-01").cast(dt.date)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
param(
|
||||||
|
lambda left, right: (
|
||||||
|
ibis.timestamp("2017-04-01").cast(dt.date).between(left, right)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
param(lambda left, right: ibis.timestamp("2017-04-01").cast(dt.date)),
|
||||||
|
param(lambda left, right: ibis.timestamp("2017-04-01").cast(dt.date).between(left, right)),
|
||||||
|
# This is too long on one line in the lambda body and gets wrapped
|
||||||
|
# inside the body.
|
||||||
|
param(
|
||||||
|
lambda left, right: (
|
||||||
|
ibis.timestamp("2017-04-01").cast(dt.date).between(left, right).between(left, right)
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_string_temporal_compare_between(con, op, left, right): ...
|
||||||
|
|
||||||
|
[
|
||||||
|
(
|
||||||
|
lambda eval_df, _: MetricValue(
|
||||||
|
scores=eval_df["prediction"].tolist(),
|
||||||
|
aggregate_results={"prediction_sum": sum(eval_df["prediction"])},
|
||||||
|
)
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
# reuses the list parentheses
|
||||||
|
lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz: [xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz]
|
||||||
|
|
||||||
|
# adds parentheses around the body
|
||||||
|
lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz: xxxxxxxxxxxxxxxxxxxx + yyyyyyyyyyyyyyyyyyyy + zzzzzzzzzzzzzzzzzzzz
|
||||||
|
|
||||||
|
# removes parentheses around the body
|
||||||
|
lambda xxxxxxxxxxxxxxxxxxxx: (xxxxxxxxxxxxxxxxxxxx + 1)
|
||||||
|
|
||||||
|
mapper = lambda x: dict_with_default[np.nan if isinstance(x, float) and np.isnan(x) else x]
|
||||||
|
|
||||||
|
lambda x, y, z: (
|
||||||
|
x + y + z
|
||||||
|
)
|
||||||
|
|
||||||
|
lambda x, y, z: (
|
||||||
|
x + y + z
|
||||||
|
# trailing body
|
||||||
|
)
|
||||||
|
|
||||||
|
lambda x, y, z: (
|
||||||
|
x + y + z # trailing eol body
|
||||||
|
)
|
||||||
|
|
||||||
|
lambda x, y, z: (
|
||||||
|
x + y + z
|
||||||
|
) # trailing lambda
|
||||||
|
|
||||||
|
lambda x, y, z: (
|
||||||
|
# leading body
|
||||||
|
x + y + z
|
||||||
|
)
|
||||||
|
|
||||||
|
lambda x, y, z: ( # leading eol body
|
||||||
|
x + y + z
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda name:
|
||||||
|
source_bucket # trailing eol comment
|
||||||
|
if name == source_bucket_name
|
||||||
|
else storage.Bucket(mock_service, destination_bucket_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda name:
|
||||||
|
# dangling header comment
|
||||||
|
source_bucket
|
||||||
|
if name == source_bucket_name
|
||||||
|
else storage.Bucket(mock_service, destination_bucket_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
x = (
|
||||||
|
lambda name:
|
||||||
|
# dangling header comment
|
||||||
|
source_bucket
|
||||||
|
if name == source_bucket_name
|
||||||
|
else storage.Bucket(mock_service, destination_bucket_name)
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda name: # dangling header comment
|
||||||
|
(
|
||||||
|
source_bucket
|
||||||
|
if name == source_bucket_name
|
||||||
|
else storage.Bucket(mock_service, destination_bucket_name)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda from_ts, to_ts, _chain_id=chain_id: # dangling eol header comment
|
||||||
|
db_evmtx.count_transactions_in_range(
|
||||||
|
chain_id=_chain_id,
|
||||||
|
from_ts=from_ts,
|
||||||
|
to_ts=to_ts,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda from_ts, to_ts, _chain_id=chain_id:
|
||||||
|
# dangling header comment before call
|
||||||
|
db_evmtx.count_transactions_in_range(
|
||||||
|
chain_id=_chain_id,
|
||||||
|
from_ts=from_ts,
|
||||||
|
to_ts=to_ts,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda left, right:
|
||||||
|
# comment
|
||||||
|
ibis.timestamp("2017-04-01").cast(dt.date).between(left, right)
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda left, right:
|
||||||
|
ibis.timestamp("2017-04-01") # comment
|
||||||
|
.cast(dt.date)
|
||||||
|
.between(left, right)
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy:
|
||||||
|
# comment
|
||||||
|
[xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz]
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda x, y:
|
||||||
|
# comment
|
||||||
|
{
|
||||||
|
"key": x,
|
||||||
|
"another": y,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda x, y:
|
||||||
|
# comment
|
||||||
|
(
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
z
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda x:
|
||||||
|
# comment
|
||||||
|
dict_with_default[np.nan if isinstance(x, float) and np.isnan(x) else x]
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda from_ts, to_ts, _chain_id=chain_id:
|
||||||
|
db_evmtx.count_transactions_in_range[
|
||||||
|
# comment
|
||||||
|
_chain_id, from_ts, to_ts
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda
|
||||||
|
# comment
|
||||||
|
*args, **kwargs:
|
||||||
|
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda # comment
|
||||||
|
*args, **kwargs:
|
||||||
|
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda # comment 1
|
||||||
|
# comment 2
|
||||||
|
*args, **kwargs: # comment 3
|
||||||
|
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda # comment 1
|
||||||
|
*args, **kwargs: # comment 3
|
||||||
|
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda *args, **kwargs:
|
||||||
|
# comment 1
|
||||||
|
( # comment 2
|
||||||
|
# comment 3
|
||||||
|
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*args, **kwargs) + 1 # comment 4
|
||||||
|
# comment 5
|
||||||
|
) # comment 6
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda *brgs, **kwargs:
|
||||||
|
# comment 1
|
||||||
|
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa( # comment 2
|
||||||
|
# comment 3
|
||||||
|
*brgs, **kwargs) + 1 # comment 4
|
||||||
|
# comment 5
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda *crgs, **kwargs: # comment 1
|
||||||
|
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*crgs, **kwargs) + 1
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda *drgs, **kwargs: # comment 1
|
||||||
|
(
|
||||||
|
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*drgs, **kwargs) + 1
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda * # comment 1
|
||||||
|
ergs, **
|
||||||
|
# comment 2
|
||||||
|
kwargs # comment 3
|
||||||
|
: # comment 4
|
||||||
|
(
|
||||||
|
aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(*ergs, **kwargs) + 1
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda # 1
|
||||||
|
# 2
|
||||||
|
left, # 3
|
||||||
|
# 4
|
||||||
|
right: # 5
|
||||||
|
# 6
|
||||||
|
ibis.timestamp("2017-04-01").cast(dt.date).between(left, right)
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda x: # outer comment 1
|
||||||
|
(
|
||||||
|
lambda y: # inner comment 1
|
||||||
|
# inner comment 2
|
||||||
|
lambda z: (
|
||||||
|
# innermost comment
|
||||||
|
x + y + z
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
foo(
|
||||||
|
lambda from_ts, # comment prevents collapsing the parameters to one line
|
||||||
|
to_ts, _chain_id=chain_id: db_evmtx.count_transactions_in_range(
|
||||||
|
chain_id=_chain_id,
|
||||||
|
from_ts=from_ts,
|
||||||
|
to_ts=to_ts,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
foo(
|
||||||
|
lambda from_ts, # but still wrap the body if it gets too long
|
||||||
|
to_ts,
|
||||||
|
_chain_id=chain_id: db_evmtx.count_transactions_in_rangeeeeeeeeeeeeeeeeeeeeeeeeeeeee(
|
||||||
|
chain_id=_chain_id,
|
||||||
|
from_ts=from_ts,
|
||||||
|
to_ts=to_ts,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
transform = lambda left, right: ibis.timestamp("2017-04-01").cast(dt.date).between(left, right).between(left, right) # trailing comment
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda: # comment
|
||||||
|
1
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda # comment
|
||||||
|
:
|
||||||
|
1
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda:
|
||||||
|
# comment
|
||||||
|
1
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda: # comment 1
|
||||||
|
# comment 2
|
||||||
|
1
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda # comment 1
|
||||||
|
# comment 2
|
||||||
|
: # comment 3
|
||||||
|
# comment 4
|
||||||
|
1
|
||||||
|
)
|
||||||
|
|
||||||
(
|
(
|
||||||
lambda
|
lambda
|
||||||
* # comment 2
|
* # comment 2
|
||||||
|
|
@ -271,3 +789,18 @@ def a():
|
||||||
x:
|
x:
|
||||||
x
|
x
|
||||||
)
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda: # dangling-end-of-line
|
||||||
|
# dangling-own-line
|
||||||
|
( # leading-body-end-of-line
|
||||||
|
x
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
(
|
||||||
|
lambda: # dangling-end-of-line
|
||||||
|
( # leading-body-end-of-line
|
||||||
|
x
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
|
||||||
1
crates/ruff_python_formatter/resources/test/fixtures/ruff/fluent.options.json
vendored
Normal file
1
crates/ruff_python_formatter/resources/test/fixtures/ruff/fluent.options.json
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
[{"line_width":8}]
|
||||||
|
|
@ -0,0 +1,35 @@
|
||||||
|
# Fixtures for fluent formatting of call chains
|
||||||
|
# Note that `fluent.options.json` sets line width to 8
|
||||||
|
|
||||||
|
|
||||||
|
x = a.b()
|
||||||
|
|
||||||
|
x = a.b().c()
|
||||||
|
|
||||||
|
x = a.b().c().d
|
||||||
|
|
||||||
|
x = a.b.c.d().e()
|
||||||
|
|
||||||
|
x = a.b.c().d.e().f.g()
|
||||||
|
|
||||||
|
# Consecutive calls/subscripts are grouped together
|
||||||
|
# for the purposes of fluent formatting (though, as 2025.12.15,
|
||||||
|
# there may be a break inside of one of these
|
||||||
|
# calls/subscripts, but that is unrelated to the fluent format.)
|
||||||
|
|
||||||
|
x = a()[0]().b().c()
|
||||||
|
|
||||||
|
x = a.b()[0].c.d()[1]().e
|
||||||
|
|
||||||
|
# Parentheses affect both where the root of the call
|
||||||
|
# chain is and how many calls we require before applying
|
||||||
|
# fluent formatting (just 1, in the presence of a parenthesized
|
||||||
|
# root, as of 2025.12.15.)
|
||||||
|
|
||||||
|
x = (a).b()
|
||||||
|
|
||||||
|
x = (a()).b()
|
||||||
|
|
||||||
|
x = (a.b()).d.e()
|
||||||
|
|
||||||
|
x = (a.b().d).e()
|
||||||
|
|
@ -216,3 +216,69 @@ max_message_id = (
|
||||||
.baz()
|
.baz()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Note in preview we split at `pl` which some
|
||||||
|
# folks may dislike. (Similarly with common
|
||||||
|
# `np` and `pd` invocations).
|
||||||
|
#
|
||||||
|
# This is because we cannot reliably predict,
|
||||||
|
# just from syntax, whether a short identifier
|
||||||
|
# is being used as a 'namespace' or as an 'object'.
|
||||||
|
#
|
||||||
|
# As of 2025.12.15, we do not indent methods in
|
||||||
|
# fluent formatting. If we ever decide to do so,
|
||||||
|
# it may make sense to special case call chain roots
|
||||||
|
# that are shorter than the indent-width (like Prettier does).
|
||||||
|
# This would have the benefit of handling these common
|
||||||
|
# two-letter aliases for libraries.
|
||||||
|
|
||||||
|
|
||||||
|
expr = (
|
||||||
|
pl.scan_parquet("/data/pypi-parquet/*.parquet")
|
||||||
|
.filter(
|
||||||
|
[
|
||||||
|
pl.col("path").str.contains(
|
||||||
|
r"\.(asm|c|cc|cpp|cxx|h|hpp|rs|[Ff][0-9]{0,2}(?:or)?|go)$"
|
||||||
|
),
|
||||||
|
~pl.col("path").str.contains(r"(^|/)test(|s|ing)"),
|
||||||
|
~pl.col("path").str.contains("/site-packages/", literal=True),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
.with_columns(
|
||||||
|
month=pl.col("uploaded_on").dt.truncate("1mo"),
|
||||||
|
ext=pl.col("path")
|
||||||
|
.str.extract(pattern=r"\.([a-z0-9]+)$", group_index=1)
|
||||||
|
.str.replace_all(pattern=r"cxx|cpp|cc|c|hpp|h", value="C/C++")
|
||||||
|
.str.replace_all(pattern="^f.*$", value="Fortran")
|
||||||
|
.str.replace("rs", "Rust", literal=True)
|
||||||
|
.str.replace("go", "Go", literal=True)
|
||||||
|
.str.replace("asm", "Assembly", literal=True)
|
||||||
|
.replace({"": None}),
|
||||||
|
)
|
||||||
|
.group_by(["month", "ext"])
|
||||||
|
.agg(project_count=pl.col("project_name").n_unique())
|
||||||
|
.drop_nulls(["ext"])
|
||||||
|
.sort(["month", "project_count"], descending=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
def indentation_matching_for_loop_in_preview():
|
||||||
|
if make_this:
|
||||||
|
if more_nested_because_line_length:
|
||||||
|
identical_hidden_layer_sizes = all(
|
||||||
|
current_hidden_layer_sizes == first_hidden_layer_sizes
|
||||||
|
for current_hidden_layer_sizes in self.component_config[
|
||||||
|
HIDDEN_LAYERS_SIZES
|
||||||
|
].values().attr
|
||||||
|
)
|
||||||
|
|
||||||
|
def indentation_matching_walrus_in_preview():
|
||||||
|
if make_this:
|
||||||
|
if more_nested_because_line_length:
|
||||||
|
with self.read_ctx(book_type) as cursor:
|
||||||
|
if (entry_count := len(names := cursor.execute(
|
||||||
|
'SELECT name FROM address_book WHERE address=?',
|
||||||
|
(address,),
|
||||||
|
).fetchall().some_attr)) == 0 or len(set(names)) > 1:
|
||||||
|
return
|
||||||
|
|
||||||
|
# behavior with parenthesized roots
|
||||||
|
x = (aaaaaaaaaaaaaaaaaaaaaa).bbbbbbbbbbbbbbbbbbb.cccccccccccccccccccccccc().dddddddddddddddddddddddd().eeeeeeeeeeee
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
use ruff_formatter::{Argument, Arguments, write};
|
use ruff_formatter::{Argument, Arguments, format_args, write};
|
||||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||||
|
|
||||||
use crate::context::{NodeLevel, WithNodeLevel};
|
use crate::context::{NodeLevel, WithNodeLevel};
|
||||||
|
|
@ -33,20 +33,27 @@ impl<'ast> Format<PyFormatContext<'ast>> for ParenthesizeIfExpands<'_, 'ast> {
|
||||||
{
|
{
|
||||||
let mut f = WithNodeLevel::new(NodeLevel::ParenthesizedExpression, f);
|
let mut f = WithNodeLevel::new(NodeLevel::ParenthesizedExpression, f);
|
||||||
|
|
||||||
write!(
|
if self.indent {
|
||||||
f,
|
let parens_id = f.group_id("indented_parenthesize_if_expands");
|
||||||
[group(&format_with(|f| {
|
group(&format_args![
|
||||||
if_group_breaks(&token("(")).fmt(f)?;
|
if_group_breaks(&token("(")),
|
||||||
|
indent_if_group_breaks(
|
||||||
if self.indent {
|
&format_args![soft_line_break(), &Arguments::from(&self.inner)],
|
||||||
soft_block_indent(&Arguments::from(&self.inner)).fmt(f)?;
|
parens_id
|
||||||
} else {
|
),
|
||||||
Arguments::from(&self.inner).fmt(f)?;
|
soft_line_break(),
|
||||||
}
|
if_group_breaks(&token(")"))
|
||||||
|
])
|
||||||
if_group_breaks(&token(")")).fmt(f)
|
.with_id(Some(parens_id))
|
||||||
}))]
|
.fmt(&mut f)
|
||||||
)
|
} else {
|
||||||
|
group(&format_args![
|
||||||
|
if_group_breaks(&token("(")),
|
||||||
|
Arguments::from(&self.inner),
|
||||||
|
if_group_breaks(&token(")")),
|
||||||
|
])
|
||||||
|
.fmt(&mut f)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
use clap::{Parser, ValueEnum, command};
|
use clap::{Parser, ValueEnum};
|
||||||
|
|
||||||
use ruff_formatter::SourceCode;
|
use ruff_formatter::SourceCode;
|
||||||
use ruff_python_ast::{PySourceType, PythonVersion};
|
use ruff_python_ast::{PySourceType, PythonVersion};
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ use crate::expression::parentheses::{
|
||||||
NeedsParentheses, OptionalParentheses, Parentheses, is_expression_parenthesized,
|
NeedsParentheses, OptionalParentheses, Parentheses, is_expression_parenthesized,
|
||||||
};
|
};
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
use crate::preview::is_fluent_layout_split_first_call_enabled;
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct FormatExprAttribute {
|
pub struct FormatExprAttribute {
|
||||||
|
|
@ -47,20 +48,26 @@ impl FormatNodeRule<ExprAttribute> for FormatExprAttribute {
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
if call_chain_layout == CallChainLayout::Fluent {
|
if call_chain_layout.is_fluent() {
|
||||||
if parenthesize_value {
|
if parenthesize_value {
|
||||||
// Don't propagate the call chain layout.
|
// Don't propagate the call chain layout.
|
||||||
value.format().with_options(Parentheses::Always).fmt(f)?;
|
value.format().with_options(Parentheses::Always).fmt(f)?;
|
||||||
} else {
|
} else {
|
||||||
match value.as_ref() {
|
match value.as_ref() {
|
||||||
Expr::Attribute(expr) => {
|
Expr::Attribute(expr) => {
|
||||||
expr.format().with_options(call_chain_layout).fmt(f)?;
|
expr.format()
|
||||||
|
.with_options(call_chain_layout.transition_after_attribute())
|
||||||
|
.fmt(f)?;
|
||||||
}
|
}
|
||||||
Expr::Call(expr) => {
|
Expr::Call(expr) => {
|
||||||
expr.format().with_options(call_chain_layout).fmt(f)?;
|
expr.format()
|
||||||
|
.with_options(call_chain_layout.transition_after_attribute())
|
||||||
|
.fmt(f)?;
|
||||||
}
|
}
|
||||||
Expr::Subscript(expr) => {
|
Expr::Subscript(expr) => {
|
||||||
expr.format().with_options(call_chain_layout).fmt(f)?;
|
expr.format()
|
||||||
|
.with_options(call_chain_layout.transition_after_attribute())
|
||||||
|
.fmt(f)?;
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
value.format().with_options(Parentheses::Never).fmt(f)?;
|
value.format().with_options(Parentheses::Never).fmt(f)?;
|
||||||
|
|
@ -105,8 +112,30 @@ impl FormatNodeRule<ExprAttribute> for FormatExprAttribute {
|
||||||
// Allow the `.` on its own line if this is a fluent call chain
|
// Allow the `.` on its own line if this is a fluent call chain
|
||||||
// and the value either requires parenthesizing or is a call or subscript expression
|
// and the value either requires parenthesizing or is a call or subscript expression
|
||||||
// (it's a fluent chain but not the first element).
|
// (it's a fluent chain but not the first element).
|
||||||
else if call_chain_layout == CallChainLayout::Fluent {
|
//
|
||||||
if parenthesize_value || value.is_call_expr() || value.is_subscript_expr() {
|
// In preview we also break _at_ the first call in the chain.
|
||||||
|
// For example:
|
||||||
|
//
|
||||||
|
// ```diff
|
||||||
|
// # stable formatting vs. preview
|
||||||
|
// x = (
|
||||||
|
// - df.merge()
|
||||||
|
// + df
|
||||||
|
// + .merge()
|
||||||
|
// .groupby()
|
||||||
|
// .agg()
|
||||||
|
// .filter()
|
||||||
|
// )
|
||||||
|
// ```
|
||||||
|
else if call_chain_layout.is_fluent() {
|
||||||
|
if parenthesize_value
|
||||||
|
|| value.is_call_expr()
|
||||||
|
|| value.is_subscript_expr()
|
||||||
|
// Remember to update the doc-comment above when
|
||||||
|
// stabilizing this behavior.
|
||||||
|
|| (is_fluent_layout_split_first_call_enabled(f.context())
|
||||||
|
&& call_chain_layout.is_first_call_like())
|
||||||
|
{
|
||||||
soft_line_break().fmt(f)?;
|
soft_line_break().fmt(f)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -148,8 +177,8 @@ impl FormatNodeRule<ExprAttribute> for FormatExprAttribute {
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
let is_call_chain_root = self.call_chain_layout == CallChainLayout::Default
|
let is_call_chain_root =
|
||||||
&& call_chain_layout == CallChainLayout::Fluent;
|
self.call_chain_layout == CallChainLayout::Default && call_chain_layout.is_fluent();
|
||||||
if is_call_chain_root {
|
if is_call_chain_root {
|
||||||
write!(f, [group(&format_inner)])
|
write!(f, [group(&format_inner)])
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -169,7 +198,8 @@ impl NeedsParentheses for ExprAttribute {
|
||||||
self.into(),
|
self.into(),
|
||||||
context.comments().ranges(),
|
context.comments().ranges(),
|
||||||
context.source(),
|
context.source(),
|
||||||
) == CallChainLayout::Fluent
|
)
|
||||||
|
.is_fluent()
|
||||||
{
|
{
|
||||||
OptionalParentheses::Multiline
|
OptionalParentheses::Multiline
|
||||||
} else if context.comments().has_dangling(self) {
|
} else if context.comments().has_dangling(self) {
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,10 @@ impl FormatNodeRule<ExprCall> for FormatExprCall {
|
||||||
func.format().with_options(Parentheses::Always).fmt(f)
|
func.format().with_options(Parentheses::Always).fmt(f)
|
||||||
} else {
|
} else {
|
||||||
match func.as_ref() {
|
match func.as_ref() {
|
||||||
Expr::Attribute(expr) => expr.format().with_options(call_chain_layout).fmt(f),
|
Expr::Attribute(expr) => expr
|
||||||
|
.format()
|
||||||
|
.with_options(call_chain_layout.decrement_call_like_count())
|
||||||
|
.fmt(f),
|
||||||
Expr::Call(expr) => expr.format().with_options(call_chain_layout).fmt(f),
|
Expr::Call(expr) => expr.format().with_options(call_chain_layout).fmt(f),
|
||||||
Expr::Subscript(expr) => expr.format().with_options(call_chain_layout).fmt(f),
|
Expr::Subscript(expr) => expr.format().with_options(call_chain_layout).fmt(f),
|
||||||
_ => func.format().with_options(Parentheses::Never).fmt(f),
|
_ => func.format().with_options(Parentheses::Never).fmt(f),
|
||||||
|
|
@ -67,9 +70,7 @@ impl FormatNodeRule<ExprCall> for FormatExprCall {
|
||||||
// queryset.distinct().order_by(field.name).values_list(field_name_flat_long_long=True)
|
// queryset.distinct().order_by(field.name).values_list(field_name_flat_long_long=True)
|
||||||
// )
|
// )
|
||||||
// ```
|
// ```
|
||||||
if call_chain_layout == CallChainLayout::Fluent
|
if call_chain_layout.is_fluent() && self.call_chain_layout == CallChainLayout::Default {
|
||||||
&& self.call_chain_layout == CallChainLayout::Default
|
|
||||||
{
|
|
||||||
group(&fmt_func).fmt(f)
|
group(&fmt_func).fmt(f)
|
||||||
} else {
|
} else {
|
||||||
fmt_func.fmt(f)
|
fmt_func.fmt(f)
|
||||||
|
|
@ -87,7 +88,8 @@ impl NeedsParentheses for ExprCall {
|
||||||
self.into(),
|
self.into(),
|
||||||
context.comments().ranges(),
|
context.comments().ranges(),
|
||||||
context.source(),
|
context.source(),
|
||||||
) == CallChainLayout::Fluent
|
)
|
||||||
|
.is_fluent()
|
||||||
{
|
{
|
||||||
OptionalParentheses::Multiline
|
OptionalParentheses::Multiline
|
||||||
} else if context.comments().has_dangling(self) {
|
} else if context.comments().has_dangling(self) {
|
||||||
|
|
|
||||||
|
|
@ -1,15 +1,21 @@
|
||||||
use ruff_formatter::write;
|
use ruff_formatter::{FormatRuleWithOptions, RemoveSoftLinesBuffer, format_args, write};
|
||||||
use ruff_python_ast::AnyNodeRef;
|
use ruff_python_ast::{AnyNodeRef, Expr, ExprLambda};
|
||||||
use ruff_python_ast::ExprLambda;
|
|
||||||
use ruff_text_size::Ranged;
|
use ruff_text_size::Ranged;
|
||||||
|
|
||||||
use crate::comments::dangling_comments;
|
use crate::builders::parenthesize_if_expands;
|
||||||
use crate::expression::parentheses::{NeedsParentheses, OptionalParentheses};
|
use crate::comments::{SourceComment, dangling_comments, leading_comments, trailing_comments};
|
||||||
|
use crate::expression::parentheses::{
|
||||||
|
NeedsParentheses, OptionalParentheses, Parentheses, is_expression_parenthesized,
|
||||||
|
};
|
||||||
|
use crate::expression::{CallChainLayout, has_own_parentheses};
|
||||||
use crate::other::parameters::ParametersParentheses;
|
use crate::other::parameters::ParametersParentheses;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
use crate::preview::is_parenthesize_lambda_bodies_enabled;
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct FormatExprLambda;
|
pub struct FormatExprLambda {
|
||||||
|
layout: ExprLambdaLayout,
|
||||||
|
}
|
||||||
|
|
||||||
impl FormatNodeRule<ExprLambda> for FormatExprLambda {
|
impl FormatNodeRule<ExprLambda> for FormatExprLambda {
|
||||||
fn fmt_fields(&self, item: &ExprLambda, f: &mut PyFormatter) -> FormatResult<()> {
|
fn fmt_fields(&self, item: &ExprLambda, f: &mut PyFormatter) -> FormatResult<()> {
|
||||||
|
|
@ -20,13 +26,19 @@ impl FormatNodeRule<ExprLambda> for FormatExprLambda {
|
||||||
body,
|
body,
|
||||||
} = item;
|
} = item;
|
||||||
|
|
||||||
|
let body = &**body;
|
||||||
|
let parameters = parameters.as_deref();
|
||||||
|
|
||||||
let comments = f.context().comments().clone();
|
let comments = f.context().comments().clone();
|
||||||
let dangling = comments.dangling(item);
|
let dangling = comments.dangling(item);
|
||||||
|
let preview = is_parenthesize_lambda_bodies_enabled(f.context());
|
||||||
|
|
||||||
write!(f, [token("lambda")])?;
|
write!(f, [token("lambda")])?;
|
||||||
|
|
||||||
if let Some(parameters) = parameters {
|
// Format any dangling comments before the parameters, but save any dangling comments after
|
||||||
// In this context, a dangling comment can either be a comment between the `lambda` the
|
// the parameters/after the header to be formatted with the body below.
|
||||||
|
let dangling_header_comments = if let Some(parameters) = parameters {
|
||||||
|
// In this context, a dangling comment can either be a comment between the `lambda` and the
|
||||||
// parameters, or a comment between the parameters and the body.
|
// parameters, or a comment between the parameters and the body.
|
||||||
let (dangling_before_parameters, dangling_after_parameters) = dangling
|
let (dangling_before_parameters, dangling_after_parameters) = dangling
|
||||||
.split_at(dangling.partition_point(|comment| comment.end() < parameters.start()));
|
.split_at(dangling.partition_point(|comment| comment.end() < parameters.start()));
|
||||||
|
|
@ -86,7 +98,7 @@ impl FormatNodeRule<ExprLambda> for FormatExprLambda {
|
||||||
// *x: x
|
// *x: x
|
||||||
// )
|
// )
|
||||||
// ```
|
// ```
|
||||||
if comments.has_leading(&**parameters) {
|
if comments.has_leading(parameters) {
|
||||||
hard_line_break().fmt(f)?;
|
hard_line_break().fmt(f)?;
|
||||||
} else {
|
} else {
|
||||||
write!(f, [space()])?;
|
write!(f, [space()])?;
|
||||||
|
|
@ -95,32 +107,90 @@ impl FormatNodeRule<ExprLambda> for FormatExprLambda {
|
||||||
write!(f, [dangling_comments(dangling_before_parameters)])?;
|
write!(f, [dangling_comments(dangling_before_parameters)])?;
|
||||||
}
|
}
|
||||||
|
|
||||||
write!(
|
// Try to keep the parameters on a single line, unless there are intervening comments.
|
||||||
f,
|
if preview && !comments.contains_comments(parameters.into()) {
|
||||||
[parameters
|
let mut buffer = RemoveSoftLinesBuffer::new(f);
|
||||||
.format()
|
write!(
|
||||||
.with_options(ParametersParentheses::Never)]
|
buffer,
|
||||||
)?;
|
[parameters
|
||||||
|
.format()
|
||||||
write!(f, [token(":")])?;
|
.with_options(ParametersParentheses::Never)]
|
||||||
|
)?;
|
||||||
if dangling_after_parameters.is_empty() {
|
|
||||||
write!(f, [space()])?;
|
|
||||||
} else {
|
} else {
|
||||||
write!(f, [dangling_comments(dangling_after_parameters)])?;
|
write!(
|
||||||
|
f,
|
||||||
|
[parameters
|
||||||
|
.format()
|
||||||
|
.with_options(ParametersParentheses::Never)]
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
dangling_after_parameters
|
||||||
} else {
|
} else {
|
||||||
write!(f, [token(":")])?;
|
dangling
|
||||||
|
};
|
||||||
|
|
||||||
// In this context, a dangling comment is a comment between the `lambda` and the body.
|
write!(f, [token(":")])?;
|
||||||
if dangling.is_empty() {
|
|
||||||
write!(f, [space()])?;
|
if dangling_header_comments.is_empty() {
|
||||||
} else {
|
write!(f, [space()])?;
|
||||||
write!(f, [dangling_comments(dangling)])?;
|
} else if !preview {
|
||||||
}
|
write!(f, [dangling_comments(dangling_header_comments)])?;
|
||||||
}
|
}
|
||||||
|
|
||||||
write!(f, [body.format()])
|
if !preview {
|
||||||
|
return body.format().fmt(f);
|
||||||
|
}
|
||||||
|
|
||||||
|
let fmt_body = FormatBody {
|
||||||
|
body,
|
||||||
|
dangling_header_comments,
|
||||||
|
};
|
||||||
|
|
||||||
|
match self.layout {
|
||||||
|
ExprLambdaLayout::Assignment => fits_expanded(&fmt_body).fmt(f),
|
||||||
|
ExprLambdaLayout::Default => fmt_body.fmt(f),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Copy, Clone)]
|
||||||
|
pub enum ExprLambdaLayout {
|
||||||
|
#[default]
|
||||||
|
Default,
|
||||||
|
|
||||||
|
/// The [`ExprLambda`] is the direct child of an assignment expression, so it needs to use
|
||||||
|
/// `fits_expanded` to prefer parenthesizing its own body before the assignment tries to
|
||||||
|
/// parenthesize the whole lambda. For example, we want this formatting:
|
||||||
|
///
|
||||||
|
/// ```py
|
||||||
|
/// long_assignment_target = lambda x, y, z: (
|
||||||
|
/// x + y + z
|
||||||
|
/// )
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// instead of either of these:
|
||||||
|
///
|
||||||
|
/// ```py
|
||||||
|
/// long_assignment_target = (
|
||||||
|
/// lambda x, y, z: (
|
||||||
|
/// x + y + z
|
||||||
|
/// )
|
||||||
|
/// )
|
||||||
|
///
|
||||||
|
/// long_assignment_target = (
|
||||||
|
/// lambda x, y, z: x + y + z
|
||||||
|
/// )
|
||||||
|
/// ```
|
||||||
|
Assignment,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FormatRuleWithOptions<ExprLambda, PyFormatContext<'_>> for FormatExprLambda {
|
||||||
|
type Options = ExprLambdaLayout;
|
||||||
|
|
||||||
|
fn with_options(mut self, options: Self::Options) -> Self {
|
||||||
|
self.layout = options;
|
||||||
|
self
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -137,3 +207,267 @@ impl NeedsParentheses for ExprLambda {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct FormatBody<'a> {
|
||||||
|
body: &'a Expr,
|
||||||
|
|
||||||
|
/// Dangling comments attached to the lambda header that should be formatted with the body.
|
||||||
|
///
|
||||||
|
/// These can include both own-line and end-of-line comments. For lambdas with parameters, this
|
||||||
|
/// means comments after the parameters:
|
||||||
|
///
|
||||||
|
/// ```py
|
||||||
|
/// (
|
||||||
|
/// lambda x, y # 1
|
||||||
|
/// # 2
|
||||||
|
/// : # 3
|
||||||
|
/// # 4
|
||||||
|
/// x + y
|
||||||
|
/// )
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// Or all dangling comments for lambdas without parameters:
|
||||||
|
///
|
||||||
|
/// ```py
|
||||||
|
/// (
|
||||||
|
/// lambda # 1
|
||||||
|
/// # 2
|
||||||
|
/// : # 3
|
||||||
|
/// # 4
|
||||||
|
/// 1
|
||||||
|
/// )
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// In most cases these should formatted within the parenthesized body, as in:
|
||||||
|
///
|
||||||
|
/// ```py
|
||||||
|
/// (
|
||||||
|
/// lambda: ( # 1
|
||||||
|
/// # 2
|
||||||
|
/// # 3
|
||||||
|
/// # 4
|
||||||
|
/// 1
|
||||||
|
/// )
|
||||||
|
/// )
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// or without `# 2`:
|
||||||
|
///
|
||||||
|
/// ```py
|
||||||
|
/// (
|
||||||
|
/// lambda: ( # 1 # 3
|
||||||
|
/// # 4
|
||||||
|
/// 1
|
||||||
|
/// )
|
||||||
|
/// )
|
||||||
|
/// ```
|
||||||
|
dangling_header_comments: &'a [SourceComment],
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Format<PyFormatContext<'_>> for FormatBody<'_> {
|
||||||
|
fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> {
|
||||||
|
let FormatBody {
|
||||||
|
dangling_header_comments,
|
||||||
|
body,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
let body = *body;
|
||||||
|
let comments = f.context().comments().clone();
|
||||||
|
let body_comments = comments.leading_dangling_trailing(body);
|
||||||
|
|
||||||
|
if !dangling_header_comments.is_empty() {
|
||||||
|
// Split the dangling header comments into trailing comments formatted with the lambda
|
||||||
|
// header (1) and leading comments formatted with the body (2, 3, 4).
|
||||||
|
//
|
||||||
|
// ```python
|
||||||
|
// (
|
||||||
|
// lambda # 1
|
||||||
|
// # 2
|
||||||
|
// : # 3
|
||||||
|
// # 4
|
||||||
|
// y
|
||||||
|
// )
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// Note that these are split based on their line position rather than using
|
||||||
|
// `partition_point` based on a range, for example.
|
||||||
|
let (trailing_header_comments, leading_body_comments) = dangling_header_comments
|
||||||
|
.split_at(
|
||||||
|
dangling_header_comments
|
||||||
|
.iter()
|
||||||
|
.position(|comment| comment.line_position().is_own_line())
|
||||||
|
.unwrap_or(dangling_header_comments.len()),
|
||||||
|
);
|
||||||
|
|
||||||
|
// If the body is parenthesized and has its own leading comments, preserve the
|
||||||
|
// separation between the dangling lambda comments and the body comments. For
|
||||||
|
// example, preserve this comment positioning:
|
||||||
|
//
|
||||||
|
// ```python
|
||||||
|
// (
|
||||||
|
// lambda: # 1
|
||||||
|
// # 2
|
||||||
|
// ( # 3
|
||||||
|
// x
|
||||||
|
// )
|
||||||
|
// )
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// 1 and 2 are dangling on the lambda and emitted first, followed by a hard line
|
||||||
|
// break and the parenthesized body with its leading comments.
|
||||||
|
//
|
||||||
|
// However, when removing 2, 1 and 3 can instead be formatted on the same line:
|
||||||
|
//
|
||||||
|
// ```python
|
||||||
|
// (
|
||||||
|
// lambda: ( # 1 # 3
|
||||||
|
// x
|
||||||
|
// )
|
||||||
|
// )
|
||||||
|
// ```
|
||||||
|
let comments = f.context().comments();
|
||||||
|
if is_expression_parenthesized(body.into(), comments.ranges(), f.context().source())
|
||||||
|
&& comments.has_leading(body)
|
||||||
|
{
|
||||||
|
trailing_comments(dangling_header_comments).fmt(f)?;
|
||||||
|
|
||||||
|
// Note that `leading_body_comments` have already been formatted as part of
|
||||||
|
// `dangling_header_comments` above, but their presence still determines the spacing
|
||||||
|
// here.
|
||||||
|
if leading_body_comments.is_empty() {
|
||||||
|
space().fmt(f)?;
|
||||||
|
} else {
|
||||||
|
hard_line_break().fmt(f)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
body.format().with_options(Parentheses::Always).fmt(f)
|
||||||
|
} else {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
[
|
||||||
|
space(),
|
||||||
|
token("("),
|
||||||
|
trailing_comments(trailing_header_comments),
|
||||||
|
block_indent(&format_args!(
|
||||||
|
leading_comments(leading_body_comments),
|
||||||
|
body.format().with_options(Parentheses::Never)
|
||||||
|
)),
|
||||||
|
token(")")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// If the body has comments, we always want to preserve the parentheses. This also
|
||||||
|
// ensures that we correctly handle parenthesized comments, and don't need to worry
|
||||||
|
// about them in the implementation below.
|
||||||
|
else if body_comments.has_leading() || body_comments.has_trailing_own_line() {
|
||||||
|
body.format().with_options(Parentheses::Always).fmt(f)
|
||||||
|
}
|
||||||
|
// Calls and subscripts require special formatting because they have their own
|
||||||
|
// parentheses, but they can also have an arbitrary amount of text before the
|
||||||
|
// opening parenthesis. We want to avoid cases where we keep a long callable on the
|
||||||
|
// same line as the lambda parameters. For example, `db_evmtx...` in:
|
||||||
|
//
|
||||||
|
// ```py
|
||||||
|
// transaction_count = self._query_txs_for_range(
|
||||||
|
// get_count_fn=lambda from_ts, to_ts, _chain_id=chain_id: db_evmtx.count_transactions_in_range(
|
||||||
|
// chain_id=_chain_id,
|
||||||
|
// from_ts=from_ts,
|
||||||
|
// to_ts=to_ts,
|
||||||
|
// ),
|
||||||
|
// )
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// should cause the whole lambda body to be parenthesized instead:
|
||||||
|
//
|
||||||
|
// ```py
|
||||||
|
// transaction_count = self._query_txs_for_range(
|
||||||
|
// get_count_fn=lambda from_ts, to_ts, _chain_id=chain_id: (
|
||||||
|
// db_evmtx.count_transactions_in_range(
|
||||||
|
// chain_id=_chain_id,
|
||||||
|
// from_ts=from_ts,
|
||||||
|
// to_ts=to_ts,
|
||||||
|
// )
|
||||||
|
// ),
|
||||||
|
// )
|
||||||
|
// ```
|
||||||
|
else if matches!(body, Expr::Call(_) | Expr::Subscript(_)) {
|
||||||
|
let unparenthesized = body.format().with_options(Parentheses::Never);
|
||||||
|
if CallChainLayout::from_expression(
|
||||||
|
body.into(),
|
||||||
|
comments.ranges(),
|
||||||
|
f.context().source(),
|
||||||
|
)
|
||||||
|
.is_fluent()
|
||||||
|
{
|
||||||
|
parenthesize_if_expands(&unparenthesized).fmt(f)
|
||||||
|
} else {
|
||||||
|
let unparenthesized = unparenthesized.memoized();
|
||||||
|
if unparenthesized.inspect(f)?.will_break() {
|
||||||
|
expand_parent().fmt(f)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
best_fitting![
|
||||||
|
// body all flat
|
||||||
|
unparenthesized,
|
||||||
|
// body expanded
|
||||||
|
group(&unparenthesized).should_expand(true),
|
||||||
|
// parenthesized
|
||||||
|
format_args![token("("), block_indent(&unparenthesized), token(")")]
|
||||||
|
]
|
||||||
|
.fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// For other cases with their own parentheses, such as lists, sets, dicts, tuples,
|
||||||
|
// etc., we can just format the body directly. Their own formatting results in the
|
||||||
|
// lambda being formatted well too. For example:
|
||||||
|
//
|
||||||
|
// ```py
|
||||||
|
// lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz: [xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz]
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// gets formatted as:
|
||||||
|
//
|
||||||
|
// ```py
|
||||||
|
// lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz: [
|
||||||
|
// xxxxxxxxxxxxxxxxxxxx,
|
||||||
|
// yyyyyyyyyyyyyyyyyyyy,
|
||||||
|
// zzzzzzzzzzzzzzzzzzzz
|
||||||
|
// ]
|
||||||
|
// ```
|
||||||
|
else if has_own_parentheses(body, f.context()).is_some() {
|
||||||
|
body.format().fmt(f)
|
||||||
|
}
|
||||||
|
// Finally, for expressions without their own parentheses, use
|
||||||
|
// `parenthesize_if_expands` to add parentheses around the body, only if it expands
|
||||||
|
// across multiple lines. The `Parentheses::Never` here also removes unnecessary
|
||||||
|
// parentheses around lambda bodies that fit on one line. For example:
|
||||||
|
//
|
||||||
|
// ```py
|
||||||
|
// lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz: xxxxxxxxxxxxxxxxxxxx + yyyyyyyyyyyyyyyyyyyy + zzzzzzzzzzzzzzzzzzzz
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// is formatted as:
|
||||||
|
//
|
||||||
|
// ```py
|
||||||
|
// lambda xxxxxxxxxxxxxxxxxxxx, yyyyyyyyyyyyyyyyyyyy, zzzzzzzzzzzzzzzzzzzz: (
|
||||||
|
// xxxxxxxxxxxxxxxxxxxx + yyyyyyyyyyyyyyyyyyyy + zzzzzzzzzzzzzzzzzzzz
|
||||||
|
// )
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// while
|
||||||
|
//
|
||||||
|
// ```py
|
||||||
|
// lambda xxxxxxxxxxxxxxxxxxxx: (xxxxxxxxxxxxxxxxxxxx + 1)
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// is formatted as:
|
||||||
|
//
|
||||||
|
// ```py
|
||||||
|
// lambda xxxxxxxxxxxxxxxxxxxx: xxxxxxxxxxxxxxxxxxxx + 1
|
||||||
|
// ```
|
||||||
|
else {
|
||||||
|
parenthesize_if_expands(&body.format().with_options(Parentheses::Never)).fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -51,7 +51,10 @@ impl FormatNodeRule<ExprSubscript> for FormatExprSubscript {
|
||||||
value.format().with_options(Parentheses::Always).fmt(f)
|
value.format().with_options(Parentheses::Always).fmt(f)
|
||||||
} else {
|
} else {
|
||||||
match value.as_ref() {
|
match value.as_ref() {
|
||||||
Expr::Attribute(expr) => expr.format().with_options(call_chain_layout).fmt(f),
|
Expr::Attribute(expr) => expr
|
||||||
|
.format()
|
||||||
|
.with_options(call_chain_layout.decrement_call_like_count())
|
||||||
|
.fmt(f),
|
||||||
Expr::Call(expr) => expr.format().with_options(call_chain_layout).fmt(f),
|
Expr::Call(expr) => expr.format().with_options(call_chain_layout).fmt(f),
|
||||||
Expr::Subscript(expr) => expr.format().with_options(call_chain_layout).fmt(f),
|
Expr::Subscript(expr) => expr.format().with_options(call_chain_layout).fmt(f),
|
||||||
_ => value.format().with_options(Parentheses::Never).fmt(f),
|
_ => value.format().with_options(Parentheses::Never).fmt(f),
|
||||||
|
|
@ -71,8 +74,8 @@ impl FormatNodeRule<ExprSubscript> for FormatExprSubscript {
|
||||||
.fmt(f)
|
.fmt(f)
|
||||||
});
|
});
|
||||||
|
|
||||||
let is_call_chain_root = self.call_chain_layout == CallChainLayout::Default
|
let is_call_chain_root =
|
||||||
&& call_chain_layout == CallChainLayout::Fluent;
|
self.call_chain_layout == CallChainLayout::Default && call_chain_layout.is_fluent();
|
||||||
if is_call_chain_root {
|
if is_call_chain_root {
|
||||||
write!(f, [group(&format_inner)])
|
write!(f, [group(&format_inner)])
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -92,7 +95,8 @@ impl NeedsParentheses for ExprSubscript {
|
||||||
self.into(),
|
self.into(),
|
||||||
context.comments().ranges(),
|
context.comments().ranges(),
|
||||||
context.source(),
|
context.source(),
|
||||||
) == CallChainLayout::Fluent
|
)
|
||||||
|
.is_fluent()
|
||||||
{
|
{
|
||||||
OptionalParentheses::Multiline
|
OptionalParentheses::Multiline
|
||||||
} else if is_expression_parenthesized(
|
} else if is_expression_parenthesized(
|
||||||
|
|
|
||||||
|
|
@ -876,6 +876,22 @@ impl<'a> First<'a> {
|
||||||
/// )
|
/// )
|
||||||
/// ).all()
|
/// ).all()
|
||||||
/// ```
|
/// ```
|
||||||
|
///
|
||||||
|
/// In [`preview`](crate::preview::is_fluent_layout_split_first_call_enabled), we also track the position of the leftmost call or
|
||||||
|
/// subscript on an attribute in the chain and break just before the dot.
|
||||||
|
///
|
||||||
|
/// So, for example, the right-hand summand in the above expression
|
||||||
|
/// would get formatted as:
|
||||||
|
/// ```python
|
||||||
|
/// Blog.objects
|
||||||
|
/// .filter(
|
||||||
|
/// entry__headline__contains="McCartney",
|
||||||
|
/// )
|
||||||
|
/// .limit_results[:10]
|
||||||
|
/// .filter(
|
||||||
|
/// entry__pub_date__year=2010,
|
||||||
|
/// )
|
||||||
|
/// ```
|
||||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
|
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
|
||||||
pub enum CallChainLayout {
|
pub enum CallChainLayout {
|
||||||
/// The root of a call chain
|
/// The root of a call chain
|
||||||
|
|
@ -883,19 +899,149 @@ pub enum CallChainLayout {
|
||||||
Default,
|
Default,
|
||||||
|
|
||||||
/// A nested call chain element that uses fluent style.
|
/// A nested call chain element that uses fluent style.
|
||||||
Fluent,
|
Fluent(AttributeState),
|
||||||
|
|
||||||
/// A nested call chain element not using fluent style.
|
/// A nested call chain element not using fluent style.
|
||||||
NonFluent,
|
NonFluent,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Records information about the current position within
|
||||||
|
/// a call chain.
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
pub enum AttributeState {
|
||||||
|
/// Stores the number of calls or subscripts
|
||||||
|
/// to the left of the current position in a chain.
|
||||||
|
///
|
||||||
|
/// Consecutive calls/subscripts on a single
|
||||||
|
/// object only count once. For example, if we are at
|
||||||
|
/// `c` in `a.b()[0]()().c()` then this number would be 1.
|
||||||
|
///
|
||||||
|
/// Caveat: If the root of the chain is parenthesized,
|
||||||
|
/// it contributes +1 to this count, even if it is not
|
||||||
|
/// a call or subscript. But the name
|
||||||
|
/// `CallLikeOrParenthesizedRootPreceding`
|
||||||
|
/// is a tad unwieldy, and this also rarely occurs.
|
||||||
|
CallLikePreceding(u32),
|
||||||
|
/// Indicates that we are at the first called or
|
||||||
|
/// subscripted object in the chain
|
||||||
|
///
|
||||||
|
/// For example, if we are at `b` in `a.b()[0]()().c()`
|
||||||
|
FirstCallLike,
|
||||||
|
/// Indicates that we are to the left of the first
|
||||||
|
/// called or subscripted object in the chain, and therefore
|
||||||
|
/// need not break.
|
||||||
|
///
|
||||||
|
/// For example, if we are at `a` in `a.b()[0]()().c()`
|
||||||
|
BeforeFirstCallLike,
|
||||||
|
}
|
||||||
|
|
||||||
impl CallChainLayout {
|
impl CallChainLayout {
|
||||||
|
/// Returns new state decreasing count of remaining calls/subscripts
|
||||||
|
/// to traverse, or the state `FirstCallOrSubscript`, as appropriate.
|
||||||
|
#[must_use]
|
||||||
|
pub(crate) fn decrement_call_like_count(self) -> Self {
|
||||||
|
match self {
|
||||||
|
Self::Fluent(AttributeState::CallLikePreceding(x)) => {
|
||||||
|
if x > 1 {
|
||||||
|
// Recall that we traverse call chains from right to
|
||||||
|
// left. So after moving from a call/subscript into
|
||||||
|
// an attribute, we _decrease_ the count of
|
||||||
|
// _remaining_ calls or subscripts to the left of our
|
||||||
|
// current position.
|
||||||
|
Self::Fluent(AttributeState::CallLikePreceding(x - 1))
|
||||||
|
} else {
|
||||||
|
Self::Fluent(AttributeState::FirstCallLike)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => self,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns with state change
|
||||||
|
/// `FirstCallOrSubscript` -> `BeforeFirstCallOrSubscript`
|
||||||
|
/// and otherwise returns unchanged.
|
||||||
|
#[must_use]
|
||||||
|
pub(crate) fn transition_after_attribute(self) -> Self {
|
||||||
|
match self {
|
||||||
|
Self::Fluent(AttributeState::FirstCallLike) => {
|
||||||
|
Self::Fluent(AttributeState::BeforeFirstCallLike)
|
||||||
|
}
|
||||||
|
_ => self,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn is_first_call_like(self) -> bool {
|
||||||
|
matches!(self, Self::Fluent(AttributeState::FirstCallLike))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns either `Fluent` or `NonFluent` depending on a
|
||||||
|
/// heuristic computed for the whole chain.
|
||||||
|
///
|
||||||
|
/// Explicitly, the criterion to return `Fluent` is
|
||||||
|
/// as follows:
|
||||||
|
///
|
||||||
|
/// 1. Beginning from the right (i.e. the `expr` itself),
|
||||||
|
/// traverse inwards past calls, subscripts, and attribute
|
||||||
|
/// expressions until we meet the first expression that is
|
||||||
|
/// either none of these or else is parenthesized. This will
|
||||||
|
/// be the _root_ of the call chain.
|
||||||
|
/// 2. Count the number of _attribute values_ that are _called
|
||||||
|
/// or subscripted_ in the chain (note that this includes the
|
||||||
|
/// root but excludes the rightmost attribute in the chain since
|
||||||
|
/// it is not the _value_ of some attribute).
|
||||||
|
/// 3. If the root is parenthesized, add 1 to that value.
|
||||||
|
/// 4. If the total is at least 2, return `Fluent`. Otherwise
|
||||||
|
/// return `NonFluent`
|
||||||
pub(crate) fn from_expression(
|
pub(crate) fn from_expression(
|
||||||
mut expr: ExprRef,
|
mut expr: ExprRef,
|
||||||
comment_ranges: &CommentRanges,
|
comment_ranges: &CommentRanges,
|
||||||
source: &str,
|
source: &str,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let mut attributes_after_parentheses = 0;
|
// TODO(dylan): Once the fluent layout preview style is
|
||||||
|
// stabilized, see if it is possible to simplify some of
|
||||||
|
// the logic around parenthesized roots. (While supporting
|
||||||
|
// both styles it is more difficult to do this.)
|
||||||
|
|
||||||
|
// Count of attribute _values_ which are called or
|
||||||
|
// subscripted, after the leftmost parenthesized
|
||||||
|
// value.
|
||||||
|
//
|
||||||
|
// Examples:
|
||||||
|
// ```
|
||||||
|
// # Count of 3 - notice that .d()
|
||||||
|
// # does not contribute
|
||||||
|
// a().b().c[0]()().d()
|
||||||
|
// # Count of 2 - notice that a()
|
||||||
|
// # does not contribute
|
||||||
|
// (a()).b().c[0].d
|
||||||
|
// ```
|
||||||
|
let mut computed_attribute_values_after_parentheses = 0;
|
||||||
|
|
||||||
|
// Similar to the above, but instead looks at all calls
|
||||||
|
// and subscripts rather than looking only at those on
|
||||||
|
// _attribute values_. So this count can differ from the
|
||||||
|
// above.
|
||||||
|
//
|
||||||
|
// Examples of `computed_attribute_values_after_parentheses` vs
|
||||||
|
// `call_like_count`:
|
||||||
|
//
|
||||||
|
// a().b ---> 1 vs 1
|
||||||
|
// a.b().c --> 1 vs 1
|
||||||
|
// a.b() ---> 0 vs 1
|
||||||
|
let mut call_like_count = 0;
|
||||||
|
|
||||||
|
// Going from right to left, we traverse calls, subscripts,
|
||||||
|
// and attributes until we get to an expression of a different
|
||||||
|
// kind _or_ to a parenthesized expression. This records
|
||||||
|
// the case where we end the traversal at a parenthesized expression.
|
||||||
|
//
|
||||||
|
// In these cases, the inferred semantics of the chain are different.
|
||||||
|
// We interpret this as the user indicating:
|
||||||
|
// "this parenthesized value is the object of interest and we are
|
||||||
|
// doing transformations on it". This increases our confidence that
|
||||||
|
// this should be fluently formatted, and also means we should make
|
||||||
|
// our first break after this value.
|
||||||
|
let mut root_value_parenthesized = false;
|
||||||
loop {
|
loop {
|
||||||
match expr {
|
match expr {
|
||||||
ExprRef::Attribute(ast::ExprAttribute { value, .. }) => {
|
ExprRef::Attribute(ast::ExprAttribute { value, .. }) => {
|
||||||
|
|
@ -907,10 +1053,10 @@ impl CallChainLayout {
|
||||||
// ```
|
// ```
|
||||||
if is_expression_parenthesized(value.into(), comment_ranges, source) {
|
if is_expression_parenthesized(value.into(), comment_ranges, source) {
|
||||||
// `(a).b`. We preserve these parentheses so don't recurse
|
// `(a).b`. We preserve these parentheses so don't recurse
|
||||||
attributes_after_parentheses += 1;
|
root_value_parenthesized = true;
|
||||||
break;
|
break;
|
||||||
} else if matches!(value.as_ref(), Expr::Call(_) | Expr::Subscript(_)) {
|
} else if matches!(value.as_ref(), Expr::Call(_) | Expr::Subscript(_)) {
|
||||||
attributes_after_parentheses += 1;
|
computed_attribute_values_after_parentheses += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
expr = ExprRef::from(value.as_ref());
|
expr = ExprRef::from(value.as_ref());
|
||||||
|
|
@ -925,31 +1071,68 @@ impl CallChainLayout {
|
||||||
// ```
|
// ```
|
||||||
ExprRef::Call(ast::ExprCall { func: inner, .. })
|
ExprRef::Call(ast::ExprCall { func: inner, .. })
|
||||||
| ExprRef::Subscript(ast::ExprSubscript { value: inner, .. }) => {
|
| ExprRef::Subscript(ast::ExprSubscript { value: inner, .. }) => {
|
||||||
|
// We preserve these parentheses so don't recurse
|
||||||
|
// e.g. (a)[0].x().y().z()
|
||||||
|
// ^stop here
|
||||||
|
if is_expression_parenthesized(inner.into(), comment_ranges, source) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Accumulate the `call_like_count`, but we only
|
||||||
|
// want to count things like `a()[0]()()` once.
|
||||||
|
if !inner.is_call_expr() && !inner.is_subscript_expr() {
|
||||||
|
call_like_count += 1;
|
||||||
|
}
|
||||||
|
|
||||||
expr = ExprRef::from(inner.as_ref());
|
expr = ExprRef::from(inner.as_ref());
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
// We to format the following in fluent style:
|
|
||||||
// ```
|
|
||||||
// f2 = (a).w().t(1,)
|
|
||||||
// ^ expr
|
|
||||||
// ```
|
|
||||||
if is_expression_parenthesized(expr, comment_ranges, source) {
|
|
||||||
attributes_after_parentheses += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// We preserve these parentheses so don't recurse
|
|
||||||
if is_expression_parenthesized(expr, comment_ranges, source) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if attributes_after_parentheses < 2 {
|
|
||||||
|
if computed_attribute_values_after_parentheses + u32::from(root_value_parenthesized) < 2 {
|
||||||
CallChainLayout::NonFluent
|
CallChainLayout::NonFluent
|
||||||
} else {
|
} else {
|
||||||
CallChainLayout::Fluent
|
CallChainLayout::Fluent(AttributeState::CallLikePreceding(
|
||||||
|
// We count a parenthesized root value as an extra
|
||||||
|
// call for the purposes of tracking state.
|
||||||
|
//
|
||||||
|
// The reason is that, in this case, we want the first
|
||||||
|
// "special" break to happen right after the root, as
|
||||||
|
// opposed to right after the first called/subscripted
|
||||||
|
// attribute.
|
||||||
|
//
|
||||||
|
// For example:
|
||||||
|
//
|
||||||
|
// ```
|
||||||
|
// (object_of_interest)
|
||||||
|
// .data.filter()
|
||||||
|
// .agg()
|
||||||
|
// .etc()
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// instead of (in preview):
|
||||||
|
//
|
||||||
|
// ```
|
||||||
|
// (object_of_interest)
|
||||||
|
// .data
|
||||||
|
// .filter()
|
||||||
|
// .etc()
|
||||||
|
// ```
|
||||||
|
//
|
||||||
|
// For comparison, if we didn't have parentheses around
|
||||||
|
// the root, we want (and get, in preview):
|
||||||
|
//
|
||||||
|
// ```
|
||||||
|
// object_of_interest.data
|
||||||
|
// .filter()
|
||||||
|
// .agg()
|
||||||
|
// .etc()
|
||||||
|
// ```
|
||||||
|
call_like_count + u32::from(root_value_parenthesized),
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -972,9 +1155,13 @@ impl CallChainLayout {
|
||||||
CallChainLayout::NonFluent
|
CallChainLayout::NonFluent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
layout @ (CallChainLayout::Fluent | CallChainLayout::NonFluent) => layout,
|
layout @ (CallChainLayout::Fluent(_) | CallChainLayout::NonFluent) => layout,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn is_fluent(self) -> bool {
|
||||||
|
matches!(self, CallChainLayout::Fluent(_))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||||
|
|
|
||||||
|
|
@ -52,3 +52,17 @@ pub(crate) const fn is_avoid_parens_for_long_as_captures_enabled(
|
||||||
) -> bool {
|
) -> bool {
|
||||||
context.is_preview()
|
context.is_preview()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if the
|
||||||
|
/// [`parenthesize_lambda_bodies`](https://github.com/astral-sh/ruff/pull/21385) preview style is
|
||||||
|
/// enabled.
|
||||||
|
pub(crate) const fn is_parenthesize_lambda_bodies_enabled(context: &PyFormatContext) -> bool {
|
||||||
|
context.is_preview()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if the
|
||||||
|
/// [`fluent_layout_split_first_call`](https://github.com/astral-sh/ruff/pull/21369) preview
|
||||||
|
/// style is enabled.
|
||||||
|
pub(crate) const fn is_fluent_layout_split_first_call_enabled(context: &PyFormatContext) -> bool {
|
||||||
|
context.is_preview()
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,7 @@ use crate::comments::{
|
||||||
Comments, LeadingDanglingTrailingComments, SourceComment, trailing_comments,
|
Comments, LeadingDanglingTrailingComments, SourceComment, trailing_comments,
|
||||||
};
|
};
|
||||||
use crate::context::{NodeLevel, WithNodeLevel};
|
use crate::context::{NodeLevel, WithNodeLevel};
|
||||||
|
use crate::expression::expr_lambda::ExprLambdaLayout;
|
||||||
use crate::expression::parentheses::{
|
use crate::expression::parentheses::{
|
||||||
NeedsParentheses, OptionalParentheses, Parentheses, Parenthesize, is_expression_parenthesized,
|
NeedsParentheses, OptionalParentheses, Parentheses, Parenthesize, is_expression_parenthesized,
|
||||||
optional_parentheses,
|
optional_parentheses,
|
||||||
|
|
@ -18,6 +19,7 @@ use crate::expression::{
|
||||||
maybe_parenthesize_expression,
|
maybe_parenthesize_expression,
|
||||||
};
|
};
|
||||||
use crate::other::interpolated_string::InterpolatedStringLayout;
|
use crate::other::interpolated_string::InterpolatedStringLayout;
|
||||||
|
use crate::preview::is_parenthesize_lambda_bodies_enabled;
|
||||||
use crate::statement::trailing_semicolon;
|
use crate::statement::trailing_semicolon;
|
||||||
use crate::string::StringLikeExtensions;
|
use crate::string::StringLikeExtensions;
|
||||||
use crate::string::implicit::{
|
use crate::string::implicit::{
|
||||||
|
|
@ -303,12 +305,7 @@ impl Format<PyFormatContext<'_>> for FormatStatementsLastExpression<'_> {
|
||||||
&& format_implicit_flat.is_none()
|
&& format_implicit_flat.is_none()
|
||||||
&& format_interpolated_string.is_none()
|
&& format_interpolated_string.is_none()
|
||||||
{
|
{
|
||||||
return maybe_parenthesize_expression(
|
return maybe_parenthesize_value(value, *statement).fmt(f);
|
||||||
value,
|
|
||||||
*statement,
|
|
||||||
Parenthesize::IfBreaks,
|
|
||||||
)
|
|
||||||
.fmt(f);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let comments = f.context().comments().clone();
|
let comments = f.context().comments().clone();
|
||||||
|
|
@ -586,11 +583,7 @@ impl Format<PyFormatContext<'_>> for FormatStatementsLastExpression<'_> {
|
||||||
space(),
|
space(),
|
||||||
operator,
|
operator,
|
||||||
space(),
|
space(),
|
||||||
maybe_parenthesize_expression(
|
maybe_parenthesize_value(value, *statement)
|
||||||
value,
|
|
||||||
*statement,
|
|
||||||
Parenthesize::IfBreaks
|
|
||||||
)
|
|
||||||
]
|
]
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -1369,3 +1362,32 @@ fn is_attribute_with_parenthesized_value(target: &Expr, context: &PyFormatContex
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Like [`maybe_parenthesize_expression`] but with special handling for lambdas in preview.
|
||||||
|
fn maybe_parenthesize_value<'a>(
|
||||||
|
expression: &'a Expr,
|
||||||
|
parent: AnyNodeRef<'a>,
|
||||||
|
) -> MaybeParenthesizeValue<'a> {
|
||||||
|
MaybeParenthesizeValue { expression, parent }
|
||||||
|
}
|
||||||
|
|
||||||
|
struct MaybeParenthesizeValue<'a> {
|
||||||
|
expression: &'a Expr,
|
||||||
|
parent: AnyNodeRef<'a>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Format<PyFormatContext<'_>> for MaybeParenthesizeValue<'_> {
|
||||||
|
fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> {
|
||||||
|
let MaybeParenthesizeValue { expression, parent } = self;
|
||||||
|
|
||||||
|
if is_parenthesize_lambda_bodies_enabled(f.context())
|
||||||
|
&& let Expr::Lambda(lambda) = expression
|
||||||
|
&& !f.context().comments().has_leading(lambda)
|
||||||
|
{
|
||||||
|
parenthesize_if_expands(&lambda.format().with_options(ExprLambdaLayout::Assignment))
|
||||||
|
.fmt(f)
|
||||||
|
} else {
|
||||||
|
maybe_parenthesize_expression(expression, *parent, Parenthesize::IfBreaks).fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,7 @@
|
||||||
use crate::normalizer::Normalizer;
|
use crate::normalizer::Normalizer;
|
||||||
|
use anyhow::anyhow;
|
||||||
|
use datatest_stable::Utf8Path;
|
||||||
|
use insta::assert_snapshot;
|
||||||
use ruff_db::diagnostic::{
|
use ruff_db::diagnostic::{
|
||||||
Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, DisplayDiagnosticConfig,
|
Annotation, Diagnostic, DiagnosticFormat, DiagnosticId, DisplayDiagnosticConfig,
|
||||||
DisplayDiagnostics, DummyFileResolver, Severity, Span, SubDiagnostic, SubDiagnosticSeverity,
|
DisplayDiagnostics, DummyFileResolver, Severity, Span, SubDiagnostic, SubDiagnosticSeverity,
|
||||||
|
|
@ -24,26 +27,27 @@ use std::{fmt, fs};
|
||||||
|
|
||||||
mod normalizer;
|
mod normalizer;
|
||||||
|
|
||||||
#[test]
|
#[expect(clippy::needless_pass_by_value)]
|
||||||
fn black_compatibility() {
|
fn black_compatibility(input_path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||||
let test_file = |input_path: &Path| {
|
let test_name = input_path
|
||||||
let content = fs::read_to_string(input_path).unwrap();
|
.strip_prefix("./resources/test/fixtures/black")
|
||||||
|
.unwrap_or(input_path)
|
||||||
|
.as_str();
|
||||||
|
|
||||||
let options_path = input_path.with_extension("options.json");
|
let options_path = input_path.with_extension("options.json");
|
||||||
|
|
||||||
let options: PyFormatOptions = if let Ok(options_file) = fs::File::open(&options_path) {
|
let options: PyFormatOptions = if let Ok(options_file) = fs::File::open(&options_path) {
|
||||||
let reader = BufReader::new(options_file);
|
let reader = BufReader::new(options_file);
|
||||||
serde_json::from_reader(reader).unwrap_or_else(|_| {
|
serde_json::from_reader(reader).map_err(|err| {
|
||||||
panic!("Expected option file {options_path:?} to be a valid Json file")
|
anyhow!("Expected option file {options_path:?} to be a valid Json file: {err}")
|
||||||
})
|
})?
|
||||||
} else {
|
} else {
|
||||||
PyFormatOptions::from_extension(input_path)
|
PyFormatOptions::from_extension(input_path.as_std_path())
|
||||||
};
|
};
|
||||||
|
|
||||||
let first_line = content.lines().next().unwrap_or_default();
|
let first_line = content.lines().next().unwrap_or_default();
|
||||||
let formatted_code = if first_line.starts_with("# flags:")
|
let formatted_code =
|
||||||
&& first_line.contains("--line-ranges=")
|
if first_line.starts_with("# flags:") && first_line.contains("--line-ranges=") {
|
||||||
{
|
|
||||||
let line_index = LineIndex::from_source_text(&content);
|
let line_index = LineIndex::from_source_text(&content);
|
||||||
|
|
||||||
let ranges = first_line
|
let ranges = first_line
|
||||||
|
|
@ -69,13 +73,9 @@ fn black_compatibility() {
|
||||||
let mut formatted_code = content.clone();
|
let mut formatted_code = content.clone();
|
||||||
|
|
||||||
for range in ranges {
|
for range in ranges {
|
||||||
let formatted =
|
let formatted = format_range(&content, range, options.clone()).map_err(|err| {
|
||||||
format_range(&content, range, options.clone()).unwrap_or_else(|err| {
|
anyhow!("Range-formatting to succeed but encountered error {err}")
|
||||||
panic!(
|
})?;
|
||||||
"Range-formatting of {} to succeed but encountered error {err}",
|
|
||||||
input_path.display()
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
let range = formatted.source_range();
|
let range = formatted.source_range();
|
||||||
|
|
||||||
|
|
@ -86,12 +86,8 @@ fn black_compatibility() {
|
||||||
|
|
||||||
formatted_code
|
formatted_code
|
||||||
} else {
|
} else {
|
||||||
let printed = format_module_source(&content, options.clone()).unwrap_or_else(|err| {
|
let printed = format_module_source(&content, options.clone())
|
||||||
panic!(
|
.map_err(|err| anyhow!("Formatting to succeed but encountered error {err}"))?;
|
||||||
"Formatting of {} to succeed but encountered error {err}",
|
|
||||||
input_path.display()
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
let formatted_code = printed.into_code();
|
let formatted_code = printed.into_code();
|
||||||
|
|
||||||
|
|
@ -100,191 +96,133 @@ fn black_compatibility() {
|
||||||
formatted_code
|
formatted_code
|
||||||
};
|
};
|
||||||
|
|
||||||
let extension = input_path
|
let extension = input_path
|
||||||
.extension()
|
.extension()
|
||||||
.expect("Test file to have py or pyi extension")
|
.expect("Test file to have py or pyi extension");
|
||||||
.to_string_lossy();
|
let expected_path = input_path.with_extension(format!("{extension}.expect"));
|
||||||
let expected_path = input_path.with_extension(format!("{extension}.expect"));
|
let expected_output = fs::read_to_string(&expected_path)
|
||||||
let expected_output = fs::read_to_string(&expected_path)
|
.unwrap_or_else(|_| panic!("Expected Black output file '{expected_path:?}' to exist"));
|
||||||
.unwrap_or_else(|_| panic!("Expected Black output file '{expected_path:?}' to exist"));
|
|
||||||
|
|
||||||
let unsupported_syntax_errors =
|
let unsupported_syntax_errors =
|
||||||
ensure_unchanged_ast(&content, &formatted_code, &options, input_path);
|
ensure_unchanged_ast(&content, &formatted_code, &options, input_path);
|
||||||
|
|
||||||
if formatted_code == expected_output {
|
// Black and Ruff formatting matches. Delete any existing snapshot files because the Black output
|
||||||
// Black and Ruff formatting matches. Delete any existing snapshot files because the Black output
|
// already perfectly captures the expected output.
|
||||||
// already perfectly captures the expected output.
|
// The following code mimics insta's logic generating the snapshot name for a test.
|
||||||
// The following code mimics insta's logic generating the snapshot name for a test.
|
let workspace_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
|
||||||
let workspace_path = std::env::var("CARGO_MANIFEST_DIR").unwrap();
|
|
||||||
|
|
||||||
let mut components = input_path.components().rev();
|
let full_snapshot_name = format!("black_compatibility@{test_name}.snap",);
|
||||||
let file_name = components.next().unwrap();
|
|
||||||
let test_suite = components.next().unwrap();
|
|
||||||
|
|
||||||
let snapshot_name = format!(
|
let snapshot_path = Path::new(&workspace_path)
|
||||||
"black_compatibility@{}__{}.snap",
|
.join("tests/snapshots")
|
||||||
test_suite.as_os_str().to_string_lossy(),
|
.join(full_snapshot_name);
|
||||||
file_name.as_os_str().to_string_lossy()
|
|
||||||
);
|
|
||||||
|
|
||||||
let snapshot_path = Path::new(&workspace_path)
|
if formatted_code == expected_output {
|
||||||
.join("tests/snapshots")
|
if snapshot_path.exists() && snapshot_path.is_file() {
|
||||||
.join(snapshot_name);
|
// SAFETY: This is a convenience feature. That's why we don't want to abort
|
||||||
if snapshot_path.exists() && snapshot_path.is_file() {
|
// when deleting a no longer needed snapshot fails.
|
||||||
// SAFETY: This is a convenience feature. That's why we don't want to abort
|
fs::remove_file(&snapshot_path).ok();
|
||||||
// when deleting a no longer needed snapshot fails.
|
|
||||||
fs::remove_file(&snapshot_path).ok();
|
|
||||||
}
|
|
||||||
|
|
||||||
let new_snapshot_path = snapshot_path.with_extension("snap.new");
|
|
||||||
if new_snapshot_path.exists() && new_snapshot_path.is_file() {
|
|
||||||
// SAFETY: This is a convenience feature. That's why we don't want to abort
|
|
||||||
// when deleting a no longer needed snapshot fails.
|
|
||||||
fs::remove_file(&new_snapshot_path).ok();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// Black and Ruff have different formatting. Write out a snapshot that covers the differences
|
|
||||||
// today.
|
|
||||||
let mut snapshot = String::new();
|
|
||||||
write!(snapshot, "{}", Header::new("Input")).unwrap();
|
|
||||||
write!(snapshot, "{}", CodeFrame::new("python", &content)).unwrap();
|
|
||||||
|
|
||||||
write!(snapshot, "{}", Header::new("Black Differences")).unwrap();
|
|
||||||
|
|
||||||
let diff = TextDiff::from_lines(expected_output.as_str(), &formatted_code)
|
|
||||||
.unified_diff()
|
|
||||||
.header("Black", "Ruff")
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
write!(snapshot, "{}", CodeFrame::new("diff", &diff)).unwrap();
|
|
||||||
|
|
||||||
write!(snapshot, "{}", Header::new("Ruff Output")).unwrap();
|
|
||||||
write!(snapshot, "{}", CodeFrame::new("python", &formatted_code)).unwrap();
|
|
||||||
|
|
||||||
write!(snapshot, "{}", Header::new("Black Output")).unwrap();
|
|
||||||
write!(snapshot, "{}", CodeFrame::new("python", &expected_output)).unwrap();
|
|
||||||
|
|
||||||
if !unsupported_syntax_errors.is_empty() {
|
|
||||||
write!(snapshot, "{}", Header::new("New Unsupported Syntax Errors")).unwrap();
|
|
||||||
writeln!(
|
|
||||||
snapshot,
|
|
||||||
"{}",
|
|
||||||
DisplayDiagnostics::new(
|
|
||||||
&DummyFileResolver,
|
|
||||||
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
|
||||||
&unsupported_syntax_errors
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
insta::with_settings!({
|
|
||||||
omit_expression => true,
|
|
||||||
input_file => input_path,
|
|
||||||
prepend_module_to_snapshot => false,
|
|
||||||
}, {
|
|
||||||
insta::assert_snapshot!(snapshot);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
insta::glob!(
|
let new_snapshot_path = snapshot_path.with_extension("snap.new");
|
||||||
"../resources",
|
if new_snapshot_path.exists() && new_snapshot_path.is_file() {
|
||||||
"test/fixtures/black/**/*.{py,pyi}",
|
// SAFETY: This is a convenience feature. That's why we don't want to abort
|
||||||
test_file
|
// when deleting a no longer needed snapshot fails.
|
||||||
);
|
fs::remove_file(&new_snapshot_path).ok();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Black and Ruff have different formatting. Write out a snapshot that covers the differences
|
||||||
|
// today.
|
||||||
|
let mut snapshot = String::new();
|
||||||
|
write!(snapshot, "{}", Header::new("Input")).unwrap();
|
||||||
|
write!(snapshot, "{}", CodeFrame::new("python", &content)).unwrap();
|
||||||
|
|
||||||
|
write!(snapshot, "{}", Header::new("Black Differences")).unwrap();
|
||||||
|
|
||||||
|
let diff = TextDiff::from_lines(expected_output.as_str(), &formatted_code)
|
||||||
|
.unified_diff()
|
||||||
|
.header("Black", "Ruff")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
write!(snapshot, "{}", CodeFrame::new("diff", &diff)).unwrap();
|
||||||
|
|
||||||
|
write!(snapshot, "{}", Header::new("Ruff Output")).unwrap();
|
||||||
|
write!(snapshot, "{}", CodeFrame::new("python", &formatted_code)).unwrap();
|
||||||
|
|
||||||
|
write!(snapshot, "{}", Header::new("Black Output")).unwrap();
|
||||||
|
write!(snapshot, "{}", CodeFrame::new("python", &expected_output)).unwrap();
|
||||||
|
|
||||||
|
if !unsupported_syntax_errors.is_empty() {
|
||||||
|
write!(snapshot, "{}", Header::new("New Unsupported Syntax Errors")).unwrap();
|
||||||
|
writeln!(
|
||||||
|
snapshot,
|
||||||
|
"{}",
|
||||||
|
DisplayDiagnostics::new(
|
||||||
|
&DummyFileResolver,
|
||||||
|
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
||||||
|
&unsupported_syntax_errors
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut settings = insta::Settings::clone_current();
|
||||||
|
settings.set_omit_expression(true);
|
||||||
|
settings.set_input_file(input_path);
|
||||||
|
settings.set_prepend_module_to_snapshot(false);
|
||||||
|
settings.set_snapshot_suffix(test_name);
|
||||||
|
let _settings = settings.bind_to_scope();
|
||||||
|
|
||||||
|
assert_snapshot!(snapshot);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[expect(clippy::needless_pass_by_value)]
|
||||||
fn format() {
|
fn format(input_path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||||
let test_file = |input_path: &Path| {
|
let test_name = input_path
|
||||||
let content = fs::read_to_string(input_path).unwrap();
|
.strip_prefix("./resources/test/fixtures/ruff")
|
||||||
|
.unwrap_or(input_path)
|
||||||
|
.as_str();
|
||||||
|
|
||||||
let mut snapshot = format!("## Input\n{}", CodeFrame::new("python", &content));
|
let mut snapshot = format!("## Input\n{}", CodeFrame::new("python", &content));
|
||||||
let options_path = input_path.with_extension("options.json");
|
let options_path = input_path.with_extension("options.json");
|
||||||
|
|
||||||
if let Ok(options_file) = fs::File::open(&options_path) {
|
if let Ok(options_file) = fs::File::open(&options_path) {
|
||||||
let reader = BufReader::new(options_file);
|
let reader = BufReader::new(options_file);
|
||||||
let options: Vec<PyFormatOptions> =
|
let options: Vec<PyFormatOptions> = serde_json::from_reader(reader).map_err(|_| {
|
||||||
serde_json::from_reader(reader).unwrap_or_else(|_| {
|
anyhow!("Expected option file {options_path:?} to be a valid Json file")
|
||||||
panic!("Expected option file {options_path:?} to be a valid Json file")
|
})?;
|
||||||
});
|
|
||||||
|
|
||||||
writeln!(snapshot, "## Outputs").unwrap();
|
writeln!(snapshot, "## Outputs").unwrap();
|
||||||
|
|
||||||
for (i, options) in options.into_iter().enumerate() {
|
for (i, options) in options.into_iter().enumerate() {
|
||||||
let (formatted_code, unsupported_syntax_errors) =
|
|
||||||
format_file(&content, &options, input_path);
|
|
||||||
|
|
||||||
writeln!(
|
|
||||||
snapshot,
|
|
||||||
"### Output {}\n{}{}",
|
|
||||||
i + 1,
|
|
||||||
CodeFrame::new("", &DisplayPyOptions(&options)),
|
|
||||||
CodeFrame::new("python", &formatted_code)
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
if options.preview().is_enabled() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// We want to capture the differences in the preview style in our fixtures
|
|
||||||
let options_preview = options.with_preview(PreviewMode::Enabled);
|
|
||||||
let (formatted_preview, _) = format_file(&content, &options_preview, input_path);
|
|
||||||
|
|
||||||
if formatted_code != formatted_preview {
|
|
||||||
// Having both snapshots makes it hard to see the difference, so we're keeping only
|
|
||||||
// diff.
|
|
||||||
writeln!(
|
|
||||||
snapshot,
|
|
||||||
"#### Preview changes\n{}",
|
|
||||||
CodeFrame::new(
|
|
||||||
"diff",
|
|
||||||
TextDiff::from_lines(&formatted_code, &formatted_preview)
|
|
||||||
.unified_diff()
|
|
||||||
.header("Stable", "Preview")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
if !unsupported_syntax_errors.is_empty() {
|
|
||||||
writeln!(
|
|
||||||
snapshot,
|
|
||||||
"### Unsupported Syntax Errors\n{}",
|
|
||||||
DisplayDiagnostics::new(
|
|
||||||
&DummyFileResolver,
|
|
||||||
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
|
||||||
&unsupported_syntax_errors
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// We want to capture the differences in the preview style in our fixtures
|
|
||||||
let options = PyFormatOptions::from_extension(input_path);
|
|
||||||
let (formatted_code, unsupported_syntax_errors) =
|
let (formatted_code, unsupported_syntax_errors) =
|
||||||
format_file(&content, &options, input_path);
|
format_file(&content, &options, input_path);
|
||||||
|
|
||||||
|
writeln!(
|
||||||
|
snapshot,
|
||||||
|
"### Output {}\n{}{}",
|
||||||
|
i + 1,
|
||||||
|
CodeFrame::new("", &DisplayPyOptions(&options)),
|
||||||
|
CodeFrame::new("python", &formatted_code)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
if options.preview().is_enabled() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We want to capture the differences in the preview style in our fixtures
|
||||||
let options_preview = options.with_preview(PreviewMode::Enabled);
|
let options_preview = options.with_preview(PreviewMode::Enabled);
|
||||||
let (formatted_preview, _) = format_file(&content, &options_preview, input_path);
|
let (formatted_preview, _) = format_file(&content, &options_preview, input_path);
|
||||||
|
|
||||||
if formatted_code == formatted_preview {
|
if formatted_code != formatted_preview {
|
||||||
writeln!(
|
|
||||||
snapshot,
|
|
||||||
"## Output\n{}",
|
|
||||||
CodeFrame::new("python", &formatted_code)
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
} else {
|
|
||||||
// Having both snapshots makes it hard to see the difference, so we're keeping only
|
// Having both snapshots makes it hard to see the difference, so we're keeping only
|
||||||
// diff.
|
// diff.
|
||||||
writeln!(
|
writeln!(
|
||||||
snapshot,
|
snapshot,
|
||||||
"## Output\n{}\n## Preview changes\n{}",
|
"#### Preview changes\n{}",
|
||||||
CodeFrame::new("python", &formatted_code),
|
|
||||||
CodeFrame::new(
|
CodeFrame::new(
|
||||||
"diff",
|
"diff",
|
||||||
TextDiff::from_lines(&formatted_code, &formatted_preview)
|
TextDiff::from_lines(&formatted_code, &formatted_preview)
|
||||||
|
|
@ -298,7 +236,7 @@ fn format() {
|
||||||
if !unsupported_syntax_errors.is_empty() {
|
if !unsupported_syntax_errors.is_empty() {
|
||||||
writeln!(
|
writeln!(
|
||||||
snapshot,
|
snapshot,
|
||||||
"## Unsupported Syntax Errors\n{}",
|
"### Unsupported Syntax Errors\n{}",
|
||||||
DisplayDiagnostics::new(
|
DisplayDiagnostics::new(
|
||||||
&DummyFileResolver,
|
&DummyFileResolver,
|
||||||
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
||||||
|
|
@ -308,27 +246,74 @@ fn format() {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// We want to capture the differences in the preview style in our fixtures
|
||||||
|
let options = PyFormatOptions::from_extension(input_path.as_std_path());
|
||||||
|
let (formatted_code, unsupported_syntax_errors) =
|
||||||
|
format_file(&content, &options, input_path);
|
||||||
|
|
||||||
insta::with_settings!({
|
let options_preview = options.with_preview(PreviewMode::Enabled);
|
||||||
omit_expression => true,
|
let (formatted_preview, _) = format_file(&content, &options_preview, input_path);
|
||||||
input_file => input_path,
|
|
||||||
prepend_module_to_snapshot => false,
|
|
||||||
}, {
|
|
||||||
insta::assert_snapshot!(snapshot);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
insta::glob!(
|
if formatted_code == formatted_preview {
|
||||||
"../resources",
|
writeln!(
|
||||||
"test/fixtures/ruff/**/*.{py,pyi}",
|
snapshot,
|
||||||
test_file
|
"## Output\n{}",
|
||||||
);
|
CodeFrame::new("python", &formatted_code)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
} else {
|
||||||
|
// Having both snapshots makes it hard to see the difference, so we're keeping only
|
||||||
|
// diff.
|
||||||
|
writeln!(
|
||||||
|
snapshot,
|
||||||
|
"## Output\n{}\n## Preview changes\n{}",
|
||||||
|
CodeFrame::new("python", &formatted_code),
|
||||||
|
CodeFrame::new(
|
||||||
|
"diff",
|
||||||
|
TextDiff::from_lines(&formatted_code, &formatted_preview)
|
||||||
|
.unified_diff()
|
||||||
|
.header("Stable", "Preview")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
if !unsupported_syntax_errors.is_empty() {
|
||||||
|
writeln!(
|
||||||
|
snapshot,
|
||||||
|
"## Unsupported Syntax Errors\n{}",
|
||||||
|
DisplayDiagnostics::new(
|
||||||
|
&DummyFileResolver,
|
||||||
|
&DisplayDiagnosticConfig::default().format(DiagnosticFormat::Full),
|
||||||
|
&unsupported_syntax_errors
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut settings = insta::Settings::clone_current();
|
||||||
|
settings.set_omit_expression(true);
|
||||||
|
settings.set_input_file(input_path);
|
||||||
|
settings.set_prepend_module_to_snapshot(false);
|
||||||
|
settings.set_snapshot_suffix(test_name);
|
||||||
|
let _settings = settings.bind_to_scope();
|
||||||
|
|
||||||
|
assert_snapshot!(snapshot);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
datatest_stable::harness! {
|
||||||
|
{ test = black_compatibility, root = "./resources/test/fixtures/black", pattern = r".+\.pyi?$" },
|
||||||
|
{ test = format, root="./resources/test/fixtures/ruff", pattern = r".+\.pyi?$" }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_file(
|
fn format_file(
|
||||||
source: &str,
|
source: &str,
|
||||||
options: &PyFormatOptions,
|
options: &PyFormatOptions,
|
||||||
input_path: &Path,
|
input_path: &Utf8Path,
|
||||||
) -> (String, Vec<Diagnostic>) {
|
) -> (String, Vec<Diagnostic>) {
|
||||||
let (unformatted, formatted_code) = if source.contains("<RANGE_START>") {
|
let (unformatted, formatted_code) = if source.contains("<RANGE_START>") {
|
||||||
let mut content = source.to_string();
|
let mut content = source.to_string();
|
||||||
|
|
@ -363,8 +348,7 @@ fn format_file(
|
||||||
let formatted =
|
let formatted =
|
||||||
format_range(&format_input, range, options.clone()).unwrap_or_else(|err| {
|
format_range(&format_input, range, options.clone()).unwrap_or_else(|err| {
|
||||||
panic!(
|
panic!(
|
||||||
"Range-formatting of {} to succeed but encountered error {err}",
|
"Range-formatting of {input_path} to succeed but encountered error {err}",
|
||||||
input_path.display()
|
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -377,10 +361,7 @@ fn format_file(
|
||||||
(Cow::Owned(without_markers), content)
|
(Cow::Owned(without_markers), content)
|
||||||
} else {
|
} else {
|
||||||
let printed = format_module_source(source, options.clone()).unwrap_or_else(|err| {
|
let printed = format_module_source(source, options.clone()).unwrap_or_else(|err| {
|
||||||
panic!(
|
panic!("Formatting `{input_path} was expected to succeed but it failed: {err}",)
|
||||||
"Formatting `{input_path} was expected to succeed but it failed: {err}",
|
|
||||||
input_path = input_path.display()
|
|
||||||
)
|
|
||||||
});
|
});
|
||||||
let formatted_code = printed.into_code();
|
let formatted_code = printed.into_code();
|
||||||
|
|
||||||
|
|
@ -399,22 +380,20 @@ fn format_file(
|
||||||
fn ensure_stability_when_formatting_twice(
|
fn ensure_stability_when_formatting_twice(
|
||||||
formatted_code: &str,
|
formatted_code: &str,
|
||||||
options: &PyFormatOptions,
|
options: &PyFormatOptions,
|
||||||
input_path: &Path,
|
input_path: &Utf8Path,
|
||||||
) {
|
) {
|
||||||
let reformatted = match format_module_source(formatted_code, options.clone()) {
|
let reformatted = match format_module_source(formatted_code, options.clone()) {
|
||||||
Ok(reformatted) => reformatted,
|
Ok(reformatted) => reformatted,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
let mut diag = Diagnostic::from(&err);
|
let mut diag = Diagnostic::from(&err);
|
||||||
if let Some(range) = err.range() {
|
if let Some(range) = err.range() {
|
||||||
let file =
|
let file = SourceFileBuilder::new(input_path.as_str(), formatted_code).finish();
|
||||||
SourceFileBuilder::new(input_path.to_string_lossy(), formatted_code).finish();
|
|
||||||
let span = Span::from(file).with_range(range);
|
let span = Span::from(file).with_range(range);
|
||||||
diag.annotate(Annotation::primary(span));
|
diag.annotate(Annotation::primary(span));
|
||||||
}
|
}
|
||||||
panic!(
|
panic!(
|
||||||
"Expected formatted code of {} to be valid syntax: {err}:\
|
"Expected formatted code of {input_path} to be valid syntax: {err}:\
|
||||||
\n---\n{formatted_code}---\n{}",
|
\n---\n{formatted_code}---\n{}",
|
||||||
input_path.display(),
|
|
||||||
diag.display(&DummyFileResolver, &DisplayDiagnosticConfig::default()),
|
diag.display(&DummyFileResolver, &DisplayDiagnosticConfig::default()),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -440,7 +419,6 @@ Formatted once:
|
||||||
Formatted twice:
|
Formatted twice:
|
||||||
---
|
---
|
||||||
{reformatted}---"#,
|
{reformatted}---"#,
|
||||||
input_path = input_path.display(),
|
|
||||||
options = &DisplayPyOptions(options),
|
options = &DisplayPyOptions(options),
|
||||||
reformatted = reformatted.as_code(),
|
reformatted = reformatted.as_code(),
|
||||||
);
|
);
|
||||||
|
|
@ -467,7 +445,7 @@ fn ensure_unchanged_ast(
|
||||||
unformatted_code: &str,
|
unformatted_code: &str,
|
||||||
formatted_code: &str,
|
formatted_code: &str,
|
||||||
options: &PyFormatOptions,
|
options: &PyFormatOptions,
|
||||||
input_path: &Path,
|
input_path: &Utf8Path,
|
||||||
) -> Vec<Diagnostic> {
|
) -> Vec<Diagnostic> {
|
||||||
let source_type = options.source_type();
|
let source_type = options.source_type();
|
||||||
|
|
||||||
|
|
@ -499,11 +477,7 @@ fn ensure_unchanged_ast(
|
||||||
formatted_unsupported_syntax_errors
|
formatted_unsupported_syntax_errors
|
||||||
.retain(|fingerprint, _| !unformatted_unsupported_syntax_errors.contains_key(fingerprint));
|
.retain(|fingerprint, _| !unformatted_unsupported_syntax_errors.contains_key(fingerprint));
|
||||||
|
|
||||||
let file = SourceFileBuilder::new(
|
let file = SourceFileBuilder::new(input_path.file_name().unwrap(), formatted_code).finish();
|
||||||
input_path.file_name().unwrap().to_string_lossy(),
|
|
||||||
formatted_code,
|
|
||||||
)
|
|
||||||
.finish();
|
|
||||||
let diagnostics = formatted_unsupported_syntax_errors
|
let diagnostics = formatted_unsupported_syntax_errors
|
||||||
.values()
|
.values()
|
||||||
.map(|error| {
|
.map(|error| {
|
||||||
|
|
@ -533,11 +507,10 @@ fn ensure_unchanged_ast(
|
||||||
.header("Unformatted", "Formatted")
|
.header("Unformatted", "Formatted")
|
||||||
.to_string();
|
.to_string();
|
||||||
panic!(
|
panic!(
|
||||||
r#"Reformatting the unformatted code of {} resulted in AST changes.
|
r#"Reformatting the unformatted code of {input_path} resulted in AST changes.
|
||||||
---
|
---
|
||||||
{diff}
|
{diff}
|
||||||
"#,
|
"#,
|
||||||
input_path.display(),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -192,7 +192,7 @@ class Random:
|
||||||
}
|
}
|
||||||
x = {
|
x = {
|
||||||
"foobar": (123) + 456,
|
"foobar": (123) + 456,
|
||||||
@@ -97,24 +94,20 @@
|
@@ -97,24 +94,21 @@
|
||||||
|
|
||||||
|
|
||||||
my_dict = {
|
my_dict = {
|
||||||
|
|
@ -221,13 +221,14 @@ class Random:
|
||||||
- .second_call()
|
- .second_call()
|
||||||
- .third_call(some_args="some value")
|
- .third_call(some_args="some value")
|
||||||
- )
|
- )
|
||||||
+ "a key in my dict": MyClass.some_attribute.first_call()
|
+ "a key in my dict": MyClass.some_attribute
|
||||||
|
+ .first_call()
|
||||||
+ .second_call()
|
+ .second_call()
|
||||||
+ .third_call(some_args="some value")
|
+ .third_call(some_args="some value")
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
@@ -139,17 +132,17 @@
|
@@ -139,17 +133,17 @@
|
||||||
|
|
||||||
class Random:
|
class Random:
|
||||||
def func():
|
def func():
|
||||||
|
|
@ -363,7 +364,8 @@ my_dict = {
|
||||||
/ 100000.0
|
/ 100000.0
|
||||||
}
|
}
|
||||||
my_dict = {
|
my_dict = {
|
||||||
"a key in my dict": MyClass.some_attribute.first_call()
|
"a key in my dict": MyClass.some_attribute
|
||||||
|
.first_call()
|
||||||
.second_call()
|
.second_call()
|
||||||
.third_call(some_args="some value")
|
.third_call(some_args="some value")
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -906,11 +906,10 @@ x = {
|
||||||
-)
|
-)
|
||||||
+string_with_escaped_nameescape = "........................................................................... \\N{LAO KO LA}"
|
+string_with_escaped_nameescape = "........................................................................... \\N{LAO KO LA}"
|
||||||
|
|
||||||
-msg = lambda x: (
|
msg = lambda x: (
|
||||||
- f"this is a very very very very long lambda value {x} that doesn't fit on a"
|
- f"this is a very very very very long lambda value {x} that doesn't fit on a"
|
||||||
- " single line"
|
- " single line"
|
||||||
+msg = (
|
+ f"this is a very very very very long lambda value {x} that doesn't fit on a single line"
|
||||||
+ lambda x: f"this is a very very very very long lambda value {x} that doesn't fit on a single line"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
dict_with_lambda_values = {
|
dict_with_lambda_values = {
|
||||||
|
|
@ -1403,8 +1402,8 @@ string_with_escaped_nameescape = "..............................................
|
||||||
|
|
||||||
string_with_escaped_nameescape = "........................................................................... \\N{LAO KO LA}"
|
string_with_escaped_nameescape = "........................................................................... \\N{LAO KO LA}"
|
||||||
|
|
||||||
msg = (
|
msg = lambda x: (
|
||||||
lambda x: f"this is a very very very very long lambda value {x} that doesn't fit on a single line"
|
f"this is a very very very very long lambda value {x} that doesn't fit on a single line"
|
||||||
)
|
)
|
||||||
|
|
||||||
dict_with_lambda_values = {
|
dict_with_lambda_values = {
|
||||||
|
|
|
||||||
|
|
@ -375,7 +375,7 @@ a = b if """
|
||||||
# Another use case
|
# Another use case
|
||||||
data = yaml.load("""\
|
data = yaml.load("""\
|
||||||
a: 1
|
a: 1
|
||||||
@@ -77,19 +106,23 @@
|
@@ -77,10 +106,12 @@
|
||||||
b: 2
|
b: 2
|
||||||
""",
|
""",
|
||||||
)
|
)
|
||||||
|
|
@ -390,19 +390,7 @@ a = b if """
|
||||||
|
|
||||||
MULTILINE = """
|
MULTILINE = """
|
||||||
foo
|
foo
|
||||||
""".replace("\n", "")
|
@@ -156,16 +187,24 @@
|
||||||
-generated_readme = lambda project_name: """
|
|
||||||
+generated_readme = (
|
|
||||||
+ lambda project_name: """
|
|
||||||
{}
|
|
||||||
|
|
||||||
<Add content here!>
|
|
||||||
""".strip().format(project_name)
|
|
||||||
+)
|
|
||||||
parser.usage += """
|
|
||||||
Custom extra help summary.
|
|
||||||
|
|
||||||
@@ -156,16 +189,24 @@
|
|
||||||
10 LOAD_CONST 0 (None)
|
10 LOAD_CONST 0 (None)
|
||||||
12 RETURN_VALUE
|
12 RETURN_VALUE
|
||||||
""" % (_C.__init__.__code__.co_firstlineno + 1,)
|
""" % (_C.__init__.__code__.co_firstlineno + 1,)
|
||||||
|
|
@ -433,7 +421,7 @@ a = b if """
|
||||||
[
|
[
|
||||||
"""cow
|
"""cow
|
||||||
moos""",
|
moos""",
|
||||||
@@ -206,7 +247,9 @@
|
@@ -206,7 +245,9 @@
|
||||||
"c"
|
"c"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
@ -444,7 +432,7 @@ a = b if """
|
||||||
|
|
||||||
assert some_var == expected_result, """
|
assert some_var == expected_result, """
|
||||||
test
|
test
|
||||||
@@ -224,10 +267,8 @@
|
@@ -224,10 +265,8 @@
|
||||||
"""Sxxxxxxx xxxxxxxx, xxxxxxx xx xxxxxxxxx
|
"""Sxxxxxxx xxxxxxxx, xxxxxxx xx xxxxxxxxx
|
||||||
xxxxxxxxxxxxx xxxxxxx xxxxxxxxx xxx-xxxxxxxxxx xxxxxx xx xxx-xxxxxx"""
|
xxxxxxxxxxxxx xxxxxxx xxxxxxxxx xxx-xxxxxxxxxx xxxxxx xx xxx-xxxxxx"""
|
||||||
),
|
),
|
||||||
|
|
@ -457,7 +445,7 @@ a = b if """
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -246,14 +287,12 @@
|
@@ -246,14 +285,12 @@
|
||||||
a
|
a
|
||||||
a"""
|
a"""
|
||||||
),
|
),
|
||||||
|
|
@ -597,13 +585,11 @@ data = yaml.load(
|
||||||
MULTILINE = """
|
MULTILINE = """
|
||||||
foo
|
foo
|
||||||
""".replace("\n", "")
|
""".replace("\n", "")
|
||||||
generated_readme = (
|
generated_readme = lambda project_name: """
|
||||||
lambda project_name: """
|
|
||||||
{}
|
{}
|
||||||
|
|
||||||
<Add content here!>
|
<Add content here!>
|
||||||
""".strip().format(project_name)
|
""".strip().format(project_name)
|
||||||
)
|
|
||||||
parser.usage += """
|
parser.usage += """
|
||||||
Custom extra help summary.
|
Custom extra help summary.
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff_python_formatter/tests/fixtures.rs
|
source: crates/ruff_python_formatter/tests/fixtures.rs
|
||||||
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/await.py
|
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/await.py
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
## Input
|
## Input
|
||||||
```python
|
```python
|
||||||
|
|
@ -142,3 +141,20 @@ test_data = await (
|
||||||
.to_list()
|
.to_list()
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Preview changes
|
||||||
|
```diff
|
||||||
|
--- Stable
|
||||||
|
+++ Preview
|
||||||
|
@@ -65,7 +65,8 @@
|
||||||
|
|
||||||
|
# https://github.com/astral-sh/ruff/issues/8644
|
||||||
|
test_data = await (
|
||||||
|
- Stream.from_async(async_data)
|
||||||
|
+ Stream
|
||||||
|
+ .from_async(async_data)
|
||||||
|
.flat_map_async()
|
||||||
|
.map()
|
||||||
|
.filter_async(is_valid_data)
|
||||||
|
```
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff_python_formatter/tests/fixtures.rs
|
source: crates/ruff_python_formatter/tests/fixtures.rs
|
||||||
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/call.py
|
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/call.py
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
## Input
|
## Input
|
||||||
```python
|
```python
|
||||||
|
|
@ -557,3 +556,20 @@ result = (
|
||||||
|
|
||||||
result = (object[complicate_caller])("argument").a["b"].test(argument)
|
result = (object[complicate_caller])("argument").a["b"].test(argument)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Preview changes
|
||||||
|
```diff
|
||||||
|
--- Stable
|
||||||
|
+++ Preview
|
||||||
|
@@ -57,7 +57,8 @@
|
||||||
|
|
||||||
|
# Call chains/fluent interface (https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#call-chains)
|
||||||
|
result = (
|
||||||
|
- session.query(models.Customer.id)
|
||||||
|
+ session
|
||||||
|
+ .query(models.Customer.id)
|
||||||
|
.filter(
|
||||||
|
models.Customer.account_id == 10000,
|
||||||
|
models.Customer.email == "user@example.org",
|
||||||
|
```
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,7 +1,6 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff_python_formatter/tests/fixtures.rs
|
source: crates/ruff_python_formatter/tests/fixtures.rs
|
||||||
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/split_empty_brackets.py
|
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/expression/split_empty_brackets.py
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
## Input
|
## Input
|
||||||
```python
|
```python
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,163 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff_python_formatter/tests/fixtures.rs
|
||||||
|
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/fluent.py
|
||||||
|
---
|
||||||
|
## Input
|
||||||
|
```python
|
||||||
|
# Fixtures for fluent formatting of call chains
|
||||||
|
# Note that `fluent.options.json` sets line width to 8
|
||||||
|
|
||||||
|
|
||||||
|
x = a.b()
|
||||||
|
|
||||||
|
x = a.b().c()
|
||||||
|
|
||||||
|
x = a.b().c().d
|
||||||
|
|
||||||
|
x = a.b.c.d().e()
|
||||||
|
|
||||||
|
x = a.b.c().d.e().f.g()
|
||||||
|
|
||||||
|
# Consecutive calls/subscripts are grouped together
|
||||||
|
# for the purposes of fluent formatting (though, as 2025.12.15,
|
||||||
|
# there may be a break inside of one of these
|
||||||
|
# calls/subscripts, but that is unrelated to the fluent format.)
|
||||||
|
|
||||||
|
x = a()[0]().b().c()
|
||||||
|
|
||||||
|
x = a.b()[0].c.d()[1]().e
|
||||||
|
|
||||||
|
# Parentheses affect both where the root of the call
|
||||||
|
# chain is and how many calls we require before applying
|
||||||
|
# fluent formatting (just 1, in the presence of a parenthesized
|
||||||
|
# root, as of 2025.12.15.)
|
||||||
|
|
||||||
|
x = (a).b()
|
||||||
|
|
||||||
|
x = (a()).b()
|
||||||
|
|
||||||
|
x = (a.b()).d.e()
|
||||||
|
|
||||||
|
x = (a.b().d).e()
|
||||||
|
```
|
||||||
|
|
||||||
|
## Outputs
|
||||||
|
### Output 1
|
||||||
|
```
|
||||||
|
indent-style = space
|
||||||
|
line-width = 8
|
||||||
|
indent-width = 4
|
||||||
|
quote-style = Double
|
||||||
|
line-ending = LineFeed
|
||||||
|
magic-trailing-comma = Respect
|
||||||
|
docstring-code = Disabled
|
||||||
|
docstring-code-line-width = "dynamic"
|
||||||
|
preview = Disabled
|
||||||
|
target_version = 3.10
|
||||||
|
source_type = Python
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Fixtures for fluent formatting of call chains
|
||||||
|
# Note that `fluent.options.json` sets line width to 8
|
||||||
|
|
||||||
|
|
||||||
|
x = a.b()
|
||||||
|
|
||||||
|
x = a.b().c()
|
||||||
|
|
||||||
|
x = (
|
||||||
|
a.b()
|
||||||
|
.c()
|
||||||
|
.d
|
||||||
|
)
|
||||||
|
|
||||||
|
x = a.b.c.d().e()
|
||||||
|
|
||||||
|
x = (
|
||||||
|
a.b.c()
|
||||||
|
.d.e()
|
||||||
|
.f.g()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Consecutive calls/subscripts are grouped together
|
||||||
|
# for the purposes of fluent formatting (though, as 2025.12.15,
|
||||||
|
# there may be a break inside of one of these
|
||||||
|
# calls/subscripts, but that is unrelated to the fluent format.)
|
||||||
|
|
||||||
|
x = (
|
||||||
|
a()[
|
||||||
|
0
|
||||||
|
]()
|
||||||
|
.b()
|
||||||
|
.c()
|
||||||
|
)
|
||||||
|
|
||||||
|
x = (
|
||||||
|
a.b()[
|
||||||
|
0
|
||||||
|
]
|
||||||
|
.c.d()[
|
||||||
|
1
|
||||||
|
]()
|
||||||
|
.e
|
||||||
|
)
|
||||||
|
|
||||||
|
# Parentheses affect both where the root of the call
|
||||||
|
# chain is and how many calls we require before applying
|
||||||
|
# fluent formatting (just 1, in the presence of a parenthesized
|
||||||
|
# root, as of 2025.12.15.)
|
||||||
|
|
||||||
|
x = (
|
||||||
|
a
|
||||||
|
).b()
|
||||||
|
|
||||||
|
x = (
|
||||||
|
a()
|
||||||
|
).b()
|
||||||
|
|
||||||
|
x = (
|
||||||
|
a.b()
|
||||||
|
).d.e()
|
||||||
|
|
||||||
|
x = (
|
||||||
|
a.b().d
|
||||||
|
).e()
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
#### Preview changes
|
||||||
|
```diff
|
||||||
|
--- Stable
|
||||||
|
+++ Preview
|
||||||
|
@@ -7,7 +7,8 @@
|
||||||
|
x = a.b().c()
|
||||||
|
|
||||||
|
x = (
|
||||||
|
- a.b()
|
||||||
|
+ a
|
||||||
|
+ .b()
|
||||||
|
.c()
|
||||||
|
.d
|
||||||
|
)
|
||||||
|
@@ -15,7 +16,8 @@
|
||||||
|
x = a.b.c.d().e()
|
||||||
|
|
||||||
|
x = (
|
||||||
|
- a.b.c()
|
||||||
|
+ a.b
|
||||||
|
+ .c()
|
||||||
|
.d.e()
|
||||||
|
.f.g()
|
||||||
|
)
|
||||||
|
@@ -34,7 +36,8 @@
|
||||||
|
)
|
||||||
|
|
||||||
|
x = (
|
||||||
|
- a.b()[
|
||||||
|
+ a
|
||||||
|
+ .b()[
|
||||||
|
0
|
||||||
|
]
|
||||||
|
.c.d()[
|
||||||
|
```
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff_python_formatter/tests/fixtures.rs
|
source: crates/ruff_python_formatter/tests/fixtures.rs
|
||||||
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/multiline_string_deviations.py
|
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/multiline_string_deviations.py
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
## Input
|
## Input
|
||||||
```python
|
```python
|
||||||
|
|
@ -106,3 +105,22 @@ generated_readme = (
|
||||||
""".strip().format(project_name)
|
""".strip().format(project_name)
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Preview changes
|
||||||
|
```diff
|
||||||
|
--- Stable
|
||||||
|
+++ Preview
|
||||||
|
@@ -44,10 +44,8 @@
|
||||||
|
# this by changing `Lambda::needs_parentheses` to return `BestFit` but it causes
|
||||||
|
# issues when the lambda has comments.
|
||||||
|
# Let's keep this as a known deviation for now.
|
||||||
|
-generated_readme = (
|
||||||
|
- lambda project_name: """
|
||||||
|
+generated_readme = lambda project_name: """
|
||||||
|
{}
|
||||||
|
|
||||||
|
<Add content here!>
|
||||||
|
""".strip().format(project_name)
|
||||||
|
-)
|
||||||
|
```
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,6 @@
|
||||||
---
|
---
|
||||||
source: crates/ruff_python_formatter/tests/fixtures.rs
|
source: crates/ruff_python_formatter/tests/fixtures.rs
|
||||||
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/parentheses/call_chains.py
|
input_file: crates/ruff_python_formatter/resources/test/fixtures/ruff/parentheses/call_chains.py
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
## Input
|
## Input
|
||||||
```python
|
```python
|
||||||
|
|
@ -223,6 +222,72 @@ max_message_id = (
|
||||||
.baz()
|
.baz()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Note in preview we split at `pl` which some
|
||||||
|
# folks may dislike. (Similarly with common
|
||||||
|
# `np` and `pd` invocations).
|
||||||
|
#
|
||||||
|
# This is because we cannot reliably predict,
|
||||||
|
# just from syntax, whether a short identifier
|
||||||
|
# is being used as a 'namespace' or as an 'object'.
|
||||||
|
#
|
||||||
|
# As of 2025.12.15, we do not indent methods in
|
||||||
|
# fluent formatting. If we ever decide to do so,
|
||||||
|
# it may make sense to special case call chain roots
|
||||||
|
# that are shorter than the indent-width (like Prettier does).
|
||||||
|
# This would have the benefit of handling these common
|
||||||
|
# two-letter aliases for libraries.
|
||||||
|
|
||||||
|
|
||||||
|
expr = (
|
||||||
|
pl.scan_parquet("/data/pypi-parquet/*.parquet")
|
||||||
|
.filter(
|
||||||
|
[
|
||||||
|
pl.col("path").str.contains(
|
||||||
|
r"\.(asm|c|cc|cpp|cxx|h|hpp|rs|[Ff][0-9]{0,2}(?:or)?|go)$"
|
||||||
|
),
|
||||||
|
~pl.col("path").str.contains(r"(^|/)test(|s|ing)"),
|
||||||
|
~pl.col("path").str.contains("/site-packages/", literal=True),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
.with_columns(
|
||||||
|
month=pl.col("uploaded_on").dt.truncate("1mo"),
|
||||||
|
ext=pl.col("path")
|
||||||
|
.str.extract(pattern=r"\.([a-z0-9]+)$", group_index=1)
|
||||||
|
.str.replace_all(pattern=r"cxx|cpp|cc|c|hpp|h", value="C/C++")
|
||||||
|
.str.replace_all(pattern="^f.*$", value="Fortran")
|
||||||
|
.str.replace("rs", "Rust", literal=True)
|
||||||
|
.str.replace("go", "Go", literal=True)
|
||||||
|
.str.replace("asm", "Assembly", literal=True)
|
||||||
|
.replace({"": None}),
|
||||||
|
)
|
||||||
|
.group_by(["month", "ext"])
|
||||||
|
.agg(project_count=pl.col("project_name").n_unique())
|
||||||
|
.drop_nulls(["ext"])
|
||||||
|
.sort(["month", "project_count"], descending=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
def indentation_matching_for_loop_in_preview():
|
||||||
|
if make_this:
|
||||||
|
if more_nested_because_line_length:
|
||||||
|
identical_hidden_layer_sizes = all(
|
||||||
|
current_hidden_layer_sizes == first_hidden_layer_sizes
|
||||||
|
for current_hidden_layer_sizes in self.component_config[
|
||||||
|
HIDDEN_LAYERS_SIZES
|
||||||
|
].values().attr
|
||||||
|
)
|
||||||
|
|
||||||
|
def indentation_matching_walrus_in_preview():
|
||||||
|
if make_this:
|
||||||
|
if more_nested_because_line_length:
|
||||||
|
with self.read_ctx(book_type) as cursor:
|
||||||
|
if (entry_count := len(names := cursor.execute(
|
||||||
|
'SELECT name FROM address_book WHERE address=?',
|
||||||
|
(address,),
|
||||||
|
).fetchall().some_attr)) == 0 or len(set(names)) > 1:
|
||||||
|
return
|
||||||
|
|
||||||
|
# behavior with parenthesized roots
|
||||||
|
x = (aaaaaaaaaaaaaaaaaaaaaa).bbbbbbbbbbbbbbbbbbb.cccccccccccccccccccccccc().dddddddddddddddddddddddd().eeeeeeeeeeee
|
||||||
```
|
```
|
||||||
|
|
||||||
## Output
|
## Output
|
||||||
|
|
@ -466,4 +531,237 @@ max_message_id = (
|
||||||
.sum()
|
.sum()
|
||||||
.baz()
|
.baz()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Note in preview we split at `pl` which some
|
||||||
|
# folks may dislike. (Similarly with common
|
||||||
|
# `np` and `pd` invocations).
|
||||||
|
#
|
||||||
|
# This is because we cannot reliably predict,
|
||||||
|
# just from syntax, whether a short identifier
|
||||||
|
# is being used as a 'namespace' or as an 'object'.
|
||||||
|
#
|
||||||
|
# As of 2025.12.15, we do not indent methods in
|
||||||
|
# fluent formatting. If we ever decide to do so,
|
||||||
|
# it may make sense to special case call chain roots
|
||||||
|
# that are shorter than the indent-width (like Prettier does).
|
||||||
|
# This would have the benefit of handling these common
|
||||||
|
# two-letter aliases for libraries.
|
||||||
|
|
||||||
|
|
||||||
|
expr = (
|
||||||
|
pl.scan_parquet("/data/pypi-parquet/*.parquet")
|
||||||
|
.filter(
|
||||||
|
[
|
||||||
|
pl.col("path").str.contains(
|
||||||
|
r"\.(asm|c|cc|cpp|cxx|h|hpp|rs|[Ff][0-9]{0,2}(?:or)?|go)$"
|
||||||
|
),
|
||||||
|
~pl.col("path").str.contains(r"(^|/)test(|s|ing)"),
|
||||||
|
~pl.col("path").str.contains("/site-packages/", literal=True),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
.with_columns(
|
||||||
|
month=pl.col("uploaded_on").dt.truncate("1mo"),
|
||||||
|
ext=pl.col("path")
|
||||||
|
.str.extract(pattern=r"\.([a-z0-9]+)$", group_index=1)
|
||||||
|
.str.replace_all(pattern=r"cxx|cpp|cc|c|hpp|h", value="C/C++")
|
||||||
|
.str.replace_all(pattern="^f.*$", value="Fortran")
|
||||||
|
.str.replace("rs", "Rust", literal=True)
|
||||||
|
.str.replace("go", "Go", literal=True)
|
||||||
|
.str.replace("asm", "Assembly", literal=True)
|
||||||
|
.replace({"": None}),
|
||||||
|
)
|
||||||
|
.group_by(["month", "ext"])
|
||||||
|
.agg(project_count=pl.col("project_name").n_unique())
|
||||||
|
.drop_nulls(["ext"])
|
||||||
|
.sort(["month", "project_count"], descending=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def indentation_matching_for_loop_in_preview():
|
||||||
|
if make_this:
|
||||||
|
if more_nested_because_line_length:
|
||||||
|
identical_hidden_layer_sizes = all(
|
||||||
|
current_hidden_layer_sizes == first_hidden_layer_sizes
|
||||||
|
for current_hidden_layer_sizes in self.component_config[
|
||||||
|
HIDDEN_LAYERS_SIZES
|
||||||
|
]
|
||||||
|
.values()
|
||||||
|
.attr
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def indentation_matching_walrus_in_preview():
|
||||||
|
if make_this:
|
||||||
|
if more_nested_because_line_length:
|
||||||
|
with self.read_ctx(book_type) as cursor:
|
||||||
|
if (
|
||||||
|
entry_count := len(
|
||||||
|
names := cursor.execute(
|
||||||
|
"SELECT name FROM address_book WHERE address=?",
|
||||||
|
(address,),
|
||||||
|
)
|
||||||
|
.fetchall()
|
||||||
|
.some_attr
|
||||||
|
)
|
||||||
|
) == 0 or len(set(names)) > 1:
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
# behavior with parenthesized roots
|
||||||
|
x = (
|
||||||
|
(aaaaaaaaaaaaaaaaaaaaaa)
|
||||||
|
.bbbbbbbbbbbbbbbbbbb.cccccccccccccccccccccccc()
|
||||||
|
.dddddddddddddddddddddddd()
|
||||||
|
.eeeeeeeeeeee
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Preview changes
|
||||||
|
```diff
|
||||||
|
--- Stable
|
||||||
|
+++ Preview
|
||||||
|
@@ -21,7 +21,8 @@
|
||||||
|
)
|
||||||
|
|
||||||
|
raise OsError("") from (
|
||||||
|
- Blog.objects.filter(
|
||||||
|
+ Blog.objects
|
||||||
|
+ .filter(
|
||||||
|
entry__headline__contains="Lennon",
|
||||||
|
)
|
||||||
|
.filter(
|
||||||
|
@@ -33,7 +34,8 @@
|
||||||
|
)
|
||||||
|
|
||||||
|
raise OsError("sökdjffffsldkfjlhsakfjhalsökafhsöfdahsödfjösaaksjdllllllllllllll") from (
|
||||||
|
- Blog.objects.filter(
|
||||||
|
+ Blog.objects
|
||||||
|
+ .filter(
|
||||||
|
entry__headline__contains="Lennon",
|
||||||
|
)
|
||||||
|
.filter(
|
||||||
|
@@ -46,7 +48,8 @@
|
||||||
|
|
||||||
|
# Break only after calls and indexing
|
||||||
|
b1 = (
|
||||||
|
- session.query(models.Customer.id)
|
||||||
|
+ session
|
||||||
|
+ .query(models.Customer.id)
|
||||||
|
.filter(
|
||||||
|
models.Customer.account_id == account_id, models.Customer.email == email_address
|
||||||
|
)
|
||||||
|
@@ -54,7 +57,8 @@
|
||||||
|
)
|
||||||
|
|
||||||
|
b2 = (
|
||||||
|
- Blog.objects.filter(
|
||||||
|
+ Blog.objects
|
||||||
|
+ .filter(
|
||||||
|
entry__headline__contains="Lennon",
|
||||||
|
)
|
||||||
|
.limit_results[:10]
|
||||||
|
@@ -70,7 +74,8 @@
|
||||||
|
).filter(
|
||||||
|
entry__pub_date__year=2008,
|
||||||
|
)
|
||||||
|
- + Blog.objects.filter(
|
||||||
|
+ + Blog.objects
|
||||||
|
+ .filter(
|
||||||
|
entry__headline__contains="McCartney",
|
||||||
|
)
|
||||||
|
.limit_results[:10]
|
||||||
|
@@ -89,7 +94,8 @@
|
||||||
|
d11 = x.e().e().e() #
|
||||||
|
d12 = x.e().e().e() #
|
||||||
|
d13 = (
|
||||||
|
- x.e() #
|
||||||
|
+ x
|
||||||
|
+ .e() #
|
||||||
|
.e()
|
||||||
|
.e()
|
||||||
|
)
|
||||||
|
@@ -101,7 +107,8 @@
|
||||||
|
|
||||||
|
# Doesn't fit, fluent style
|
||||||
|
d3 = (
|
||||||
|
- x.e() #
|
||||||
|
+ x
|
||||||
|
+ .e() #
|
||||||
|
.esadjkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk()
|
||||||
|
.esadjkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkkk()
|
||||||
|
)
|
||||||
|
@@ -218,7 +225,8 @@
|
||||||
|
|
||||||
|
(
|
||||||
|
(
|
||||||
|
- df1_aaaaaaaaaaaa.merge()
|
||||||
|
+ df1_aaaaaaaaaaaa
|
||||||
|
+ .merge()
|
||||||
|
.groupby(
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
@@ -228,7 +236,8 @@
|
||||||
|
|
||||||
|
(
|
||||||
|
(
|
||||||
|
- df1_aaaaaaaaaaaa.merge()
|
||||||
|
+ df1_aaaaaaaaaaaa
|
||||||
|
+ .merge()
|
||||||
|
.groupby(
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
@@ -255,19 +264,19 @@
|
||||||
|
|
||||||
|
|
||||||
|
expr = (
|
||||||
|
- pl.scan_parquet("/data/pypi-parquet/*.parquet")
|
||||||
|
- .filter(
|
||||||
|
- [
|
||||||
|
- pl.col("path").str.contains(
|
||||||
|
- r"\.(asm|c|cc|cpp|cxx|h|hpp|rs|[Ff][0-9]{0,2}(?:or)?|go)$"
|
||||||
|
- ),
|
||||||
|
- ~pl.col("path").str.contains(r"(^|/)test(|s|ing)"),
|
||||||
|
- ~pl.col("path").str.contains("/site-packages/", literal=True),
|
||||||
|
- ]
|
||||||
|
- )
|
||||||
|
+ pl
|
||||||
|
+ .scan_parquet("/data/pypi-parquet/*.parquet")
|
||||||
|
+ .filter([
|
||||||
|
+ pl.col("path").str.contains(
|
||||||
|
+ r"\.(asm|c|cc|cpp|cxx|h|hpp|rs|[Ff][0-9]{0,2}(?:or)?|go)$"
|
||||||
|
+ ),
|
||||||
|
+ ~pl.col("path").str.contains(r"(^|/)test(|s|ing)"),
|
||||||
|
+ ~pl.col("path").str.contains("/site-packages/", literal=True),
|
||||||
|
+ ])
|
||||||
|
.with_columns(
|
||||||
|
month=pl.col("uploaded_on").dt.truncate("1mo"),
|
||||||
|
- ext=pl.col("path")
|
||||||
|
+ ext=pl
|
||||||
|
+ .col("path")
|
||||||
|
.str.extract(pattern=r"\.([a-z0-9]+)$", group_index=1)
|
||||||
|
.str.replace_all(pattern=r"cxx|cpp|cc|c|hpp|h", value="C/C++")
|
||||||
|
.str.replace_all(pattern="^f.*$", value="Fortran")
|
||||||
|
@@ -288,9 +297,8 @@
|
||||||
|
if more_nested_because_line_length:
|
||||||
|
identical_hidden_layer_sizes = all(
|
||||||
|
current_hidden_layer_sizes == first_hidden_layer_sizes
|
||||||
|
- for current_hidden_layer_sizes in self.component_config[
|
||||||
|
- HIDDEN_LAYERS_SIZES
|
||||||
|
- ]
|
||||||
|
+ for current_hidden_layer_sizes in self
|
||||||
|
+ .component_config[HIDDEN_LAYERS_SIZES]
|
||||||
|
.values()
|
||||||
|
.attr
|
||||||
|
)
|
||||||
|
@@ -302,7 +310,8 @@
|
||||||
|
with self.read_ctx(book_type) as cursor:
|
||||||
|
if (
|
||||||
|
entry_count := len(
|
||||||
|
- names := cursor.execute(
|
||||||
|
+ names := cursor
|
||||||
|
+ .execute(
|
||||||
|
"SELECT name FROM address_book WHERE address=?",
|
||||||
|
(address,),
|
||||||
|
)
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -592,11 +592,23 @@ impl FormatString {
|
||||||
fn parse_literal(text: &str) -> Result<(FormatPart, &str), FormatParseError> {
|
fn parse_literal(text: &str) -> Result<(FormatPart, &str), FormatParseError> {
|
||||||
let mut cur_text = text;
|
let mut cur_text = text;
|
||||||
let mut result_string = String::new();
|
let mut result_string = String::new();
|
||||||
|
let mut pending_escape = false;
|
||||||
while !cur_text.is_empty() {
|
while !cur_text.is_empty() {
|
||||||
|
if pending_escape
|
||||||
|
&& let Some((unicode_string, remaining)) =
|
||||||
|
FormatString::parse_escaped_unicode_string(cur_text)
|
||||||
|
{
|
||||||
|
result_string.push_str(unicode_string);
|
||||||
|
cur_text = remaining;
|
||||||
|
pending_escape = false;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
match FormatString::parse_literal_single(cur_text) {
|
match FormatString::parse_literal_single(cur_text) {
|
||||||
Ok((next_char, remaining)) => {
|
Ok((next_char, remaining)) => {
|
||||||
result_string.push(next_char);
|
result_string.push(next_char);
|
||||||
cur_text = remaining;
|
cur_text = remaining;
|
||||||
|
pending_escape = next_char == '\\' && !pending_escape;
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
return if result_string.is_empty() {
|
return if result_string.is_empty() {
|
||||||
|
|
@ -678,6 +690,13 @@ impl FormatString {
|
||||||
}
|
}
|
||||||
Err(FormatParseError::UnmatchedBracket)
|
Err(FormatParseError::UnmatchedBracket)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn parse_escaped_unicode_string(text: &str) -> Option<(&str, &str)> {
|
||||||
|
text.strip_prefix("N{")?.find('}').map(|idx| {
|
||||||
|
let end_idx = idx + 3; // 3 for "N{"
|
||||||
|
(&text[..end_idx], &text[end_idx..])
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait FromTemplate<'a>: Sized {
|
pub trait FromTemplate<'a>: Sized {
|
||||||
|
|
@ -1020,4 +1039,48 @@ mod tests {
|
||||||
Err(FormatParseError::InvalidCharacterAfterRightBracket)
|
Err(FormatParseError::InvalidCharacterAfterRightBracket)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_unicode_escape() {
|
||||||
|
let expected = Ok(FormatString {
|
||||||
|
format_parts: vec![FormatPart::Literal("I am a \\N{snowman}".to_owned())],
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(FormatString::from_str("I am a \\N{snowman}"), expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_unicode_escape_with_field() {
|
||||||
|
let expected = Ok(FormatString {
|
||||||
|
format_parts: vec![
|
||||||
|
FormatPart::Literal("I am a \\N{snowman}".to_owned()),
|
||||||
|
FormatPart::Field {
|
||||||
|
field_name: "snowman".to_owned(),
|
||||||
|
conversion_spec: None,
|
||||||
|
format_spec: String::new(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
FormatString::from_str("I am a \\N{snowman}{snowman}"),
|
||||||
|
expected
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_format_multiple_escape_with_field() {
|
||||||
|
let expected = Ok(FormatString {
|
||||||
|
format_parts: vec![
|
||||||
|
FormatPart::Literal("I am a \\\\N".to_owned()),
|
||||||
|
FormatPart::Field {
|
||||||
|
field_name: "snowman".to_owned(),
|
||||||
|
conversion_spec: None,
|
||||||
|
format_spec: String::new(),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
assert_eq!(FormatString::from_str("I am a \\\\N{snowman}"), expected);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,10 @@ license = { workspace = true }
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
|
|
||||||
|
[[test]]
|
||||||
|
name = "fixtures"
|
||||||
|
harness = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ruff_python_ast = { workspace = true, features = ["get-size"] }
|
ruff_python_ast = { workspace = true, features = ["get-size"] }
|
||||||
ruff_python_trivia = { workspace = true }
|
ruff_python_trivia = { workspace = true }
|
||||||
|
|
@ -34,7 +38,8 @@ ruff_python_ast = { workspace = true, features = ["serde"] }
|
||||||
ruff_source_file = { workspace = true }
|
ruff_source_file = { workspace = true }
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
insta = { workspace = true, features = ["glob"] }
|
datatest-stable = { workspace = true }
|
||||||
|
insta = { workspace = true }
|
||||||
itertools = { workspace = true }
|
itertools = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
|
|
||||||
|
|
@ -272,7 +272,9 @@ impl SemanticSyntaxChecker {
|
||||||
|
|
||||||
fn check_annotation<Ctx: SemanticSyntaxContext>(stmt: &ast::Stmt, ctx: &Ctx) {
|
fn check_annotation<Ctx: SemanticSyntaxContext>(stmt: &ast::Stmt, ctx: &Ctx) {
|
||||||
match stmt {
|
match stmt {
|
||||||
Stmt::AnnAssign(ast::StmtAnnAssign { annotation, .. }) => {
|
Stmt::AnnAssign(ast::StmtAnnAssign {
|
||||||
|
target, annotation, ..
|
||||||
|
}) => {
|
||||||
if ctx.python_version() > PythonVersion::PY313 {
|
if ctx.python_version() > PythonVersion::PY313 {
|
||||||
// test_ok valid_annotation_py313
|
// test_ok valid_annotation_py313
|
||||||
// # parse_options: {"target-version": "3.13"}
|
// # parse_options: {"target-version": "3.13"}
|
||||||
|
|
@ -297,6 +299,18 @@ impl SemanticSyntaxChecker {
|
||||||
};
|
};
|
||||||
visitor.visit_expr(annotation);
|
visitor.visit_expr(annotation);
|
||||||
}
|
}
|
||||||
|
if let Expr::Name(ast::ExprName { id, .. }) = target.as_ref() {
|
||||||
|
if let Some(global_stmt) = ctx.global(id.as_str()) {
|
||||||
|
let global_start = global_stmt.start();
|
||||||
|
if !ctx.in_module_scope() || target.start() < global_start {
|
||||||
|
Self::add_error(
|
||||||
|
ctx,
|
||||||
|
SemanticSyntaxErrorKind::AnnotatedGlobal(id.to_string()),
|
||||||
|
target.range(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Stmt::FunctionDef(ast::StmtFunctionDef {
|
Stmt::FunctionDef(ast::StmtFunctionDef {
|
||||||
type_params,
|
type_params,
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,8 @@
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::fmt::{Formatter, Write};
|
use std::fmt::{Formatter, Write};
|
||||||
use std::fs;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
|
use datatest_stable::Utf8Path;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use ruff_annotate_snippets::{Level, Renderer, Snippet};
|
use ruff_annotate_snippets::{Level, Renderer, Snippet};
|
||||||
use ruff_python_ast::token::{Token, Tokens};
|
use ruff_python_ast::token::{Token, Tokens};
|
||||||
|
|
@ -17,38 +16,49 @@ use ruff_python_parser::{Mode, ParseErrorType, ParseOptions, Parsed, parse_unche
|
||||||
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
|
use ruff_source_file::{LineIndex, OneIndexed, SourceCode};
|
||||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||||
|
|
||||||
#[test]
|
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
|
||||||
fn valid_syntax() {
|
fn valid_syntax(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||||
insta::glob!("../resources", "valid/**/*.py", test_valid_syntax);
|
test_valid_syntax(path, &content, "./resources/valid");
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
|
||||||
fn invalid_syntax() {
|
fn invalid_syntax(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||||
insta::glob!("../resources", "invalid/**/*.py", test_invalid_syntax);
|
test_invalid_syntax(path, &content, "./resources/invalid");
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
|
||||||
fn inline_ok() {
|
fn inline_ok(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||||
insta::glob!("../resources/inline", "ok/**/*.py", test_valid_syntax);
|
test_valid_syntax(path, &content, "./resources/inline/ok");
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[expect(clippy::needless_pass_by_value, clippy::unnecessary_wraps)]
|
||||||
fn inline_err() {
|
fn inline_err(path: &Utf8Path, content: String) -> datatest_stable::Result<()> {
|
||||||
insta::glob!("../resources/inline", "err/**/*.py", test_invalid_syntax);
|
test_invalid_syntax(path, &content, "./resources/inline/err");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
datatest_stable::harness! {
|
||||||
|
{ test = valid_syntax, root = "./resources/valid", pattern = r"\.pyi?$" },
|
||||||
|
{ test = inline_ok, root = "./resources/inline/ok", pattern = r"\.pyi?$" },
|
||||||
|
{ test = invalid_syntax, root = "./resources/invalid", pattern = r"\.pyi?$" },
|
||||||
|
{ test = inline_err, root="./resources/inline/err", pattern = r"\.pyi?$" }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Asserts that the parser generates no syntax errors for a valid program.
|
/// Asserts that the parser generates no syntax errors for a valid program.
|
||||||
/// Snapshots the AST.
|
/// Snapshots the AST.
|
||||||
fn test_valid_syntax(input_path: &Path) {
|
fn test_valid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
|
||||||
let source = fs::read_to_string(input_path).expect("Expected test file to exist");
|
let test_name = input_path.strip_prefix(root).unwrap_or(input_path).as_str();
|
||||||
let options = extract_options(&source).unwrap_or_else(|| {
|
let options = extract_options(source).unwrap_or_else(|| {
|
||||||
ParseOptions::from(Mode::Module).with_target_version(PythonVersion::latest_preview())
|
ParseOptions::from(Mode::Module).with_target_version(PythonVersion::latest_preview())
|
||||||
});
|
});
|
||||||
let parsed = parse_unchecked(&source, options.clone());
|
let parsed = parse_unchecked(source, options.clone());
|
||||||
|
|
||||||
if parsed.has_syntax_errors() {
|
if parsed.has_syntax_errors() {
|
||||||
let line_index = LineIndex::from_source_text(&source);
|
let line_index = LineIndex::from_source_text(source);
|
||||||
let source_code = SourceCode::new(&source, &line_index);
|
let source_code = SourceCode::new(source, &line_index);
|
||||||
|
|
||||||
let mut message = "Expected no syntax errors for a valid program but the parser generated the following errors:\n".to_string();
|
let mut message = "Expected no syntax errors for a valid program but the parser generated the following errors:\n".to_string();
|
||||||
|
|
||||||
|
|
@ -81,8 +91,8 @@ fn test_valid_syntax(input_path: &Path) {
|
||||||
panic!("{input_path:?}: {message}");
|
panic!("{input_path:?}: {message}");
|
||||||
}
|
}
|
||||||
|
|
||||||
validate_tokens(parsed.tokens(), source.text_len(), input_path);
|
validate_tokens(parsed.tokens(), source.text_len());
|
||||||
validate_ast(&parsed, source.text_len(), input_path);
|
validate_ast(&parsed, source.text_len());
|
||||||
|
|
||||||
let mut output = String::new();
|
let mut output = String::new();
|
||||||
writeln!(&mut output, "## AST").unwrap();
|
writeln!(&mut output, "## AST").unwrap();
|
||||||
|
|
@ -91,7 +101,7 @@ fn test_valid_syntax(input_path: &Path) {
|
||||||
let parsed = parsed.try_into_module().expect("Parsed with Mode::Module");
|
let parsed = parsed.try_into_module().expect("Parsed with Mode::Module");
|
||||||
|
|
||||||
let mut visitor =
|
let mut visitor =
|
||||||
SemanticSyntaxCheckerVisitor::new(&source).with_python_version(options.target_version());
|
SemanticSyntaxCheckerVisitor::new(source).with_python_version(options.target_version());
|
||||||
|
|
||||||
for stmt in parsed.suite() {
|
for stmt in parsed.suite() {
|
||||||
visitor.visit_stmt(stmt);
|
visitor.visit_stmt(stmt);
|
||||||
|
|
@ -102,8 +112,8 @@ fn test_valid_syntax(input_path: &Path) {
|
||||||
if !semantic_syntax_errors.is_empty() {
|
if !semantic_syntax_errors.is_empty() {
|
||||||
let mut message = "Expected no semantic syntax errors for a valid program:\n".to_string();
|
let mut message = "Expected no semantic syntax errors for a valid program:\n".to_string();
|
||||||
|
|
||||||
let line_index = LineIndex::from_source_text(&source);
|
let line_index = LineIndex::from_source_text(source);
|
||||||
let source_code = SourceCode::new(&source, &line_index);
|
let source_code = SourceCode::new(source, &line_index);
|
||||||
|
|
||||||
for error in semantic_syntax_errors {
|
for error in semantic_syntax_errors {
|
||||||
writeln!(
|
writeln!(
|
||||||
|
|
@ -125,6 +135,7 @@ fn test_valid_syntax(input_path: &Path) {
|
||||||
omit_expression => true,
|
omit_expression => true,
|
||||||
input_file => input_path,
|
input_file => input_path,
|
||||||
prepend_module_to_snapshot => false,
|
prepend_module_to_snapshot => false,
|
||||||
|
snapshot_suffix => test_name
|
||||||
}, {
|
}, {
|
||||||
insta::assert_snapshot!(output);
|
insta::assert_snapshot!(output);
|
||||||
});
|
});
|
||||||
|
|
@ -132,22 +143,23 @@ fn test_valid_syntax(input_path: &Path) {
|
||||||
|
|
||||||
/// Assert that the parser generates at least one syntax error for the given input file.
|
/// Assert that the parser generates at least one syntax error for the given input file.
|
||||||
/// Snapshots the AST and the error messages.
|
/// Snapshots the AST and the error messages.
|
||||||
fn test_invalid_syntax(input_path: &Path) {
|
fn test_invalid_syntax(input_path: &Utf8Path, source: &str, root: &str) {
|
||||||
let source = fs::read_to_string(input_path).expect("Expected test file to exist");
|
let test_name = input_path.strip_prefix(root).unwrap_or(input_path).as_str();
|
||||||
let options = extract_options(&source).unwrap_or_else(|| {
|
|
||||||
|
let options = extract_options(source).unwrap_or_else(|| {
|
||||||
ParseOptions::from(Mode::Module).with_target_version(PythonVersion::PY314)
|
ParseOptions::from(Mode::Module).with_target_version(PythonVersion::PY314)
|
||||||
});
|
});
|
||||||
let parsed = parse_unchecked(&source, options.clone());
|
let parsed = parse_unchecked(source, options.clone());
|
||||||
|
|
||||||
validate_tokens(parsed.tokens(), source.text_len(), input_path);
|
validate_tokens(parsed.tokens(), source.text_len());
|
||||||
validate_ast(&parsed, source.text_len(), input_path);
|
validate_ast(&parsed, source.text_len());
|
||||||
|
|
||||||
let mut output = String::new();
|
let mut output = String::new();
|
||||||
writeln!(&mut output, "## AST").unwrap();
|
writeln!(&mut output, "## AST").unwrap();
|
||||||
writeln!(&mut output, "\n```\n{:#?}\n```", parsed.syntax()).unwrap();
|
writeln!(&mut output, "\n```\n{:#?}\n```", parsed.syntax()).unwrap();
|
||||||
|
|
||||||
let line_index = LineIndex::from_source_text(&source);
|
let line_index = LineIndex::from_source_text(source);
|
||||||
let source_code = SourceCode::new(&source, &line_index);
|
let source_code = SourceCode::new(source, &line_index);
|
||||||
|
|
||||||
if !parsed.errors().is_empty() {
|
if !parsed.errors().is_empty() {
|
||||||
writeln!(&mut output, "## Errors\n").unwrap();
|
writeln!(&mut output, "## Errors\n").unwrap();
|
||||||
|
|
@ -186,7 +198,7 @@ fn test_invalid_syntax(input_path: &Path) {
|
||||||
let parsed = parsed.try_into_module().expect("Parsed with Mode::Module");
|
let parsed = parsed.try_into_module().expect("Parsed with Mode::Module");
|
||||||
|
|
||||||
let mut visitor =
|
let mut visitor =
|
||||||
SemanticSyntaxCheckerVisitor::new(&source).with_python_version(options.target_version());
|
SemanticSyntaxCheckerVisitor::new(source).with_python_version(options.target_version());
|
||||||
|
|
||||||
for stmt in parsed.suite() {
|
for stmt in parsed.suite() {
|
||||||
visitor.visit_stmt(stmt);
|
visitor.visit_stmt(stmt);
|
||||||
|
|
@ -196,7 +208,7 @@ fn test_invalid_syntax(input_path: &Path) {
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
parsed.has_syntax_errors() || !semantic_syntax_errors.is_empty(),
|
parsed.has_syntax_errors() || !semantic_syntax_errors.is_empty(),
|
||||||
"{input_path:?}: Expected parser to generate at least one syntax error for a program containing syntax errors."
|
"Expected parser to generate at least one syntax error for a program containing syntax errors."
|
||||||
);
|
);
|
||||||
|
|
||||||
if !semantic_syntax_errors.is_empty() {
|
if !semantic_syntax_errors.is_empty() {
|
||||||
|
|
@ -220,6 +232,7 @@ fn test_invalid_syntax(input_path: &Path) {
|
||||||
omit_expression => true,
|
omit_expression => true,
|
||||||
input_file => input_path,
|
input_file => input_path,
|
||||||
prepend_module_to_snapshot => false,
|
prepend_module_to_snapshot => false,
|
||||||
|
snapshot_suffix => test_name
|
||||||
}, {
|
}, {
|
||||||
insta::assert_snapshot!(output);
|
insta::assert_snapshot!(output);
|
||||||
});
|
});
|
||||||
|
|
@ -372,26 +385,24 @@ impl std::fmt::Display for CodeFrame<'_> {
|
||||||
/// Verifies that:
|
/// Verifies that:
|
||||||
/// * the ranges are strictly increasing when loop the tokens in insertion order
|
/// * the ranges are strictly increasing when loop the tokens in insertion order
|
||||||
/// * all ranges are within the length of the source code
|
/// * all ranges are within the length of the source code
|
||||||
fn validate_tokens(tokens: &[Token], source_length: TextSize, test_path: &Path) {
|
fn validate_tokens(tokens: &[Token], source_length: TextSize) {
|
||||||
let mut previous: Option<&Token> = None;
|
let mut previous: Option<&Token> = None;
|
||||||
|
|
||||||
for token in tokens {
|
for token in tokens {
|
||||||
assert!(
|
assert!(
|
||||||
token.end() <= source_length,
|
token.end() <= source_length,
|
||||||
"{path}: Token range exceeds the source code length. Token: {token:#?}",
|
"Token range exceeds the source code length. Token: {token:#?}",
|
||||||
path = test_path.display()
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(previous) = previous {
|
if let Some(previous) = previous {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
previous.range().ordering(token.range()),
|
previous.range().ordering(token.range()),
|
||||||
Ordering::Less,
|
Ordering::Less,
|
||||||
"{path}: Token ranges are not in increasing order
|
"Token ranges are not in increasing order
|
||||||
Previous token: {previous:#?}
|
Previous token: {previous:#?}
|
||||||
Current token: {token:#?}
|
Current token: {token:#?}
|
||||||
Tokens: {tokens:#?}
|
Tokens: {tokens:#?}
|
||||||
",
|
",
|
||||||
path = test_path.display(),
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -403,9 +414,9 @@ Tokens: {tokens:#?}
|
||||||
/// * the range of the parent node fully encloses all its child nodes
|
/// * the range of the parent node fully encloses all its child nodes
|
||||||
/// * the ranges are strictly increasing when traversing the nodes in pre-order.
|
/// * the ranges are strictly increasing when traversing the nodes in pre-order.
|
||||||
/// * all ranges are within the length of the source code.
|
/// * all ranges are within the length of the source code.
|
||||||
fn validate_ast(parsed: &Parsed<Mod>, source_len: TextSize, test_path: &Path) {
|
fn validate_ast(parsed: &Parsed<Mod>, source_len: TextSize) {
|
||||||
walk_module(
|
walk_module(
|
||||||
&mut ValidateAstVisitor::new(parsed.tokens(), source_len, test_path),
|
&mut ValidateAstVisitor::new(parsed.tokens(), source_len),
|
||||||
parsed.syntax(),
|
parsed.syntax(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -416,17 +427,15 @@ struct ValidateAstVisitor<'a> {
|
||||||
parents: Vec<AnyNodeRef<'a>>,
|
parents: Vec<AnyNodeRef<'a>>,
|
||||||
previous: Option<AnyNodeRef<'a>>,
|
previous: Option<AnyNodeRef<'a>>,
|
||||||
source_length: TextSize,
|
source_length: TextSize,
|
||||||
test_path: &'a Path,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ValidateAstVisitor<'a> {
|
impl<'a> ValidateAstVisitor<'a> {
|
||||||
fn new(tokens: &'a Tokens, source_length: TextSize, test_path: &'a Path) -> Self {
|
fn new(tokens: &'a Tokens, source_length: TextSize) -> Self {
|
||||||
Self {
|
Self {
|
||||||
tokens: tokens.iter().peekable(),
|
tokens: tokens.iter().peekable(),
|
||||||
parents: Vec::new(),
|
parents: Vec::new(),
|
||||||
previous: None,
|
previous: None,
|
||||||
source_length,
|
source_length,
|
||||||
test_path,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -444,8 +453,7 @@ impl ValidateAstVisitor<'_> {
|
||||||
// At this point, next_token.end() > node.start()
|
// At this point, next_token.end() > node.start()
|
||||||
assert!(
|
assert!(
|
||||||
next.start() >= node.start(),
|
next.start() >= node.start(),
|
||||||
"{path}: The start of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
|
"The start of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
|
||||||
path = self.test_path.display(),
|
|
||||||
root = self.parents.first()
|
root = self.parents.first()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -464,8 +472,7 @@ impl ValidateAstVisitor<'_> {
|
||||||
// At this point, `next_token.end() > node.end()`
|
// At this point, `next_token.end() > node.end()`
|
||||||
assert!(
|
assert!(
|
||||||
next.start() >= node.end(),
|
next.start() >= node.end(),
|
||||||
"{path}: The end of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
|
"The end of the node falls within a token.\nNode: {node:#?}\n\nToken: {next:#?}\n\nRoot: {root:#?}",
|
||||||
path = self.test_path.display(),
|
|
||||||
root = self.parents.first()
|
root = self.parents.first()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -476,16 +483,14 @@ impl<'ast> SourceOrderVisitor<'ast> for ValidateAstVisitor<'ast> {
|
||||||
fn enter_node(&mut self, node: AnyNodeRef<'ast>) -> TraversalSignal {
|
fn enter_node(&mut self, node: AnyNodeRef<'ast>) -> TraversalSignal {
|
||||||
assert!(
|
assert!(
|
||||||
node.end() <= self.source_length,
|
node.end() <= self.source_length,
|
||||||
"{path}: The range of the node exceeds the length of the source code. Node: {node:#?}",
|
"The range of the node exceeds the length of the source code. Node: {node:#?}",
|
||||||
path = self.test_path.display()
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(previous) = self.previous {
|
if let Some(previous) = self.previous {
|
||||||
assert_ne!(
|
assert_ne!(
|
||||||
previous.range().ordering(node.range()),
|
previous.range().ordering(node.range()),
|
||||||
Ordering::Greater,
|
Ordering::Greater,
|
||||||
"{path}: The ranges of the nodes are not strictly increasing when traversing the AST in pre-order.\nPrevious node: {previous:#?}\n\nCurrent node: {node:#?}\n\nRoot: {root:#?}",
|
"The ranges of the nodes are not strictly increasing when traversing the AST in pre-order.\nPrevious node: {previous:#?}\n\nCurrent node: {node:#?}\n\nRoot: {root:#?}",
|
||||||
path = self.test_path.display(),
|
|
||||||
root = self.parents.first()
|
root = self.parents.first()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -493,8 +498,7 @@ impl<'ast> SourceOrderVisitor<'ast> for ValidateAstVisitor<'ast> {
|
||||||
if let Some(parent) = self.parents.last() {
|
if let Some(parent) = self.parents.last() {
|
||||||
assert!(
|
assert!(
|
||||||
parent.range().contains_range(node.range()),
|
parent.range().contains_range(node.range()),
|
||||||
"{path}: The range of the parent node does not fully enclose the range of the child node.\nParent node: {parent:#?}\n\nChild node: {node:#?}\n\nRoot: {root:#?}",
|
"The range of the parent node does not fully enclose the range of the child node.\nParent node: {parent:#?}\n\nChild node: {node:#?}\n\nRoot: {root:#?}",
|
||||||
path = self.test_path.display(),
|
|
||||||
root = self.parents.first()
|
root = self.parents.first()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -51,5 +51,11 @@ regex = { workspace = true }
|
||||||
tempfile = { workspace = true }
|
tempfile = { workspace = true }
|
||||||
toml = { workspace = true }
|
toml = { workspace = true }
|
||||||
|
|
||||||
|
[features]
|
||||||
|
default = []
|
||||||
|
|
||||||
|
[target.'cfg(all(not(target_os = "macos"), not(target_os = "windows"), not(target_os = "openbsd"), not(target_os = "aix"), not(target_os = "android"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64", target_arch = "riscv64")))'.dependencies]
|
||||||
|
tikv-jemallocator = { workspace = true }
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
|
||||||
|
|
@ -18,9 +18,9 @@ Valid severities are:
|
||||||
|
|
||||||
**Type**: `dict[RuleName, "ignore" | "warn" | "error"]`
|
**Type**: `dict[RuleName, "ignore" | "warn" | "error"]`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[tool.ty.rules]
|
[tool.ty.rules]
|
||||||
possibly-unresolved-reference = "warn"
|
possibly-unresolved-reference = "warn"
|
||||||
division-by-zero = "ignore"
|
division-by-zero = "ignore"
|
||||||
|
|
@ -45,9 +45,9 @@ configuration setting.
|
||||||
|
|
||||||
**Type**: `list[str]`
|
**Type**: `list[str]`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[tool.ty.environment]
|
[tool.ty.environment]
|
||||||
extra-paths = ["./shared/my-search-path"]
|
extra-paths = ["./shared/my-search-path"]
|
||||||
```
|
```
|
||||||
|
|
@ -76,9 +76,9 @@ This option can be used to point to virtual or system Python environments.
|
||||||
|
|
||||||
**Type**: `str`
|
**Type**: `str`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[tool.ty.environment]
|
[tool.ty.environment]
|
||||||
python = "./custom-venv-location/.venv"
|
python = "./custom-venv-location/.venv"
|
||||||
```
|
```
|
||||||
|
|
@ -103,9 +103,9 @@ If no platform is specified, ty will use the current platform:
|
||||||
|
|
||||||
**Type**: `"win32" | "darwin" | "android" | "ios" | "linux" | "all" | str`
|
**Type**: `"win32" | "darwin" | "android" | "ios" | "linux" | "all" | str`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[tool.ty.environment]
|
[tool.ty.environment]
|
||||||
# Tailor type stubs and conditionalized type definitions to windows.
|
# Tailor type stubs and conditionalized type definitions to windows.
|
||||||
python-platform = "win32"
|
python-platform = "win32"
|
||||||
|
|
@ -137,9 +137,9 @@ to reflect the differing contents of the standard library across Python versions
|
||||||
|
|
||||||
**Type**: `"3.7" | "3.8" | "3.9" | "3.10" | "3.11" | "3.12" | "3.13" | "3.14" | <major>.<minor>`
|
**Type**: `"3.7" | "3.8" | "3.9" | "3.10" | "3.11" | "3.12" | "3.13" | "3.14" | <major>.<minor>`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[tool.ty.environment]
|
[tool.ty.environment]
|
||||||
python-version = "3.12"
|
python-version = "3.12"
|
||||||
```
|
```
|
||||||
|
|
@ -158,16 +158,16 @@ If left unspecified, ty will try to detect common project layouts and initialize
|
||||||
* if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
|
* if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
|
||||||
* otherwise, default to `.` (flat layout)
|
* otherwise, default to `.` (flat layout)
|
||||||
|
|
||||||
Besides, if a `./python` or `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` or `__init__.pyi` file),
|
Additionally, if a `./python` directory exists and is not a package (i.e. it does not contain an `__init__.py` or `__init__.pyi` file),
|
||||||
it will also be included in the first party search path.
|
it will also be included in the first party search path.
|
||||||
|
|
||||||
**Default value**: `null`
|
**Default value**: `null`
|
||||||
|
|
||||||
**Type**: `list[str]`
|
**Type**: `list[str]`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[tool.ty.environment]
|
[tool.ty.environment]
|
||||||
# Multiple directories (priority order)
|
# Multiple directories (priority order)
|
||||||
root = ["./src", "./lib", "./vendor"]
|
root = ["./src", "./lib", "./vendor"]
|
||||||
|
|
@ -185,9 +185,9 @@ bundled as a zip file in the binary
|
||||||
|
|
||||||
**Type**: `str`
|
**Type**: `str`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[tool.ty.environment]
|
[tool.ty.environment]
|
||||||
typeshed = "/path/to/custom/typeshed"
|
typeshed = "/path/to/custom/typeshed"
|
||||||
```
|
```
|
||||||
|
|
@ -200,24 +200,22 @@ Configuration override that applies to specific files based on glob patterns.
|
||||||
|
|
||||||
An override allows you to apply different rule configurations to specific
|
An override allows you to apply different rule configurations to specific
|
||||||
files or directories. Multiple overrides can match the same file, with
|
files or directories. Multiple overrides can match the same file, with
|
||||||
later overrides take precedence.
|
later overrides take precedence. Override rules take precedence over global
|
||||||
|
rules for matching files.
|
||||||
|
|
||||||
### Precedence
|
For example, to relax enforcement of rules in test files:
|
||||||
|
|
||||||
- Later overrides in the array take precedence over earlier ones
|
|
||||||
- Override rules take precedence over global rules for matching files
|
|
||||||
|
|
||||||
### Examples
|
|
||||||
|
|
||||||
```toml
|
```toml
|
||||||
# Relax rules for test files
|
|
||||||
[[tool.ty.overrides]]
|
[[tool.ty.overrides]]
|
||||||
include = ["tests/**", "**/test_*.py"]
|
include = ["tests/**", "**/test_*.py"]
|
||||||
|
|
||||||
[tool.ty.overrides.rules]
|
[tool.ty.overrides.rules]
|
||||||
possibly-unresolved-reference = "warn"
|
possibly-unresolved-reference = "warn"
|
||||||
|
```
|
||||||
|
|
||||||
# Ignore generated files but still check important ones
|
Or, to ignore a rule in generated files but retain enforcement in an important file:
|
||||||
|
|
||||||
|
```toml
|
||||||
[[tool.ty.overrides]]
|
[[tool.ty.overrides]]
|
||||||
include = ["generated/**"]
|
include = ["generated/**"]
|
||||||
exclude = ["generated/important.py"]
|
exclude = ["generated/important.py"]
|
||||||
|
|
@ -240,9 +238,9 @@ If not specified, defaults to `[]` (excludes no files).
|
||||||
|
|
||||||
**Type**: `list[str]`
|
**Type**: `list[str]`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[[tool.ty.overrides]]
|
[[tool.ty.overrides]]
|
||||||
exclude = [
|
exclude = [
|
||||||
"generated",
|
"generated",
|
||||||
|
|
@ -268,9 +266,9 @@ If not specified, defaults to `["**"]` (matches all files).
|
||||||
|
|
||||||
**Type**: `list[str]`
|
**Type**: `list[str]`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[[tool.ty.overrides]]
|
[[tool.ty.overrides]]
|
||||||
include = [
|
include = [
|
||||||
"src",
|
"src",
|
||||||
|
|
@ -292,9 +290,9 @@ severity levels or disable them entirely.
|
||||||
|
|
||||||
**Type**: `dict[RuleName, "ignore" | "warn" | "error"]`
|
**Type**: `dict[RuleName, "ignore" | "warn" | "error"]`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[[tool.ty.overrides]]
|
[[tool.ty.overrides]]
|
||||||
include = ["src"]
|
include = ["src"]
|
||||||
|
|
||||||
|
|
@ -358,9 +356,9 @@ to re-include `dist` use `exclude = ["!dist"]`
|
||||||
|
|
||||||
**Type**: `list[str]`
|
**Type**: `list[str]`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[tool.ty.src]
|
[tool.ty.src]
|
||||||
exclude = [
|
exclude = [
|
||||||
"generated",
|
"generated",
|
||||||
|
|
@ -399,9 +397,9 @@ matches `<project_root>/src` and not `<project_root>/test/src`).
|
||||||
|
|
||||||
**Type**: `list[str]`
|
**Type**: `list[str]`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[tool.ty.src]
|
[tool.ty.src]
|
||||||
include = [
|
include = [
|
||||||
"src",
|
"src",
|
||||||
|
|
@ -421,9 +419,9 @@ Enabled by default.
|
||||||
|
|
||||||
**Type**: `bool`
|
**Type**: `bool`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[tool.ty.src]
|
[tool.ty.src]
|
||||||
respect-ignore-files = false
|
respect-ignore-files = false
|
||||||
```
|
```
|
||||||
|
|
@ -432,8 +430,8 @@ respect-ignore-files = false
|
||||||
|
|
||||||
### `root`
|
### `root`
|
||||||
|
|
||||||
> [!WARN] "Deprecated"
|
!!! warning "Deprecated"
|
||||||
> This option has been deprecated. Use `environment.root` instead.
|
This option has been deprecated. Use `environment.root` instead.
|
||||||
|
|
||||||
The root of the project, used for finding first-party modules.
|
The root of the project, used for finding first-party modules.
|
||||||
|
|
||||||
|
|
@ -443,16 +441,16 @@ If left unspecified, ty will try to detect common project layouts and initialize
|
||||||
* if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
|
* if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
|
||||||
* otherwise, default to `.` (flat layout)
|
* otherwise, default to `.` (flat layout)
|
||||||
|
|
||||||
Besides, if a `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` file),
|
Additionally, if a `./python` directory exists and is not a package (i.e. it does not contain an `__init__.py` file),
|
||||||
it will also be included in the first party search path.
|
it will also be included in the first party search path.
|
||||||
|
|
||||||
**Default value**: `null`
|
**Default value**: `null`
|
||||||
|
|
||||||
**Type**: `str`
|
**Type**: `str`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[tool.ty.src]
|
[tool.ty.src]
|
||||||
root = "./app"
|
root = "./app"
|
||||||
```
|
```
|
||||||
|
|
@ -471,9 +469,9 @@ Defaults to `false`.
|
||||||
|
|
||||||
**Type**: `bool`
|
**Type**: `bool`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[tool.ty.terminal]
|
[tool.ty.terminal]
|
||||||
# Error if ty emits any warning-level diagnostics.
|
# Error if ty emits any warning-level diagnostics.
|
||||||
error-on-warning = true
|
error-on-warning = true
|
||||||
|
|
@ -491,9 +489,9 @@ Defaults to `full`.
|
||||||
|
|
||||||
**Type**: `full | concise`
|
**Type**: `full | concise`
|
||||||
|
|
||||||
**Example usage** (`pyproject.toml`):
|
**Example usage**:
|
||||||
|
|
||||||
```toml
|
```toml title="pyproject.toml"
|
||||||
[tool.ty.terminal]
|
[tool.ty.terminal]
|
||||||
output-format = "concise"
|
output-format = "concise"
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,15 @@
|
||||||
|
|
||||||
ty defines and respects the following environment variables:
|
ty defines and respects the following environment variables:
|
||||||
|
|
||||||
|
### `TY_CONFIG_FILE`
|
||||||
|
|
||||||
|
Path to a `ty.toml` configuration file to use.
|
||||||
|
|
||||||
|
When set, ty will use this file for configuration instead of
|
||||||
|
discovering configuration files automatically.
|
||||||
|
|
||||||
|
Equivalent to the `--config-file` command-line argument.
|
||||||
|
|
||||||
### `TY_LOG`
|
### `TY_LOG`
|
||||||
|
|
||||||
If set, ty will use this value as the log level for its `--verbose` output.
|
If set, ty will use this value as the log level for its `--verbose` output.
|
||||||
|
|
|
||||||
|
|
@ -39,7 +39,7 @@ def test(): -> "int":
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20call-non-callable" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20call-non-callable" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L134" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L135" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -63,7 +63,7 @@ Calling a non-callable object will raise a `TypeError` at runtime.
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-argument-forms" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-argument-forms" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L178" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L179" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -95,7 +95,7 @@ f(int) # error
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-declarations" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-declarations" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L204" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L205" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -126,7 +126,7 @@ a = 1
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-metaclass" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20conflicting-metaclass" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L229" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L230" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -158,7 +158,7 @@ class C(A, B): ...
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20cyclic-class-definition" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20cyclic-class-definition" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L255" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L256" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -190,7 +190,7 @@ class B(A): ...
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/1.0.0">1.0.0</a>) ·
|
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/1.0.0">1.0.0</a>) ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20cyclic-type-alias-definition" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20cyclic-type-alias-definition" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L281" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L282" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -218,7 +218,7 @@ type B = A
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20duplicate-base" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20duplicate-base" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L342" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L343" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -245,7 +245,7 @@ class B(A, A): ...
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.12">0.0.1-alpha.12</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.12">0.0.1-alpha.12</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20duplicate-kw-only" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20duplicate-kw-only" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L363" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L364" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -357,7 +357,7 @@ def test(): -> "Literal[5]":
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20inconsistent-mro" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20inconsistent-mro" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L589" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L590" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -387,7 +387,7 @@ class C(A, B): ...
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20index-out-of-bounds" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20index-out-of-bounds" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L613" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L614" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -413,7 +413,7 @@ t[3] # IndexError: tuple index out of range
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.12">0.0.1-alpha.12</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.12">0.0.1-alpha.12</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20instance-layout-conflict" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20instance-layout-conflict" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L395" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L396" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -502,7 +502,7 @@ an atypical memory layout.
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-argument-type" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-argument-type" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L667" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L668" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -529,7 +529,7 @@ func("foo") # error: [invalid-argument-type]
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-assignment" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-assignment" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L707" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L708" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -557,7 +557,7 @@ a: int = ''
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-attribute-access" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-attribute-access" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1997" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2003" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -591,7 +591,7 @@ C.instance_var = 3 # error: Cannot assign to instance variable
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.19">0.0.1-alpha.19</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.19">0.0.1-alpha.19</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-await" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-await" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L729" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L730" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -627,7 +627,7 @@ asyncio.run(main())
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-base" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-base" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L759" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L760" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -651,7 +651,7 @@ class A(42): ... # error: [invalid-base]
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-context-manager" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-context-manager" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L810" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L811" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -678,7 +678,7 @@ with 1:
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-declaration" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-declaration" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L831" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L832" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -707,7 +707,7 @@ a: str
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-exception-caught" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-exception-caught" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L854" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L855" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -751,7 +751,7 @@ except ZeroDivisionError:
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.28">0.0.1-alpha.28</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.28">0.0.1-alpha.28</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-explicit-override" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-explicit-override" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1667" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1673" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -787,13 +787,57 @@ class D(A):
|
||||||
def foo(self): ... # fine: overrides `A.foo`
|
def foo(self): ... # fine: overrides `A.foo`
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## `invalid-frozen-dataclass-subclass`
|
||||||
|
|
||||||
|
<small>
|
||||||
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.35">0.0.1-alpha.35</a> ·
|
||||||
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-frozen-dataclass-subclass" target="_blank">Related issues</a> ·
|
||||||
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2229" target="_blank">View source</a>
|
||||||
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
**What it does**
|
||||||
|
|
||||||
|
Checks for dataclasses with invalid frozen inheritance:
|
||||||
|
- A frozen dataclass cannot inherit from a non-frozen dataclass.
|
||||||
|
- A non-frozen dataclass cannot inherit from a frozen dataclass.
|
||||||
|
|
||||||
|
**Why is this bad?**
|
||||||
|
|
||||||
|
Python raises a `TypeError` at runtime when either of these inheritance
|
||||||
|
patterns occurs.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
|
||||||
|
```python
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Base:
|
||||||
|
x: int
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class Child(Base): # Error raised here
|
||||||
|
y: int
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class FrozenBase:
|
||||||
|
x: int
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class NonFrozenChild(FrozenBase): # Error raised here
|
||||||
|
y: int
|
||||||
|
```
|
||||||
|
|
||||||
## `invalid-generic-class`
|
## `invalid-generic-class`
|
||||||
|
|
||||||
<small>
|
<small>
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-generic-class" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-generic-class" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L890" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L891" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -804,16 +848,21 @@ Checks for the creation of invalid generic classes
|
||||||
**Why is this bad?**
|
**Why is this bad?**
|
||||||
|
|
||||||
There are several requirements that you must follow when defining a generic class.
|
There are several requirements that you must follow when defining a generic class.
|
||||||
|
Many of these result in `TypeError` being raised at runtime if they are violated.
|
||||||
|
|
||||||
**Examples**
|
**Examples**
|
||||||
|
|
||||||
```python
|
```python
|
||||||
from typing import Generic, TypeVar
|
from typing_extensions import Generic, TypeVar
|
||||||
|
|
||||||
T = TypeVar("T") # okay
|
T = TypeVar("T")
|
||||||
|
U = TypeVar("U", default=int)
|
||||||
|
|
||||||
# error: class uses both PEP-695 syntax and legacy syntax
|
# error: class uses both PEP-695 syntax and legacy syntax
|
||||||
class C[U](Generic[T]): ...
|
class C[U](Generic[T]): ...
|
||||||
|
|
||||||
|
# error: type parameter with default comes before type parameter without default
|
||||||
|
class D(Generic[U, T]): ...
|
||||||
```
|
```
|
||||||
|
|
||||||
**References**
|
**References**
|
||||||
|
|
@ -826,7 +875,7 @@ class C[U](Generic[T]): ...
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.17">0.0.1-alpha.17</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.17">0.0.1-alpha.17</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-key" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-key" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L634" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L635" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -865,7 +914,7 @@ carol = Person(name="Carol", age=25) # typo!
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-legacy-type-variable" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-legacy-type-variable" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L916" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L922" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -900,7 +949,7 @@ def f(t: TypeVar("U")): ...
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-metaclass" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-metaclass" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1013" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1019" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -934,7 +983,7 @@ class B(metaclass=f): ...
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-method-override" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-method-override" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2125" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2131" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1041,7 +1090,7 @@ Correct use of `@override` is enforced by ty's `invalid-explicit-override` rule.
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.19">0.0.1-alpha.19</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.19">0.0.1-alpha.19</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-named-tuple" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-named-tuple" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L541" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L542" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1095,7 +1144,7 @@ AttributeError: Cannot overwrite NamedTuple attribute _asdict
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/1.0.0">1.0.0</a>) ·
|
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/1.0.0">1.0.0</a>) ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-newtype" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-newtype" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L989" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L995" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1125,7 +1174,7 @@ Baz = NewType("Baz", int | str) # error: invalid base for `typing.NewType`
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-overload" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-overload" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1040" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1046" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1175,7 +1224,7 @@ def foo(x: int) -> int: ...
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-parameter-default" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-parameter-default" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1139" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1145" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1201,7 +1250,7 @@ def f(a: int = ''): ...
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-paramspec" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-paramspec" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L944" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L950" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1232,7 +1281,7 @@ P2 = ParamSpec("S2") # error: ParamSpec name must match the variable it's assig
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-protocol" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-protocol" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L477" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L478" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1266,7 +1315,7 @@ TypeError: Protocols can only inherit from other protocols, got <class 'int'>
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-raise" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-raise" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1159" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1165" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1315,7 +1364,7 @@ def g():
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-return-type" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-return-type" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L688" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L689" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1340,7 +1389,7 @@ def func() -> int:
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-super-argument" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-super-argument" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1202" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1208" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1398,7 +1447,7 @@ TODO #14889
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.6">0.0.1-alpha.6</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.6">0.0.1-alpha.6</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-alias-type" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-alias-type" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L968" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L974" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1425,7 +1474,7 @@ NewAlias = TypeAliasType(get_name(), int) # error: TypeAliasType name mus
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.29">0.0.1-alpha.29</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.29">0.0.1-alpha.29</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-arguments" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-arguments" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1434" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1440" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1472,7 +1521,7 @@ Bar[int] # error: too few arguments
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-checking-constant" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-checking-constant" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1241" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1247" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1502,7 +1551,7 @@ TYPE_CHECKING = ''
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-form" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-form" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1265" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1271" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1532,7 +1581,7 @@ b: Annotated[int] # `Annotated` expects at least two arguments
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.11">0.0.1-alpha.11</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.11">0.0.1-alpha.11</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-guard-call" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-guard-call" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1317" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1323" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1566,7 +1615,7 @@ f(10) # Error
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.11">0.0.1-alpha.11</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.11">0.0.1-alpha.11</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-guard-definition" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-guard-definition" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1289" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1295" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1600,7 +1649,7 @@ class C:
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-variable-constraints" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20invalid-type-variable-constraints" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1345" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1351" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1635,7 +1684,7 @@ T = TypeVar('T', bound=str) # valid bound TypeVar
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20missing-argument" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20missing-argument" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1374" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1380" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1660,7 +1709,7 @@ func() # TypeError: func() missing 1 required positional argument: 'x'
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20missing-typed-dict-key" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20missing-typed-dict-key" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2098" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2104" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1693,7 +1742,7 @@ alice["age"] # KeyError
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20no-matching-overload" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20no-matching-overload" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1393" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1399" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1722,7 +1771,7 @@ func("string") # error: [no-matching-overload]
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20non-subscriptable" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20non-subscriptable" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1416" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1422" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1746,7 +1795,7 @@ Subscripting an object that does not support it will raise a `TypeError` at runt
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20not-iterable" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20not-iterable" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1475" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1481" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1772,7 +1821,7 @@ for i in 34: # TypeError: 'int' object is not iterable
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.29">0.0.1-alpha.29</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.29">0.0.1-alpha.29</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20override-of-final-method" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20override-of-final-method" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1640" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1646" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1805,7 +1854,7 @@ class B(A):
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20parameter-already-assigned" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20parameter-already-assigned" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1526" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1532" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1832,7 +1881,7 @@ f(1, x=2) # Error raised here
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20positional-only-parameter-as-kwarg" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20positional-only-parameter-as-kwarg" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1851" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1857" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1890,7 +1939,7 @@ def test(): -> "int":
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20static-assert-error" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20static-assert-error" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1973" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1979" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1920,7 +1969,7 @@ static_assert(int(2.0 * 3.0) == 6) # error: does not have a statically known tr
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20subclass-of-final-class" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20subclass-of-final-class" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1617" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1623" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1949,7 +1998,7 @@ class B(A): ... # Error raised here
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.30">0.0.1-alpha.30</a>) ·
|
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.30">0.0.1-alpha.30</a>) ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20super-call-in-named-tuple-method" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20super-call-in-named-tuple-method" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1785" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1791" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1983,7 +2032,7 @@ class F(NamedTuple):
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20too-many-positional-arguments" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20too-many-positional-arguments" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1725" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1731" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2010,7 +2059,7 @@ f("foo") # Error raised here
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20type-assertion-failure" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20type-assertion-failure" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1703" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1709" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2038,7 +2087,7 @@ def _(x: int):
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unavailable-implicit-super-arguments" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unavailable-implicit-super-arguments" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1746" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1752" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2084,7 +2133,7 @@ class A:
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unknown-argument" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unknown-argument" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1830" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1836" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2111,7 +2160,7 @@ f(x=1, y=2) # Error raised here
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-attribute" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-attribute" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1872" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1878" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2139,7 +2188,7 @@ A().foo # AttributeError: 'A' object has no attribute 'foo'
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-import" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-import" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1894" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1900" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2164,7 +2213,7 @@ import foo # ModuleNotFoundError: No module named 'foo'
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-reference" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-reference" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1913" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1919" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2189,7 +2238,7 @@ print(x) # NameError: name 'x' is not defined
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-bool-conversion" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-bool-conversion" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1495" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1501" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2226,7 +2275,7 @@ b1 < b2 < b1 # exception raised here
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-operator" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-operator" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1932" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1938" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2254,7 +2303,7 @@ A() + A() # TypeError: unsupported operand type(s) for +: 'A' and 'A'
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'error'."><code>error</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20zero-stepsize-in-slice" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20zero-stepsize-in-slice" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1954" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1960" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2279,7 +2328,7 @@ l[1:10:0] # ValueError: slice step cannot be zero
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.20">0.0.1-alpha.20</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20ambiguous-protocol-member" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20ambiguous-protocol-member" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L506" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L507" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2320,7 +2369,7 @@ class SubProto(BaseProto, Protocol):
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.16">0.0.1-alpha.16</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.16">0.0.1-alpha.16</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20deprecated" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20deprecated" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L321" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L322" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2408,7 +2457,7 @@ a = 20 / 0 # type: ignore
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-attribute" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-attribute" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1547" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1553" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2436,7 +2485,7 @@ A.c # AttributeError: type object 'A' has no attribute 'c'
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-implicit-call" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-implicit-call" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L152" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L153" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2468,7 +2517,7 @@ A()[0] # TypeError: 'A' object is not subscriptable
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-import" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-missing-import" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1569" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1575" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2500,7 +2549,7 @@ from module import a # ImportError: cannot import name 'a' from 'module'
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20redundant-cast" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20redundant-cast" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2025" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2031" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2527,7 +2576,7 @@ cast(int, f()) # Redundant
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20undefined-reveal" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20undefined-reveal" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1812" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1818" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2551,7 +2600,7 @@ reveal_type(1) # NameError: name 'reveal_type' is not defined
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.15">0.0.1-alpha.15</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.15">0.0.1-alpha.15</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-global" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unresolved-global" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2046" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L2052" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2609,7 +2658,7 @@ def g():
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.7">0.0.1-alpha.7</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.7">0.0.1-alpha.7</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-base" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20unsupported-base" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L777" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L778" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2648,7 +2697,7 @@ class D(C): ... # error: [unsupported-base]
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'warn'."><code>warn</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.22">0.0.1-alpha.22</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20useless-overload-body" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20useless-overload-body" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1083" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1089" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2711,7 +2760,7 @@ def foo(x: int | str) -> int | str:
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'ignore'."><code>ignore</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'ignore'."><code>ignore</code></a> ·
|
||||||
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a>) ·
|
Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a>) ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20division-by-zero" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20division-by-zero" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L303" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L304" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -2735,7 +2784,7 @@ Dividing by zero raises a `ZeroDivisionError` at runtime.
|
||||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'ignore'."><code>ignore</code></a> ·
|
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of 'ignore'."><code>ignore</code></a> ·
|
||||||
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
Added in <a href="https://github.com/astral-sh/ty/releases/tag/0.0.1-alpha.1">0.0.1-alpha.1</a> ·
|
||||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-unresolved-reference" target="_blank">Related issues</a> ·
|
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20possibly-unresolved-reference" target="_blank">Related issues</a> ·
|
||||||
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1595" target="_blank">View source</a>
|
<a href="https://github.com/astral-sh/ruff/blob/main/crates%2Fty_python_semantic%2Fsrc%2Ftypes%2Fdiagnostic.rs#L1601" target="_blank">View source</a>
|
||||||
</small>
|
</small>
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,7 @@ use ty_combine::Combine;
|
||||||
use ty_project::metadata::options::{EnvironmentOptions, Options, SrcOptions, TerminalOptions};
|
use ty_project::metadata::options::{EnvironmentOptions, Options, SrcOptions, TerminalOptions};
|
||||||
use ty_project::metadata::value::{RangedValue, RelativeGlobPattern, RelativePathBuf, ValueSource};
|
use ty_project::metadata::value::{RangedValue, RelativeGlobPattern, RelativePathBuf, ValueSource};
|
||||||
use ty_python_semantic::lint;
|
use ty_python_semantic::lint;
|
||||||
|
use ty_static::EnvVars;
|
||||||
|
|
||||||
// Configures Clap v3-style help menu colors
|
// Configures Clap v3-style help menu colors
|
||||||
const STYLES: Styles = Styles::styled()
|
const STYLES: Styles = Styles::styled()
|
||||||
|
|
@ -121,7 +122,7 @@ pub(crate) struct CheckCommand {
|
||||||
/// The path to a `ty.toml` file to use for configuration.
|
/// The path to a `ty.toml` file to use for configuration.
|
||||||
///
|
///
|
||||||
/// While ty configuration can be included in a `pyproject.toml` file, it is not allowed in this context.
|
/// While ty configuration can be included in a `pyproject.toml` file, it is not allowed in this context.
|
||||||
#[arg(long, env = "TY_CONFIG_FILE", value_name = "PATH")]
|
#[arg(long, env = EnvVars::TY_CONFIG_FILE, value_name = "PATH")]
|
||||||
pub(crate) config_file: Option<SystemPathBuf>,
|
pub(crate) config_file: Option<SystemPathBuf>,
|
||||||
|
|
||||||
/// The format to use for printing diagnostic messages.
|
/// The format to use for printing diagnostic messages.
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,22 @@ use colored::Colorize;
|
||||||
use std::io;
|
use std::io;
|
||||||
use ty::{ExitStatus, run};
|
use ty::{ExitStatus, run};
|
||||||
|
|
||||||
|
#[cfg(all(
|
||||||
|
not(target_os = "macos"),
|
||||||
|
not(target_os = "windows"),
|
||||||
|
not(target_os = "openbsd"),
|
||||||
|
not(target_os = "aix"),
|
||||||
|
not(target_os = "android"),
|
||||||
|
any(
|
||||||
|
target_arch = "x86_64",
|
||||||
|
target_arch = "aarch64",
|
||||||
|
target_arch = "powerpc64",
|
||||||
|
target_arch = "riscv64"
|
||||||
|
)
|
||||||
|
))]
|
||||||
|
#[global_allocator]
|
||||||
|
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||||
|
|
||||||
pub fn main() -> ExitStatus {
|
pub fn main() -> ExitStatus {
|
||||||
run().unwrap_or_else(|error| {
|
run().unwrap_or_else(|error| {
|
||||||
use io::Write;
|
use io::Write;
|
||||||
|
|
|
||||||
|
|
@ -2390,14 +2390,14 @@ fn default_root_flat_layout() -> anyhow::Result<()> {
|
||||||
fn default_root_tests_folder() -> anyhow::Result<()> {
|
fn default_root_tests_folder() -> anyhow::Result<()> {
|
||||||
let case = CliTest::with_files([
|
let case = CliTest::with_files([
|
||||||
("src/foo.py", "foo = 10"),
|
("src/foo.py", "foo = 10"),
|
||||||
("tests/bar.py", "bar = 20"),
|
("tests/bar.py", "baz = 20"),
|
||||||
(
|
(
|
||||||
"tests/test_bar.py",
|
"tests/test_bar.py",
|
||||||
r#"
|
r#"
|
||||||
from foo import foo
|
from foo import foo
|
||||||
from bar import bar
|
from bar import baz
|
||||||
|
|
||||||
print(f"{foo} {bar}")
|
print(f"{foo} {baz}")
|
||||||
"#,
|
"#,
|
||||||
),
|
),
|
||||||
])?;
|
])?;
|
||||||
|
|
|
||||||
|
|
@ -29,12 +29,11 @@ pub fn code_actions(
|
||||||
|
|
||||||
let mut actions = Vec::new();
|
let mut actions = Vec::new();
|
||||||
|
|
||||||
// Suggest imports for unresolved references (often ideal)
|
// Suggest imports/qualifications for unresolved references (often ideal)
|
||||||
// TODO: suggest qualifying with an already imported symbol
|
|
||||||
let is_unresolved_reference =
|
let is_unresolved_reference =
|
||||||
lint_id == LintId::of(&UNRESOLVED_REFERENCE) || lint_id == LintId::of(&UNDEFINED_REVEAL);
|
lint_id == LintId::of(&UNRESOLVED_REFERENCE) || lint_id == LintId::of(&UNDEFINED_REVEAL);
|
||||||
if is_unresolved_reference
|
if is_unresolved_reference
|
||||||
&& let Some(import_quick_fix) = create_import_symbol_quick_fix(db, file, diagnostic_range)
|
&& let Some(import_quick_fix) = unresolved_fixes(db, file, diagnostic_range)
|
||||||
{
|
{
|
||||||
actions.extend(import_quick_fix);
|
actions.extend(import_quick_fix);
|
||||||
}
|
}
|
||||||
|
|
@ -49,7 +48,7 @@ pub fn code_actions(
|
||||||
actions
|
actions
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_import_symbol_quick_fix(
|
fn unresolved_fixes(
|
||||||
db: &dyn Db,
|
db: &dyn Db,
|
||||||
file: File,
|
file: File,
|
||||||
diagnostic_range: TextRange,
|
diagnostic_range: TextRange,
|
||||||
|
|
@ -59,7 +58,7 @@ fn create_import_symbol_quick_fix(
|
||||||
let symbol = &node.expr_name()?.id;
|
let symbol = &node.expr_name()?.id;
|
||||||
|
|
||||||
Some(
|
Some(
|
||||||
completion::missing_imports(db, file, &parsed, symbol, node)
|
completion::unresolved_fixes(db, file, &parsed, symbol, node)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|import| QuickFix {
|
.map(|import| QuickFix {
|
||||||
title: import.label,
|
title: import.label,
|
||||||
|
|
@ -84,6 +83,7 @@ mod tests {
|
||||||
system::{DbWithWritableSystem, SystemPathBuf},
|
system::{DbWithWritableSystem, SystemPathBuf},
|
||||||
};
|
};
|
||||||
use ruff_diagnostics::Fix;
|
use ruff_diagnostics::Fix;
|
||||||
|
use ruff_python_trivia::textwrap::dedent;
|
||||||
use ruff_text_size::{TextRange, TextSize};
|
use ruff_text_size::{TextRange, TextSize};
|
||||||
use ty_project::ProjectMetadata;
|
use ty_project::ProjectMetadata;
|
||||||
use ty_python_semantic::{
|
use ty_python_semantic::{
|
||||||
|
|
@ -149,15 +149,14 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
||||||
info[code-action]: Ignore 'unresolved-reference' for this line
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
--> main.py:2:17
|
--> main.py:2:5
|
||||||
|
|
|
|
||||||
2 | b = a / 0 # ty:ignore[division-by-zero]
|
2 | b = a / 0 # ty:ignore[division-by-zero]
|
||||||
| ^
|
| ^
|
||||||
|
|
|
|
||||||
1 |
|
1 |
|
||||||
- b = a / 0 # ty:ignore[division-by-zero]
|
- b = a / 0 # ty:ignore[division-by-zero]
|
||||||
2 + b = a / 0 # ty:ignore[division-by-zero, unresolved-reference]
|
2 + b = a / 0 # ty:ignore[division-by-zero, unresolved-reference]
|
||||||
3 |
|
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -171,15 +170,14 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
||||||
info[code-action]: Ignore 'unresolved-reference' for this line
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
--> main.py:2:17
|
--> main.py:2:5
|
||||||
|
|
|
|
||||||
2 | b = a / 0 # ty:ignore[division-by-zero,]
|
2 | b = a / 0 # ty:ignore[division-by-zero,]
|
||||||
| ^
|
| ^
|
||||||
|
|
|
|
||||||
1 |
|
1 |
|
||||||
- b = a / 0 # ty:ignore[division-by-zero,]
|
- b = a / 0 # ty:ignore[division-by-zero,]
|
||||||
2 + b = a / 0 # ty:ignore[division-by-zero, unresolved-reference]
|
2 + b = a / 0 # ty:ignore[division-by-zero, unresolved-reference]
|
||||||
3 |
|
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -193,15 +191,14 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
||||||
info[code-action]: Ignore 'unresolved-reference' for this line
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
--> main.py:2:17
|
--> main.py:2:5
|
||||||
|
|
|
|
||||||
2 | b = a / 0 # ty:ignore[division-by-zero ]
|
2 | b = a / 0 # ty:ignore[division-by-zero ]
|
||||||
| ^
|
| ^
|
||||||
|
|
|
|
||||||
1 |
|
1 |
|
||||||
- b = a / 0 # ty:ignore[division-by-zero ]
|
- b = a / 0 # ty:ignore[division-by-zero ]
|
||||||
2 + b = a / 0 # ty:ignore[division-by-zero, unresolved-reference ]
|
2 + b = a / 0 # ty:ignore[division-by-zero, unresolved-reference ]
|
||||||
3 |
|
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -215,15 +212,14 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
||||||
info[code-action]: Ignore 'unresolved-reference' for this line
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
--> main.py:2:17
|
--> main.py:2:5
|
||||||
|
|
|
|
||||||
2 | b = a / 0 # ty:ignore[division-by-zero] some explanation
|
2 | b = a / 0 # ty:ignore[division-by-zero] some explanation
|
||||||
| ^
|
| ^
|
||||||
|
|
|
|
||||||
1 |
|
1 |
|
||||||
- b = a / 0 # ty:ignore[division-by-zero] some explanation
|
- b = a / 0 # ty:ignore[division-by-zero] some explanation
|
||||||
2 + b = a / 0 # ty:ignore[division-by-zero] some explanation # ty:ignore[unresolved-reference]
|
2 + b = a / 0 # ty:ignore[division-by-zero] some explanation # ty:ignore[unresolved-reference]
|
||||||
3 |
|
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -241,22 +237,22 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
||||||
info[code-action]: Ignore 'unresolved-reference' for this line
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
--> main.py:3:21
|
--> main.py:3:9
|
||||||
|
|
|
|
||||||
2 | b = (
|
2 | b = (
|
||||||
3 | / a # ty:ignore[division-by-zero]
|
3 | / a # ty:ignore[division-by-zero]
|
||||||
4 | | /
|
4 | | /
|
||||||
5 | | 0
|
5 | | 0
|
||||||
| |_____________________^
|
| |_________^
|
||||||
6 | )
|
6 | )
|
||||||
|
|
|
|
||||||
1 |
|
1 |
|
||||||
2 | b = (
|
2 | b = (
|
||||||
- a # ty:ignore[division-by-zero]
|
- a # ty:ignore[division-by-zero]
|
||||||
3 + a # ty:ignore[division-by-zero, unresolved-reference]
|
3 + a # ty:ignore[division-by-zero, unresolved-reference]
|
||||||
4 | /
|
4 | /
|
||||||
5 | 0
|
5 | 0
|
||||||
6 | )
|
6 | )
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -274,22 +270,21 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
||||||
info[code-action]: Ignore 'unresolved-reference' for this line
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
--> main.py:3:21
|
--> main.py:3:9
|
||||||
|
|
|
|
||||||
2 | b = (
|
2 | b = (
|
||||||
3 | / a
|
3 | / a
|
||||||
4 | | /
|
4 | | /
|
||||||
5 | | 0 # ty:ignore[division-by-zero]
|
5 | | 0 # ty:ignore[division-by-zero]
|
||||||
| |_____________________^
|
| |_________^
|
||||||
6 | )
|
6 | )
|
||||||
|
|
|
|
||||||
2 | b = (
|
2 | b = (
|
||||||
3 | a
|
3 | a
|
||||||
4 | /
|
4 | /
|
||||||
- 0 # ty:ignore[division-by-zero]
|
- 0 # ty:ignore[division-by-zero]
|
||||||
5 + 0 # ty:ignore[division-by-zero, unresolved-reference]
|
5 + 0 # ty:ignore[division-by-zero, unresolved-reference]
|
||||||
6 | )
|
6 | )
|
||||||
7 |
|
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -307,22 +302,22 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
||||||
info[code-action]: Ignore 'unresolved-reference' for this line
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
--> main.py:3:21
|
--> main.py:3:9
|
||||||
|
|
|
|
||||||
2 | b = (
|
2 | b = (
|
||||||
3 | / a # ty:ignore[division-by-zero]
|
3 | / a # ty:ignore[division-by-zero]
|
||||||
4 | | /
|
4 | | /
|
||||||
5 | | 0 # ty:ignore[division-by-zero]
|
5 | | 0 # ty:ignore[division-by-zero]
|
||||||
| |_____________________^
|
| |_________^
|
||||||
6 | )
|
6 | )
|
||||||
|
|
|
|
||||||
1 |
|
1 |
|
||||||
2 | b = (
|
2 | b = (
|
||||||
- a # ty:ignore[division-by-zero]
|
- a # ty:ignore[division-by-zero]
|
||||||
3 + a # ty:ignore[division-by-zero, unresolved-reference]
|
3 + a # ty:ignore[division-by-zero, unresolved-reference]
|
||||||
4 | /
|
4 | /
|
||||||
5 | 0 # ty:ignore[division-by-zero]
|
5 | 0 # ty:ignore[division-by-zero]
|
||||||
6 | )
|
6 | )
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -339,20 +334,19 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
|
||||||
info[code-action]: Ignore 'unresolved-reference' for this line
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
--> main.py:3:18
|
--> main.py:3:6
|
||||||
|
|
|
|
||||||
2 | b = f"""
|
2 | b = f"""
|
||||||
3 | {a}
|
3 | {a}
|
||||||
| ^
|
| ^
|
||||||
4 | more text
|
4 | more text
|
||||||
5 | """
|
5 | """
|
||||||
|
|
|
|
||||||
2 | b = f"""
|
2 | b = f"""
|
||||||
3 | {a}
|
3 | {a}
|
||||||
4 | more text
|
4 | more text
|
||||||
- """
|
- """
|
||||||
5 + """ # ty:ignore[unresolved-reference]
|
5 + """ # ty:ignore[unresolved-reference]
|
||||||
6 |
|
|
||||||
"#);
|
"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -371,23 +365,23 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
|
||||||
info[code-action]: Ignore 'unresolved-reference' for this line
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
--> main.py:4:17
|
--> main.py:4:5
|
||||||
|
|
|
|
||||||
2 | b = f"""
|
2 | b = f"""
|
||||||
3 | {
|
3 | {
|
||||||
4 | a
|
4 | a
|
||||||
| ^
|
| ^
|
||||||
5 | }
|
5 | }
|
||||||
6 | more text
|
6 | more text
|
||||||
|
|
|
|
||||||
1 |
|
1 |
|
||||||
2 | b = f"""
|
2 | b = f"""
|
||||||
3 | {
|
3 | {
|
||||||
- a
|
- a
|
||||||
4 + a # ty:ignore[unresolved-reference]
|
4 + a # ty:ignore[unresolved-reference]
|
||||||
5 | }
|
5 | }
|
||||||
6 | more text
|
6 | more text
|
||||||
7 | """
|
7 | """
|
||||||
"#);
|
"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -403,19 +397,18 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
|
||||||
info[code-action]: Ignore 'unresolved-reference' for this line
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
--> main.py:2:17
|
--> main.py:2:5
|
||||||
|
|
|
|
||||||
2 | b = a + """
|
2 | b = a + """
|
||||||
| ^
|
| ^
|
||||||
3 | more text
|
3 | more text
|
||||||
4 | """
|
4 | """
|
||||||
|
|
|
|
||||||
1 |
|
1 |
|
||||||
2 | b = a + """
|
2 | b = a + """
|
||||||
3 | more text
|
3 | more text
|
||||||
- """
|
- """
|
||||||
4 + """ # ty:ignore[unresolved-reference]
|
4 + """ # ty:ignore[unresolved-reference]
|
||||||
5 |
|
|
||||||
"#);
|
"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -430,17 +423,16 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
|
||||||
info[code-action]: Ignore 'unresolved-reference' for this line
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
--> main.py:2:17
|
--> main.py:2:5
|
||||||
|
|
|
|
||||||
2 | b = a \
|
2 | b = a \
|
||||||
| ^
|
| ^
|
||||||
3 | + "test"
|
3 | + "test"
|
||||||
|
|
|
|
||||||
1 |
|
1 |
|
||||||
2 | b = a \
|
2 | b = a \
|
||||||
- + "test"
|
- + "test"
|
||||||
3 + + "test" # ty:ignore[unresolved-reference]
|
3 + + "test" # ty:ignore[unresolved-reference]
|
||||||
4 |
|
|
||||||
"#);
|
"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -454,27 +446,249 @@ mod tests {
|
||||||
|
|
||||||
assert_snapshot!(test.code_actions(&UNDEFINED_REVEAL), @r"
|
assert_snapshot!(test.code_actions(&UNDEFINED_REVEAL), @r"
|
||||||
info[code-action]: import typing.reveal_type
|
info[code-action]: import typing.reveal_type
|
||||||
--> main.py:2:13
|
--> main.py:2:1
|
||||||
|
|
|
|
||||||
2 | reveal_type(1)
|
2 | reveal_type(1)
|
||||||
| ^^^^^^^^^^^
|
| ^^^^^^^^^^^
|
||||||
|
|
|
|
||||||
help: This is a preferred code action
|
help: This is a preferred code action
|
||||||
1 + from typing import reveal_type
|
1 + from typing import reveal_type
|
||||||
2 |
|
2 |
|
||||||
3 | reveal_type(1)
|
3 | reveal_type(1)
|
||||||
4 |
|
|
||||||
|
|
||||||
info[code-action]: Ignore 'undefined-reveal' for this line
|
info[code-action]: Ignore 'undefined-reveal' for this line
|
||||||
--> main.py:2:13
|
--> main.py:2:1
|
||||||
|
|
|
|
||||||
2 | reveal_type(1)
|
2 | reveal_type(1)
|
||||||
| ^^^^^^^^^^^
|
| ^^^^^^^^^^^
|
||||||
|
|
|
|
||||||
1 |
|
1 |
|
||||||
- reveal_type(1)
|
- reveal_type(1)
|
||||||
2 + reveal_type(1) # ty:ignore[undefined-reveal]
|
2 + reveal_type(1) # ty:ignore[undefined-reveal]
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unresolved_deprecated() {
|
||||||
|
let test = CodeActionTest::with_source(
|
||||||
|
r#"
|
||||||
|
@<START>deprecated<END>("do not use")
|
||||||
|
def my_func(): ...
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
|
||||||
|
info[code-action]: import warnings.deprecated
|
||||||
|
--> main.py:2:2
|
||||||
|
|
|
||||||
|
2 | @deprecated("do not use")
|
||||||
|
| ^^^^^^^^^^
|
||||||
|
3 | def my_func(): ...
|
||||||
|
|
|
||||||
|
help: This is a preferred code action
|
||||||
|
1 + from warnings import deprecated
|
||||||
|
2 |
|
||||||
|
3 | @deprecated("do not use")
|
||||||
|
4 | def my_func(): ...
|
||||||
|
|
||||||
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
|
--> main.py:2:2
|
||||||
|
|
|
||||||
|
2 | @deprecated("do not use")
|
||||||
|
| ^^^^^^^^^^
|
||||||
|
3 | def my_func(): ...
|
||||||
|
|
|
||||||
|
1 |
|
||||||
|
- @deprecated("do not use")
|
||||||
|
2 + @deprecated("do not use") # ty:ignore[unresolved-reference]
|
||||||
|
3 | def my_func(): ...
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn unresolved_deprecated_warnings_imported() {
|
||||||
|
let test = CodeActionTest::with_source(
|
||||||
|
r#"
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
@<START>deprecated<END>("do not use")
|
||||||
|
def my_func(): ...
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r#"
|
||||||
|
info[code-action]: import warnings.deprecated
|
||||||
|
--> main.py:4:2
|
||||||
|
|
|
||||||
|
2 | import warnings
|
||||||
3 |
|
3 |
|
||||||
|
4 | @deprecated("do not use")
|
||||||
|
| ^^^^^^^^^^
|
||||||
|
5 | def my_func(): ...
|
||||||
|
|
|
||||||
|
help: This is a preferred code action
|
||||||
|
1 + from warnings import deprecated
|
||||||
|
2 |
|
||||||
|
3 | import warnings
|
||||||
|
4 |
|
||||||
|
|
||||||
|
info[code-action]: qualify warnings.deprecated
|
||||||
|
--> main.py:4:2
|
||||||
|
|
|
||||||
|
2 | import warnings
|
||||||
|
3 |
|
||||||
|
4 | @deprecated("do not use")
|
||||||
|
| ^^^^^^^^^^
|
||||||
|
5 | def my_func(): ...
|
||||||
|
|
|
||||||
|
help: This is a preferred code action
|
||||||
|
1 |
|
||||||
|
2 | import warnings
|
||||||
|
3 |
|
||||||
|
- @deprecated("do not use")
|
||||||
|
4 + @warnings.deprecated("do not use")
|
||||||
|
5 | def my_func(): ...
|
||||||
|
|
||||||
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
|
--> main.py:4:2
|
||||||
|
|
|
||||||
|
2 | import warnings
|
||||||
|
3 |
|
||||||
|
4 | @deprecated("do not use")
|
||||||
|
| ^^^^^^^^^^
|
||||||
|
5 | def my_func(): ...
|
||||||
|
|
|
||||||
|
1 |
|
||||||
|
2 | import warnings
|
||||||
|
3 |
|
||||||
|
- @deprecated("do not use")
|
||||||
|
4 + @deprecated("do not use") # ty:ignore[unresolved-reference]
|
||||||
|
5 | def my_func(): ...
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
// using `importlib.abc.ExecutionLoader` when no imports are in scope
|
||||||
|
#[test]
|
||||||
|
fn unresolved_loader() {
|
||||||
|
let test = CodeActionTest::with_source(
|
||||||
|
r#"
|
||||||
|
<START>ExecutionLoader<END>
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
||||||
|
info[code-action]: import importlib.abc.ExecutionLoader
|
||||||
|
--> main.py:2:1
|
||||||
|
|
|
||||||
|
2 | ExecutionLoader
|
||||||
|
| ^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
help: This is a preferred code action
|
||||||
|
1 + from importlib.abc import ExecutionLoader
|
||||||
|
2 |
|
||||||
|
3 | ExecutionLoader
|
||||||
|
|
||||||
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
|
--> main.py:2:1
|
||||||
|
|
|
||||||
|
2 | ExecutionLoader
|
||||||
|
| ^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
1 |
|
||||||
|
- ExecutionLoader
|
||||||
|
2 + ExecutionLoader # ty:ignore[unresolved-reference]
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
// using `importlib.abc.ExecutionLoader` when `import importlib` is in scope
|
||||||
|
//
|
||||||
|
// TODO: `importlib.abc` is available whenever `importlib` is, so qualifying
|
||||||
|
// `importlib.abc.ExecutionLoader` without adding imports is actually legal here!
|
||||||
|
#[test]
|
||||||
|
fn unresolved_loader_importlib_imported() {
|
||||||
|
let test = CodeActionTest::with_source(
|
||||||
|
r#"
|
||||||
|
import importlib
|
||||||
|
<START>ExecutionLoader<END>
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
||||||
|
info[code-action]: import importlib.abc.ExecutionLoader
|
||||||
|
--> main.py:3:1
|
||||||
|
|
|
||||||
|
2 | import importlib
|
||||||
|
3 | ExecutionLoader
|
||||||
|
| ^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
help: This is a preferred code action
|
||||||
|
1 + from importlib.abc import ExecutionLoader
|
||||||
|
2 |
|
||||||
|
3 | import importlib
|
||||||
|
4 | ExecutionLoader
|
||||||
|
|
||||||
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
|
--> main.py:3:1
|
||||||
|
|
|
||||||
|
2 | import importlib
|
||||||
|
3 | ExecutionLoader
|
||||||
|
| ^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
1 |
|
||||||
|
2 | import importlib
|
||||||
|
- ExecutionLoader
|
||||||
|
3 + ExecutionLoader # ty:ignore[unresolved-reference]
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Using `importlib.abc.ExecutionLoader` when `import importlib.abc` is in scope
|
||||||
|
#[test]
|
||||||
|
fn unresolved_loader_abc_imported() {
|
||||||
|
let test = CodeActionTest::with_source(
|
||||||
|
r#"
|
||||||
|
import importlib.abc
|
||||||
|
<START>ExecutionLoader<END>
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.code_actions(&UNRESOLVED_REFERENCE), @r"
|
||||||
|
info[code-action]: import importlib.abc.ExecutionLoader
|
||||||
|
--> main.py:3:1
|
||||||
|
|
|
||||||
|
2 | import importlib.abc
|
||||||
|
3 | ExecutionLoader
|
||||||
|
| ^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
help: This is a preferred code action
|
||||||
|
1 + from importlib.abc import ExecutionLoader
|
||||||
|
2 |
|
||||||
|
3 | import importlib.abc
|
||||||
|
4 | ExecutionLoader
|
||||||
|
|
||||||
|
info[code-action]: qualify importlib.abc.ExecutionLoader
|
||||||
|
--> main.py:3:1
|
||||||
|
|
|
||||||
|
2 | import importlib.abc
|
||||||
|
3 | ExecutionLoader
|
||||||
|
| ^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
help: This is a preferred code action
|
||||||
|
1 |
|
||||||
|
2 | import importlib.abc
|
||||||
|
- ExecutionLoader
|
||||||
|
3 + importlib.abc.ExecutionLoader
|
||||||
|
|
||||||
|
info[code-action]: Ignore 'unresolved-reference' for this line
|
||||||
|
--> main.py:3:1
|
||||||
|
|
|
||||||
|
2 | import importlib.abc
|
||||||
|
3 | ExecutionLoader
|
||||||
|
| ^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
1 |
|
||||||
|
2 | import importlib.abc
|
||||||
|
- ExecutionLoader
|
||||||
|
3 + ExecutionLoader # ty:ignore[unresolved-reference]
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -493,7 +707,7 @@ mod tests {
|
||||||
|
|
||||||
db.init_program().unwrap();
|
db.init_program().unwrap();
|
||||||
|
|
||||||
let mut cleansed = source.to_string();
|
let mut cleansed = dedent(source).to_string();
|
||||||
|
|
||||||
let start = cleansed
|
let start = cleansed
|
||||||
.find("<START>")
|
.find("<START>")
|
||||||
|
|
|
||||||
|
|
@ -67,6 +67,7 @@ impl<'db> Completions<'db> {
|
||||||
self.items
|
self.items
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Convert this collection into a list of "import..." fixes
|
||||||
fn into_imports(mut self) -> Vec<ImportEdit> {
|
fn into_imports(mut self) -> Vec<ImportEdit> {
|
||||||
self.items.sort_by(compare_suggestions);
|
self.items.sort_by(compare_suggestions);
|
||||||
self.items
|
self.items
|
||||||
|
|
@ -82,6 +83,28 @@ impl<'db> Completions<'db> {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Convert this collection into a list of "qualify..." fixes
|
||||||
|
fn into_qualifications(mut self, range: TextRange) -> Vec<ImportEdit> {
|
||||||
|
self.items.sort_by(compare_suggestions);
|
||||||
|
self.items
|
||||||
|
.dedup_by(|c1, c2| (&c1.name, c1.module_name) == (&c2.name, c2.module_name));
|
||||||
|
self.items
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(|item| {
|
||||||
|
// If we would have to actually import something, don't suggest the qualification
|
||||||
|
// (we could, maybe we should, but for now, we don't)
|
||||||
|
if item.import.is_some() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(ImportEdit {
|
||||||
|
label: format!("qualify {}", item.insert.as_ref()?),
|
||||||
|
edit: Edit::replacement(item.insert?.into_string(), range.start(), range.end()),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
/// Attempts to adds the given completion to this collection.
|
/// Attempts to adds the given completion to this collection.
|
||||||
///
|
///
|
||||||
/// When added, `true` is returned.
|
/// When added, `true` is returned.
|
||||||
|
|
@ -467,6 +490,17 @@ pub fn completion<'db>(
|
||||||
!ty.is_notimplemented(db)
|
!ty.is_notimplemented(db)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
if is_specifying_for_statement_iterable(&parsed, offset, typed.as_deref()) {
|
||||||
|
// Remove all keywords that doesn't make sense given the context,
|
||||||
|
// even if they are syntatically valid, e.g. `None`.
|
||||||
|
completions.retain(|item| {
|
||||||
|
let Some(kind) = item.kind else { return true };
|
||||||
|
if kind != CompletionKind::Keyword {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
matches!(item.name.as_str(), "await" | "lambda" | "yield")
|
||||||
|
});
|
||||||
|
}
|
||||||
completions.into_completions()
|
completions.into_completions()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -481,6 +515,18 @@ fn detect_function_arg_completions<'db>(
|
||||||
parsed: &ParsedModuleRef,
|
parsed: &ParsedModuleRef,
|
||||||
offset: TextSize,
|
offset: TextSize,
|
||||||
) -> Option<Vec<Completion<'db>>> {
|
) -> Option<Vec<Completion<'db>>> {
|
||||||
|
// But be careful: this isn't as simple as just finding a call
|
||||||
|
// expression. We also have to make sure we are in the "arguments"
|
||||||
|
// portion of the call. Otherwise we risk incorrectly returning
|
||||||
|
// something for `(<CURSOR>)(arg1, arg2)`-style expressions.
|
||||||
|
if !covering_node(parsed.syntax().into(), TextRange::empty(offset))
|
||||||
|
.ancestors()
|
||||||
|
.take_while(|node| !node.is_statement())
|
||||||
|
.any(|node| node.is_arguments())
|
||||||
|
{
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
let sig_help = signature_help(db, file, offset)?;
|
let sig_help = signature_help(db, file, offset)?;
|
||||||
let set_function_args = detect_set_function_args(parsed, offset);
|
let set_function_args = detect_set_function_args(parsed, offset);
|
||||||
|
|
||||||
|
|
@ -555,15 +601,19 @@ pub(crate) struct ImportEdit {
|
||||||
pub edit: Edit,
|
pub edit: Edit,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn missing_imports(
|
/// Get fixes that would resolve an unresolved reference
|
||||||
|
pub(crate) fn unresolved_fixes(
|
||||||
db: &dyn Db,
|
db: &dyn Db,
|
||||||
file: File,
|
file: File,
|
||||||
parsed: &ParsedModuleRef,
|
parsed: &ParsedModuleRef,
|
||||||
symbol: &str,
|
symbol: &str,
|
||||||
node: AnyNodeRef,
|
node: AnyNodeRef,
|
||||||
) -> Vec<ImportEdit> {
|
) -> Vec<ImportEdit> {
|
||||||
let mut completions = Completions::exactly(db, symbol);
|
let mut results = Vec::new();
|
||||||
let scoped = ScopedTarget { node };
|
let scoped = ScopedTarget { node };
|
||||||
|
|
||||||
|
// Request imports we could add to put the symbol in scope
|
||||||
|
let mut completions = Completions::exactly(db, symbol);
|
||||||
add_unimported_completions(
|
add_unimported_completions(
|
||||||
db,
|
db,
|
||||||
file,
|
file,
|
||||||
|
|
@ -574,8 +624,23 @@ pub(crate) fn missing_imports(
|
||||||
},
|
},
|
||||||
&mut completions,
|
&mut completions,
|
||||||
);
|
);
|
||||||
|
results.extend(completions.into_imports());
|
||||||
|
|
||||||
completions.into_imports()
|
// Request qualifications we could apply to the symbol to make it resolve
|
||||||
|
let mut completions = Completions::exactly(db, symbol);
|
||||||
|
add_unimported_completions(
|
||||||
|
db,
|
||||||
|
file,
|
||||||
|
parsed,
|
||||||
|
scoped,
|
||||||
|
|module_name: &ModuleName, symbol: &str| {
|
||||||
|
ImportRequest::import(module_name.as_str(), symbol).force()
|
||||||
|
},
|
||||||
|
&mut completions,
|
||||||
|
);
|
||||||
|
results.extend(completions.into_qualifications(node.range()));
|
||||||
|
|
||||||
|
results
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Adds completions derived from keywords.
|
/// Adds completions derived from keywords.
|
||||||
|
|
@ -1565,12 +1630,7 @@ fn is_in_definition_place(
|
||||||
/// Returns true when the cursor sits on a binding statement.
|
/// Returns true when the cursor sits on a binding statement.
|
||||||
/// E.g. naming a parameter, type parameter, or `for` <name>).
|
/// E.g. naming a parameter, type parameter, or `for` <name>).
|
||||||
fn is_in_variable_binding(parsed: &ParsedModuleRef, offset: TextSize, typed: Option<&str>) -> bool {
|
fn is_in_variable_binding(parsed: &ParsedModuleRef, offset: TextSize, typed: Option<&str>) -> bool {
|
||||||
let range = if let Some(typed) = typed {
|
let range = typed_text_range(typed, offset);
|
||||||
let start = offset.saturating_sub(typed.text_len());
|
|
||||||
TextRange::new(start, offset)
|
|
||||||
} else {
|
|
||||||
TextRange::empty(offset)
|
|
||||||
};
|
|
||||||
|
|
||||||
let covering = covering_node(parsed.syntax().into(), range);
|
let covering = covering_node(parsed.syntax().into(), range);
|
||||||
covering.ancestors().any(|node| match node {
|
covering.ancestors().any(|node| match node {
|
||||||
|
|
@ -1625,6 +1685,36 @@ fn is_raising_exception(tokens: &[Token]) -> bool {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns true when the cursor is after the `in` keyword in a
|
||||||
|
/// `for x in <CURSOR>` statement.
|
||||||
|
fn is_specifying_for_statement_iterable(
|
||||||
|
parsed: &ParsedModuleRef,
|
||||||
|
offset: TextSize,
|
||||||
|
typed: Option<&str>,
|
||||||
|
) -> bool {
|
||||||
|
let range = typed_text_range(typed, offset);
|
||||||
|
|
||||||
|
let covering = covering_node(parsed.syntax().into(), range);
|
||||||
|
covering.parent().is_some_and(|node| {
|
||||||
|
matches!(
|
||||||
|
node, ast::AnyNodeRef::StmtFor(stmt_for) if stmt_for.iter.range().contains_range(range)
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the `TextRange` of the `typed` text.
|
||||||
|
///
|
||||||
|
/// `typed` should be the text immediately before the
|
||||||
|
/// provided cursor `offset`.
|
||||||
|
fn typed_text_range(typed: Option<&str>, offset: TextSize) -> TextRange {
|
||||||
|
if let Some(typed) = typed {
|
||||||
|
let start = offset.saturating_sub(typed.text_len());
|
||||||
|
TextRange::new(start, offset)
|
||||||
|
} else {
|
||||||
|
TextRange::empty(offset)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Order completions according to the following rules:
|
/// Order completions according to the following rules:
|
||||||
///
|
///
|
||||||
/// 1) Names with no underscore prefix
|
/// 1) Names with no underscore prefix
|
||||||
|
|
@ -2516,9 +2606,7 @@ def frob(): ...
|
||||||
|
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
builder.skip_keywords().skip_builtins().build().snapshot(),
|
builder.skip_keywords().skip_builtins().build().snapshot(),
|
||||||
@r"
|
@"<No completions found after filtering out completions>",
|
||||||
foo
|
|
||||||
",
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -2532,9 +2620,7 @@ def frob(): ...
|
||||||
|
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
builder.skip_keywords().skip_builtins().build().snapshot(),
|
builder.skip_keywords().skip_builtins().build().snapshot(),
|
||||||
@r"
|
@"<No completions found after filtering out completions>",
|
||||||
foo
|
|
||||||
",
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -3126,7 +3212,7 @@ quux.<CURSOR>
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r"
|
builder.skip_keywords().skip_builtins().type_signatures().build().snapshot(), @r"
|
||||||
count :: bound method Quux.count(value: Any, /) -> int
|
count :: bound method Quux.count(value: Any, /) -> int
|
||||||
index :: bound method Quux.index(value: Any, start: SupportsIndex = Literal[0], stop: SupportsIndex = int, /) -> int
|
index :: bound method Quux.index(value: Any, start: SupportsIndex = 0, stop: SupportsIndex = ..., /) -> int
|
||||||
x :: int
|
x :: int
|
||||||
y :: str
|
y :: str
|
||||||
__add__ :: Overload[(value: tuple[int | str, ...], /) -> tuple[int | str, ...], (value: tuple[_T@__add__, ...], /) -> tuple[int | str | _T@__add__, ...]]
|
__add__ :: Overload[(value: tuple[int | str, ...], /) -> tuple[int | str, ...], (value: tuple[_T@__add__, ...], /) -> tuple[int | str | _T@__add__, ...]]
|
||||||
|
|
@ -3191,7 +3277,7 @@ bar(o<CURSOR>
|
||||||
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
||||||
@r"
|
@r"
|
||||||
foo
|
foo
|
||||||
okay
|
okay=
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -3212,7 +3298,7 @@ bar(o<CURSOR>
|
||||||
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
||||||
@r"
|
@r"
|
||||||
foo
|
foo
|
||||||
okay
|
okay=
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -3230,9 +3316,9 @@ foo(b<CURSOR>
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
||||||
@r"
|
@r"
|
||||||
bar
|
bar=
|
||||||
barbaz
|
barbaz=
|
||||||
baz
|
baz=
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -3249,9 +3335,7 @@ foo(bar=1, b<CURSOR>
|
||||||
|
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
||||||
@r"
|
@"baz="
|
||||||
baz
|
|
||||||
"
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -3269,9 +3353,7 @@ abc(o<CURSOR>
|
||||||
|
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
||||||
@r"
|
@"okay="
|
||||||
okay
|
|
||||||
"
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -3287,9 +3369,7 @@ abc(okay=1, ba<CURSOR> baz=5
|
||||||
|
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
||||||
@r"
|
@"bar="
|
||||||
bar
|
|
||||||
"
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -3333,9 +3413,9 @@ bar(o<CURSOR>
|
||||||
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
builder.skip_keywords().skip_builtins().skip_auto_import().build().snapshot(),
|
||||||
@r"
|
@r"
|
||||||
foo
|
foo
|
||||||
okay
|
okay=
|
||||||
okay_abc
|
okay_abc=
|
||||||
okay_okay
|
okay_okay=
|
||||||
"
|
"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -3355,7 +3435,7 @@ bar(<CURSOR>
|
||||||
assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r"
|
assert_snapshot!(builder.skip_keywords().skip_builtins().build().snapshot(), @r"
|
||||||
bar
|
bar
|
||||||
foo
|
foo
|
||||||
okay
|
okay=
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -4711,8 +4791,7 @@ from os.<CURSOR>
|
||||||
let last_nonunderscore = test
|
let last_nonunderscore = test
|
||||||
.completions()
|
.completions()
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|c| !c.name.starts_with('_'))
|
.rfind(|c| !c.name.starts_with('_'))
|
||||||
.next_back()
|
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
assert_eq!(&last_nonunderscore.name, "type_check_only");
|
assert_eq!(&last_nonunderscore.name, "type_check_only");
|
||||||
|
|
@ -5824,6 +5903,62 @@ def foo(param: s<CURSOR>)
|
||||||
.contains("str");
|
.contains("str");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_statement_keywords_in_for_statement_simple1() {
|
||||||
|
completion_test_builder(
|
||||||
|
"\
|
||||||
|
for x in a<CURSOR>
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build()
|
||||||
|
.contains("lambda")
|
||||||
|
.contains("await")
|
||||||
|
.not_contains("raise")
|
||||||
|
.not_contains("False");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_statement_keywords_in_for_statement_simple2() {
|
||||||
|
completion_test_builder(
|
||||||
|
"\
|
||||||
|
for x, y, _ in a<CURSOR>
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build()
|
||||||
|
.contains("lambda")
|
||||||
|
.contains("await")
|
||||||
|
.not_contains("raise")
|
||||||
|
.not_contains("False");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_statement_keywords_in_for_statement_simple3() {
|
||||||
|
completion_test_builder(
|
||||||
|
"\
|
||||||
|
for i, (x, y, z) in a<CURSOR>
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build()
|
||||||
|
.contains("lambda")
|
||||||
|
.contains("await")
|
||||||
|
.not_contains("raise")
|
||||||
|
.not_contains("False");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn no_statement_keywords_in_for_statement_complex() {
|
||||||
|
completion_test_builder(
|
||||||
|
"\
|
||||||
|
for i, (obj.x, (a[0], b['k']), _), *rest in a<CURSOR>
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build()
|
||||||
|
.contains("lambda")
|
||||||
|
.contains("await")
|
||||||
|
.not_contains("raise")
|
||||||
|
.not_contains("False");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn favour_symbols_currently_imported() {
|
fn favour_symbols_currently_imported() {
|
||||||
let snapshot = CursorTest::builder()
|
let snapshot = CursorTest::builder()
|
||||||
|
|
@ -6582,6 +6717,27 @@ def f(zqzqzq: str):
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn auto_import_prioritizes_reusing_import_from_statements() {
|
||||||
|
let builder = completion_test_builder(
|
||||||
|
"\
|
||||||
|
import typing
|
||||||
|
from typing import Callable
|
||||||
|
TypedDi<CURSOR>
|
||||||
|
",
|
||||||
|
);
|
||||||
|
assert_snapshot!(
|
||||||
|
builder.imports().build().snapshot(),
|
||||||
|
@r"
|
||||||
|
TypedDict :: , TypedDict
|
||||||
|
is_typeddict :: , is_typeddict
|
||||||
|
_FilterConfigurationTypedDict :: from logging.config import _FilterConfigurationTypedDict
|
||||||
|
|
||||||
|
_FormatterConfigurationTypedDict :: from logging.config import _FormatterConfigurationTypedDict
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
/// A way to create a simple single-file (named `main.py`) completion test
|
/// A way to create a simple single-file (named `main.py`) completion test
|
||||||
/// builder.
|
/// builder.
|
||||||
///
|
///
|
||||||
|
|
@ -6607,6 +6763,7 @@ def f(zqzqzq: str):
|
||||||
skip_builtins: bool,
|
skip_builtins: bool,
|
||||||
skip_keywords: bool,
|
skip_keywords: bool,
|
||||||
type_signatures: bool,
|
type_signatures: bool,
|
||||||
|
imports: bool,
|
||||||
module_names: bool,
|
module_names: bool,
|
||||||
// This doesn't seem like a "very complex" type to me... ---AG
|
// This doesn't seem like a "very complex" type to me... ---AG
|
||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
|
|
@ -6639,6 +6796,7 @@ def f(zqzqzq: str):
|
||||||
original,
|
original,
|
||||||
filtered,
|
filtered,
|
||||||
type_signatures: self.type_signatures,
|
type_signatures: self.type_signatures,
|
||||||
|
imports: self.imports,
|
||||||
module_names: self.module_names,
|
module_names: self.module_names,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -6699,6 +6857,15 @@ def f(zqzqzq: str):
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// When set, include the import associated with the
|
||||||
|
/// completion.
|
||||||
|
///
|
||||||
|
/// Not enabled by default.
|
||||||
|
fn imports(mut self) -> CompletionTestBuilder {
|
||||||
|
self.imports = true;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// When set, the module name for each symbol is included
|
/// When set, the module name for each symbol is included
|
||||||
/// in the snapshot (if available).
|
/// in the snapshot (if available).
|
||||||
fn module_names(mut self) -> CompletionTestBuilder {
|
fn module_names(mut self) -> CompletionTestBuilder {
|
||||||
|
|
@ -6731,6 +6898,9 @@ def f(zqzqzq: str):
|
||||||
/// Whether type signatures should be included in the snapshot
|
/// Whether type signatures should be included in the snapshot
|
||||||
/// generated by `CompletionTest::snapshot`.
|
/// generated by `CompletionTest::snapshot`.
|
||||||
type_signatures: bool,
|
type_signatures: bool,
|
||||||
|
/// Whether to show the import that will be inserted when this
|
||||||
|
/// completion is selected.
|
||||||
|
imports: bool,
|
||||||
/// Whether module names should be included in the snapshot
|
/// Whether module names should be included in the snapshot
|
||||||
/// generated by `CompletionTest::snapshot`.
|
/// generated by `CompletionTest::snapshot`.
|
||||||
module_names: bool,
|
module_names: bool,
|
||||||
|
|
@ -6752,7 +6922,7 @@ def f(zqzqzq: str):
|
||||||
self.filtered
|
self.filtered
|
||||||
.iter()
|
.iter()
|
||||||
.map(|c| {
|
.map(|c| {
|
||||||
let mut snapshot = c.name.as_str().to_string();
|
let mut snapshot = c.insert.as_deref().unwrap_or(c.name.as_str()).to_string();
|
||||||
if self.type_signatures {
|
if self.type_signatures {
|
||||||
let ty =
|
let ty =
|
||||||
c.ty.map(|ty| ty.display(self.db).to_string())
|
c.ty.map(|ty| ty.display(self.db).to_string())
|
||||||
|
|
@ -6766,6 +6936,17 @@ def f(zqzqzq: str):
|
||||||
.unwrap_or("<no import required>");
|
.unwrap_or("<no import required>");
|
||||||
snapshot = format!("{snapshot} :: {module_name}");
|
snapshot = format!("{snapshot} :: {module_name}");
|
||||||
}
|
}
|
||||||
|
if self.imports {
|
||||||
|
if let Some(ref edit) = c.import {
|
||||||
|
if let Some(import) = edit.content() {
|
||||||
|
snapshot = format!("{snapshot} :: {import}");
|
||||||
|
} else {
|
||||||
|
snapshot = format!("{snapshot} :: <import deletion>");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
snapshot = format!("{snapshot} :: <no import edit>");
|
||||||
|
}
|
||||||
|
}
|
||||||
snapshot
|
snapshot
|
||||||
})
|
})
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
|
|
@ -6814,6 +6995,7 @@ def f(zqzqzq: str):
|
||||||
skip_builtins: false,
|
skip_builtins: false,
|
||||||
skip_keywords: false,
|
skip_keywords: false,
|
||||||
type_signatures: false,
|
type_signatures: false,
|
||||||
|
imports: false,
|
||||||
module_names: false,
|
module_names: false,
|
||||||
predicate: None,
|
predicate: None,
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -151,14 +151,19 @@ impl fmt::Display for DisplayHoverContent<'_, '_> {
|
||||||
Some(TypeVarVariance::Bivariant) => " (bivariant)",
|
Some(TypeVarVariance::Bivariant) => " (bivariant)",
|
||||||
None => "",
|
None => "",
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Special types like `<special-form of whatever 'blahblah' with 'florps'>`
|
||||||
|
// render poorly with python syntax-highlighting but well as xml
|
||||||
|
let ty_string = ty
|
||||||
|
.display_with(self.db, DisplaySettings::default().multiline())
|
||||||
|
.to_string();
|
||||||
|
let syntax = if ty_string.starts_with('<') {
|
||||||
|
"xml"
|
||||||
|
} else {
|
||||||
|
"python"
|
||||||
|
};
|
||||||
self.kind
|
self.kind
|
||||||
.fenced_code_block(
|
.fenced_code_block(format!("{ty_string}{variance}"), syntax)
|
||||||
format!(
|
|
||||||
"{}{variance}",
|
|
||||||
ty.display_with(self.db, DisplaySettings::default().multiline())
|
|
||||||
),
|
|
||||||
"python",
|
|
||||||
)
|
|
||||||
.fmt(f)
|
.fmt(f)
|
||||||
}
|
}
|
||||||
HoverContent::Docstring(docstring) => docstring.render(self.kind).fmt(f),
|
HoverContent::Docstring(docstring) => docstring.render(self.kind).fmt(f),
|
||||||
|
|
@ -358,7 +363,7 @@ mod tests {
|
||||||
Everyone loves my class!!
|
Everyone loves my class!!
|
||||||
|
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```xml
|
||||||
<class 'MyClass'>
|
<class 'MyClass'>
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
|
|
@ -420,7 +425,7 @@ mod tests {
|
||||||
Everyone loves my class!!
|
Everyone loves my class!!
|
||||||
|
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```xml
|
||||||
<class 'MyClass'>
|
<class 'MyClass'>
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
|
|
@ -480,7 +485,7 @@ mod tests {
|
||||||
initializes MyClass (perfectly)
|
initializes MyClass (perfectly)
|
||||||
|
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```xml
|
||||||
<class 'MyClass'>
|
<class 'MyClass'>
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
|
|
@ -536,7 +541,7 @@ mod tests {
|
||||||
initializes MyClass (perfectly)
|
initializes MyClass (perfectly)
|
||||||
|
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```xml
|
||||||
<class 'MyClass'>
|
<class 'MyClass'>
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
|
|
@ -595,7 +600,7 @@ mod tests {
|
||||||
Everyone loves my class!!
|
Everyone loves my class!!
|
||||||
|
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```xml
|
||||||
<class 'MyClass'>
|
<class 'MyClass'>
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
|
|
@ -1680,7 +1685,7 @@ def ab(a: int, *, c: int):
|
||||||
Wow this module rocks.
|
Wow this module rocks.
|
||||||
|
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```xml
|
||||||
<module 'lib'>
|
<module 'lib'>
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
|
|
@ -2029,7 +2034,7 @@ def function():
|
||||||
assert_snapshot!(test.hover(), @r"
|
assert_snapshot!(test.hover(), @r"
|
||||||
<class 'Click'>
|
<class 'Click'>
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```xml
|
||||||
<class 'Click'>
|
<class 'Click'>
|
||||||
```
|
```
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
|
|
@ -2234,7 +2239,7 @@ def function():
|
||||||
Wow this module rocks.
|
Wow this module rocks.
|
||||||
|
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```xml
|
||||||
<module 'lib'>
|
<module 'lib'>
|
||||||
```
|
```
|
||||||
---
|
---
|
||||||
|
|
@ -3057,10 +3062,10 @@ def function():
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_snapshot!(test.hover(), @r"
|
assert_snapshot!(test.hover(), @r"
|
||||||
typing.TypeVar
|
TypeVar
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```python
|
||||||
typing.TypeVar
|
TypeVar
|
||||||
```
|
```
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
info[hover]: Hovered content is
|
info[hover]: Hovered content is
|
||||||
|
|
@ -3120,10 +3125,10 @@ def function():
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_snapshot!(test.hover(), @r"
|
assert_snapshot!(test.hover(), @r"
|
||||||
typing.TypeVar
|
TypeVar
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```python
|
||||||
typing.TypeVar
|
TypeVar
|
||||||
```
|
```
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
info[hover]: Hovered content is
|
info[hover]: Hovered content is
|
||||||
|
|
@ -3343,7 +3348,7 @@ def function():
|
||||||
assert_snapshot!(test.hover(), @r"
|
assert_snapshot!(test.hover(), @r"
|
||||||
<module 'mypackage.subpkg'>
|
<module 'mypackage.subpkg'>
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```xml
|
||||||
<module 'mypackage.subpkg'>
|
<module 'mypackage.subpkg'>
|
||||||
```
|
```
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
|
|
@ -3385,7 +3390,7 @@ def function():
|
||||||
assert_snapshot!(test.hover(), @r"
|
assert_snapshot!(test.hover(), @r"
|
||||||
<module 'mypackage.subpkg'>
|
<module 'mypackage.subpkg'>
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```xml
|
||||||
<module 'mypackage.subpkg'>
|
<module 'mypackage.subpkg'>
|
||||||
```
|
```
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
|
|
@ -3469,7 +3474,7 @@ def function():
|
||||||
assert_snapshot!(test.hover(), @r"
|
assert_snapshot!(test.hover(), @r"
|
||||||
<module 'mypackage.subpkg.submod'>
|
<module 'mypackage.subpkg.submod'>
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```xml
|
||||||
<module 'mypackage.subpkg.submod'>
|
<module 'mypackage.subpkg.submod'>
|
||||||
```
|
```
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
|
|
@ -3510,7 +3515,7 @@ def function():
|
||||||
assert_snapshot!(test.hover(), @r"
|
assert_snapshot!(test.hover(), @r"
|
||||||
<module 'mypackage.subpkg'>
|
<module 'mypackage.subpkg'>
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
```python
|
```xml
|
||||||
<module 'mypackage.subpkg'>
|
<module 'mypackage.subpkg'>
|
||||||
```
|
```
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
|
|
@ -3624,6 +3629,37 @@ def function():
|
||||||
assert_snapshot!(test.hover(), @"Hover provided no content");
|
assert_snapshot!(test.hover(), @"Hover provided no content");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn hover_named_expression_target() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"mymod.py",
|
||||||
|
r#"
|
||||||
|
if a<CURSOR> := 10:
|
||||||
|
pass
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.hover(), @r###"
|
||||||
|
Literal[10]
|
||||||
|
---------------------------------------------
|
||||||
|
```python
|
||||||
|
Literal[10]
|
||||||
|
```
|
||||||
|
---------------------------------------------
|
||||||
|
info[hover]: Hovered content is
|
||||||
|
--> mymod.py:2:4
|
||||||
|
|
|
||||||
|
2 | if a := 10:
|
||||||
|
| ^- Cursor offset
|
||||||
|
| |
|
||||||
|
| source
|
||||||
|
3 | pass
|
||||||
|
|
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
impl CursorTest {
|
impl CursorTest {
|
||||||
fn hover(&self) -> String {
|
fn hover(&self) -> String {
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
|
||||||
|
|
@ -745,8 +745,17 @@ impl ImportResponseKind<'_> {
|
||||||
fn priority(&self) -> usize {
|
fn priority(&self) -> usize {
|
||||||
match *self {
|
match *self {
|
||||||
ImportResponseKind::Unqualified { .. } => 0,
|
ImportResponseKind::Unqualified { .. } => 0,
|
||||||
ImportResponseKind::Qualified { .. } => 1,
|
ImportResponseKind::Partial(_) => 1,
|
||||||
ImportResponseKind::Partial(_) => 2,
|
// N.B. When given the choice between adding a
|
||||||
|
// name to an existing `from ... import ...`
|
||||||
|
// statement and using an existing `import ...`
|
||||||
|
// in a qualified manner, we currently choose
|
||||||
|
// the former. Originally we preferred qualification,
|
||||||
|
// but there is some evidence that this violates
|
||||||
|
// expectations.
|
||||||
|
//
|
||||||
|
// Ref: https://github.com/astral-sh/ty/issues/1274#issuecomment-3352233790
|
||||||
|
ImportResponseKind::Qualified { .. } => 2,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1332,9 +1341,9 @@ import collections
|
||||||
);
|
);
|
||||||
assert_snapshot!(
|
assert_snapshot!(
|
||||||
test.import("collections", "defaultdict"), @r"
|
test.import("collections", "defaultdict"), @r"
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict, defaultdict
|
||||||
import collections
|
import collections
|
||||||
collections.defaultdict
|
defaultdict
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6017,9 +6017,9 @@ mod tests {
|
||||||
fn test_function_signature_inlay_hint() {
|
fn test_function_signature_inlay_hint() {
|
||||||
let mut test = inlay_hint_test(
|
let mut test = inlay_hint_test(
|
||||||
"
|
"
|
||||||
def foo(x: int, *y: bool, z: str | int | list[str]): ...
|
def foo(x: int, *y: bool, z: str | int | list[str]): ...
|
||||||
|
|
||||||
a = foo",
|
a = foo",
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_snapshot!(test.inlay_hints(), @r#"
|
assert_snapshot!(test.inlay_hints(), @r#"
|
||||||
|
|
@ -6158,18 +6158,35 @@ mod tests {
|
||||||
fn test_module_inlay_hint() {
|
fn test_module_inlay_hint() {
|
||||||
let mut test = inlay_hint_test(
|
let mut test = inlay_hint_test(
|
||||||
"
|
"
|
||||||
import foo
|
import foo
|
||||||
|
|
||||||
a = foo",
|
a = foo",
|
||||||
);
|
);
|
||||||
|
|
||||||
test.with_extra_file("foo.py", "'''Foo module'''");
|
test.with_extra_file("foo.py", "'''Foo module'''");
|
||||||
|
|
||||||
assert_snapshot!(test.inlay_hints(), @r"
|
assert_snapshot!(test.inlay_hints(), @r#"
|
||||||
import foo
|
import foo
|
||||||
|
|
||||||
a[: <module 'foo'>] = foo
|
a[: <module 'foo'>] = foo
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/types.pyi:423:7
|
||||||
|
|
|
||||||
|
422 | @disjoint_base
|
||||||
|
423 | class ModuleType:
|
||||||
|
| ^^^^^^^^^^
|
||||||
|
424 | """Create a module object.
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:6
|
||||||
|
|
|
||||||
|
2 | import foo
|
||||||
|
3 |
|
||||||
|
4 | a[: <module 'foo'>] = foo
|
||||||
|
| ^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
info[inlay-hint-location]: Inlay Hint Target
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
--> foo.py:1:1
|
--> foo.py:1:1
|
||||||
|
|
|
|
||||||
|
|
@ -6177,32 +6194,620 @@ mod tests {
|
||||||
| ^^^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^^^
|
||||||
|
|
|
|
||||||
info: Source
|
info: Source
|
||||||
--> main2.py:4:5
|
--> main2.py:4:14
|
||||||
|
|
|
|
||||||
2 | import foo
|
2 | import foo
|
||||||
3 |
|
3 |
|
||||||
4 | a[: <module 'foo'>] = foo
|
4 | a[: <module 'foo'>] = foo
|
||||||
| ^^^^^^^^^^^^^^
|
| ^^^
|
||||||
|
|
|
|
||||||
");
|
"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_literal_type_alias_inlay_hint() {
|
fn test_literal_type_alias_inlay_hint() {
|
||||||
let mut test = inlay_hint_test(
|
let mut test = inlay_hint_test(
|
||||||
"
|
"
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
|
|
||||||
a = Literal['a', 'b', 'c']",
|
a = Literal['a', 'b', 'c']",
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_snapshot!(test.inlay_hints(), @r#"
|
assert_snapshot!(test.inlay_hints(), @r#"
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
|
|
||||||
a[: <special form 'Literal["a", "b", "c"]'>] = Literal['a', 'b', 'c']
|
a[: <special-form 'Literal["a", "b", "c"]'>] = Literal['a', 'b', 'c']
|
||||||
|
---------------------------------------------
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/typing.pyi:351:1
|
||||||
|
|
|
||||||
|
349 | Final: _SpecialForm
|
||||||
|
350 |
|
||||||
|
351 | Literal: _SpecialForm
|
||||||
|
| ^^^^^^^
|
||||||
|
352 | TypedDict: _SpecialForm
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:20
|
||||||
|
|
|
||||||
|
2 | from typing import Literal
|
||||||
|
3 |
|
||||||
|
4 | a[: <special-form 'Literal["a", "b", "c"]'>] = Literal['a', 'b', 'c']
|
||||||
|
| ^^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/builtins.pyi:915:7
|
||||||
|
|
|
||||||
|
914 | @disjoint_base
|
||||||
|
915 | class str(Sequence[str]):
|
||||||
|
| ^^^
|
||||||
|
916 | """str(object='') -> str
|
||||||
|
917 | str(bytes_or_buffer[, encoding[, errors]]) -> str
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:28
|
||||||
|
|
|
||||||
|
2 | from typing import Literal
|
||||||
|
3 |
|
||||||
|
4 | a[: <special-form 'Literal["a", "b", "c"]'>] = Literal['a', 'b', 'c']
|
||||||
|
| ^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/builtins.pyi:915:7
|
||||||
|
|
|
||||||
|
914 | @disjoint_base
|
||||||
|
915 | class str(Sequence[str]):
|
||||||
|
| ^^^
|
||||||
|
916 | """str(object='') -> str
|
||||||
|
917 | str(bytes_or_buffer[, encoding[, errors]]) -> str
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:33
|
||||||
|
|
|
||||||
|
2 | from typing import Literal
|
||||||
|
3 |
|
||||||
|
4 | a[: <special-form 'Literal["a", "b", "c"]'>] = Literal['a', 'b', 'c']
|
||||||
|
| ^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/builtins.pyi:915:7
|
||||||
|
|
|
||||||
|
914 | @disjoint_base
|
||||||
|
915 | class str(Sequence[str]):
|
||||||
|
| ^^^
|
||||||
|
916 | """str(object='') -> str
|
||||||
|
917 | str(bytes_or_buffer[, encoding[, errors]]) -> str
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:38
|
||||||
|
|
|
||||||
|
2 | from typing import Literal
|
||||||
|
3 |
|
||||||
|
4 | a[: <special-form 'Literal["a", "b", "c"]'>] = Literal['a', 'b', 'c']
|
||||||
|
| ^^^
|
||||||
|
|
|
||||||
"#);
|
"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_wrapper_descriptor_inlay_hint() {
|
||||||
|
let mut test = inlay_hint_test(
|
||||||
|
"
|
||||||
|
from types import FunctionType
|
||||||
|
|
||||||
|
a = FunctionType.__get__",
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.inlay_hints(), @r#"
|
||||||
|
from types import FunctionType
|
||||||
|
|
||||||
|
a[: <wrapper-descriptor '__get__' of 'function' objects>] = FunctionType.__get__
|
||||||
|
---------------------------------------------
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/types.pyi:670:7
|
||||||
|
|
|
||||||
|
669 | @final
|
||||||
|
670 | class WrapperDescriptorType:
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
671 | @property
|
||||||
|
672 | def __name__(self) -> str: ...
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:6
|
||||||
|
|
|
||||||
|
2 | from types import FunctionType
|
||||||
|
3 |
|
||||||
|
4 | a[: <wrapper-descriptor '__get__' of 'function' objects>] = FunctionType.__get__
|
||||||
|
| ^^^^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/types.pyi:77:7
|
||||||
|
|
|
||||||
|
75 | # Make sure this class definition stays roughly in line with `builtins.function`
|
||||||
|
76 | @final
|
||||||
|
77 | class FunctionType:
|
||||||
|
| ^^^^^^^^^^^^
|
||||||
|
78 | """Create a function object.
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:39
|
||||||
|
|
|
||||||
|
2 | from types import FunctionType
|
||||||
|
3 |
|
||||||
|
4 | a[: <wrapper-descriptor '__get__' of 'function' objects>] = FunctionType.__get__
|
||||||
|
| ^^^^^^^^
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_method_wrapper_inlay_hint() {
|
||||||
|
let mut test = inlay_hint_test(
|
||||||
|
"
|
||||||
|
def f(): ...
|
||||||
|
|
||||||
|
a = f.__call__",
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.inlay_hints(), @r#"
|
||||||
|
def f(): ...
|
||||||
|
|
||||||
|
a[: <method-wrapper '__call__' of function 'f'>] = f.__call__
|
||||||
|
---------------------------------------------
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/types.pyi:684:7
|
||||||
|
|
|
||||||
|
683 | @final
|
||||||
|
684 | class MethodWrapperType:
|
||||||
|
| ^^^^^^^^^^^^^^^^^
|
||||||
|
685 | @property
|
||||||
|
686 | def __self__(self) -> object: ...
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:6
|
||||||
|
|
|
||||||
|
2 | def f(): ...
|
||||||
|
3 |
|
||||||
|
4 | a[: <method-wrapper '__call__' of function 'f'>] = f.__call__
|
||||||
|
| ^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/types.pyi:134:9
|
||||||
|
|
|
||||||
|
132 | ) -> Self: ...
|
||||||
|
133 |
|
||||||
|
134 | def __call__(self, *args: Any, **kwargs: Any) -> Any:
|
||||||
|
| ^^^^^^^^
|
||||||
|
135 | """Call self as a function."""
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:22
|
||||||
|
|
|
||||||
|
2 | def f(): ...
|
||||||
|
3 |
|
||||||
|
4 | a[: <method-wrapper '__call__' of function 'f'>] = f.__call__
|
||||||
|
| ^^^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/types.pyi:77:7
|
||||||
|
|
|
||||||
|
75 | # Make sure this class definition stays roughly in line with `builtins.function`
|
||||||
|
76 | @final
|
||||||
|
77 | class FunctionType:
|
||||||
|
| ^^^^^^^^^^^^
|
||||||
|
78 | """Create a function object.
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:35
|
||||||
|
|
|
||||||
|
2 | def f(): ...
|
||||||
|
3 |
|
||||||
|
4 | a[: <method-wrapper '__call__' of function 'f'>] = f.__call__
|
||||||
|
| ^^^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> main.py:2:5
|
||||||
|
|
|
||||||
|
2 | def f(): ...
|
||||||
|
| ^
|
||||||
|
3 |
|
||||||
|
4 | a = f.__call__
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:45
|
||||||
|
|
|
||||||
|
2 | def f(): ...
|
||||||
|
3 |
|
||||||
|
4 | a[: <method-wrapper '__call__' of function 'f'>] = f.__call__
|
||||||
|
| ^
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_newtype_inlay_hint() {
|
||||||
|
let mut test = inlay_hint_test(
|
||||||
|
"
|
||||||
|
from typing import NewType
|
||||||
|
|
||||||
|
N = NewType('N', str)
|
||||||
|
|
||||||
|
Y = N",
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.inlay_hints(), @r#"
|
||||||
|
from typing import NewType
|
||||||
|
|
||||||
|
N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
|
||||||
|
|
||||||
|
Y[: <NewType pseudo-class 'N'>] = N
|
||||||
|
---------------------------------------------
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/typing.pyi:615:11
|
||||||
|
|
|
||||||
|
613 | TypeGuard: _SpecialForm
|
||||||
|
614 |
|
||||||
|
615 | class NewType:
|
||||||
|
| ^^^^^^^
|
||||||
|
616 | """NewType creates simple unique types with almost zero runtime overhead.
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:6
|
||||||
|
|
|
||||||
|
2 | from typing import NewType
|
||||||
|
3 |
|
||||||
|
4 | N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
|
||||||
|
| ^^^^^^^
|
||||||
|
5 |
|
||||||
|
6 | Y[: <NewType pseudo-class 'N'>] = N
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> main.py:4:1
|
||||||
|
|
|
||||||
|
2 | from typing import NewType
|
||||||
|
3 |
|
||||||
|
4 | N = NewType('N', str)
|
||||||
|
| ^
|
||||||
|
5 |
|
||||||
|
6 | Y = N
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:28
|
||||||
|
|
|
||||||
|
2 | from typing import NewType
|
||||||
|
3 |
|
||||||
|
4 | N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
|
||||||
|
| ^
|
||||||
|
5 |
|
||||||
|
6 | Y[: <NewType pseudo-class 'N'>] = N
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/typing.pyi:637:28
|
||||||
|
|
|
||||||
|
635 | """
|
||||||
|
636 |
|
||||||
|
637 | def __init__(self, name: str, tp: Any) -> None: ... # AnnotationForm
|
||||||
|
| ^^^^
|
||||||
|
638 | if sys.version_info >= (3, 11):
|
||||||
|
639 | @staticmethod
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:44
|
||||||
|
|
|
||||||
|
2 | from typing import NewType
|
||||||
|
3 |
|
||||||
|
4 | N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
|
||||||
|
| ^^^^
|
||||||
|
5 |
|
||||||
|
6 | Y[: <NewType pseudo-class 'N'>] = N
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/typing.pyi:637:39
|
||||||
|
|
|
||||||
|
635 | """
|
||||||
|
636 |
|
||||||
|
637 | def __init__(self, name: str, tp: Any) -> None: ... # AnnotationForm
|
||||||
|
| ^^
|
||||||
|
638 | if sys.version_info >= (3, 11):
|
||||||
|
639 | @staticmethod
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:56
|
||||||
|
|
|
||||||
|
2 | from typing import NewType
|
||||||
|
3 |
|
||||||
|
4 | N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
|
||||||
|
| ^^
|
||||||
|
5 |
|
||||||
|
6 | Y[: <NewType pseudo-class 'N'>] = N
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/typing.pyi:615:11
|
||||||
|
|
|
||||||
|
613 | TypeGuard: _SpecialForm
|
||||||
|
614 |
|
||||||
|
615 | class NewType:
|
||||||
|
| ^^^^^^^
|
||||||
|
616 | """NewType creates simple unique types with almost zero runtime overhead.
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:6:6
|
||||||
|
|
|
||||||
|
4 | N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
|
||||||
|
5 |
|
||||||
|
6 | Y[: <NewType pseudo-class 'N'>] = N
|
||||||
|
| ^^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> main.py:4:1
|
||||||
|
|
|
||||||
|
2 | from typing import NewType
|
||||||
|
3 |
|
||||||
|
4 | N = NewType('N', str)
|
||||||
|
| ^
|
||||||
|
5 |
|
||||||
|
6 | Y = N
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:6:28
|
||||||
|
|
|
||||||
|
4 | N[: <NewType pseudo-class 'N'>] = NewType([name=]'N', [tp=]str)
|
||||||
|
5 |
|
||||||
|
6 | Y[: <NewType pseudo-class 'N'>] = N
|
||||||
|
| ^
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_meta_typevar_inlay_hint() {
|
||||||
|
let mut test = inlay_hint_test(
|
||||||
|
"
|
||||||
|
def f[T](x: type[T]):
|
||||||
|
y = x",
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.inlay_hints(), @r#"
|
||||||
|
def f[T](x: type[T]):
|
||||||
|
y[: type[T@f]] = x
|
||||||
|
---------------------------------------------
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/builtins.pyi:247:7
|
||||||
|
|
|
||||||
|
246 | @disjoint_base
|
||||||
|
247 | class type:
|
||||||
|
| ^^^^
|
||||||
|
248 | """type(object) -> the object's type
|
||||||
|
249 | type(name, bases, dict, **kwds) -> a new type
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:3:9
|
||||||
|
|
|
||||||
|
2 | def f[T](x: type[T]):
|
||||||
|
3 | y[: type[T@f]] = x
|
||||||
|
| ^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> main.py:2:7
|
||||||
|
|
|
||||||
|
2 | def f[T](x: type[T]):
|
||||||
|
| ^
|
||||||
|
3 | y = x
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:3:14
|
||||||
|
|
|
||||||
|
2 | def f[T](x: type[T]):
|
||||||
|
3 | y[: type[T@f]] = x
|
||||||
|
| ^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
---------------------------------------------
|
||||||
|
info[inlay-hint-edit]: File after edits
|
||||||
|
info: Source
|
||||||
|
|
||||||
|
def f[T](x: type[T]):
|
||||||
|
y: type[T@f] = x
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_subscripted_protocol_inlay_hint() {
|
||||||
|
let mut test = inlay_hint_test(
|
||||||
|
"
|
||||||
|
from typing import Protocol, TypeVar
|
||||||
|
T = TypeVar('T')
|
||||||
|
Strange = Protocol[T]",
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.inlay_hints(), @r"
|
||||||
|
from typing import Protocol, TypeVar
|
||||||
|
T = TypeVar([name=]'T')
|
||||||
|
Strange[: <special-form 'typing.Protocol[T]'>] = Protocol[T]
|
||||||
|
---------------------------------------------
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/typing.pyi:276:13
|
||||||
|
|
|
||||||
|
274 | def __new__(
|
||||||
|
275 | cls,
|
||||||
|
276 | name: str,
|
||||||
|
| ^^^^
|
||||||
|
277 | *constraints: Any, # AnnotationForm
|
||||||
|
278 | bound: Any | None = None, # AnnotationForm
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:3:14
|
||||||
|
|
|
||||||
|
2 | from typing import Protocol, TypeVar
|
||||||
|
3 | T = TypeVar([name=]'T')
|
||||||
|
| ^^^^
|
||||||
|
4 | Strange[: <special-form 'typing.Protocol[T]'>] = Protocol[T]
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/typing.pyi:341:1
|
||||||
|
|
|
||||||
|
340 | Union: _SpecialForm
|
||||||
|
341 | Protocol: _SpecialForm
|
||||||
|
| ^^^^^^^^
|
||||||
|
342 | Callable: _SpecialForm
|
||||||
|
343 | Type: _SpecialForm
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:26
|
||||||
|
|
|
||||||
|
2 | from typing import Protocol, TypeVar
|
||||||
|
3 | T = TypeVar([name=]'T')
|
||||||
|
4 | Strange[: <special-form 'typing.Protocol[T]'>] = Protocol[T]
|
||||||
|
| ^^^^^^^^^^^^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> main.py:3:1
|
||||||
|
|
|
||||||
|
2 | from typing import Protocol, TypeVar
|
||||||
|
3 | T = TypeVar('T')
|
||||||
|
| ^
|
||||||
|
4 | Strange = Protocol[T]
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:4:42
|
||||||
|
|
|
||||||
|
2 | from typing import Protocol, TypeVar
|
||||||
|
3 | T = TypeVar([name=]'T')
|
||||||
|
4 | Strange[: <special-form 'typing.Protocol[T]'>] = Protocol[T]
|
||||||
|
| ^
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_paramspec_creation_inlay_hint() {
|
||||||
|
let mut test = inlay_hint_test(
|
||||||
|
"
|
||||||
|
from typing import ParamSpec
|
||||||
|
P = ParamSpec('P')",
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.inlay_hints(), @r"
|
||||||
|
from typing import ParamSpec
|
||||||
|
P = ParamSpec([name=]'P')
|
||||||
|
---------------------------------------------
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/typing.pyi:552:17
|
||||||
|
|
|
||||||
|
550 | def __new__(
|
||||||
|
551 | cls,
|
||||||
|
552 | name: str,
|
||||||
|
| ^^^^
|
||||||
|
553 | *,
|
||||||
|
554 | bound: Any | None = None, # AnnotationForm
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:3:16
|
||||||
|
|
|
||||||
|
2 | from typing import ParamSpec
|
||||||
|
3 | P = ParamSpec([name=]'P')
|
||||||
|
| ^^^^
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_typealiastype_creation_inlay_hint() {
|
||||||
|
let mut test = inlay_hint_test(
|
||||||
|
"
|
||||||
|
from typing_extensions import TypeAliasType
|
||||||
|
A = TypeAliasType('A', str)",
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.inlay_hints(), @r#"
|
||||||
|
from typing_extensions import TypeAliasType
|
||||||
|
A = TypeAliasType([name=]'A', [value=]str)
|
||||||
|
---------------------------------------------
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/typing.pyi:2032:26
|
||||||
|
|
|
||||||
|
2030 | """
|
||||||
|
2031 |
|
||||||
|
2032 | def __new__(cls, name: str, value: Any, *, type_params: tuple[_TypeParameter, ...] = ()) -> Self: ...
|
||||||
|
| ^^^^
|
||||||
|
2033 | @property
|
||||||
|
2034 | def __value__(self) -> Any: ... # AnnotationForm
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:3:20
|
||||||
|
|
|
||||||
|
2 | from typing_extensions import TypeAliasType
|
||||||
|
3 | A = TypeAliasType([name=]'A', [value=]str)
|
||||||
|
| ^^^^
|
||||||
|
|
|
||||||
|
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/typing.pyi:2032:37
|
||||||
|
|
|
||||||
|
2030 | """
|
||||||
|
2031 |
|
||||||
|
2032 | def __new__(cls, name: str, value: Any, *, type_params: tuple[_TypeParameter, ...] = ()) -> Self: ...
|
||||||
|
| ^^^^^
|
||||||
|
2033 | @property
|
||||||
|
2034 | def __value__(self) -> Any: ... # AnnotationForm
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:3:32
|
||||||
|
|
|
||||||
|
2 | from typing_extensions import TypeAliasType
|
||||||
|
3 | A = TypeAliasType([name=]'A', [value=]str)
|
||||||
|
| ^^^^^
|
||||||
|
|
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_typevartuple_creation_inlay_hint() {
|
||||||
|
let mut test = inlay_hint_test(
|
||||||
|
"
|
||||||
|
from typing_extensions import TypeVarTuple
|
||||||
|
Ts = TypeVarTuple('Ts')",
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_snapshot!(test.inlay_hints(), @r"
|
||||||
|
from typing_extensions import TypeVarTuple
|
||||||
|
Ts = TypeVarTuple([name=]'Ts')
|
||||||
|
---------------------------------------------
|
||||||
|
info[inlay-hint-location]: Inlay Hint Target
|
||||||
|
--> stdlib/typing.pyi:412:30
|
||||||
|
|
|
||||||
|
410 | def has_default(self) -> bool: ...
|
||||||
|
411 | if sys.version_info >= (3, 13):
|
||||||
|
412 | def __new__(cls, name: str, *, default: Any = ...) -> Self: ... # AnnotationForm
|
||||||
|
| ^^^^
|
||||||
|
413 | elif sys.version_info >= (3, 12):
|
||||||
|
414 | def __new__(cls, name: str) -> Self: ...
|
||||||
|
|
|
||||||
|
info: Source
|
||||||
|
--> main2.py:3:20
|
||||||
|
|
|
||||||
|
2 | from typing_extensions import TypeVarTuple
|
||||||
|
3 | Ts = TypeVarTuple([name=]'Ts')
|
||||||
|
| ^^^^
|
||||||
|
|
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
struct InlayHintLocationDiagnostic {
|
struct InlayHintLocationDiagnostic {
|
||||||
source: FileRange,
|
source: FileRange,
|
||||||
target: FileRange,
|
target: FileRange,
|
||||||
|
|
|
||||||
|
|
@ -84,7 +84,7 @@ pub fn rename(
|
||||||
|
|
||||||
/// Helper function to check if a file is included in the project.
|
/// Helper function to check if a file is included in the project.
|
||||||
fn is_file_in_project(db: &dyn Db, file: File) -> bool {
|
fn is_file_in_project(db: &dyn Db, file: File) -> bool {
|
||||||
db.project().files(db).contains(&file)
|
file.path(db).is_system_virtual_path() || db.project().files(db).contains(&file)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
|
|
||||||
|
|
@ -254,7 +254,9 @@ impl<'db> SemanticTokenVisitor<'db> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_constant_name(name: &str) -> bool {
|
fn is_constant_name(name: &str) -> bool {
|
||||||
name.chars().all(|c| c.is_uppercase() || c == '_') && name.len() > 1
|
name.chars()
|
||||||
|
.all(|c| c.is_uppercase() || c == '_' || c.is_numeric())
|
||||||
|
&& name.len() > 1
|
||||||
}
|
}
|
||||||
|
|
||||||
fn classify_name(&self, name: &ast::ExprName) -> (SemanticTokenType, SemanticTokenModifier) {
|
fn classify_name(&self, name: &ast::ExprName) -> (SemanticTokenType, SemanticTokenModifier) {
|
||||||
|
|
@ -302,17 +304,25 @@ impl<'db> SemanticTokenVisitor<'db> {
|
||||||
let parsed = parsed_module(db, definition.file(db));
|
let parsed = parsed_module(db, definition.file(db));
|
||||||
let ty = parameter.node(&parsed.load(db)).inferred_type(&model);
|
let ty = parameter.node(&parsed.load(db)).inferred_type(&model);
|
||||||
|
|
||||||
if let Some(ty) = ty
|
if let Some(ty) = ty {
|
||||||
&& let Type::TypeVar(type_var) = ty
|
let type_var = match ty {
|
||||||
{
|
Type::TypeVar(type_var) => Some((type_var, false)),
|
||||||
match type_var.typevar(db).kind(db) {
|
Type::SubclassOf(subclass_of) => {
|
||||||
TypeVarKind::TypingSelf => {
|
subclass_of.into_type_var().map(|var| (var, true))
|
||||||
return Some((SemanticTokenType::SelfParameter, modifiers));
|
|
||||||
}
|
}
|
||||||
TypeVarKind::Legacy
|
_ => None,
|
||||||
| TypeVarKind::ParamSpec
|
};
|
||||||
| TypeVarKind::Pep695ParamSpec
|
|
||||||
| TypeVarKind::Pep695 => {}
|
if let Some((type_var, is_cls)) = type_var
|
||||||
|
&& matches!(type_var.typevar(db).kind(db), TypeVarKind::TypingSelf)
|
||||||
|
{
|
||||||
|
let kind = if is_cls {
|
||||||
|
SemanticTokenType::ClsParameter
|
||||||
|
} else {
|
||||||
|
SemanticTokenType::SelfParameter
|
||||||
|
};
|
||||||
|
|
||||||
|
return Some((kind, modifiers));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1203,7 +1213,7 @@ class MyClass:
|
||||||
"
|
"
|
||||||
class MyClass:
|
class MyClass:
|
||||||
@classmethod
|
@classmethod
|
||||||
def method(cls, x): pass
|
def method(cls, x): print(cls)
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
@ -1215,6 +1225,8 @@ class MyClass:
|
||||||
"method" @ 41..47: Method [definition]
|
"method" @ 41..47: Method [definition]
|
||||||
"cls" @ 48..51: ClsParameter [definition]
|
"cls" @ 48..51: ClsParameter [definition]
|
||||||
"x" @ 53..54: Parameter [definition]
|
"x" @ 53..54: Parameter [definition]
|
||||||
|
"print" @ 57..62: Function
|
||||||
|
"cls" @ 63..66: ClsParameter
|
||||||
"#);
|
"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1246,7 +1258,7 @@ class MyClass:
|
||||||
class MyClass:
|
class MyClass:
|
||||||
def method(instance, x): pass
|
def method(instance, x): pass
|
||||||
@classmethod
|
@classmethod
|
||||||
def other(klass, y): pass
|
def other(klass, y): print(klass)
|
||||||
def complex_method(instance, posonly, /, regular, *args, kwonly, **kwargs): pass
|
def complex_method(instance, posonly, /, regular, *args, kwonly, **kwargs): pass
|
||||||
",
|
",
|
||||||
);
|
);
|
||||||
|
|
@ -1262,13 +1274,15 @@ class MyClass:
|
||||||
"other" @ 75..80: Method [definition]
|
"other" @ 75..80: Method [definition]
|
||||||
"klass" @ 81..86: ClsParameter [definition]
|
"klass" @ 81..86: ClsParameter [definition]
|
||||||
"y" @ 88..89: Parameter [definition]
|
"y" @ 88..89: Parameter [definition]
|
||||||
"complex_method" @ 105..119: Method [definition]
|
"print" @ 92..97: Function
|
||||||
"instance" @ 120..128: SelfParameter [definition]
|
"klass" @ 98..103: ClsParameter
|
||||||
"posonly" @ 130..137: Parameter [definition]
|
"complex_method" @ 113..127: Method [definition]
|
||||||
"regular" @ 142..149: Parameter [definition]
|
"instance" @ 128..136: SelfParameter [definition]
|
||||||
"args" @ 152..156: Parameter [definition]
|
"posonly" @ 138..145: Parameter [definition]
|
||||||
"kwonly" @ 158..164: Parameter [definition]
|
"regular" @ 150..157: Parameter [definition]
|
||||||
"kwargs" @ 168..174: Parameter [definition]
|
"args" @ 160..164: Parameter [definition]
|
||||||
|
"kwonly" @ 166..172: Parameter [definition]
|
||||||
|
"kwargs" @ 176..182: Parameter [definition]
|
||||||
"#);
|
"#);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -2218,6 +2232,49 @@ class MyClass:
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_constant_variations() {
|
||||||
|
let test = SemanticTokenTest::new(
|
||||||
|
r#"
|
||||||
|
A = 1
|
||||||
|
AB = 1
|
||||||
|
ABC = 1
|
||||||
|
A1 = 1
|
||||||
|
AB1 = 1
|
||||||
|
ABC1 = 1
|
||||||
|
A_B = 1
|
||||||
|
A1_B = 1
|
||||||
|
A_B1 = 1
|
||||||
|
A_1 = 1
|
||||||
|
"#,
|
||||||
|
);
|
||||||
|
|
||||||
|
let tokens = test.highlight_file();
|
||||||
|
|
||||||
|
assert_snapshot!(test.to_snapshot(&tokens), @r#"
|
||||||
|
"A" @ 1..2: Variable [definition]
|
||||||
|
"1" @ 5..6: Number
|
||||||
|
"AB" @ 7..9: Variable [definition, readonly]
|
||||||
|
"1" @ 12..13: Number
|
||||||
|
"ABC" @ 14..17: Variable [definition, readonly]
|
||||||
|
"1" @ 20..21: Number
|
||||||
|
"A1" @ 22..24: Variable [definition, readonly]
|
||||||
|
"1" @ 27..28: Number
|
||||||
|
"AB1" @ 29..32: Variable [definition, readonly]
|
||||||
|
"1" @ 35..36: Number
|
||||||
|
"ABC1" @ 37..41: Variable [definition, readonly]
|
||||||
|
"1" @ 44..45: Number
|
||||||
|
"A_B" @ 46..49: Variable [definition, readonly]
|
||||||
|
"1" @ 52..53: Number
|
||||||
|
"A1_B" @ 54..58: Variable [definition, readonly]
|
||||||
|
"1" @ 61..62: Number
|
||||||
|
"A_B1" @ 63..67: Variable [definition, readonly]
|
||||||
|
"1" @ 70..71: Number
|
||||||
|
"A_1" @ 72..75: Variable [definition, readonly]
|
||||||
|
"1" @ 78..79: Number
|
||||||
|
"#);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_implicitly_concatenated_strings() {
|
fn test_implicitly_concatenated_strings() {
|
||||||
let test = SemanticTokenTest::new(
|
let test = SemanticTokenTest::new(
|
||||||
|
|
|
||||||
|
|
@ -124,6 +124,11 @@ fn get_call_expr(
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
// Find the covering node at the given position that is a function call.
|
// Find the covering node at the given position that is a function call.
|
||||||
|
// Note that we are okay with the range being anywhere within a call
|
||||||
|
// expression, even if it's not in the arguments portion of the call
|
||||||
|
// expression. This is because, e.g., a user can request signature
|
||||||
|
// information at a call site, and this should ideally work anywhere
|
||||||
|
// within the call site, even at the function name.
|
||||||
let call = covering_node(root_node, token.range())
|
let call = covering_node(root_node, token.range())
|
||||||
.find_first(|node| {
|
.find_first(|node| {
|
||||||
if !node.is_expr_call() {
|
if !node.is_expr_call() {
|
||||||
|
|
|
||||||
|
|
@ -10,10 +10,10 @@ use ruff_db::files::File;
|
||||||
use ruff_db::parsed::parsed_module;
|
use ruff_db::parsed::parsed_module;
|
||||||
use ruff_index::{IndexVec, newtype_index};
|
use ruff_index::{IndexVec, newtype_index};
|
||||||
use ruff_python_ast as ast;
|
use ruff_python_ast as ast;
|
||||||
use ruff_python_ast::name::Name;
|
use ruff_python_ast::name::{Name, UnqualifiedName};
|
||||||
use ruff_python_ast::visitor::source_order::{self, SourceOrderVisitor};
|
use ruff_python_ast::visitor::source_order::{self, SourceOrderVisitor};
|
||||||
use ruff_text_size::{Ranged, TextRange};
|
use ruff_text_size::{Ranged, TextRange};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::{FxHashMap, FxHashSet};
|
||||||
use ty_project::Db;
|
use ty_project::Db;
|
||||||
use ty_python_semantic::{ModuleName, resolve_module};
|
use ty_python_semantic::{ModuleName, resolve_module};
|
||||||
|
|
||||||
|
|
@ -375,7 +375,11 @@ pub(crate) fn symbols_for_file(db: &dyn Db, file: File) -> FlatSymbols {
|
||||||
/// While callers can convert this into a hierarchical collection of
|
/// While callers can convert this into a hierarchical collection of
|
||||||
/// symbols, it won't result in anything meaningful since the flat list
|
/// symbols, it won't result in anything meaningful since the flat list
|
||||||
/// returned doesn't include children.
|
/// returned doesn't include children.
|
||||||
#[salsa::tracked(returns(ref), heap_size=ruff_memory_usage::heap_size)]
|
#[salsa::tracked(
|
||||||
|
returns(ref),
|
||||||
|
cycle_initial=symbols_for_file_global_only_cycle_initial,
|
||||||
|
heap_size=ruff_memory_usage::heap_size,
|
||||||
|
)]
|
||||||
pub(crate) fn symbols_for_file_global_only(db: &dyn Db, file: File) -> FlatSymbols {
|
pub(crate) fn symbols_for_file_global_only(db: &dyn Db, file: File) -> FlatSymbols {
|
||||||
let parsed = parsed_module(db, file);
|
let parsed = parsed_module(db, file);
|
||||||
let module = parsed.load(db);
|
let module = parsed.load(db);
|
||||||
|
|
@ -394,6 +398,14 @@ pub(crate) fn symbols_for_file_global_only(db: &dyn Db, file: File) -> FlatSymbo
|
||||||
visitor.into_flat_symbols()
|
visitor.into_flat_symbols()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn symbols_for_file_global_only_cycle_initial(
|
||||||
|
_db: &dyn Db,
|
||||||
|
_id: salsa::Id,
|
||||||
|
_file: File,
|
||||||
|
) -> FlatSymbols {
|
||||||
|
FlatSymbols::default()
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
|
#[derive(Debug, Clone, PartialEq, Eq, get_size2::GetSize)]
|
||||||
struct SymbolTree {
|
struct SymbolTree {
|
||||||
parent: Option<SymbolId>,
|
parent: Option<SymbolId>,
|
||||||
|
|
@ -411,6 +423,189 @@ enum ImportKind {
|
||||||
Wildcard,
|
Wildcard,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// An abstraction for managing module scope imports.
|
||||||
|
///
|
||||||
|
/// This is meant to recognize the following idioms for updating
|
||||||
|
/// `__all__` in module scope:
|
||||||
|
///
|
||||||
|
/// ```ignore
|
||||||
|
/// __all__ += submodule.__all__
|
||||||
|
/// __all__.extend(submodule.__all__)
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// # Correctness
|
||||||
|
///
|
||||||
|
/// The approach used here is not correct 100% of the time.
|
||||||
|
/// For example, it is somewhat easy to defeat it:
|
||||||
|
///
|
||||||
|
/// ```ignore
|
||||||
|
/// from numpy import *
|
||||||
|
/// from importlib import resources
|
||||||
|
/// import numpy as np
|
||||||
|
/// np = resources
|
||||||
|
/// __all__ = []
|
||||||
|
/// __all__ += np.__all__
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// In this example, `np` will still be resolved to the `numpy`
|
||||||
|
/// module instead of the `importlib.resources` module. Namely, this
|
||||||
|
/// abstraction doesn't track all definitions. This would result in a
|
||||||
|
/// silently incorrect `__all__`.
|
||||||
|
///
|
||||||
|
/// This abstraction does handle the case when submodules are imported.
|
||||||
|
/// Namely, we do get this case correct:
|
||||||
|
///
|
||||||
|
/// ```ignore
|
||||||
|
/// from importlib.resources import *
|
||||||
|
/// from importlib import resources
|
||||||
|
/// __all__ = []
|
||||||
|
/// __all__ += resources.__all__
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// We do this by treating all imports in a `from ... import ...`
|
||||||
|
/// statement as *possible* modules. Then when we lookup `resources`,
|
||||||
|
/// we attempt to resolve it to an actual module. If that fails, then
|
||||||
|
/// we consider `__all__` invalid.
|
||||||
|
///
|
||||||
|
/// There are likely many many other cases that we don't handle as
|
||||||
|
/// well, which ty does (it has its own `__all__` parsing using types
|
||||||
|
/// to deal with this case). We can add handling for those as they
|
||||||
|
/// come up in real world examples.
|
||||||
|
///
|
||||||
|
/// # Performance
|
||||||
|
///
|
||||||
|
/// This abstraction recognizes that, compared to all possible imports,
|
||||||
|
/// it is very rare to use one of them to update `__all__`. Therefore,
|
||||||
|
/// we are careful not to do too much work up-front (like eagerly
|
||||||
|
/// manifesting `ModuleName` values).
|
||||||
|
#[derive(Clone, Debug, Default, get_size2::GetSize)]
|
||||||
|
struct Imports<'db> {
|
||||||
|
/// A map from the name that a module is available
|
||||||
|
/// under to its actual module name (and our level
|
||||||
|
/// of certainty that it ought to be treated as a module).
|
||||||
|
module_names: FxHashMap<&'db str, ImportModuleKind<'db>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'db> Imports<'db> {
|
||||||
|
/// Track the imports from the given `import ...` statement.
|
||||||
|
fn add_import(&mut self, import: &'db ast::StmtImport) {
|
||||||
|
for alias in &import.names {
|
||||||
|
let asname = alias
|
||||||
|
.asname
|
||||||
|
.as_ref()
|
||||||
|
.map(|ident| &ident.id)
|
||||||
|
.unwrap_or(&alias.name.id);
|
||||||
|
let module_name = ImportModuleName::Import(&alias.name.id);
|
||||||
|
self.module_names
|
||||||
|
.insert(asname, ImportModuleKind::Definitive(module_name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Track the imports from the given `from ... import ...` statement.
|
||||||
|
fn add_import_from(&mut self, import_from: &'db ast::StmtImportFrom) {
|
||||||
|
for alias in &import_from.names {
|
||||||
|
if &alias.name == "*" {
|
||||||
|
// FIXME: We'd ideally include the names
|
||||||
|
// imported from the module, but we don't
|
||||||
|
// want to do this eagerly. So supporting
|
||||||
|
// this requires more infrastructure in
|
||||||
|
// `Imports`.
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let asname = alias
|
||||||
|
.asname
|
||||||
|
.as_ref()
|
||||||
|
.map(|ident| &ident.id)
|
||||||
|
.unwrap_or(&alias.name.id);
|
||||||
|
let module_name = ImportModuleName::ImportFrom {
|
||||||
|
parent: import_from,
|
||||||
|
child: &alias.name.id,
|
||||||
|
};
|
||||||
|
self.module_names
|
||||||
|
.insert(asname, ImportModuleKind::Possible(module_name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the symbols exported by the module referred to by `name`.
|
||||||
|
///
|
||||||
|
/// e.g., This can be used to resolve `__all__ += submodule.__all__`,
|
||||||
|
/// where `name` is `submodule`.
|
||||||
|
fn get_module_symbols(
|
||||||
|
&self,
|
||||||
|
db: &'db dyn Db,
|
||||||
|
importing_file: File,
|
||||||
|
name: &Name,
|
||||||
|
) -> Option<&'db FlatSymbols> {
|
||||||
|
let module_name = match self.module_names.get(name.as_str())? {
|
||||||
|
ImportModuleKind::Definitive(name) | ImportModuleKind::Possible(name) => {
|
||||||
|
name.to_module_name(db, importing_file)?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let module = resolve_module(db, importing_file, &module_name)?;
|
||||||
|
Some(symbols_for_file_global_only(db, module.file(db)?))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Describes the level of certainty that an import is a module.
|
||||||
|
///
|
||||||
|
/// For example, `import foo`, then `foo` is definitively a module.
|
||||||
|
/// But `from quux import foo`, then `quux.foo` is possibly a module.
|
||||||
|
#[derive(Debug, Clone, Copy, get_size2::GetSize)]
|
||||||
|
enum ImportModuleKind<'db> {
|
||||||
|
Definitive(ImportModuleName<'db>),
|
||||||
|
Possible(ImportModuleName<'db>),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A representation of something that can be turned into a
|
||||||
|
/// `ModuleName`.
|
||||||
|
///
|
||||||
|
/// We don't do this eagerly, and instead represent the constituent
|
||||||
|
/// pieces, in order to avoid the work needed to build a `ModuleName`.
|
||||||
|
/// In particular, it is somewhat rare for the visitor to need
|
||||||
|
/// to access the imports found in a module. At time of writing
|
||||||
|
/// (2025-12-10), this only happens when referencing a submodule
|
||||||
|
/// to augment an `__all__` definition. For example, as found in
|
||||||
|
/// `matplotlib`:
|
||||||
|
///
|
||||||
|
/// ```ignore
|
||||||
|
/// import numpy as np
|
||||||
|
/// __all__ = ['rand', 'randn', 'repmat']
|
||||||
|
/// __all__ += np.__all__
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// This construct is somewhat rare and it would be sad to allocate a
|
||||||
|
/// `ModuleName` for every imported item unnecessarily.
|
||||||
|
#[derive(Debug, Clone, Copy, get_size2::GetSize)]
|
||||||
|
enum ImportModuleName<'db> {
|
||||||
|
/// The `foo` in `import quux, foo as blah, baz`.
|
||||||
|
Import(&'db Name),
|
||||||
|
/// A possible module in a `from ... import ...` statement.
|
||||||
|
ImportFrom {
|
||||||
|
/// The `..foo` in `from ..foo import quux`.
|
||||||
|
parent: &'db ast::StmtImportFrom,
|
||||||
|
/// The `foo` in `from quux import foo`.
|
||||||
|
child: &'db Name,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'db> ImportModuleName<'db> {
|
||||||
|
/// Converts the lazy representation of a module name into an
|
||||||
|
/// actual `ModuleName` that can be used for module resolution.
|
||||||
|
fn to_module_name(self, db: &'db dyn Db, importing_file: File) -> Option<ModuleName> {
|
||||||
|
match self {
|
||||||
|
ImportModuleName::Import(name) => ModuleName::new(name),
|
||||||
|
ImportModuleName::ImportFrom { parent, child } => {
|
||||||
|
let mut module_name =
|
||||||
|
ModuleName::from_import_statement(db, importing_file, parent).ok()?;
|
||||||
|
let child_module_name = ModuleName::new(child)?;
|
||||||
|
module_name.extend(&child_module_name);
|
||||||
|
Some(module_name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A visitor over all symbols in a single file.
|
/// A visitor over all symbols in a single file.
|
||||||
///
|
///
|
||||||
/// This guarantees that child symbols have a symbol ID greater
|
/// This guarantees that child symbols have a symbol ID greater
|
||||||
|
|
@ -431,7 +626,11 @@ struct SymbolVisitor<'db> {
|
||||||
/// This is true even when we're inside a function definition
|
/// This is true even when we're inside a function definition
|
||||||
/// that is inside a class.
|
/// that is inside a class.
|
||||||
in_class: bool,
|
in_class: bool,
|
||||||
global_only: bool,
|
/// When enabled, the visitor should only try to extract
|
||||||
|
/// symbols from a module that we believed form the "exported"
|
||||||
|
/// interface for that module. i.e., `__all__` is only respected
|
||||||
|
/// when this is enabled. It's otherwise ignored.
|
||||||
|
exports_only: bool,
|
||||||
/// The origin of an `__all__` variable, if found.
|
/// The origin of an `__all__` variable, if found.
|
||||||
all_origin: Option<DunderAllOrigin>,
|
all_origin: Option<DunderAllOrigin>,
|
||||||
/// A set of names extracted from `__all__`.
|
/// A set of names extracted from `__all__`.
|
||||||
|
|
@ -440,6 +639,11 @@ struct SymbolVisitor<'db> {
|
||||||
/// `__all__` idioms or there are any invalid elements in
|
/// `__all__` idioms or there are any invalid elements in
|
||||||
/// `__all__`.
|
/// `__all__`.
|
||||||
all_invalid: bool,
|
all_invalid: bool,
|
||||||
|
/// A collection of imports found while visiting the AST.
|
||||||
|
///
|
||||||
|
/// These are used to help resolve references to modules
|
||||||
|
/// in some limited cases.
|
||||||
|
imports: Imports<'db>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'db> SymbolVisitor<'db> {
|
impl<'db> SymbolVisitor<'db> {
|
||||||
|
|
@ -451,21 +655,27 @@ impl<'db> SymbolVisitor<'db> {
|
||||||
symbol_stack: vec![],
|
symbol_stack: vec![],
|
||||||
in_function: false,
|
in_function: false,
|
||||||
in_class: false,
|
in_class: false,
|
||||||
global_only: false,
|
exports_only: false,
|
||||||
all_origin: None,
|
all_origin: None,
|
||||||
all_names: FxHashSet::default(),
|
all_names: FxHashSet::default(),
|
||||||
all_invalid: false,
|
all_invalid: false,
|
||||||
|
imports: Imports::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn globals(db: &'db dyn Db, file: File) -> Self {
|
fn globals(db: &'db dyn Db, file: File) -> Self {
|
||||||
Self {
|
Self {
|
||||||
global_only: true,
|
exports_only: true,
|
||||||
..Self::tree(db, file)
|
..Self::tree(db, file)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn into_flat_symbols(mut self) -> FlatSymbols {
|
fn into_flat_symbols(mut self) -> FlatSymbols {
|
||||||
|
// If `__all__` was found but wasn't recognized,
|
||||||
|
// then we emit a diagnostic message indicating as such.
|
||||||
|
if self.all_invalid {
|
||||||
|
tracing::debug!("Invalid `__all__` in `{}`", self.file.path(self.db));
|
||||||
|
}
|
||||||
// We want to filter out some of the symbols we collected.
|
// We want to filter out some of the symbols we collected.
|
||||||
// Specifically, to respect conventions around library
|
// Specifically, to respect conventions around library
|
||||||
// interface.
|
// interface.
|
||||||
|
|
@ -474,12 +684,28 @@ impl<'db> SymbolVisitor<'db> {
|
||||||
// their position in a sequence. So when we filter some
|
// their position in a sequence. So when we filter some
|
||||||
// out, we need to remap the identifiers.
|
// out, we need to remap the identifiers.
|
||||||
//
|
//
|
||||||
// N.B. The remapping could be skipped when `global_only` is
|
// We also want to deduplicate when `exports_only` is
|
||||||
|
// `true`. In particular, dealing with `__all__` can
|
||||||
|
// result in cycles, and we need to make sure our output
|
||||||
|
// is stable for that reason.
|
||||||
|
//
|
||||||
|
// N.B. The remapping could be skipped when `exports_only` is
|
||||||
// true, since in that case, none of the symbols have a parent
|
// true, since in that case, none of the symbols have a parent
|
||||||
// ID by construction.
|
// ID by construction.
|
||||||
let mut remap = IndexVec::with_capacity(self.symbols.len());
|
let mut remap = IndexVec::with_capacity(self.symbols.len());
|
||||||
|
let mut seen = self.exports_only.then(FxHashSet::default);
|
||||||
let mut new = IndexVec::with_capacity(self.symbols.len());
|
let mut new = IndexVec::with_capacity(self.symbols.len());
|
||||||
for mut symbol in std::mem::take(&mut self.symbols) {
|
for mut symbol in std::mem::take(&mut self.symbols) {
|
||||||
|
// If we're deduplicating and we've already seen
|
||||||
|
// this symbol, then skip it.
|
||||||
|
//
|
||||||
|
// FIXME: We should do this without copying every
|
||||||
|
// symbol name. ---AG
|
||||||
|
if let Some(ref mut seen) = seen {
|
||||||
|
if !seen.insert(symbol.name.clone()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
if !self.is_part_of_library_interface(&symbol) {
|
if !self.is_part_of_library_interface(&symbol) {
|
||||||
remap.push(None);
|
remap.push(None);
|
||||||
continue;
|
continue;
|
||||||
|
|
@ -510,7 +736,7 @@ impl<'db> SymbolVisitor<'db> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn visit_body(&mut self, body: &[ast::Stmt]) {
|
fn visit_body(&mut self, body: &'db [ast::Stmt]) {
|
||||||
for stmt in body {
|
for stmt in body {
|
||||||
self.visit_stmt(stmt);
|
self.visit_stmt(stmt);
|
||||||
}
|
}
|
||||||
|
|
@ -585,6 +811,11 @@ impl<'db> SymbolVisitor<'db> {
|
||||||
///
|
///
|
||||||
/// If the assignment isn't for `__all__`, then this is a no-op.
|
/// If the assignment isn't for `__all__`, then this is a no-op.
|
||||||
fn add_all_assignment(&mut self, targets: &[ast::Expr], value: Option<&ast::Expr>) {
|
fn add_all_assignment(&mut self, targets: &[ast::Expr], value: Option<&ast::Expr>) {
|
||||||
|
// We don't care about `__all__` unless we're
|
||||||
|
// specifically looking for exported symbols.
|
||||||
|
if !self.exports_only {
|
||||||
|
return;
|
||||||
|
}
|
||||||
if self.in_function || self.in_class {
|
if self.in_function || self.in_class {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
@ -635,6 +866,31 @@ impl<'db> SymbolVisitor<'db> {
|
||||||
ast::Expr::List(ast::ExprList { elts, .. })
|
ast::Expr::List(ast::ExprList { elts, .. })
|
||||||
| ast::Expr::Tuple(ast::ExprTuple { elts, .. })
|
| ast::Expr::Tuple(ast::ExprTuple { elts, .. })
|
||||||
| ast::Expr::Set(ast::ExprSet { elts, .. }) => self.add_all_names(elts),
|
| ast::Expr::Set(ast::ExprSet { elts, .. }) => self.add_all_names(elts),
|
||||||
|
// `__all__ += module.__all__`
|
||||||
|
// `__all__.extend(module.__all__)`
|
||||||
|
ast::Expr::Attribute(ast::ExprAttribute { .. }) => {
|
||||||
|
let Some(unqualified) = UnqualifiedName::from_expr(expr) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
let Some((&attr, rest)) = unqualified.segments().split_last() else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
if attr != "__all__" {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
let possible_module_name = Name::new(rest.join("."));
|
||||||
|
let Some(symbols) =
|
||||||
|
self.imports
|
||||||
|
.get_module_symbols(self.db, self.file, &possible_module_name)
|
||||||
|
else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
let Some(ref all) = symbols.all_names else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
self.all_names.extend(all.iter().cloned());
|
||||||
|
true
|
||||||
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -801,14 +1057,11 @@ impl<'db> SymbolVisitor<'db> {
|
||||||
// if a name should be part of the exported API of a module
|
// if a name should be part of the exported API of a module
|
||||||
// or not. When there is `__all__`, we currently follow it
|
// or not. When there is `__all__`, we currently follow it
|
||||||
// strictly.
|
// strictly.
|
||||||
if self.all_origin.is_some() {
|
//
|
||||||
// If `__all__` is somehow invalid, ignore it and fall
|
// If `__all__` is somehow invalid, ignore it and fall
|
||||||
// through as-if `__all__` didn't exist.
|
// through as-if `__all__` didn't exist.
|
||||||
if self.all_invalid {
|
if self.all_origin.is_some() && !self.all_invalid {
|
||||||
tracing::debug!("Invalid `__all__` in `{}`", self.file.path(self.db));
|
return self.all_names.contains(&*symbol.name);
|
||||||
} else {
|
|
||||||
return self.all_names.contains(&*symbol.name);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// "Imported symbols are considered private by default. A fixed
|
// "Imported symbols are considered private by default. A fixed
|
||||||
|
|
@ -839,8 +1092,8 @@ impl<'db> SymbolVisitor<'db> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SourceOrderVisitor<'_> for SymbolVisitor<'_> {
|
impl<'db> SourceOrderVisitor<'db> for SymbolVisitor<'db> {
|
||||||
fn visit_stmt(&mut self, stmt: &ast::Stmt) {
|
fn visit_stmt(&mut self, stmt: &'db ast::Stmt) {
|
||||||
match stmt {
|
match stmt {
|
||||||
ast::Stmt::FunctionDef(func_def) => {
|
ast::Stmt::FunctionDef(func_def) => {
|
||||||
let kind = if self
|
let kind = if self
|
||||||
|
|
@ -865,7 +1118,7 @@ impl SourceOrderVisitor<'_> for SymbolVisitor<'_> {
|
||||||
import_kind: None,
|
import_kind: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
if self.global_only {
|
if self.exports_only {
|
||||||
self.add_symbol(symbol);
|
self.add_symbol(symbol);
|
||||||
// If global_only, don't walk function bodies
|
// If global_only, don't walk function bodies
|
||||||
return;
|
return;
|
||||||
|
|
@ -894,7 +1147,7 @@ impl SourceOrderVisitor<'_> for SymbolVisitor<'_> {
|
||||||
import_kind: None,
|
import_kind: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
if self.global_only {
|
if self.exports_only {
|
||||||
self.add_symbol(symbol);
|
self.add_symbol(symbol);
|
||||||
// If global_only, don't walk class bodies
|
// If global_only, don't walk class bodies
|
||||||
return;
|
return;
|
||||||
|
|
@ -943,6 +1196,12 @@ impl SourceOrderVisitor<'_> for SymbolVisitor<'_> {
|
||||||
ast::Stmt::AugAssign(ast::StmtAugAssign {
|
ast::Stmt::AugAssign(ast::StmtAugAssign {
|
||||||
target, op, value, ..
|
target, op, value, ..
|
||||||
}) => {
|
}) => {
|
||||||
|
// We don't care about `__all__` unless we're
|
||||||
|
// specifically looking for exported symbols.
|
||||||
|
if !self.exports_only {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if self.all_origin.is_none() {
|
if self.all_origin.is_none() {
|
||||||
// We can't update `__all__` if it doesn't already
|
// We can't update `__all__` if it doesn't already
|
||||||
// exist.
|
// exist.
|
||||||
|
|
@ -961,6 +1220,12 @@ impl SourceOrderVisitor<'_> for SymbolVisitor<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::Stmt::Expr(expr) => {
|
ast::Stmt::Expr(expr) => {
|
||||||
|
// We don't care about `__all__` unless we're
|
||||||
|
// specifically looking for exported symbols.
|
||||||
|
if !self.exports_only {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if self.all_origin.is_none() {
|
if self.all_origin.is_none() {
|
||||||
// We can't update `__all__` if it doesn't already exist.
|
// We can't update `__all__` if it doesn't already exist.
|
||||||
return;
|
return;
|
||||||
|
|
@ -990,19 +1255,33 @@ impl SourceOrderVisitor<'_> for SymbolVisitor<'_> {
|
||||||
source_order::walk_stmt(self, stmt);
|
source_order::walk_stmt(self, stmt);
|
||||||
}
|
}
|
||||||
ast::Stmt::Import(import) => {
|
ast::Stmt::Import(import) => {
|
||||||
|
// We ignore any names introduced by imports
|
||||||
|
// unless we're specifically looking for the
|
||||||
|
// set of exported symbols.
|
||||||
|
if !self.exports_only {
|
||||||
|
return;
|
||||||
|
}
|
||||||
// We only consider imports in global scope.
|
// We only consider imports in global scope.
|
||||||
if self.in_function {
|
if self.in_function {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
self.imports.add_import(import);
|
||||||
for alias in &import.names {
|
for alias in &import.names {
|
||||||
self.add_import_alias(stmt, alias);
|
self.add_import_alias(stmt, alias);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::Stmt::ImportFrom(import_from) => {
|
ast::Stmt::ImportFrom(import_from) => {
|
||||||
|
// We ignore any names introduced by imports
|
||||||
|
// unless we're specifically looking for the
|
||||||
|
// set of exported symbols.
|
||||||
|
if !self.exports_only {
|
||||||
|
return;
|
||||||
|
}
|
||||||
// We only consider imports in global scope.
|
// We only consider imports in global scope.
|
||||||
if self.in_function {
|
if self.in_function {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
self.imports.add_import_from(import_from);
|
||||||
for alias in &import_from.names {
|
for alias in &import_from.names {
|
||||||
if &alias.name == "*" {
|
if &alias.name == "*" {
|
||||||
self.add_exported_from_wildcard(import_from);
|
self.add_exported_from_wildcard(import_from);
|
||||||
|
|
@ -1975,6 +2254,363 @@ class X:
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_and_extend_from_submodule_import_statement_plus_equals() {
|
||||||
|
let test = PublicTestBuilder::default()
|
||||||
|
.source(
|
||||||
|
"foo.py",
|
||||||
|
"
|
||||||
|
_ZQZQZQ = 1
|
||||||
|
__all__ = ['_ZQZQZQ']
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"test.py",
|
||||||
|
"import foo
|
||||||
|
from foo import *
|
||||||
|
_ZYZYZY = 1
|
||||||
|
__all__ = ['_ZYZYZY']
|
||||||
|
__all__ += foo.__all__
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
test.exports_for("test.py"),
|
||||||
|
@r"
|
||||||
|
_ZQZQZQ :: Constant
|
||||||
|
_ZYZYZY :: Constant
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_and_extend_from_submodule_import_statement_extend() {
|
||||||
|
let test = PublicTestBuilder::default()
|
||||||
|
.source(
|
||||||
|
"foo.py",
|
||||||
|
"
|
||||||
|
_ZQZQZQ = 1
|
||||||
|
__all__ = ['_ZQZQZQ']
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"test.py",
|
||||||
|
"import foo
|
||||||
|
from foo import *
|
||||||
|
_ZYZYZY = 1
|
||||||
|
__all__ = ['_ZYZYZY']
|
||||||
|
__all__.extend(foo.__all__)
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
test.exports_for("test.py"),
|
||||||
|
@r"
|
||||||
|
_ZQZQZQ :: Constant
|
||||||
|
_ZYZYZY :: Constant
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_and_extend_from_submodule_import_statement_alias() {
|
||||||
|
let test = PublicTestBuilder::default()
|
||||||
|
.source(
|
||||||
|
"foo.py",
|
||||||
|
"
|
||||||
|
_ZQZQZQ = 1
|
||||||
|
__all__ = ['_ZQZQZQ']
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"test.py",
|
||||||
|
"import foo as blah
|
||||||
|
from foo import *
|
||||||
|
_ZYZYZY = 1
|
||||||
|
__all__ = ['_ZYZYZY']
|
||||||
|
__all__ += blah.__all__
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
test.exports_for("test.py"),
|
||||||
|
@r"
|
||||||
|
_ZQZQZQ :: Constant
|
||||||
|
_ZYZYZY :: Constant
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_and_extend_from_submodule_import_statement_nested_alias() {
|
||||||
|
let test = PublicTestBuilder::default()
|
||||||
|
.source("parent/__init__.py", "")
|
||||||
|
.source(
|
||||||
|
"parent/foo.py",
|
||||||
|
"
|
||||||
|
_ZQZQZQ = 1
|
||||||
|
__all__ = ['_ZQZQZQ']
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"test.py",
|
||||||
|
"import parent.foo as blah
|
||||||
|
from parent.foo import *
|
||||||
|
_ZYZYZY = 1
|
||||||
|
__all__ = ['_ZYZYZY']
|
||||||
|
__all__ += blah.__all__
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
test.exports_for("test.py"),
|
||||||
|
@r"
|
||||||
|
_ZQZQZQ :: Constant
|
||||||
|
_ZYZYZY :: Constant
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_and_extend_from_submodule_import_from_statement_plus_equals() {
|
||||||
|
let test = PublicTestBuilder::default()
|
||||||
|
.source("parent/__init__.py", "")
|
||||||
|
.source(
|
||||||
|
"parent/foo.py",
|
||||||
|
"
|
||||||
|
_ZQZQZQ = 1
|
||||||
|
__all__ = ['_ZQZQZQ']
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"test.py",
|
||||||
|
"from parent import foo
|
||||||
|
from parent.foo import *
|
||||||
|
_ZYZYZY = 1
|
||||||
|
__all__ = ['_ZYZYZY']
|
||||||
|
__all__ += foo.__all__
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
test.exports_for("test.py"),
|
||||||
|
@r"
|
||||||
|
_ZQZQZQ :: Constant
|
||||||
|
_ZYZYZY :: Constant
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_and_extend_from_submodule_import_from_statement_nested_module_reference() {
|
||||||
|
let test = PublicTestBuilder::default()
|
||||||
|
.source("parent/__init__.py", "")
|
||||||
|
.source(
|
||||||
|
"parent/foo.py",
|
||||||
|
"
|
||||||
|
_ZQZQZQ = 1
|
||||||
|
__all__ = ['_ZQZQZQ']
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"test.py",
|
||||||
|
"import parent.foo
|
||||||
|
from parent.foo import *
|
||||||
|
_ZYZYZY = 1
|
||||||
|
__all__ = ['_ZYZYZY']
|
||||||
|
__all__ += parent.foo.__all__
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
test.exports_for("test.py"),
|
||||||
|
@r"
|
||||||
|
_ZQZQZQ :: Constant
|
||||||
|
_ZYZYZY :: Constant
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_and_extend_from_submodule_import_from_statement_extend() {
|
||||||
|
let test = PublicTestBuilder::default()
|
||||||
|
.source("parent/__init__.py", "")
|
||||||
|
.source(
|
||||||
|
"parent/foo.py",
|
||||||
|
"
|
||||||
|
_ZQZQZQ = 1
|
||||||
|
__all__ = ['_ZQZQZQ']
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"test.py",
|
||||||
|
"import parent.foo
|
||||||
|
from parent.foo import *
|
||||||
|
_ZYZYZY = 1
|
||||||
|
__all__ = ['_ZYZYZY']
|
||||||
|
__all__.extend(parent.foo.__all__)
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
test.exports_for("test.py"),
|
||||||
|
@r"
|
||||||
|
_ZQZQZQ :: Constant
|
||||||
|
_ZYZYZY :: Constant
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_and_extend_from_submodule_import_from_statement_alias() {
|
||||||
|
let test = PublicTestBuilder::default()
|
||||||
|
.source("parent/__init__.py", "")
|
||||||
|
.source(
|
||||||
|
"parent/foo.py",
|
||||||
|
"
|
||||||
|
_ZQZQZQ = 1
|
||||||
|
__all__ = ['_ZQZQZQ']
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"test.py",
|
||||||
|
"from parent import foo as blah
|
||||||
|
from parent.foo import *
|
||||||
|
_ZYZYZY = 1
|
||||||
|
__all__ = ['_ZYZYZY']
|
||||||
|
__all__ += blah.__all__
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
test.exports_for("test.py"),
|
||||||
|
@r"
|
||||||
|
_ZQZQZQ :: Constant
|
||||||
|
_ZYZYZY :: Constant
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_and_extend_from_submodule_cycle1() {
|
||||||
|
let test = PublicTestBuilder::default()
|
||||||
|
.source(
|
||||||
|
"a.py",
|
||||||
|
"from b import *
|
||||||
|
import b
|
||||||
|
_ZAZAZA = 1
|
||||||
|
__all__ = ['_ZAZAZA']
|
||||||
|
__all__ += b.__all__
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"b.py",
|
||||||
|
"
|
||||||
|
from a import *
|
||||||
|
import a
|
||||||
|
_ZBZBZB = 1
|
||||||
|
__all__ = ['_ZBZBZB']
|
||||||
|
__all__ += a.__all__
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
test.exports_for("a.py"),
|
||||||
|
@r"
|
||||||
|
_ZBZBZB :: Constant
|
||||||
|
_ZAZAZA :: Constant
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_and_extend_from_submodule_import_statement_failure1() {
|
||||||
|
let test = PublicTestBuilder::default()
|
||||||
|
.source(
|
||||||
|
"foo.py",
|
||||||
|
"
|
||||||
|
_ZFZFZF = 1
|
||||||
|
__all__ = ['_ZFZFZF']
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"bar.py",
|
||||||
|
"
|
||||||
|
_ZBZBZB = 1
|
||||||
|
__all__ = ['_ZBZBZB']
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"test.py",
|
||||||
|
"import foo
|
||||||
|
import bar
|
||||||
|
from foo import *
|
||||||
|
from bar import *
|
||||||
|
|
||||||
|
foo = bar
|
||||||
|
_ZYZYZY = 1
|
||||||
|
__all__ = ['_ZYZYZY']
|
||||||
|
__all__.extend(foo.__all__)
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
// In this test, we resolve `foo.__all__` to the `__all__`
|
||||||
|
// attribute in module `foo` instead of in `bar`. This is
|
||||||
|
// because we don't track redefinitions of imports (as of
|
||||||
|
// 2025-12-11). Handling this correctly would mean exporting
|
||||||
|
// `_ZBZBZB` instead of `_ZFZFZF`.
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
test.exports_for("test.py"),
|
||||||
|
@r"
|
||||||
|
_ZFZFZF :: Constant
|
||||||
|
_ZYZYZY :: Constant
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reexport_and_extend_from_submodule_import_statement_failure2() {
|
||||||
|
let test = PublicTestBuilder::default()
|
||||||
|
.source(
|
||||||
|
"parent/__init__.py",
|
||||||
|
"import parent.foo as foo
|
||||||
|
__all__ = ['foo']
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"parent/foo.py",
|
||||||
|
"
|
||||||
|
_ZFZFZF = 1
|
||||||
|
__all__ = ['_ZFZFZF']
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.source(
|
||||||
|
"test.py",
|
||||||
|
"from parent.foo import *
|
||||||
|
from parent import *
|
||||||
|
|
||||||
|
_ZYZYZY = 1
|
||||||
|
__all__ = ['_ZYZYZY']
|
||||||
|
__all__.extend(foo.__all__)
|
||||||
|
",
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
// This is not quite right either because we end up
|
||||||
|
// considering the `__all__` in `test.py` to be invalid.
|
||||||
|
// Namely, we don't pick up the `foo` that is in scope
|
||||||
|
// from the `from parent import *` import. The correct
|
||||||
|
// answer should just be `_ZFZFZF` and `_ZYZYZY`.
|
||||||
|
insta::assert_snapshot!(
|
||||||
|
test.exports_for("test.py"),
|
||||||
|
@r"
|
||||||
|
_ZFZFZF :: Constant
|
||||||
|
foo :: Module
|
||||||
|
_ZYZYZY :: Constant
|
||||||
|
__all__ :: Variable
|
||||||
|
",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
fn matches(query: &str, symbol: &str) -> bool {
|
fn matches(query: &str, symbol: &str) -> bool {
|
||||||
super::QueryPattern::fuzzy(query).is_match_symbol_name(symbol)
|
super::QueryPattern::fuzzy(query).is_match_symbol_name(symbol)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -150,6 +150,62 @@ class Test:
|
||||||
");
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ignore_all() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"utils.py",
|
||||||
|
"
|
||||||
|
__all__ = []
|
||||||
|
class Test:
|
||||||
|
def from_path(): ...
|
||||||
|
<CURSOR>",
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.workspace_symbols("from"), @r"
|
||||||
|
info[workspace-symbols]: WorkspaceSymbolInfo
|
||||||
|
--> utils.py:4:9
|
||||||
|
|
|
||||||
|
2 | __all__ = []
|
||||||
|
3 | class Test:
|
||||||
|
4 | def from_path(): ...
|
||||||
|
| ^^^^^^^^^
|
||||||
|
|
|
||||||
|
info: Method from_path
|
||||||
|
");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ignore_imports() {
|
||||||
|
let test = CursorTest::builder()
|
||||||
|
.source(
|
||||||
|
"utils.py",
|
||||||
|
"
|
||||||
|
import re
|
||||||
|
import json as json
|
||||||
|
from collections import defaultdict
|
||||||
|
foo = 1
|
||||||
|
<CURSOR>",
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
assert_snapshot!(test.workspace_symbols("foo"), @r"
|
||||||
|
info[workspace-symbols]: WorkspaceSymbolInfo
|
||||||
|
--> utils.py:5:1
|
||||||
|
|
|
||||||
|
3 | import json as json
|
||||||
|
4 | from collections import defaultdict
|
||||||
|
5 | foo = 1
|
||||||
|
| ^^^
|
||||||
|
|
|
||||||
|
info: Variable foo
|
||||||
|
");
|
||||||
|
assert_snapshot!(test.workspace_symbols("re"), @"No symbols found");
|
||||||
|
assert_snapshot!(test.workspace_symbols("json"), @"No symbols found");
|
||||||
|
assert_snapshot!(test.workspace_symbols("default"), @"No symbols found");
|
||||||
|
}
|
||||||
|
|
||||||
impl CursorTest {
|
impl CursorTest {
|
||||||
fn workspace_symbols(&self, query: &str) -> String {
|
fn workspace_symbols(&self, query: &str) -> String {
|
||||||
let symbols = workspace_symbols(&self.db, query);
|
let symbols = workspace_symbols(&self.db, query);
|
||||||
|
|
|
||||||
|
|
@ -27,7 +27,6 @@ use std::iter::FusedIterator;
|
||||||
use std::panic::{AssertUnwindSafe, UnwindSafe};
|
use std::panic::{AssertUnwindSafe, UnwindSafe};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use tracing::error;
|
|
||||||
use ty_python_semantic::add_inferred_python_version_hint_to_diagnostic;
|
use ty_python_semantic::add_inferred_python_version_hint_to_diagnostic;
|
||||||
use ty_python_semantic::lint::RuleSelection;
|
use ty_python_semantic::lint::RuleSelection;
|
||||||
use ty_python_semantic::types::check_types;
|
use ty_python_semantic::types::check_types;
|
||||||
|
|
|
||||||
|
|
@ -285,22 +285,6 @@ impl Options {
|
||||||
roots.push(python);
|
roots.push(python);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Considering pytest test discovery conventions,
|
|
||||||
// we also include the `tests` directory if it exists and is not a package.
|
|
||||||
let tests_dir = project_root.join("tests");
|
|
||||||
if system.is_directory(&tests_dir)
|
|
||||||
&& !system.is_file(&tests_dir.join("__init__.py"))
|
|
||||||
&& !system.is_file(&tests_dir.join("__init__.pyi"))
|
|
||||||
&& !roots.contains(&tests_dir)
|
|
||||||
{
|
|
||||||
// If the `tests` directory exists and is not a package, include it as a source root.
|
|
||||||
tracing::debug!(
|
|
||||||
"Including `./tests` in `environment.root` because a `./tests` directory exists"
|
|
||||||
);
|
|
||||||
|
|
||||||
roots.push(tests_dir);
|
|
||||||
}
|
|
||||||
|
|
||||||
// The project root should always be included, and should always come
|
// The project root should always be included, and should always come
|
||||||
// after any subdirectories such as `./src`, `./tests` and/or `./python`.
|
// after any subdirectories such as `./src`, `./tests` and/or `./python`.
|
||||||
roots.push(project_root.to_path_buf());
|
roots.push(project_root.to_path_buf());
|
||||||
|
|
@ -532,7 +516,7 @@ pub struct EnvironmentOptions {
|
||||||
/// * if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
|
/// * if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
|
||||||
/// * otherwise, default to `.` (flat layout)
|
/// * otherwise, default to `.` (flat layout)
|
||||||
///
|
///
|
||||||
/// Besides, if a `./python` or `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` or `__init__.pyi` file),
|
/// Additionally, if a `./python` directory exists and is not a package (i.e. it does not contain an `__init__.py` or `__init__.pyi` file),
|
||||||
/// it will also be included in the first party search path.
|
/// it will also be included in the first party search path.
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
#[option(
|
#[option(
|
||||||
|
|
@ -674,7 +658,7 @@ pub struct SrcOptions {
|
||||||
/// * if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
|
/// * if a `./<project-name>/<project-name>` directory exists, include `.` and `./<project-name>` in the first party search path
|
||||||
/// * otherwise, default to `.` (flat layout)
|
/// * otherwise, default to `.` (flat layout)
|
||||||
///
|
///
|
||||||
/// Besides, if a `./tests` directory exists and is not a package (i.e. it does not contain an `__init__.py` file),
|
/// Additionally, if a `./python` directory exists and is not a package (i.e. it does not contain an `__init__.py` file),
|
||||||
/// it will also be included in the first party search path.
|
/// it will also be included in the first party search path.
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
#[option(
|
#[option(
|
||||||
|
|
@ -1241,24 +1225,22 @@ pub struct TerminalOptions {
|
||||||
///
|
///
|
||||||
/// An override allows you to apply different rule configurations to specific
|
/// An override allows you to apply different rule configurations to specific
|
||||||
/// files or directories. Multiple overrides can match the same file, with
|
/// files or directories. Multiple overrides can match the same file, with
|
||||||
/// later overrides take precedence.
|
/// later overrides take precedence. Override rules take precedence over global
|
||||||
|
/// rules for matching files.
|
||||||
///
|
///
|
||||||
/// ### Precedence
|
/// For example, to relax enforcement of rules in test files:
|
||||||
///
|
|
||||||
/// - Later overrides in the array take precedence over earlier ones
|
|
||||||
/// - Override rules take precedence over global rules for matching files
|
|
||||||
///
|
|
||||||
/// ### Examples
|
|
||||||
///
|
///
|
||||||
/// ```toml
|
/// ```toml
|
||||||
/// # Relax rules for test files
|
|
||||||
/// [[tool.ty.overrides]]
|
/// [[tool.ty.overrides]]
|
||||||
/// include = ["tests/**", "**/test_*.py"]
|
/// include = ["tests/**", "**/test_*.py"]
|
||||||
///
|
///
|
||||||
/// [tool.ty.overrides.rules]
|
/// [tool.ty.overrides.rules]
|
||||||
/// possibly-unresolved-reference = "warn"
|
/// possibly-unresolved-reference = "warn"
|
||||||
|
/// ```
|
||||||
///
|
///
|
||||||
/// # Ignore generated files but still check important ones
|
/// Or, to ignore a rule in generated files but retain enforcement in an important file:
|
||||||
|
///
|
||||||
|
/// ```toml
|
||||||
/// [[tool.ty.overrides]]
|
/// [[tool.ty.overrides]]
|
||||||
/// include = ["generated/**"]
|
/// include = ["generated/**"]
|
||||||
/// exclude = ["generated/important.py"]
|
/// exclude = ["generated/important.py"]
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,7 @@
|
||||||
|
from typing import TypeAlias, TypeVar
|
||||||
|
|
||||||
|
T = TypeVar("T", bound="A[0]")
|
||||||
|
A: TypeAlias = T
|
||||||
|
def _(x: A):
|
||||||
|
if x:
|
||||||
|
pass
|
||||||
|
|
@ -0,0 +1 @@
|
||||||
|
def _[T: (T if cond else U)[0], U](): pass
|
||||||
|
|
@ -0,0 +1,3 @@
|
||||||
|
def _[T: T[0]](x: T):
|
||||||
|
if x:
|
||||||
|
pass
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
class _[T: (0, T[0])]:
|
||||||
|
def _(x: T):
|
||||||
|
if x:
|
||||||
|
pass
|
||||||
|
|
@ -169,13 +169,13 @@ def f(x: Any[int]):
|
||||||
`Any` cannot be called (this leads to a `TypeError` at runtime):
|
`Any` cannot be called (this leads to a `TypeError` at runtime):
|
||||||
|
|
||||||
```py
|
```py
|
||||||
Any() # error: [call-non-callable] "Object of type `<special form 'typing.Any'>` is not callable"
|
Any() # error: [call-non-callable] "Object of type `<special-form 'typing.Any'>` is not callable"
|
||||||
```
|
```
|
||||||
|
|
||||||
`Any` also cannot be used as a metaclass (under the hood, this leads to an implicit call to `Any`):
|
`Any` also cannot be used as a metaclass (under the hood, this leads to an implicit call to `Any`):
|
||||||
|
|
||||||
```py
|
```py
|
||||||
class F(metaclass=Any): ... # error: [invalid-metaclass] "Metaclass type `<special form 'typing.Any'>` is not callable"
|
class F(metaclass=Any): ... # error: [invalid-metaclass] "Metaclass type `<special-form 'typing.Any'>` is not callable"
|
||||||
```
|
```
|
||||||
|
|
||||||
And `Any` cannot be used in `isinstance()` checks:
|
And `Any` cannot be used in `isinstance()` checks:
|
||||||
|
|
|
||||||
|
|
@ -407,4 +407,22 @@ def f_okay(c: Callable[[], None]):
|
||||||
c.__qualname__ = "my_callable" # okay
|
c.__qualname__ = "my_callable" # okay
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## From a class
|
||||||
|
|
||||||
|
### Subclasses should return themselves, not superclass
|
||||||
|
|
||||||
|
```py
|
||||||
|
from ty_extensions import into_callable
|
||||||
|
|
||||||
|
class Base:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
class A(Base):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# revealed: () -> A
|
||||||
|
reveal_type(into_callable(A))
|
||||||
|
```
|
||||||
|
|
||||||
[gradual form]: https://typing.python.org/en/latest/spec/glossary.html#term-gradual-form
|
[gradual form]: https://typing.python.org/en/latest/spec/glossary.html#term-gradual-form
|
||||||
|
|
|
||||||
|
|
@ -59,7 +59,7 @@ python-version = "3.11"
|
||||||
```py
|
```py
|
||||||
from typing import Never
|
from typing import Never
|
||||||
|
|
||||||
reveal_type(Never) # revealed: <special form 'typing.Never'>
|
reveal_type(Never) # revealed: <special-form 'typing.Never'>
|
||||||
```
|
```
|
||||||
|
|
||||||
### Python 3.10
|
### Python 3.10
|
||||||
|
|
|
||||||
|
|
@ -146,9 +146,10 @@ Foo = NewType(name, int)
|
||||||
reveal_type(Foo) # revealed: <NewType pseudo-class 'Foo'>
|
reveal_type(Foo) # revealed: <NewType pseudo-class 'Foo'>
|
||||||
```
|
```
|
||||||
|
|
||||||
## The second argument must be a class type or another newtype
|
## The base must be a class type or another newtype
|
||||||
|
|
||||||
Other typing constructs like `Union` are not allowed.
|
Other typing constructs like `Union` are not _generally_ allowed. (However, see the next section for
|
||||||
|
a couple special cases.)
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing_extensions import NewType
|
from typing_extensions import NewType
|
||||||
|
|
@ -167,6 +168,61 @@ on top of that:
|
||||||
Foo = NewType("Foo", 42)
|
Foo = NewType("Foo", 42)
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## `float` and `complex` special cases
|
||||||
|
|
||||||
|
`float` and `complex` are subject to a special case in the typing spec, which we currently interpret
|
||||||
|
to mean that `float` in type position is `int | float`, and `complex` in type position is
|
||||||
|
`int | float | complex`. This is awkward for `NewType`, because as we just tested above, unions
|
||||||
|
aren't generally valid `NewType` bases. However, `float` and `complex` _are_ valid `NewType` bases,
|
||||||
|
and we accept the unions they expand into.
|
||||||
|
|
||||||
|
```py
|
||||||
|
from typing import NewType
|
||||||
|
|
||||||
|
Foo = NewType("Foo", float)
|
||||||
|
Foo(3.14)
|
||||||
|
Foo(42)
|
||||||
|
Foo("hello") # error: [invalid-argument-type] "Argument is incorrect: Expected `int | float`, found `Literal["hello"]`"
|
||||||
|
|
||||||
|
reveal_type(Foo(3.14).__class__) # revealed: type[int] | type[float]
|
||||||
|
reveal_type(Foo(42).__class__) # revealed: type[int] | type[float]
|
||||||
|
|
||||||
|
Bar = NewType("Bar", complex)
|
||||||
|
Bar(1 + 2j)
|
||||||
|
Bar(3.14)
|
||||||
|
Bar(42)
|
||||||
|
Bar("goodbye") # error: [invalid-argument-type]
|
||||||
|
|
||||||
|
reveal_type(Bar(1 + 2j).__class__) # revealed: type[int] | type[float] | type[complex]
|
||||||
|
reveal_type(Bar(3.14).__class__) # revealed: type[int] | type[float] | type[complex]
|
||||||
|
reveal_type(Bar(42).__class__) # revealed: type[int] | type[float] | type[complex]
|
||||||
|
```
|
||||||
|
|
||||||
|
We don't currently try to distinguish between an implicit union (e.g. `float`) and the equivalent
|
||||||
|
explicit union (e.g. `int | float`), so these two explicit unions are also allowed. But again, most
|
||||||
|
unions are not allowed:
|
||||||
|
|
||||||
|
```py
|
||||||
|
Baz = NewType("Baz", int | float)
|
||||||
|
Baz = NewType("Baz", int | float | complex)
|
||||||
|
Baz = NewType("Baz", int | str) # error: [invalid-newtype] "invalid base for `typing.NewType`"
|
||||||
|
```
|
||||||
|
|
||||||
|
Similarly, a `NewType` of `float` or `complex` is valid as a `Callable` of the corresponding union
|
||||||
|
type:
|
||||||
|
|
||||||
|
```py
|
||||||
|
from collections.abc import Callable
|
||||||
|
|
||||||
|
def f(_: Callable[[int | float], Foo]): ...
|
||||||
|
|
||||||
|
f(Foo)
|
||||||
|
|
||||||
|
def g(_: Callable[[int | float | complex], Bar]): ...
|
||||||
|
|
||||||
|
g(Bar)
|
||||||
|
```
|
||||||
|
|
||||||
## A `NewType` definition must be a simple variable assignment
|
## A `NewType` definition must be a simple variable assignment
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
|
@ -179,7 +235,7 @@ N: NewType = NewType("N", int) # error: [invalid-newtype] "A `NewType` definiti
|
||||||
|
|
||||||
Cyclic newtypes are kind of silly, but it's possible for the user to express them, and it's
|
Cyclic newtypes are kind of silly, but it's possible for the user to express them, and it's
|
||||||
important that we don't go into infinite recursive loops and crash with a stack overflow. In fact,
|
important that we don't go into infinite recursive loops and crash with a stack overflow. In fact,
|
||||||
this is *why* base type evaluation is deferred; otherwise Salsa itself would crash.
|
this is _why_ base type evaluation is deferred; otherwise Salsa itself would crash.
|
||||||
|
|
||||||
```py
|
```py
|
||||||
from typing_extensions import NewType, reveal_type, cast
|
from typing_extensions import NewType, reveal_type, cast
|
||||||
|
|
|
||||||
|
|
@ -194,7 +194,7 @@ reveal_type(B().name_does_not_matter()) # revealed: B
|
||||||
reveal_type(B().positional_only(1)) # revealed: B
|
reveal_type(B().positional_only(1)) # revealed: B
|
||||||
reveal_type(B().keyword_only(x=1)) # revealed: B
|
reveal_type(B().keyword_only(x=1)) # revealed: B
|
||||||
# TODO: This should deally be `B`
|
# TODO: This should deally be `B`
|
||||||
reveal_type(B().decorated_method()) # revealed: Unknown
|
reveal_type(B().decorated_method()) # revealed: Self@decorated_method
|
||||||
|
|
||||||
reveal_type(B().a_property) # revealed: B
|
reveal_type(B().a_property) # revealed: B
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -152,6 +152,20 @@ The expressions in these string annotations aren't valid expressions in this con
|
||||||
shouldn't panic.
|
shouldn't panic.
|
||||||
|
|
||||||
```py
|
```py
|
||||||
|
# Regression test for https://github.com/astral-sh/ty/issues/1865
|
||||||
|
# error: [fstring-type-annotation]
|
||||||
|
stringified_fstring_with_conditional: "f'{1 if 1 else 1}'"
|
||||||
|
# error: [fstring-type-annotation]
|
||||||
|
stringified_fstring_with_boolean_expression: "f'{1 or 2}'"
|
||||||
|
# error: [fstring-type-annotation]
|
||||||
|
stringified_fstring_with_generator_expression: "f'{(i for i in range(5))}'"
|
||||||
|
# error: [fstring-type-annotation]
|
||||||
|
stringified_fstring_with_list_comprehension: "f'{[i for i in range(5)]}'"
|
||||||
|
# error: [fstring-type-annotation]
|
||||||
|
stringified_fstring_with_dict_comprehension: "f'{ {i: i for i in range(5)} }'"
|
||||||
|
# error: [fstring-type-annotation]
|
||||||
|
stringified_fstring_with_set_comprehension: "f'{ {i for i in range(5)} }'"
|
||||||
|
|
||||||
a: "1 or 2"
|
a: "1 or 2"
|
||||||
b: "(x := 1)"
|
b: "(x := 1)"
|
||||||
# error: [invalid-type-form]
|
# error: [invalid-type-form]
|
||||||
|
|
|
||||||
|
|
@ -38,6 +38,8 @@ reveal_type(x) # revealed: int
|
||||||
|
|
||||||
## Unsupported types
|
## Unsupported types
|
||||||
|
|
||||||
|
<!-- snapshot-diagnostics -->
|
||||||
|
|
||||||
```py
|
```py
|
||||||
class C:
|
class C:
|
||||||
def __isub__(self, other: str) -> int:
|
def __isub__(self, other: str) -> int:
|
||||||
|
|
|
||||||
|
|
@ -43,9 +43,7 @@ async def main():
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
with concurrent.futures.ThreadPoolExecutor() as pool:
|
with concurrent.futures.ThreadPoolExecutor() as pool:
|
||||||
result = await loop.run_in_executor(pool, blocking_function)
|
result = await loop.run_in_executor(pool, blocking_function)
|
||||||
|
reveal_type(result) # revealed: int
|
||||||
# TODO: should be `int`
|
|
||||||
reveal_type(result) # revealed: Unknown
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### `asyncio.Task`
|
### `asyncio.Task`
|
||||||
|
|
|
||||||
|
|
@ -1208,7 +1208,7 @@ def _(flag: bool):
|
||||||
reveal_type(C1.y) # revealed: int | str
|
reveal_type(C1.y) # revealed: int | str
|
||||||
|
|
||||||
C1.y = 100
|
C1.y = 100
|
||||||
# error: [invalid-assignment] "Object of type `Literal["problematic"]` is not assignable to attribute `y` on type `<class 'C1'> | <class 'C1'>`"
|
# error: [invalid-assignment] "Object of type `Literal["problematic"]` is not assignable to attribute `y` on type `<class 'mdtest_snippet.<locals of function '_'>.C1 @ src/mdtest_snippet.py:3'> | <class 'mdtest_snippet.<locals of function '_'>.C1 @ src/mdtest_snippet.py:8'>`"
|
||||||
C1.y = "problematic"
|
C1.y = "problematic"
|
||||||
|
|
||||||
class C2:
|
class C2:
|
||||||
|
|
@ -2162,8 +2162,8 @@ Some attributes are special-cased, however:
|
||||||
import types
|
import types
|
||||||
from ty_extensions import static_assert, TypeOf, is_subtype_of
|
from ty_extensions import static_assert, TypeOf, is_subtype_of
|
||||||
|
|
||||||
reveal_type(f.__get__) # revealed: <method-wrapper `__get__` of `f`>
|
reveal_type(f.__get__) # revealed: <method-wrapper '__get__' of function 'f'>
|
||||||
reveal_type(f.__call__) # revealed: <method-wrapper `__call__` of `f`>
|
reveal_type(f.__call__) # revealed: <method-wrapper '__call__' of function 'f'>
|
||||||
static_assert(is_subtype_of(TypeOf[f.__get__], types.MethodWrapperType))
|
static_assert(is_subtype_of(TypeOf[f.__get__], types.MethodWrapperType))
|
||||||
static_assert(is_subtype_of(TypeOf[f.__call__], types.MethodWrapperType))
|
static_assert(is_subtype_of(TypeOf[f.__call__], types.MethodWrapperType))
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -13,7 +13,7 @@ python-version = "3.10"
|
||||||
class A: ...
|
class A: ...
|
||||||
class B: ...
|
class B: ...
|
||||||
|
|
||||||
reveal_type(A | B) # revealed: <types.UnionType special form 'A | B'>
|
reveal_type(A | B) # revealed: <types.UnionType special-form 'A | B'>
|
||||||
```
|
```
|
||||||
|
|
||||||
## Union of two classes (prior to 3.10)
|
## Union of two classes (prior to 3.10)
|
||||||
|
|
@ -43,14 +43,14 @@ class A: ...
|
||||||
class B: ...
|
class B: ...
|
||||||
|
|
||||||
def _(sub_a: type[A], sub_b: type[B]):
|
def _(sub_a: type[A], sub_b: type[B]):
|
||||||
reveal_type(A | sub_b) # revealed: <types.UnionType special form>
|
reveal_type(A | sub_b) # revealed: <types.UnionType special-form>
|
||||||
reveal_type(sub_a | B) # revealed: <types.UnionType special form>
|
reveal_type(sub_a | B) # revealed: <types.UnionType special-form>
|
||||||
reveal_type(sub_a | sub_b) # revealed: <types.UnionType special form>
|
reveal_type(sub_a | sub_b) # revealed: <types.UnionType special-form>
|
||||||
|
|
||||||
class C[T]: ...
|
class C[T]: ...
|
||||||
class D[T]: ...
|
class D[T]: ...
|
||||||
|
|
||||||
reveal_type(C | D) # revealed: <types.UnionType special form 'C[Unknown] | D[Unknown]'>
|
reveal_type(C | D) # revealed: <types.UnionType special-form 'C[Unknown] | D[Unknown]'>
|
||||||
|
|
||||||
reveal_type(C[int] | D[str]) # revealed: <types.UnionType special form 'C[int] | D[str]'>
|
reveal_type(C[int] | D[str]) # revealed: <types.UnionType special-form 'C[int] | D[str]'>
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -79,31 +79,31 @@ reveal_type(Sub() & Sub()) # revealed: Literal["&"]
|
||||||
reveal_type(Sub() // Sub()) # revealed: Literal["//"]
|
reveal_type(Sub() // Sub()) # revealed: Literal["//"]
|
||||||
|
|
||||||
# No does not implement any of the dunder methods.
|
# No does not implement any of the dunder methods.
|
||||||
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `No` and `No`"
|
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `No`"
|
||||||
reveal_type(No() + No()) # revealed: Unknown
|
reveal_type(No() + No()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `-` is not supported between objects of type `No` and `No`"
|
# error: [unsupported-operator] "Operator `-` is not supported between two objects of type `No`"
|
||||||
reveal_type(No() - No()) # revealed: Unknown
|
reveal_type(No() - No()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `*` is not supported between objects of type `No` and `No`"
|
# error: [unsupported-operator] "Operator `*` is not supported between two objects of type `No`"
|
||||||
reveal_type(No() * No()) # revealed: Unknown
|
reveal_type(No() * No()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `@` is not supported between objects of type `No` and `No`"
|
# error: [unsupported-operator] "Operator `@` is not supported between two objects of type `No`"
|
||||||
reveal_type(No() @ No()) # revealed: Unknown
|
reveal_type(No() @ No()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `/` is not supported between objects of type `No` and `No`"
|
# error: [unsupported-operator] "Operator `/` is not supported between two objects of type `No`"
|
||||||
reveal_type(No() / No()) # revealed: Unknown
|
reveal_type(No() / No()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `%` is not supported between objects of type `No` and `No`"
|
# error: [unsupported-operator] "Operator `%` is not supported between two objects of type `No`"
|
||||||
reveal_type(No() % No()) # revealed: Unknown
|
reveal_type(No() % No()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `**` is not supported between objects of type `No` and `No`"
|
# error: [unsupported-operator] "Operator `**` is not supported between two objects of type `No`"
|
||||||
reveal_type(No() ** No()) # revealed: Unknown
|
reveal_type(No() ** No()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `<<` is not supported between objects of type `No` and `No`"
|
# error: [unsupported-operator] "Operator `<<` is not supported between two objects of type `No`"
|
||||||
reveal_type(No() << No()) # revealed: Unknown
|
reveal_type(No() << No()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `>>` is not supported between objects of type `No` and `No`"
|
# error: [unsupported-operator] "Operator `>>` is not supported between two objects of type `No`"
|
||||||
reveal_type(No() >> No()) # revealed: Unknown
|
reveal_type(No() >> No()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `|` is not supported between objects of type `No` and `No`"
|
# error: [unsupported-operator] "Operator `|` is not supported between two objects of type `No`"
|
||||||
reveal_type(No() | No()) # revealed: Unknown
|
reveal_type(No() | No()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `^` is not supported between objects of type `No` and `No`"
|
# error: [unsupported-operator] "Operator `^` is not supported between two objects of type `No`"
|
||||||
reveal_type(No() ^ No()) # revealed: Unknown
|
reveal_type(No() ^ No()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `&` is not supported between objects of type `No` and `No`"
|
# error: [unsupported-operator] "Operator `&` is not supported between two objects of type `No`"
|
||||||
reveal_type(No() & No()) # revealed: Unknown
|
reveal_type(No() & No()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `//` is not supported between objects of type `No` and `No`"
|
# error: [unsupported-operator] "Operator `//` is not supported between two objects of type `No`"
|
||||||
reveal_type(No() // No()) # revealed: Unknown
|
reveal_type(No() // No()) # revealed: Unknown
|
||||||
|
|
||||||
# Yes does not implement any of the reflected dunder methods.
|
# Yes does not implement any of the reflected dunder methods.
|
||||||
|
|
@ -293,6 +293,8 @@ reveal_type(Yes() // No()) # revealed: Literal["//"]
|
||||||
|
|
||||||
## Classes
|
## Classes
|
||||||
|
|
||||||
|
<!-- snapshot-diagnostics -->
|
||||||
|
|
||||||
Dunder methods defined in a class are available to instances of that class, but not to the class
|
Dunder methods defined in a class are available to instances of that class, but not to the class
|
||||||
itself. (For these operators to work on the class itself, they would have to be defined on the
|
itself. (For these operators to work on the class itself, they would have to be defined on the
|
||||||
class's type, i.e. `type`.)
|
class's type, i.e. `type`.)
|
||||||
|
|
@ -307,11 +309,11 @@ class Yes:
|
||||||
class Sub(Yes): ...
|
class Sub(Yes): ...
|
||||||
class No: ...
|
class No: ...
|
||||||
|
|
||||||
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `<class 'Yes'>` and `<class 'Yes'>`"
|
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `<class 'Yes'>`"
|
||||||
reveal_type(Yes + Yes) # revealed: Unknown
|
reveal_type(Yes + Yes) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `<class 'Sub'>` and `<class 'Sub'>`"
|
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `<class 'Sub'>`"
|
||||||
reveal_type(Sub + Sub) # revealed: Unknown
|
reveal_type(Sub + Sub) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `<class 'No'>` and `<class 'No'>`"
|
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `<class 'No'>`"
|
||||||
reveal_type(No + No) # revealed: Unknown
|
reveal_type(No + No) # revealed: Unknown
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -336,11 +338,11 @@ def sub() -> type[Sub]:
|
||||||
def no() -> type[No]:
|
def no() -> type[No]:
|
||||||
return No
|
return No
|
||||||
|
|
||||||
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `type[Yes]` and `type[Yes]`"
|
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `type[Yes]`"
|
||||||
reveal_type(yes() + yes()) # revealed: Unknown
|
reveal_type(yes() + yes()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `type[Sub]` and `type[Sub]`"
|
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `type[Sub]`"
|
||||||
reveal_type(sub() + sub()) # revealed: Unknown
|
reveal_type(sub() + sub()) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `type[No]` and `type[No]`"
|
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `type[No]`"
|
||||||
reveal_type(no() + no()) # revealed: Unknown
|
reveal_type(no() + no()) # revealed: Unknown
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -350,30 +352,54 @@ reveal_type(no() + no()) # revealed: Unknown
|
||||||
def f():
|
def f():
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
|
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `def f() -> Unknown`"
|
||||||
reveal_type(f + f) # revealed: Unknown
|
reveal_type(f + f) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `-` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
|
# error: [unsupported-operator] "Operator `-` is not supported between two objects of type `def f() -> Unknown`"
|
||||||
reveal_type(f - f) # revealed: Unknown
|
reveal_type(f - f) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `*` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
|
# error: [unsupported-operator] "Operator `*` is not supported between two objects of type `def f() -> Unknown`"
|
||||||
reveal_type(f * f) # revealed: Unknown
|
reveal_type(f * f) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `@` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
|
# error: [unsupported-operator] "Operator `@` is not supported between two objects of type `def f() -> Unknown`"
|
||||||
reveal_type(f @ f) # revealed: Unknown
|
reveal_type(f @ f) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `/` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
|
# error: [unsupported-operator] "Operator `/` is not supported between two objects of type `def f() -> Unknown`"
|
||||||
reveal_type(f / f) # revealed: Unknown
|
reveal_type(f / f) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `%` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
|
# error: [unsupported-operator] "Operator `%` is not supported between two objects of type `def f() -> Unknown`"
|
||||||
reveal_type(f % f) # revealed: Unknown
|
reveal_type(f % f) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `**` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
|
# error: [unsupported-operator] "Operator `**` is not supported between two objects of type `def f() -> Unknown`"
|
||||||
reveal_type(f**f) # revealed: Unknown
|
reveal_type(f**f) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `<<` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
|
# error: [unsupported-operator] "Operator `<<` is not supported between two objects of type `def f() -> Unknown`"
|
||||||
reveal_type(f << f) # revealed: Unknown
|
reveal_type(f << f) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `>>` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
|
# error: [unsupported-operator] "Operator `>>` is not supported between two objects of type `def f() -> Unknown`"
|
||||||
reveal_type(f >> f) # revealed: Unknown
|
reveal_type(f >> f) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `|` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
|
# error: [unsupported-operator] "Operator `|` is not supported between two objects of type `def f() -> Unknown`"
|
||||||
reveal_type(f | f) # revealed: Unknown
|
reveal_type(f | f) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `^` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
|
# error: [unsupported-operator] "Operator `^` is not supported between two objects of type `def f() -> Unknown`"
|
||||||
reveal_type(f ^ f) # revealed: Unknown
|
reveal_type(f ^ f) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `&` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
|
# error: [unsupported-operator] "Operator `&` is not supported between two objects of type `def f() -> Unknown`"
|
||||||
reveal_type(f & f) # revealed: Unknown
|
reveal_type(f & f) # revealed: Unknown
|
||||||
# error: [unsupported-operator] "Operator `//` is not supported between objects of type `def f() -> Unknown` and `def f() -> Unknown`"
|
# error: [unsupported-operator] "Operator `//` is not supported between two objects of type `def f() -> Unknown`"
|
||||||
reveal_type(f // f) # revealed: Unknown
|
reveal_type(f // f) # revealed: Unknown
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Classes from different modules with the same name
|
||||||
|
|
||||||
|
We use the fully qualified names in diagnostics if the two classes have the same unqualified name,
|
||||||
|
but are nonetheless different.
|
||||||
|
|
||||||
|
<!-- snapshot-diagnostics -->
|
||||||
|
|
||||||
|
`mod1.py`:
|
||||||
|
|
||||||
|
```py
|
||||||
|
class A: ...
|
||||||
|
```
|
||||||
|
|
||||||
|
`mod2.py`:
|
||||||
|
|
||||||
|
```py
|
||||||
|
import mod1
|
||||||
|
|
||||||
|
class A: ...
|
||||||
|
|
||||||
|
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `mod2.A` and `mod1.A`"
|
||||||
|
A() + mod1.A()
|
||||||
|
```
|
||||||
|
|
|
||||||
|
|
@ -412,7 +412,7 @@ class A:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.__add__ = add_impl
|
self.__add__ = add_impl
|
||||||
|
|
||||||
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `A` and `A`"
|
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `A`"
|
||||||
# revealed: Unknown
|
# revealed: Unknown
|
||||||
reveal_type(A() + A())
|
reveal_type(A() + A())
|
||||||
```
|
```
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ cannot be added, because that would require addition of `int` and `str` or vice
|
||||||
def f2(i: int, s: str, int_or_str: int | str):
|
def f2(i: int, s: str, int_or_str: int | str):
|
||||||
i + i
|
i + i
|
||||||
s + s
|
s + s
|
||||||
# error: [unsupported-operator] "Operator `+` is not supported between objects of type `int | str` and `int | str`"
|
# error: [unsupported-operator] "Operator `+` is not supported between two objects of type `int | str`"
|
||||||
reveal_type(int_or_str + int_or_str) # revealed: Unknown
|
reveal_type(int_or_str + int_or_str) # revealed: Unknown
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue