mirror of https://github.com/astral-sh/ruff
Merge remote-tracking branch 'origin/main' into PYI050
This commit is contained in:
commit
fd02805fa1
|
|
@ -1,6 +1,6 @@
|
||||||
[alias]
|
[alias]
|
||||||
dev = "run --package ruff_dev --bin ruff_dev"
|
dev = "run --package ruff_dev --bin ruff_dev"
|
||||||
benchmark = "bench -p ruff_benchmark --"
|
benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
||||||
|
|
||||||
[target.'cfg(all())']
|
[target.'cfg(all())']
|
||||||
rustflags = [
|
rustflags = [
|
||||||
|
|
|
||||||
|
|
@ -32,6 +32,12 @@ dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "android-tzdata"
|
||||||
|
version = "0.1.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "android_system_properties"
|
name = "android_system_properties"
|
||||||
version = "0.1.5"
|
version = "0.1.5"
|
||||||
|
|
@ -188,9 +194,9 @@ checksum = "6776fc96284a0bb647b615056fc496d1fe1644a7ab01829818a6d91cae888b84"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bstr"
|
name = "bstr"
|
||||||
version = "1.4.0"
|
version = "1.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c3d4260bcc2e8fc9df1eac4919a720effeb63a3f0952f5bf4944adfa18897f09"
|
checksum = "a246e68bb43f6cd9db24bea052a53e40405417c5fb372e3d1a8a7f770a564ef5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
|
@ -200,9 +206,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bumpalo"
|
name = "bumpalo"
|
||||||
version = "3.12.2"
|
version = "3.13.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3c6ed94e98ecff0c12dd1b04c15ec0d7d9458ca8fe806cea6f12954efe74c63b"
|
checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cachedir"
|
name = "cachedir"
|
||||||
|
|
@ -242,13 +248,13 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chrono"
|
name = "chrono"
|
||||||
version = "0.4.24"
|
version = "0.4.26"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4e3c5919066adf22df73762e50cffcde3a758f2a848b113b586d1f86728b673b"
|
checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"android-tzdata",
|
||||||
"iana-time-zone",
|
"iana-time-zone",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"num-integer",
|
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"time",
|
"time",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
|
|
@ -284,21 +290,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "3.2.25"
|
version = "4.3.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123"
|
checksum = "b4ed2379f8603fa2b7509891660e802b88c70a79a6427a70abb5968054de2c28"
|
||||||
dependencies = [
|
|
||||||
"bitflags 1.3.2",
|
|
||||||
"clap_lex 0.2.4",
|
|
||||||
"indexmap",
|
|
||||||
"textwrap",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "clap"
|
|
||||||
version = "4.2.7"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "34d21f9bf1b425d2968943631ec91202fe5e837264063503708b83013f8fc938"
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap_builder",
|
"clap_builder",
|
||||||
"clap_derive",
|
"clap_derive",
|
||||||
|
|
@ -307,24 +301,24 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_builder"
|
name = "clap_builder"
|
||||||
version = "4.2.7"
|
version = "4.3.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "914c8c79fb560f238ef6429439a30023c862f7a28e688c58f7203f12b29970bd"
|
checksum = "72394f3339a76daf211e57d4bcb374410f3965dcc606dd0e03738c7888766980"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anstream",
|
"anstream",
|
||||||
"anstyle",
|
"anstyle",
|
||||||
"bitflags 1.3.2",
|
"bitflags 1.3.2",
|
||||||
"clap_lex 0.4.1",
|
"clap_lex",
|
||||||
"strsim",
|
"strsim",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_complete"
|
name = "clap_complete"
|
||||||
version = "4.2.3"
|
version = "4.3.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1594fe2312ec4abf402076e407628f5c313e54c32ade058521df4ee34ecac8a8"
|
checksum = "7f6b5c519bab3ea61843a7923d074b04245624bb84a64a8c150f5deb014e388b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap 4.2.7",
|
"clap",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -333,7 +327,7 @@ version = "0.5.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "183495371ea78d4c9ff638bfc6497d46fed2396e4f9c50aebc1278a4a9919a3d"
|
checksum = "183495371ea78d4c9ff638bfc6497d46fed2396e4f9c50aebc1278a4a9919a3d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap 4.2.7",
|
"clap",
|
||||||
"clap_complete",
|
"clap_complete",
|
||||||
"clap_complete_fig",
|
"clap_complete_fig",
|
||||||
"clap_complete_nushell",
|
"clap_complete_nushell",
|
||||||
|
|
@ -341,50 +335,41 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_complete_fig"
|
name = "clap_complete_fig"
|
||||||
version = "4.2.0"
|
version = "4.3.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f3af28956330989baa428ed4d3471b853715d445c62de21b67292e22cf8a41fa"
|
checksum = "99fee1d30a51305a6c2ed3fc5709be3c8af626c9c958e04dd9ae94e27bcbce9f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap 4.2.7",
|
"clap",
|
||||||
"clap_complete",
|
"clap_complete",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_complete_nushell"
|
name = "clap_complete_nushell"
|
||||||
version = "0.1.10"
|
version = "0.1.11"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c7fa41f5e6aa83bd151b70fd0ceaee703d68cd669522795dc812df9edad1252c"
|
checksum = "5d02bc8b1a18ee47c4d2eec3fb5ac034dc68ebea6125b1509e9ccdffcddce66e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap 4.2.7",
|
"clap",
|
||||||
"clap_complete",
|
"clap_complete",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_derive"
|
name = "clap_derive"
|
||||||
version = "4.2.0"
|
version = "4.3.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3f9644cd56d6b87dbe899ef8b053e331c0637664e9e21a33dfcdc36093f5c5c4"
|
checksum = "59e9ef9a08ee1c0e1f2e162121665ac45ac3783b0f897db7244ae75ad9a8f65b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heck",
|
"heck",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_lex"
|
name = "clap_lex"
|
||||||
version = "0.2.4"
|
version = "0.5.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5"
|
checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b"
|
||||||
dependencies = [
|
|
||||||
"os_str_bytes",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "clap_lex"
|
|
||||||
version = "0.4.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clearscreen"
|
name = "clearscreen"
|
||||||
|
|
@ -424,14 +409,14 @@ checksum = "5458d9d1a587efaf5091602c59d299696a3877a439c8f6d461a2d3cce11df87a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "console"
|
name = "console"
|
||||||
version = "0.15.5"
|
version = "0.15.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c3d79fbe8970a77e3e34151cc13d3b3e248aa0faaecb9f6091fa07ebefe5ad60"
|
checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"encode_unicode",
|
"encode_unicode",
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"libc",
|
"libc",
|
||||||
"windows-sys 0.42.0",
|
"windows-sys 0.45.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -477,19 +462,19 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "criterion"
|
name = "criterion"
|
||||||
version = "0.4.0"
|
version = "0.5.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e7c76e09c1aae2bc52b3d2f29e13c6572553b30c4aa1b8a49fd70de6412654cb"
|
checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anes",
|
"anes",
|
||||||
"atty",
|
|
||||||
"cast",
|
"cast",
|
||||||
"ciborium",
|
"ciborium",
|
||||||
"clap 3.2.25",
|
"clap",
|
||||||
"criterion-plot",
|
"criterion-plot",
|
||||||
|
"is-terminal",
|
||||||
"itertools",
|
"itertools",
|
||||||
"lazy_static",
|
|
||||||
"num-traits",
|
"num-traits",
|
||||||
|
"once_cell",
|
||||||
"oorandom",
|
"oorandom",
|
||||||
"plotters",
|
"plotters",
|
||||||
"rayon",
|
"rayon",
|
||||||
|
|
@ -709,7 +694,7 @@ name = "flake8-to-ruff"
|
||||||
version = "0.0.270"
|
version = "0.0.270"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap 4.2.7",
|
"clap",
|
||||||
"colored",
|
"colored",
|
||||||
"configparser",
|
"configparser",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
|
@ -950,9 +935,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "io-lifetimes"
|
name = "io-lifetimes"
|
||||||
version = "1.0.10"
|
version = "1.0.11"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220"
|
checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"hermit-abi 0.3.1",
|
"hermit-abi 0.3.1",
|
||||||
"libc",
|
"libc",
|
||||||
|
|
@ -1001,9 +986,9 @@ checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "js-sys"
|
name = "js-sys"
|
||||||
version = "0.3.62"
|
version = "0.3.63"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "68c16e1bfd491478ab155fd8b4896b86f9ede344949b641e61501e07c2b8b4d5"
|
checksum = "2f37a4a5928311ac501dee68b3c7613a1037d0edb30c8e5427bd832d55d1b790"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
]
|
]
|
||||||
|
|
@ -1118,18 +1103,15 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "linux-raw-sys"
|
name = "linux-raw-sys"
|
||||||
version = "0.3.7"
|
version = "0.3.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ece97ea872ece730aed82664c424eb4c8291e1ff2480247ccf7409044bc6479f"
|
checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log"
|
name = "log"
|
||||||
version = "0.4.17"
|
version = "0.4.18"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
checksum = "518ef76f2f87365916b142844c16d8fefd85039bc5699050210a7778ee1cd1de"
|
||||||
dependencies = [
|
|
||||||
"cfg-if",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "matches"
|
name = "matches"
|
||||||
|
|
@ -1178,14 +1160,14 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mio"
|
name = "mio"
|
||||||
version = "0.8.6"
|
version = "0.8.8"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9"
|
checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
"log",
|
"log",
|
||||||
"wasi 0.11.0+wasi-snapshot-preview1",
|
"wasi 0.11.0+wasi-snapshot-preview1",
|
||||||
"windows-sys 0.45.0",
|
"windows-sys 0.48.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -1230,9 +1212,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "notify"
|
name = "notify"
|
||||||
version = "5.1.0"
|
version = "5.2.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "58ea850aa68a06e48fdb069c0ec44d0d64c8dbffa49bf3b6f7f0a901fdea1ba9"
|
checksum = "729f63e1ca555a43fe3efa4f3efdf4801c479da85b432242a7b726f353c88486"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 1.3.2",
|
"bitflags 1.3.2",
|
||||||
"crossbeam-channel",
|
"crossbeam-channel",
|
||||||
|
|
@ -1243,7 +1225,7 @@ dependencies = [
|
||||||
"libc",
|
"libc",
|
||||||
"mio",
|
"mio",
|
||||||
"walkdir",
|
"walkdir",
|
||||||
"windows-sys 0.42.0",
|
"windows-sys 0.45.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -1288,9 +1270,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "once_cell"
|
name = "once_cell"
|
||||||
version = "1.17.1"
|
version = "1.17.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3"
|
checksum = "9670a07f94779e00908f3e686eab508878ebb390ba6e604d3a284c00e8d0487b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "oorandom"
|
name = "oorandom"
|
||||||
|
|
@ -1563,9 +1545,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.56"
|
version = "1.0.59"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
|
checksum = "6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
@ -1608,9 +1590,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.27"
|
version = "1.0.28"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8f4f29d145265ec1c483c7c654450edde0bfe043d3938d6972630663356d9500"
|
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
|
@ -1683,9 +1665,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "1.8.1"
|
version = "1.8.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "af83e617f331cc6ae2da5443c602dfa5af81e517212d9d611a5b3ba1777b5370"
|
checksum = "81ca098a9821bd52d6b24fd8b10bd081f47d39c22778cafaa75a2857a62c6390"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aho-corasick 1.0.1",
|
"aho-corasick 1.0.1",
|
||||||
"memchr",
|
"memchr",
|
||||||
|
|
@ -1700,9 +1682,9 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex-syntax"
|
name = "regex-syntax"
|
||||||
version = "0.7.1"
|
version = "0.7.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a5996294f19bd3aae0453a862ad728f60e6600695733dd5df01da90c54363a3c"
|
checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "result-like"
|
name = "result-like"
|
||||||
|
|
@ -1749,7 +1731,7 @@ dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"bitflags 2.3.1",
|
"bitflags 2.3.1",
|
||||||
"chrono",
|
"chrono",
|
||||||
"clap 4.2.7",
|
"clap",
|
||||||
"colored",
|
"colored",
|
||||||
"dirs 5.0.1",
|
"dirs 5.0.1",
|
||||||
"fern",
|
"fern",
|
||||||
|
|
@ -1814,6 +1796,7 @@ dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"ruff",
|
"ruff",
|
||||||
"ruff_python_ast",
|
"ruff_python_ast",
|
||||||
|
"ruff_python_formatter",
|
||||||
"rustpython-parser",
|
"rustpython-parser",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
|
@ -1846,7 +1829,7 @@ dependencies = [
|
||||||
"bitflags 2.3.1",
|
"bitflags 2.3.1",
|
||||||
"cachedir",
|
"cachedir",
|
||||||
"chrono",
|
"chrono",
|
||||||
"clap 4.2.7",
|
"clap",
|
||||||
"clap_complete_command",
|
"clap_complete_command",
|
||||||
"clearscreen",
|
"clearscreen",
|
||||||
"colored",
|
"colored",
|
||||||
|
|
@ -1885,7 +1868,7 @@ name = "ruff_dev"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap 4.2.7",
|
"clap",
|
||||||
"itertools",
|
"itertools",
|
||||||
"libcst",
|
"libcst",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
|
|
@ -1944,7 +1927,7 @@ dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"ruff_textwrap",
|
"ruff_textwrap",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -1984,7 +1967,7 @@ name = "ruff_python_formatter"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap 4.2.7",
|
"clap",
|
||||||
"countme",
|
"countme",
|
||||||
"insta",
|
"insta",
|
||||||
"is-macro",
|
"is-macro",
|
||||||
|
|
@ -2041,7 +2024,7 @@ dependencies = [
|
||||||
"glob",
|
"glob",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -2290,7 +2273,7 @@ checksum = "8c805777e3930c8883389c602315a24224bcc738b63905ef87cd1420353ea93e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -2318,9 +2301,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_spanned"
|
name = "serde_spanned"
|
||||||
version = "0.6.1"
|
version = "0.6.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0efd8caf556a6cebd3b285caf480045fcc1ac04f6bd786b09a6f11af30c4fcf4"
|
checksum = "93107647184f6027e3b7dcb2e11034cf95ffa1e3a682c67951963ac69c1c007d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
@ -2405,9 +2388,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "2.0.15"
|
version = "2.0.18"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822"
|
checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
|
@ -2490,12 +2473,6 @@ dependencies = [
|
||||||
"test-case-core",
|
"test-case-core",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "textwrap"
|
|
||||||
version = "0.16.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror"
|
name = "thiserror"
|
||||||
version = "1.0.40"
|
version = "1.0.40"
|
||||||
|
|
@ -2513,7 +2490,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -2593,9 +2570,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml"
|
name = "toml"
|
||||||
version = "0.7.3"
|
version = "0.7.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b403acf6f2bb0859c93c7f0d967cb4a75a7ac552100f9322faf64dc047669b21"
|
checksum = "d6135d499e69981f9ff0ef2167955a5333c35e36f6937d382974566b3d5b94ec"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"serde_spanned",
|
"serde_spanned",
|
||||||
|
|
@ -2605,18 +2582,18 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml_datetime"
|
name = "toml_datetime"
|
||||||
version = "0.6.1"
|
version = "0.6.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622"
|
checksum = "5a76a9312f5ba4c2dec6b9161fdf25d87ad8a09256ccea5a556fef03c706a10f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml_edit"
|
name = "toml_edit"
|
||||||
version = "0.19.8"
|
version = "0.19.10"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "239410c8609e8125456927e6707163a3b1fdb40561e4b803bc041f466ccfdc13"
|
checksum = "2380d56e8670370eee6566b0bfd4265f65b3f432e8c6d85623f728d4fa31f739"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"serde",
|
"serde",
|
||||||
|
|
@ -2646,7 +2623,7 @@ checksum = "0f57e3ca2a01450b1a921183a9c9cbfda207fd822cef4ccb00a65402cbba7a74"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
@ -2736,9 +2713,9 @@ checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-ident"
|
name = "unicode-ident"
|
||||||
version = "1.0.8"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4"
|
checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-normalization"
|
name = "unicode-normalization"
|
||||||
|
|
@ -2805,9 +2782,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "uuid"
|
name = "uuid"
|
||||||
version = "1.3.2"
|
version = "1.3.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4dad5567ad0cf5b760e5665964bec1b47dfd077ba8a2544b513f3556d3d239a2"
|
checksum = "345444e32442451b267fc254ae85a209c64be56d2890e601a0c37ff0c3c5ecd2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "version_check"
|
name = "version_check"
|
||||||
|
|
@ -2848,9 +2825,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen"
|
name = "wasm-bindgen"
|
||||||
version = "0.2.85"
|
version = "0.2.86"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5b6cb788c4e39112fbe1822277ef6fb3c55cd86b95cb3d3c4c1c9597e4ac74b4"
|
checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"wasm-bindgen-macro",
|
"wasm-bindgen-macro",
|
||||||
|
|
@ -2858,24 +2835,24 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-backend"
|
name = "wasm-bindgen-backend"
|
||||||
version = "0.2.85"
|
version = "0.2.86"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "35e522ed4105a9d626d885b35d62501b30d9666283a5c8be12c14a8bdafe7822"
|
checksum = "19b04bc93f9d6bdee709f6bd2118f57dd6679cf1176a1af464fca3ab0d66d8fb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bumpalo",
|
"bumpalo",
|
||||||
"log",
|
"log",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-futures"
|
name = "wasm-bindgen-futures"
|
||||||
version = "0.4.35"
|
version = "0.4.36"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "083abe15c5d88556b77bdf7aef403625be9e327ad37c62c4e4129af740168163"
|
checksum = "2d1985d03709c53167ce907ff394f5316aa22cb4e12761295c5dc57dacb6297e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
|
|
@ -2885,9 +2862,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro"
|
name = "wasm-bindgen-macro"
|
||||||
version = "0.2.85"
|
version = "0.2.86"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "358a79a0cb89d21db8120cbfb91392335913e4890665b1a7981d9e956903b434"
|
checksum = "14d6b024f1a526bb0234f52840389927257beb670610081360e5a03c5df9c258"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote",
|
"quote",
|
||||||
"wasm-bindgen-macro-support",
|
"wasm-bindgen-macro-support",
|
||||||
|
|
@ -2895,28 +2872,28 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro-support"
|
name = "wasm-bindgen-macro-support"
|
||||||
version = "0.2.85"
|
version = "0.2.86"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4783ce29f09b9d93134d41297aded3a712b7b979e9c6f28c32cb88c973a94869"
|
checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn 2.0.15",
|
"syn 2.0.18",
|
||||||
"wasm-bindgen-backend",
|
"wasm-bindgen-backend",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-shared"
|
name = "wasm-bindgen-shared"
|
||||||
version = "0.2.85"
|
version = "0.2.86"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a901d592cafaa4d711bc324edfaff879ac700b19c3dfd60058d2b445be2691eb"
|
checksum = "ed9d5b4305409d1fc9482fee2d7f9bcbf24b3972bf59817ef757e23982242a93"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-test"
|
name = "wasm-bindgen-test"
|
||||||
version = "0.3.35"
|
version = "0.3.36"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b27e15b4a3030b9944370ba1d8cec6f21f66a1ad4fd14725c5685600460713ec"
|
checksum = "c9e636f3a428ff62b3742ebc3c70e254dfe12b8c2b469d688ea59cdd4abcf502"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"console_error_panic_hook",
|
"console_error_panic_hook",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
|
|
@ -2928,9 +2905,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-test-macro"
|
name = "wasm-bindgen-test-macro"
|
||||||
version = "0.3.35"
|
version = "0.3.36"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1dbaa9b9a574eac00c4f3a9c4941ac051f07632ecd0484a8588abd95af6b99d2"
|
checksum = "f18c1fad2f7c4958e7bcce014fa212f59a65d5e3721d0f77e6c0b27ede936ba3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
|
@ -2938,9 +2915,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "web-sys"
|
name = "web-sys"
|
||||||
version = "0.3.62"
|
version = "0.3.63"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "16b5f940c7edfdc6d12126d98c9ef4d1b3d470011c47c76a6581df47ad9ba721"
|
checksum = "3bdd9ef4e984da1187bf8110c5cf5b845fbc87a23602cdf912386a76fcd3a7c2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
|
|
@ -3025,21 +3002,6 @@ dependencies = [
|
||||||
"windows-targets 0.48.0",
|
"windows-targets 0.48.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "windows-sys"
|
|
||||||
version = "0.42.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
|
|
||||||
dependencies = [
|
|
||||||
"windows_aarch64_gnullvm 0.42.2",
|
|
||||||
"windows_aarch64_msvc 0.42.2",
|
|
||||||
"windows_i686_gnu 0.42.2",
|
|
||||||
"windows_i686_msvc 0.42.2",
|
|
||||||
"windows_x86_64_gnu 0.42.2",
|
|
||||||
"windows_x86_64_gnullvm 0.42.2",
|
|
||||||
"windows_x86_64_msvc 0.42.2",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-sys"
|
name = "windows-sys"
|
||||||
version = "0.45.0"
|
version = "0.45.0"
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ members = ["crates/*"]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.69"
|
rust-version = "1.70"
|
||||||
homepage = "https://beta.ruff.rs/docs/"
|
homepage = "https://beta.ruff.rs/docs/"
|
||||||
documentation = "https://beta.ruff.rs/docs/"
|
documentation = "https://beta.ruff.rs/docs/"
|
||||||
repository = "https://github.com/charliermarsh/ruff"
|
repository = "https://github.com/charliermarsh/ruff"
|
||||||
|
|
|
||||||
|
|
@ -57,12 +57,16 @@ dict.fromkeys(("world",), True)
|
||||||
{}.deploy(True, False)
|
{}.deploy(True, False)
|
||||||
getattr(someobj, attrname, False)
|
getattr(someobj, attrname, False)
|
||||||
mylist.index(True)
|
mylist.index(True)
|
||||||
|
bool(False)
|
||||||
int(True)
|
int(True)
|
||||||
str(int(False))
|
str(int(False))
|
||||||
cfg.get("hello", True)
|
cfg.get("hello", True)
|
||||||
cfg.getint("hello", True)
|
cfg.getint("hello", True)
|
||||||
cfg.getfloat("hello", True)
|
cfg.getfloat("hello", True)
|
||||||
cfg.getboolean("hello", True)
|
cfg.getboolean("hello", True)
|
||||||
|
os.set_blocking(0, False)
|
||||||
|
g_action.set_enabled(True)
|
||||||
|
settings.set_enable_developer_extras(True)
|
||||||
|
|
||||||
|
|
||||||
class Registry:
|
class Registry:
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,57 @@
|
||||||
|
import builtins
|
||||||
|
from abc import abstractmethod
|
||||||
|
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def __str__(self) -> builtins.str:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def __repr__(self, /, foo) -> str:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def __repr__(self, *, foo) -> str:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class ShouldRemoveSingle:
|
||||||
|
def __str__(self) -> builtins.str:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class ShouldRemove:
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
...
|
||||||
|
|
||||||
|
def __str__(self) -> builtins.str:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class NoReturnSpecified:
|
||||||
|
def __str__(self):
|
||||||
|
...
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class NonMatchingArgs:
|
||||||
|
def __str__(self, *, extra) -> builtins.str:
|
||||||
|
...
|
||||||
|
|
||||||
|
def __repr__(self, /, extra) -> str:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class MatchingArgsButAbstract:
|
||||||
|
@abstractmethod
|
||||||
|
def __str__(self) -> builtins.str:
|
||||||
|
...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
...
|
||||||
|
|
@ -0,0 +1,28 @@
|
||||||
|
import builtins
|
||||||
|
from abc import abstractmethod
|
||||||
|
|
||||||
|
def __repr__(self) -> str: ...
|
||||||
|
def __str__(self) -> builtins.str: ...
|
||||||
|
def __repr__(self, /, foo) -> str: ...
|
||||||
|
def __repr__(self, *, foo) -> str: ...
|
||||||
|
|
||||||
|
class ShouldRemoveSingle:
|
||||||
|
def __str__(self) -> builtins.str: ... # Error: PYI029
|
||||||
|
|
||||||
|
class ShouldRemove:
|
||||||
|
def __repr__(self) -> str: ... # Error: PYI029
|
||||||
|
def __str__(self) -> builtins.str: ... # Error: PYI029
|
||||||
|
|
||||||
|
class NoReturnSpecified:
|
||||||
|
def __str__(self): ...
|
||||||
|
def __repr__(self): ...
|
||||||
|
|
||||||
|
class NonMatchingArgs:
|
||||||
|
def __str__(self, *, extra) -> builtins.str: ...
|
||||||
|
def __repr__(self, /, extra) -> str: ...
|
||||||
|
|
||||||
|
class MatchingArgsButAbstract:
|
||||||
|
@abstractmethod
|
||||||
|
def __str__(self) -> builtins.str: ...
|
||||||
|
@abstractmethod
|
||||||
|
def __repr__(self) -> str: ...
|
||||||
|
|
@ -272,3 +272,34 @@ def str_to_bool(val):
|
||||||
if isinstance(val, bool):
|
if isinstance(val, bool):
|
||||||
return some_obj
|
return some_obj
|
||||||
return val
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
# Mixed assignments
|
||||||
|
def function_assignment(x):
|
||||||
|
def f(): ...
|
||||||
|
|
||||||
|
return f
|
||||||
|
|
||||||
|
|
||||||
|
def class_assignment(x):
|
||||||
|
class Foo: ...
|
||||||
|
|
||||||
|
return Foo
|
||||||
|
|
||||||
|
|
||||||
|
def mixed_function_assignment(x):
|
||||||
|
if x:
|
||||||
|
def f(): ...
|
||||||
|
else:
|
||||||
|
f = 42
|
||||||
|
|
||||||
|
return f
|
||||||
|
|
||||||
|
|
||||||
|
def mixed_class_assignment(x):
|
||||||
|
if x:
|
||||||
|
class Foo: ...
|
||||||
|
else:
|
||||||
|
Foo = 42
|
||||||
|
|
||||||
|
return Foo
|
||||||
|
|
|
||||||
|
|
@ -150,3 +150,17 @@ def f():
|
||||||
|
|
||||||
def f():
|
def f():
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
|
||||||
|
|
||||||
|
def f():
|
||||||
|
from pandas import DataFrame # noqa: TCH002
|
||||||
|
|
||||||
|
x: DataFrame = 2
|
||||||
|
|
||||||
|
|
||||||
|
def f():
|
||||||
|
from pandas import ( # noqa: TCH002
|
||||||
|
DataFrame,
|
||||||
|
)
|
||||||
|
|
||||||
|
x: DataFrame = 2
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
def f():
|
def f():
|
||||||
# Even in strict mode, this shouldn't rase an error, since `pkg` is used at runtime,
|
# Even in strict mode, this shouldn't raise an error, since `pkg` is used at runtime,
|
||||||
# and implicitly imports `pkg.bar`.
|
# and implicitly imports `pkg.bar`.
|
||||||
import pkg
|
import pkg
|
||||||
import pkg.bar
|
import pkg.bar
|
||||||
|
|
@ -12,7 +12,7 @@ def f():
|
||||||
|
|
||||||
|
|
||||||
def f():
|
def f():
|
||||||
# Even in strict mode, this shouldn't rase an error, since `pkg.bar` is used at
|
# Even in strict mode, this shouldn't raise an error, since `pkg.bar` is used at
|
||||||
# runtime, and implicitly imports `pkg`.
|
# runtime, and implicitly imports `pkg`.
|
||||||
import pkg
|
import pkg
|
||||||
import pkg.bar
|
import pkg.bar
|
||||||
|
|
@ -22,7 +22,7 @@ def f():
|
||||||
|
|
||||||
|
|
||||||
def f():
|
def f():
|
||||||
# In un-strict mode, this shouldn't rase an error, since `pkg` is used at runtime.
|
# In un-strict mode, this shouldn't raise an error, since `pkg` is used at runtime.
|
||||||
import pkg
|
import pkg
|
||||||
from pkg import A
|
from pkg import A
|
||||||
|
|
||||||
|
|
@ -31,7 +31,7 @@ def f():
|
||||||
|
|
||||||
|
|
||||||
def f():
|
def f():
|
||||||
# In un-strict mode, this shouldn't rase an error, since `pkg` is used at runtime.
|
# In un-strict mode, this shouldn't raise an error, since `pkg` is used at runtime.
|
||||||
from pkg import A, B
|
from pkg import A, B
|
||||||
|
|
||||||
def test(value: A):
|
def test(value: A):
|
||||||
|
|
@ -39,7 +39,7 @@ def f():
|
||||||
|
|
||||||
|
|
||||||
def f():
|
def f():
|
||||||
# Even in strict mode, this shouldn't rase an error, since `pkg.baz` is used at
|
# Even in strict mode, this shouldn't raise an error, since `pkg.baz` is used at
|
||||||
# runtime, and implicitly imports `pkg.bar`.
|
# runtime, and implicitly imports `pkg.bar`.
|
||||||
import pkg.bar
|
import pkg.bar
|
||||||
import pkg.baz
|
import pkg.baz
|
||||||
|
|
@ -49,7 +49,7 @@ def f():
|
||||||
|
|
||||||
|
|
||||||
def f():
|
def f():
|
||||||
# In un-strict mode, this _should_ rase an error, since `pkg` is used at runtime.
|
# In un-strict mode, this _should_ raise an error, since `pkg.bar` isn't used at runtime
|
||||||
import pkg
|
import pkg
|
||||||
from pkg.bar import A
|
from pkg.bar import A
|
||||||
|
|
||||||
|
|
@ -58,7 +58,7 @@ def f():
|
||||||
|
|
||||||
|
|
||||||
def f():
|
def f():
|
||||||
# In un-strict mode, this shouldn't rase an error, since `pkg.bar` is used at runtime.
|
# In un-strict mode, this shouldn't raise an error, since `pkg.bar` is used at runtime.
|
||||||
import pkg
|
import pkg
|
||||||
import pkg.bar as B
|
import pkg.bar as B
|
||||||
|
|
||||||
|
|
@ -67,7 +67,7 @@ def f():
|
||||||
|
|
||||||
|
|
||||||
def f():
|
def f():
|
||||||
# In un-strict mode, this shouldn't rase an error, since `pkg.foo.bar` is used at runtime.
|
# In un-strict mode, this shouldn't raise an error, since `pkg.foo.bar` is used at runtime.
|
||||||
import pkg.foo as F
|
import pkg.foo as F
|
||||||
import pkg.foo.bar as B
|
import pkg.foo.bar as B
|
||||||
|
|
||||||
|
|
@ -76,7 +76,7 @@ def f():
|
||||||
|
|
||||||
|
|
||||||
def f():
|
def f():
|
||||||
# In un-strict mode, this shouldn't rase an error, since `pkg.foo.bar` is used at runtime.
|
# In un-strict mode, this shouldn't raise an error, since `pkg.foo.bar` is used at runtime.
|
||||||
import pkg
|
import pkg
|
||||||
import pkg.foo.bar as B
|
import pkg.foo.bar as B
|
||||||
|
|
||||||
|
|
@ -85,7 +85,7 @@ def f():
|
||||||
|
|
||||||
|
|
||||||
def f():
|
def f():
|
||||||
# In un-strict mode, this _should_ rase an error, since `pkgfoo.bar` is used at runtime.
|
# In un-strict mode, this _should_ raise an error, since `pkg` isn't used at runtime.
|
||||||
# Note that `pkg` is a prefix of `pkgfoo` which are both different modules. This is
|
# Note that `pkg` is a prefix of `pkgfoo` which are both different modules. This is
|
||||||
# testing the implementation.
|
# testing the implementation.
|
||||||
import pkg
|
import pkg
|
||||||
|
|
@ -96,7 +96,7 @@ def f():
|
||||||
|
|
||||||
|
|
||||||
def f():
|
def f():
|
||||||
# In un-strict mode, this shouldn't raise an error, since `pkg.bar` is used at runtime.
|
# In un-strict mode, this shouldn't raise an error, since `pkg` is used at runtime.
|
||||||
import pkg.bar as B
|
import pkg.bar as B
|
||||||
import pkg.foo as F
|
import pkg.foo as F
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,15 @@
|
||||||
|
"""Test that `__all__` exports are respected even with multiple declarations."""
|
||||||
|
|
||||||
|
import random
|
||||||
|
|
||||||
|
|
||||||
|
def some_dependency_check():
|
||||||
|
return random.uniform(0.0, 1.0) > 0.49999
|
||||||
|
|
||||||
|
|
||||||
|
if some_dependency_check():
|
||||||
|
import math
|
||||||
|
|
||||||
|
__all__ = ["math"]
|
||||||
|
else:
|
||||||
|
__all__ = []
|
||||||
|
|
@ -17,3 +17,17 @@
|
||||||
"{0}{1}".format(1, *args) # No issues
|
"{0}{1}".format(1, *args) # No issues
|
||||||
"{0}{1}".format(1, 2, *args) # No issues
|
"{0}{1}".format(1, 2, *args) # No issues
|
||||||
"{0}{1}".format(1, 2, 3, *args) # F523
|
"{0}{1}".format(1, 2, 3, *args) # F523
|
||||||
|
|
||||||
|
# With nested quotes
|
||||||
|
"''1{0}".format(1, 2, 3) # F523
|
||||||
|
"\"\"{1}{0}".format(1, 2, 3) # F523
|
||||||
|
'""{1}{0}'.format(1, 2, 3) # F523
|
||||||
|
|
||||||
|
# With modified indexes
|
||||||
|
"{1}{2}".format(1, 2, 3) # F523, # F524
|
||||||
|
"{1}{3}".format(1, 2, 3, 4) # F523, # F524
|
||||||
|
"{1} {8}".format(0, 1) # F523, # F524
|
||||||
|
|
||||||
|
# Not fixable
|
||||||
|
(''
|
||||||
|
.format(2))
|
||||||
|
|
|
||||||
|
|
@ -4,3 +4,4 @@
|
||||||
"{0} {bar}".format(1) # F524
|
"{0} {bar}".format(1) # F524
|
||||||
"{0} {bar}".format() # F524
|
"{0} {bar}".format() # F524
|
||||||
"{bar} {0}".format() # F524
|
"{bar} {0}".format() # F524
|
||||||
|
"{1} {8}".format(0, 1)
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,28 @@
|
||||||
|
class Str:
|
||||||
|
def __str__(self):
|
||||||
|
return 1
|
||||||
|
|
||||||
|
class Float:
|
||||||
|
def __str__(self):
|
||||||
|
return 3.05
|
||||||
|
|
||||||
|
class Int:
|
||||||
|
def __str__(self):
|
||||||
|
return 0
|
||||||
|
|
||||||
|
class Bool:
|
||||||
|
def __str__(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
class Str2:
|
||||||
|
def __str__(self):
|
||||||
|
x = "ruff"
|
||||||
|
return x
|
||||||
|
|
||||||
|
# TODO fixme once Ruff has better type checking
|
||||||
|
def return_int():
|
||||||
|
return 3
|
||||||
|
|
||||||
|
class ComplexReturn:
|
||||||
|
def __str__(self):
|
||||||
|
return return_int()
|
||||||
|
|
@ -12,6 +12,8 @@ f"{str(d['a'])}, {repr(d['b'])}, {ascii(d['c'])}" # RUF010
|
||||||
|
|
||||||
f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
|
|
||||||
|
f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
|
|
||||||
f"{foo(bla)}" # OK
|
f"{foo(bla)}" # OK
|
||||||
|
|
||||||
f"{str(bla, 'ascii')}, {str(bla, encoding='cp1255')}" # OK
|
f"{str(bla, 'ascii')}, {str(bla, encoding='cp1255')}" # OK
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,23 @@ use ruff_python_ast::source_code::{Locator, Stylist};
|
||||||
use crate::cst::helpers::compose_module_path;
|
use crate::cst::helpers::compose_module_path;
|
||||||
use crate::cst::matchers::match_statement;
|
use crate::cst::matchers::match_statement;
|
||||||
|
|
||||||
|
/// Glue code to make libcst codegen work with ruff's Stylist
|
||||||
|
pub(crate) trait CodegenStylist<'a>: Codegen<'a> {
|
||||||
|
fn codegen_stylist(&self, stylist: &'a Stylist) -> String;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, T: Codegen<'a>> CodegenStylist<'a> for T {
|
||||||
|
fn codegen_stylist(&self, stylist: &'a Stylist) -> String {
|
||||||
|
let mut state = CodegenState {
|
||||||
|
default_newline: stylist.line_ending().as_str(),
|
||||||
|
default_indent: stylist.indentation(),
|
||||||
|
..Default::default()
|
||||||
|
};
|
||||||
|
self.codegen(&mut state);
|
||||||
|
state.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Given an import statement, remove any imports that are specified in the `imports` iterator.
|
/// Given an import statement, remove any imports that are specified in the `imports` iterator.
|
||||||
///
|
///
|
||||||
/// Returns `Ok(None)` if the statement is empty after removing the imports.
|
/// Returns `Ok(None)` if the statement is empty after removing the imports.
|
||||||
|
|
@ -40,11 +57,11 @@ pub(crate) fn remove_imports<'a>(
|
||||||
// entire statement.
|
// entire statement.
|
||||||
let mut found_star = false;
|
let mut found_star = false;
|
||||||
for import in imports {
|
for import in imports {
|
||||||
let full_name = match import_body.module.as_ref() {
|
let qualified_name = match import_body.module.as_ref() {
|
||||||
Some(module_name) => format!("{}.*", compose_module_path(module_name)),
|
Some(module_name) => format!("{}.*", compose_module_path(module_name)),
|
||||||
None => "*".to_string(),
|
None => "*".to_string(),
|
||||||
};
|
};
|
||||||
if import == full_name {
|
if import == qualified_name {
|
||||||
found_star = true;
|
found_star = true;
|
||||||
} else {
|
} else {
|
||||||
bail!("Expected \"*\" for unused import (got: \"{}\")", import);
|
bail!("Expected \"*\" for unused import (got: \"{}\")", import);
|
||||||
|
|
@ -66,26 +83,26 @@ pub(crate) fn remove_imports<'a>(
|
||||||
|
|
||||||
for import in imports {
|
for import in imports {
|
||||||
let alias_index = aliases.iter().position(|alias| {
|
let alias_index = aliases.iter().position(|alias| {
|
||||||
let full_name = match import_module {
|
let qualified_name = match import_module {
|
||||||
Some((relative, module)) => {
|
Some((relative, module)) => {
|
||||||
let module = module.map(compose_module_path);
|
let module = module.map(compose_module_path);
|
||||||
let member = compose_module_path(&alias.name);
|
let member = compose_module_path(&alias.name);
|
||||||
let mut full_name = String::with_capacity(
|
let mut qualified_name = String::with_capacity(
|
||||||
relative.len() + module.as_ref().map_or(0, String::len) + member.len() + 1,
|
relative.len() + module.as_ref().map_or(0, String::len) + member.len() + 1,
|
||||||
);
|
);
|
||||||
for _ in 0..relative.len() {
|
for _ in 0..relative.len() {
|
||||||
full_name.push('.');
|
qualified_name.push('.');
|
||||||
}
|
}
|
||||||
if let Some(module) = module {
|
if let Some(module) = module {
|
||||||
full_name.push_str(&module);
|
qualified_name.push_str(&module);
|
||||||
full_name.push('.');
|
qualified_name.push('.');
|
||||||
}
|
}
|
||||||
full_name.push_str(&member);
|
qualified_name.push_str(&member);
|
||||||
full_name
|
qualified_name
|
||||||
}
|
}
|
||||||
None => compose_module_path(&alias.name),
|
None => compose_module_path(&alias.name),
|
||||||
};
|
};
|
||||||
full_name == import
|
qualified_name == import
|
||||||
});
|
});
|
||||||
|
|
||||||
if let Some(index) = alias_index {
|
if let Some(index) = alias_index {
|
||||||
|
|
@ -114,14 +131,7 @@ pub(crate) fn remove_imports<'a>(
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Some(tree.codegen_stylist(stylist)))
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Some(state.to_string()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given an import statement, remove any imports that are not specified in the `imports` slice.
|
/// Given an import statement, remove any imports that are not specified in the `imports` slice.
|
||||||
|
|
@ -160,26 +170,26 @@ pub(crate) fn retain_imports(
|
||||||
|
|
||||||
aliases.retain(|alias| {
|
aliases.retain(|alias| {
|
||||||
imports.iter().any(|import| {
|
imports.iter().any(|import| {
|
||||||
let full_name = match import_module {
|
let qualified_name = match import_module {
|
||||||
Some((relative, module)) => {
|
Some((relative, module)) => {
|
||||||
let module = module.map(compose_module_path);
|
let module = module.map(compose_module_path);
|
||||||
let member = compose_module_path(&alias.name);
|
let member = compose_module_path(&alias.name);
|
||||||
let mut full_name = String::with_capacity(
|
let mut qualified_name = String::with_capacity(
|
||||||
relative.len() + module.as_ref().map_or(0, String::len) + member.len() + 1,
|
relative.len() + module.as_ref().map_or(0, String::len) + member.len() + 1,
|
||||||
);
|
);
|
||||||
for _ in 0..relative.len() {
|
for _ in 0..relative.len() {
|
||||||
full_name.push('.');
|
qualified_name.push('.');
|
||||||
}
|
}
|
||||||
if let Some(module) = module {
|
if let Some(module) = module {
|
||||||
full_name.push_str(&module);
|
qualified_name.push_str(&module);
|
||||||
full_name.push('.');
|
qualified_name.push('.');
|
||||||
}
|
}
|
||||||
full_name.push_str(&member);
|
qualified_name.push_str(&member);
|
||||||
full_name
|
qualified_name
|
||||||
}
|
}
|
||||||
None => compose_module_path(&alias.name),
|
None => compose_module_path(&alias.name),
|
||||||
};
|
};
|
||||||
full_name == *import
|
qualified_name == *import
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -200,11 +210,5 @@ pub(crate) fn retain_imports(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(tree.codegen_stylist(stylist))
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
Ok(state.to_string())
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -257,21 +257,14 @@ where
|
||||||
Stmt::Global(ast::StmtGlobal { names, range: _ }) => {
|
Stmt::Global(ast::StmtGlobal { names, range: _ }) => {
|
||||||
let ranges: Vec<TextRange> = helpers::find_names(stmt, self.locator).collect();
|
let ranges: Vec<TextRange> = helpers::find_names(stmt, self.locator).collect();
|
||||||
if !self.semantic_model.scope_id.is_global() {
|
if !self.semantic_model.scope_id.is_global() {
|
||||||
// Add the binding to the current scope.
|
|
||||||
let context = self.semantic_model.execution_context();
|
|
||||||
let exceptions = self.semantic_model.exceptions();
|
|
||||||
let scope = &mut self.semantic_model.scopes[self.semantic_model.scope_id];
|
|
||||||
for (name, range) in names.iter().zip(ranges.iter()) {
|
for (name, range) in names.iter().zip(ranges.iter()) {
|
||||||
// Add a binding to the current scope.
|
// Add a binding to the current scope.
|
||||||
let binding_id = self.semantic_model.bindings.push(Binding {
|
let binding_id = self.semantic_model.push_binding(
|
||||||
kind: BindingKind::Global,
|
*range,
|
||||||
range: *range,
|
BindingKind::Global,
|
||||||
references: Vec::new(),
|
BindingFlags::empty(),
|
||||||
source: self.semantic_model.stmt_id,
|
);
|
||||||
context,
|
let scope = self.semantic_model.scope_mut();
|
||||||
exceptions,
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
});
|
|
||||||
scope.add(name, binding_id);
|
scope.add(name, binding_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -286,20 +279,14 @@ where
|
||||||
Stmt::Nonlocal(ast::StmtNonlocal { names, range: _ }) => {
|
Stmt::Nonlocal(ast::StmtNonlocal { names, range: _ }) => {
|
||||||
let ranges: Vec<TextRange> = helpers::find_names(stmt, self.locator).collect();
|
let ranges: Vec<TextRange> = helpers::find_names(stmt, self.locator).collect();
|
||||||
if !self.semantic_model.scope_id.is_global() {
|
if !self.semantic_model.scope_id.is_global() {
|
||||||
let context = self.semantic_model.execution_context();
|
|
||||||
let exceptions = self.semantic_model.exceptions();
|
|
||||||
let scope = &mut self.semantic_model.scopes[self.semantic_model.scope_id];
|
|
||||||
for (name, range) in names.iter().zip(ranges.iter()) {
|
for (name, range) in names.iter().zip(ranges.iter()) {
|
||||||
// Add a binding to the current scope.
|
// Add a binding to the current scope.
|
||||||
let binding_id = self.semantic_model.bindings.push(Binding {
|
let binding_id = self.semantic_model.push_binding(
|
||||||
kind: BindingKind::Nonlocal,
|
*range,
|
||||||
range: *range,
|
BindingKind::Nonlocal,
|
||||||
references: Vec::new(),
|
BindingFlags::empty(),
|
||||||
source: self.semantic_model.stmt_id,
|
);
|
||||||
context,
|
let scope = self.semantic_model.scope_mut();
|
||||||
exceptions,
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
});
|
|
||||||
scope.add(name, binding_id);
|
scope.add(name, binding_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -395,6 +382,10 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if self.enabled(Rule::InvalidStrReturnType) {
|
||||||
|
pylint::rules::invalid_str_return(self, name, body);
|
||||||
|
}
|
||||||
|
|
||||||
if self.enabled(Rule::InvalidFunctionName) {
|
if self.enabled(Rule::InvalidFunctionName) {
|
||||||
if let Some(diagnostic) = pep8_naming::rules::invalid_function_name(
|
if let Some(diagnostic) = pep8_naming::rules::invalid_function_name(
|
||||||
stmt,
|
stmt,
|
||||||
|
|
@ -460,6 +451,9 @@ where
|
||||||
stmt.is_async_function_def_stmt(),
|
stmt.is_async_function_def_stmt(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
if self.enabled(Rule::StrOrReprDefinedInStub) {
|
||||||
|
flake8_pyi::rules::str_or_repr_defined_in_stub(self, stmt);
|
||||||
|
}
|
||||||
if self.enabled(Rule::NoReturnArgumentAnnotationInStub) {
|
if self.enabled(Rule::NoReturnArgumentAnnotationInStub) {
|
||||||
flake8_pyi::rules::no_return_argument_annotation(self, args);
|
flake8_pyi::rules::no_return_argument_annotation(self, args);
|
||||||
}
|
}
|
||||||
|
|
@ -844,18 +838,11 @@ where
|
||||||
for alias in names {
|
for alias in names {
|
||||||
if &alias.name == "__future__" {
|
if &alias.name == "__future__" {
|
||||||
let name = alias.asname.as_ref().unwrap_or(&alias.name);
|
let name = alias.asname.as_ref().unwrap_or(&alias.name);
|
||||||
|
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
name,
|
name,
|
||||||
Binding {
|
alias.range(),
|
||||||
kind: BindingKind::FutureImportation,
|
BindingKind::FutureImportation,
|
||||||
range: alias.range(),
|
BindingFlags::empty(),
|
||||||
references: Vec::new(),
|
|
||||||
source: self.semantic_model.stmt_id,
|
|
||||||
context: self.semantic_model.execution_context(),
|
|
||||||
exceptions: self.semantic_model.exceptions(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if self.enabled(Rule::LateFutureImport) {
|
if self.enabled(Rule::LateFutureImport) {
|
||||||
|
|
@ -867,45 +854,33 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if alias.name.contains('.') && alias.asname.is_none() {
|
} else if alias.name.contains('.') && alias.asname.is_none() {
|
||||||
// Given `import foo.bar`, `name` would be "foo", and `full_name` would be
|
// Given `import foo.bar`, `name` would be "foo", and `qualified_name` would be
|
||||||
// "foo.bar".
|
// "foo.bar".
|
||||||
let name = alias.name.split('.').next().unwrap();
|
let name = alias.name.split('.').next().unwrap();
|
||||||
let full_name = &alias.name;
|
let qualified_name = &alias.name;
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
name,
|
name,
|
||||||
Binding {
|
alias.range(),
|
||||||
kind: BindingKind::SubmoduleImportation(SubmoduleImportation {
|
BindingKind::SubmoduleImportation(SubmoduleImportation {
|
||||||
full_name,
|
qualified_name,
|
||||||
}),
|
}),
|
||||||
range: alias.range(),
|
BindingFlags::empty(),
|
||||||
references: Vec::new(),
|
|
||||||
source: self.semantic_model.stmt_id,
|
|
||||||
context: self.semantic_model.execution_context(),
|
|
||||||
exceptions: self.semantic_model.exceptions(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
let name = alias.asname.as_ref().unwrap_or(&alias.name);
|
let name = alias.asname.as_ref().unwrap_or(&alias.name);
|
||||||
let full_name = &alias.name;
|
let qualified_name = &alias.name;
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
name,
|
name,
|
||||||
Binding {
|
alias.range(),
|
||||||
kind: BindingKind::Importation(Importation { full_name }),
|
BindingKind::Importation(Importation { qualified_name }),
|
||||||
range: alias.range(),
|
if alias
|
||||||
references: Vec::new(),
|
.asname
|
||||||
source: self.semantic_model.stmt_id,
|
.as_ref()
|
||||||
context: self.semantic_model.execution_context(),
|
.map_or(false, |asname| asname == &alias.name)
|
||||||
exceptions: self.semantic_model.exceptions(),
|
{
|
||||||
flags: if alias
|
BindingFlags::EXPLICIT_EXPORT
|
||||||
.asname
|
} else {
|
||||||
.as_ref()
|
BindingFlags::empty()
|
||||||
.map_or(false, |asname| asname == &alias.name)
|
|
||||||
{
|
|
||||||
BindingFlags::EXPLICIT_EXPORT
|
|
||||||
} else {
|
|
||||||
BindingFlags::empty()
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
@ -1130,15 +1105,9 @@ where
|
||||||
|
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
name,
|
name,
|
||||||
Binding {
|
alias.range(),
|
||||||
kind: BindingKind::FutureImportation,
|
BindingKind::FutureImportation,
|
||||||
range: alias.range(),
|
BindingFlags::empty(),
|
||||||
references: Vec::new(),
|
|
||||||
source: self.semantic_model.stmt_id,
|
|
||||||
context: self.semantic_model.execution_context(),
|
|
||||||
exceptions: self.semantic_model.exceptions(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if self.enabled(Rule::FutureFeatureNotDefined) {
|
if self.enabled(Rule::FutureFeatureNotDefined) {
|
||||||
|
|
@ -1189,30 +1158,24 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Given `from foo import bar`, `name` would be "bar" and `full_name` would
|
// Given `from foo import bar`, `name` would be "bar" and `qualified_name` would
|
||||||
// be "foo.bar". Given `from foo import bar as baz`, `name` would be "baz"
|
// be "foo.bar". Given `from foo import bar as baz`, `name` would be "baz"
|
||||||
// and `full_name` would be "foo.bar".
|
// and `qualified_name` would be "foo.bar".
|
||||||
let name = alias.asname.as_ref().unwrap_or(&alias.name);
|
let name = alias.asname.as_ref().unwrap_or(&alias.name);
|
||||||
let full_name =
|
let qualified_name =
|
||||||
helpers::format_import_from_member(level, module, &alias.name);
|
helpers::format_import_from_member(level, module, &alias.name);
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
name,
|
name,
|
||||||
Binding {
|
alias.range(),
|
||||||
kind: BindingKind::FromImportation(FromImportation { full_name }),
|
BindingKind::FromImportation(FromImportation { qualified_name }),
|
||||||
range: alias.range(),
|
if alias
|
||||||
references: Vec::new(),
|
.asname
|
||||||
source: self.semantic_model.stmt_id,
|
.as_ref()
|
||||||
context: self.semantic_model.execution_context(),
|
.map_or(false, |asname| asname == &alias.name)
|
||||||
exceptions: self.semantic_model.exceptions(),
|
{
|
||||||
flags: if alias
|
BindingFlags::EXPLICIT_EXPORT
|
||||||
.asname
|
} else {
|
||||||
.as_ref()
|
BindingFlags::empty()
|
||||||
.map_or(false, |asname| asname == &alias.name)
|
|
||||||
{
|
|
||||||
BindingFlags::EXPLICIT_EXPORT
|
|
||||||
} else {
|
|
||||||
BindingFlags::empty()
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -1240,12 +1203,12 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.enabled(Rule::UnconventionalImportAlias) {
|
if self.enabled(Rule::UnconventionalImportAlias) {
|
||||||
let full_name =
|
let qualified_name =
|
||||||
helpers::format_import_from_member(level, module, &alias.name);
|
helpers::format_import_from_member(level, module, &alias.name);
|
||||||
if let Some(diagnostic) =
|
if let Some(diagnostic) =
|
||||||
flake8_import_conventions::rules::conventional_import_alias(
|
flake8_import_conventions::rules::conventional_import_alias(
|
||||||
stmt,
|
stmt,
|
||||||
&full_name,
|
&qualified_name,
|
||||||
alias.asname.as_deref(),
|
alias.asname.as_deref(),
|
||||||
&self.settings.flake8_import_conventions.aliases,
|
&self.settings.flake8_import_conventions.aliases,
|
||||||
)
|
)
|
||||||
|
|
@ -1256,12 +1219,12 @@ where
|
||||||
|
|
||||||
if self.enabled(Rule::BannedImportAlias) {
|
if self.enabled(Rule::BannedImportAlias) {
|
||||||
if let Some(asname) = &alias.asname {
|
if let Some(asname) = &alias.asname {
|
||||||
let full_name =
|
let qualified_name =
|
||||||
helpers::format_import_from_member(level, module, &alias.name);
|
helpers::format_import_from_member(level, module, &alias.name);
|
||||||
if let Some(diagnostic) =
|
if let Some(diagnostic) =
|
||||||
flake8_import_conventions::rules::banned_import_alias(
|
flake8_import_conventions::rules::banned_import_alias(
|
||||||
stmt,
|
stmt,
|
||||||
&full_name,
|
&qualified_name,
|
||||||
asname,
|
asname,
|
||||||
&self.settings.flake8_import_conventions.banned_aliases,
|
&self.settings.flake8_import_conventions.banned_aliases,
|
||||||
)
|
)
|
||||||
|
|
@ -1930,15 +1893,9 @@ where
|
||||||
|
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
name,
|
name,
|
||||||
Binding {
|
stmt.range(),
|
||||||
kind: BindingKind::FunctionDefinition,
|
BindingKind::FunctionDefinition,
|
||||||
range: stmt.range(),
|
BindingFlags::empty(),
|
||||||
references: Vec::new(),
|
|
||||||
source: self.semantic_model.stmt_id,
|
|
||||||
context: self.semantic_model.execution_context(),
|
|
||||||
exceptions: self.semantic_model.exceptions(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let definition = docstrings::extraction::extract_definition(
|
let definition = docstrings::extraction::extract_definition(
|
||||||
|
|
@ -2166,15 +2123,9 @@ where
|
||||||
self.semantic_model.pop_definition();
|
self.semantic_model.pop_definition();
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
name,
|
name,
|
||||||
Binding {
|
stmt.range(),
|
||||||
kind: BindingKind::ClassDefinition,
|
BindingKind::ClassDefinition,
|
||||||
range: stmt.range(),
|
BindingFlags::empty(),
|
||||||
references: Vec::new(),
|
|
||||||
source: self.semantic_model.stmt_id,
|
|
||||||
context: self.semantic_model.execution_context(),
|
|
||||||
exceptions: self.semantic_model.exceptions(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
|
@ -3112,7 +3063,7 @@ where
|
||||||
}
|
}
|
||||||
Expr::Set(ast::ExprSet { elts, range: _ }) => {
|
Expr::Set(ast::ExprSet { elts, range: _ }) => {
|
||||||
if self.enabled(Rule::DuplicateValue) {
|
if self.enabled(Rule::DuplicateValue) {
|
||||||
pylint::rules::duplicate_value(self, elts);
|
flake8_bugbear::rules::duplicate_value(self, elts);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Expr::Yield(_) => {
|
Expr::Yield(_) => {
|
||||||
|
|
@ -4203,15 +4154,9 @@ where
|
||||||
// upstream.
|
// upstream.
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
&arg.arg,
|
&arg.arg,
|
||||||
Binding {
|
arg.range(),
|
||||||
kind: BindingKind::Argument,
|
BindingKind::Argument,
|
||||||
range: arg.range(),
|
BindingFlags::empty(),
|
||||||
references: Vec::new(),
|
|
||||||
source: self.semantic_model.stmt_id,
|
|
||||||
context: self.semantic_model.execution_context(),
|
|
||||||
exceptions: self.semantic_model.exceptions(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if self.enabled(Rule::AmbiguousVariableName) {
|
if self.enabled(Rule::AmbiguousVariableName) {
|
||||||
|
|
@ -4251,15 +4196,9 @@ where
|
||||||
{
|
{
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
name,
|
name,
|
||||||
Binding {
|
pattern.range(),
|
||||||
kind: BindingKind::Assignment,
|
BindingKind::Assignment,
|
||||||
range: pattern.range(),
|
BindingFlags::empty(),
|
||||||
references: Vec::new(),
|
|
||||||
source: self.semantic_model.stmt_id,
|
|
||||||
context: self.semantic_model.execution_context(),
|
|
||||||
exceptions: self.semantic_model.exceptions(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -4383,9 +4322,33 @@ impl<'a> Checker<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a [`Binding`] to the current scope, bound to the given name.
|
/// Add a [`Binding`] to the current scope, bound to the given name.
|
||||||
fn add_binding(&mut self, name: &'a str, binding: Binding<'a>) -> BindingId {
|
fn add_binding(
|
||||||
let binding_id = self.semantic_model.bindings.next_id();
|
&mut self,
|
||||||
if let Some((stack_index, existing_binding_id)) = self
|
name: &'a str,
|
||||||
|
range: TextRange,
|
||||||
|
kind: BindingKind<'a>,
|
||||||
|
flags: BindingFlags,
|
||||||
|
) -> BindingId {
|
||||||
|
// Determine the scope to which the binding belongs.
|
||||||
|
// Per [PEP 572](https://peps.python.org/pep-0572/#scope-of-the-target), named
|
||||||
|
// expressions in generators and comprehensions bind to the scope that contains the
|
||||||
|
// outermost comprehension.
|
||||||
|
let scope_id = if kind.is_named_expr_assignment() {
|
||||||
|
self.semantic_model
|
||||||
|
.scopes
|
||||||
|
.ancestor_ids(self.semantic_model.scope_id)
|
||||||
|
.find_or_last(|scope_id| !self.semantic_model.scopes[*scope_id].kind.is_generator())
|
||||||
|
.unwrap_or(self.semantic_model.scope_id)
|
||||||
|
} else {
|
||||||
|
self.semantic_model.scope_id
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create the `Binding`.
|
||||||
|
let binding_id = self.semantic_model.push_binding(range, kind, flags);
|
||||||
|
let binding = &self.semantic_model.bindings[binding_id];
|
||||||
|
|
||||||
|
// Determine whether the binding shadows any existing bindings.
|
||||||
|
if let Some((stack_index, shadowed_id)) = self
|
||||||
.semantic_model
|
.semantic_model
|
||||||
.scopes
|
.scopes
|
||||||
.ancestors(self.semantic_model.scope_id)
|
.ancestors(self.semantic_model.scope_id)
|
||||||
|
|
@ -4394,26 +4357,26 @@ impl<'a> Checker<'a> {
|
||||||
scope.get(name).map(|binding_id| (stack_index, binding_id))
|
scope.get(name).map(|binding_id| (stack_index, binding_id))
|
||||||
})
|
})
|
||||||
{
|
{
|
||||||
let existing = &self.semantic_model.bindings[existing_binding_id];
|
let shadowed = &self.semantic_model.bindings[shadowed_id];
|
||||||
let in_current_scope = stack_index == 0;
|
let in_current_scope = stack_index == 0;
|
||||||
if !existing.kind.is_builtin()
|
if !shadowed.kind.is_builtin()
|
||||||
&& existing.source.map_or(true, |left| {
|
&& shadowed.source.map_or(true, |left| {
|
||||||
binding.source.map_or(true, |right| {
|
binding.source.map_or(true, |right| {
|
||||||
!branch_detection::different_forks(left, right, &self.semantic_model.stmts)
|
!branch_detection::different_forks(left, right, &self.semantic_model.stmts)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
{
|
{
|
||||||
let existing_is_import = matches!(
|
let shadows_import = matches!(
|
||||||
existing.kind,
|
shadowed.kind,
|
||||||
BindingKind::Importation(..)
|
BindingKind::Importation(..)
|
||||||
| BindingKind::FromImportation(..)
|
| BindingKind::FromImportation(..)
|
||||||
| BindingKind::SubmoduleImportation(..)
|
| BindingKind::SubmoduleImportation(..)
|
||||||
| BindingKind::FutureImportation
|
| BindingKind::FutureImportation
|
||||||
);
|
);
|
||||||
if binding.kind.is_loop_var() && existing_is_import {
|
if binding.kind.is_loop_var() && shadows_import {
|
||||||
if self.enabled(Rule::ImportShadowedByLoopVar) {
|
if self.enabled(Rule::ImportShadowedByLoopVar) {
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let line = self.locator.compute_line_index(existing.range.start());
|
let line = self.locator.compute_line_index(shadowed.range.start());
|
||||||
|
|
||||||
self.diagnostics.push(Diagnostic::new(
|
self.diagnostics.push(Diagnostic::new(
|
||||||
pyflakes::rules::ImportShadowedByLoopVar {
|
pyflakes::rules::ImportShadowedByLoopVar {
|
||||||
|
|
@ -4424,21 +4387,21 @@ impl<'a> Checker<'a> {
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
} else if in_current_scope {
|
} else if in_current_scope {
|
||||||
if !existing.is_used()
|
if !shadowed.is_used()
|
||||||
&& binding.redefines(existing)
|
&& binding.redefines(shadowed)
|
||||||
&& (!self.settings.dummy_variable_rgx.is_match(name) || existing_is_import)
|
&& (!self.settings.dummy_variable_rgx.is_match(name) || shadows_import)
|
||||||
&& !(existing.kind.is_function_definition()
|
&& !(shadowed.kind.is_function_definition()
|
||||||
&& analyze::visibility::is_overload(
|
&& analyze::visibility::is_overload(
|
||||||
&self.semantic_model,
|
&self.semantic_model,
|
||||||
cast::decorator_list(
|
cast::decorator_list(
|
||||||
self.semantic_model.stmts[existing.source.unwrap()],
|
self.semantic_model.stmts[shadowed.source.unwrap()],
|
||||||
),
|
),
|
||||||
))
|
))
|
||||||
{
|
{
|
||||||
if self.enabled(Rule::RedefinedWhileUnused) {
|
if self.enabled(Rule::RedefinedWhileUnused) {
|
||||||
#[allow(deprecated)]
|
#[allow(deprecated)]
|
||||||
let line = self.locator.compute_line_index(
|
let line = self.locator.compute_line_index(
|
||||||
existing
|
shadowed
|
||||||
.trimmed_range(&self.semantic_model, self.locator)
|
.trimmed_range(&self.semantic_model, self.locator)
|
||||||
.start(),
|
.start(),
|
||||||
);
|
);
|
||||||
|
|
@ -4450,81 +4413,60 @@ impl<'a> Checker<'a> {
|
||||||
},
|
},
|
||||||
binding.trimmed_range(&self.semantic_model, self.locator),
|
binding.trimmed_range(&self.semantic_model, self.locator),
|
||||||
);
|
);
|
||||||
if let Some(parent) = binding.source {
|
if let Some(range) = binding.parent_range(&self.semantic_model) {
|
||||||
let parent = self.semantic_model.stmts[parent];
|
diagnostic.set_parent(range.start());
|
||||||
if matches!(parent, Stmt::ImportFrom(_))
|
|
||||||
&& parent.range().contains_range(binding.range)
|
|
||||||
{
|
|
||||||
diagnostic.set_parent(parent.start());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
self.diagnostics.push(diagnostic);
|
self.diagnostics.push(diagnostic);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if existing_is_import && binding.redefines(existing) {
|
} else if shadows_import && binding.redefines(shadowed) {
|
||||||
self.semantic_model
|
self.semantic_model
|
||||||
.shadowed_bindings
|
.shadowed_bindings
|
||||||
.entry(existing_binding_id)
|
.insert(binding_id, shadowed_id);
|
||||||
.or_insert_with(Vec::new)
|
|
||||||
.push(binding_id);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Per [PEP 572](https://peps.python.org/pep-0572/#scope-of-the-target), named
|
// If there's an existing binding in this scope, copy its references.
|
||||||
// expressions in generators and comprehensions bind to the scope that contains the
|
if let Some(shadowed) = self.semantic_model.scopes[scope_id]
|
||||||
// outermost comprehension.
|
.get(name)
|
||||||
let scope_id = if binding.kind.is_named_expr_assignment() {
|
.map(|binding_id| &self.semantic_model.bindings[binding_id])
|
||||||
self.semantic_model
|
{
|
||||||
.scopes
|
match &shadowed.kind {
|
||||||
.ancestor_ids(self.semantic_model.scope_id)
|
|
||||||
.find_or_last(|scope_id| !self.semantic_model.scopes[*scope_id].kind.is_generator())
|
|
||||||
.unwrap_or(self.semantic_model.scope_id)
|
|
||||||
} else {
|
|
||||||
self.semantic_model.scope_id
|
|
||||||
};
|
|
||||||
let scope = &mut self.semantic_model.scopes[scope_id];
|
|
||||||
|
|
||||||
let binding = if let Some(binding_id) = scope.get(name) {
|
|
||||||
let existing = &self.semantic_model.bindings[binding_id];
|
|
||||||
match &existing.kind {
|
|
||||||
BindingKind::Builtin => {
|
BindingKind::Builtin => {
|
||||||
// Avoid overriding builtins.
|
// Avoid overriding builtins.
|
||||||
binding
|
|
||||||
}
|
}
|
||||||
kind @ (BindingKind::Global | BindingKind::Nonlocal) => {
|
kind @ (BindingKind::Global | BindingKind::Nonlocal) => {
|
||||||
// If the original binding was a global or nonlocal, and the new binding conflicts within
|
// If the original binding was a global or nonlocal, then the new binding is
|
||||||
// the current scope, then the new binding is also as the same.
|
// too.
|
||||||
Binding {
|
let references = shadowed.references.clone();
|
||||||
references: existing.references.clone(),
|
self.semantic_model.bindings[binding_id].kind = kind.clone();
|
||||||
kind: kind.clone(),
|
self.semantic_model.bindings[binding_id].references = references;
|
||||||
..binding
|
}
|
||||||
}
|
_ => {
|
||||||
|
let references = shadowed.references.clone();
|
||||||
|
self.semantic_model.bindings[binding_id].references = references;
|
||||||
}
|
}
|
||||||
_ => Binding {
|
|
||||||
references: existing.references.clone(),
|
|
||||||
..binding
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
binding
|
|
||||||
};
|
|
||||||
|
|
||||||
// Don't treat annotations as assignments if there is an existing value
|
// If this is an annotation, and we already have an existing value in the same scope,
|
||||||
// in scope.
|
// don't treat it as an assignment (i.e., avoid adding it to the scope).
|
||||||
if binding.kind.is_annotation() && scope.defines(name) {
|
if self.semantic_model.bindings[binding_id]
|
||||||
return self.semantic_model.bindings.push(binding);
|
.kind
|
||||||
|
.is_annotation()
|
||||||
|
{
|
||||||
|
return binding_id;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add the binding to the scope.
|
// Add the binding to the scope.
|
||||||
|
let scope = &mut self.semantic_model.scopes[scope_id];
|
||||||
scope.add(name, binding_id);
|
scope.add(name, binding_id);
|
||||||
|
|
||||||
// Add the binding to the arena.
|
binding_id
|
||||||
self.semantic_model.bindings.push(binding)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn bind_builtins(&mut self) {
|
fn bind_builtins(&mut self) {
|
||||||
let scope = &mut self.semantic_model.scopes[self.semantic_model.scope_id];
|
|
||||||
for builtin in BUILTINS
|
for builtin in BUILTINS
|
||||||
.iter()
|
.iter()
|
||||||
.chain(MAGIC_GLOBALS.iter())
|
.chain(MAGIC_GLOBALS.iter())
|
||||||
|
|
@ -4532,15 +4474,8 @@ impl<'a> Checker<'a> {
|
||||||
.chain(self.settings.builtins.iter().map(String::as_str))
|
.chain(self.settings.builtins.iter().map(String::as_str))
|
||||||
{
|
{
|
||||||
// Add the builtin to the scope.
|
// Add the builtin to the scope.
|
||||||
let binding_id = self.semantic_model.bindings.push(Binding {
|
let binding_id = self.semantic_model.push_builtin();
|
||||||
kind: BindingKind::Builtin,
|
let scope = self.semantic_model.scope_mut();
|
||||||
range: TextRange::default(),
|
|
||||||
source: None,
|
|
||||||
references: Vec::new(),
|
|
||||||
context: ExecutionContext::Runtime,
|
|
||||||
exceptions: Exceptions::empty(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
});
|
|
||||||
scope.add(builtin, binding_id);
|
scope.add(builtin, binding_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -4650,15 +4585,9 @@ impl<'a> Checker<'a> {
|
||||||
) {
|
) {
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
id,
|
id,
|
||||||
Binding {
|
expr.range(),
|
||||||
kind: BindingKind::Annotation,
|
BindingKind::Annotation,
|
||||||
range: expr.range(),
|
BindingFlags::empty(),
|
||||||
references: Vec::new(),
|
|
||||||
source: self.semantic_model.stmt_id,
|
|
||||||
context: self.semantic_model.execution_context(),
|
|
||||||
exceptions: self.semantic_model.exceptions(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
@ -4666,15 +4595,9 @@ impl<'a> Checker<'a> {
|
||||||
if matches!(parent, Stmt::For(_) | Stmt::AsyncFor(_)) {
|
if matches!(parent, Stmt::For(_) | Stmt::AsyncFor(_)) {
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
id,
|
id,
|
||||||
Binding {
|
expr.range(),
|
||||||
kind: BindingKind::LoopVar,
|
BindingKind::LoopVar,
|
||||||
range: expr.range(),
|
BindingFlags::empty(),
|
||||||
references: Vec::new(),
|
|
||||||
source: self.semantic_model.stmt_id,
|
|
||||||
context: self.semantic_model.execution_context(),
|
|
||||||
exceptions: self.semantic_model.exceptions(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
@ -4682,29 +4605,17 @@ impl<'a> Checker<'a> {
|
||||||
if helpers::is_unpacking_assignment(parent, expr) {
|
if helpers::is_unpacking_assignment(parent, expr) {
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
id,
|
id,
|
||||||
Binding {
|
expr.range(),
|
||||||
kind: BindingKind::Binding,
|
BindingKind::Binding,
|
||||||
range: expr.range(),
|
BindingFlags::empty(),
|
||||||
references: Vec::new(),
|
|
||||||
source: self.semantic_model.stmt_id,
|
|
||||||
context: self.semantic_model.execution_context(),
|
|
||||||
exceptions: self.semantic_model.exceptions(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let scope = self.semantic_model.scope();
|
let scope = self.semantic_model.scope();
|
||||||
|
|
||||||
if id == "__all__"
|
if scope.kind.is_module()
|
||||||
&& scope.kind.is_module()
|
&& match parent {
|
||||||
&& matches!(
|
|
||||||
parent,
|
|
||||||
Stmt::Assign(_) | Stmt::AugAssign(_) | Stmt::AnnAssign(_)
|
|
||||||
)
|
|
||||||
{
|
|
||||||
if match parent {
|
|
||||||
Stmt::Assign(ast::StmtAssign { targets, .. }) => {
|
Stmt::Assign(ast::StmtAssign { targets, .. }) => {
|
||||||
if let Some(Expr::Name(ast::ExprName { id, .. })) = targets.first() {
|
if let Some(Expr::Name(ast::ExprName { id, .. })) = targets.first() {
|
||||||
id == "__all__"
|
id == "__all__"
|
||||||
|
|
@ -4727,53 +4638,32 @@ impl<'a> Checker<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
} {
|
|
||||||
let (all_names, all_names_flags) = {
|
|
||||||
let (mut names, flags) =
|
|
||||||
extract_all_names(parent, |name| self.semantic_model.is_builtin(name));
|
|
||||||
|
|
||||||
// Grab the existing bound __all__ values.
|
|
||||||
if let Stmt::AugAssign(_) = parent {
|
|
||||||
if let Some(binding_id) = scope.get("__all__") {
|
|
||||||
if let BindingKind::Export(Export { names: existing }) =
|
|
||||||
&self.semantic_model.bindings[binding_id].kind
|
|
||||||
{
|
|
||||||
names.extend_from_slice(existing);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
(names, flags)
|
|
||||||
};
|
|
||||||
|
|
||||||
if self.enabled(Rule::InvalidAllFormat) {
|
|
||||||
if matches!(all_names_flags, AllNamesFlags::INVALID_FORMAT) {
|
|
||||||
self.diagnostics
|
|
||||||
.push(pylint::rules::invalid_all_format(expr));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.enabled(Rule::InvalidAllObject) {
|
|
||||||
if matches!(all_names_flags, AllNamesFlags::INVALID_OBJECT) {
|
|
||||||
self.diagnostics
|
|
||||||
.push(pylint::rules::invalid_all_object(expr));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.add_binding(
|
|
||||||
id,
|
|
||||||
Binding {
|
|
||||||
kind: BindingKind::Export(Export { names: all_names }),
|
|
||||||
range: expr.range(),
|
|
||||||
references: Vec::new(),
|
|
||||||
source: self.semantic_model.stmt_id,
|
|
||||||
context: self.semantic_model.execution_context(),
|
|
||||||
exceptions: self.semantic_model.exceptions(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
{
|
||||||
|
let (names, flags) =
|
||||||
|
extract_all_names(parent, |name| self.semantic_model.is_builtin(name));
|
||||||
|
|
||||||
|
if self.enabled(Rule::InvalidAllFormat) {
|
||||||
|
if matches!(flags, AllNamesFlags::INVALID_FORMAT) {
|
||||||
|
self.diagnostics
|
||||||
|
.push(pylint::rules::invalid_all_format(expr));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.enabled(Rule::InvalidAllObject) {
|
||||||
|
if matches!(flags, AllNamesFlags::INVALID_OBJECT) {
|
||||||
|
self.diagnostics
|
||||||
|
.push(pylint::rules::invalid_all_object(expr));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.add_binding(
|
||||||
|
id,
|
||||||
|
expr.range(),
|
||||||
|
BindingKind::Export(Export { names }),
|
||||||
|
BindingFlags::empty(),
|
||||||
|
);
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if self
|
if self
|
||||||
|
|
@ -4783,30 +4673,18 @@ impl<'a> Checker<'a> {
|
||||||
{
|
{
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
id,
|
id,
|
||||||
Binding {
|
expr.range(),
|
||||||
kind: BindingKind::NamedExprAssignment,
|
BindingKind::NamedExprAssignment,
|
||||||
range: expr.range(),
|
BindingFlags::empty(),
|
||||||
references: Vec::new(),
|
|
||||||
source: self.semantic_model.stmt_id,
|
|
||||||
context: self.semantic_model.execution_context(),
|
|
||||||
exceptions: self.semantic_model.exceptions(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.add_binding(
|
self.add_binding(
|
||||||
id,
|
id,
|
||||||
Binding {
|
expr.range(),
|
||||||
kind: BindingKind::Assignment,
|
BindingKind::Assignment,
|
||||||
range: expr.range(),
|
BindingFlags::empty(),
|
||||||
references: Vec::new(),
|
|
||||||
source: self.semantic_model.stmt_id,
|
|
||||||
context: self.semantic_model.execution_context(),
|
|
||||||
exceptions: self.semantic_model.exceptions(),
|
|
||||||
flags: BindingFlags::empty(),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -5019,50 +4897,31 @@ impl<'a> Checker<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Mark anything referenced in `__all__` as used.
|
// Mark anything referenced in `__all__` as used.
|
||||||
let all_bindings: Option<(Vec<BindingId>, TextRange)> = {
|
let exports: Vec<(&str, TextRange)> = {
|
||||||
let global_scope = self.semantic_model.global_scope();
|
let global_scope = self.semantic_model.global_scope();
|
||||||
let all_names: Option<(&[&str], TextRange)> = global_scope
|
global_scope
|
||||||
.get("__all__")
|
.bindings_for_name("__all__")
|
||||||
.map(|binding_id| &self.semantic_model.bindings[binding_id])
|
.map(|binding_id| &self.semantic_model.bindings[binding_id])
|
||||||
.and_then(|binding| match &binding.kind {
|
.filter_map(|binding| match &binding.kind {
|
||||||
BindingKind::Export(Export { names }) => {
|
BindingKind::Export(Export { names }) => {
|
||||||
Some((names.as_slice(), binding.range))
|
Some(names.iter().map(|name| (*name, binding.range)))
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
});
|
})
|
||||||
|
.flatten()
|
||||||
all_names.map(|(names, range)| {
|
.collect()
|
||||||
(
|
|
||||||
names
|
|
||||||
.iter()
|
|
||||||
.filter_map(|name| global_scope.get(name))
|
|
||||||
.collect(),
|
|
||||||
range,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some((bindings, range)) = all_bindings {
|
for (name, range) in &exports {
|
||||||
for binding_id in bindings {
|
if let Some(binding_id) = self.semantic_model.global_scope().get(name) {
|
||||||
self.semantic_model.add_global_reference(
|
self.semantic_model.add_global_reference(
|
||||||
binding_id,
|
binding_id,
|
||||||
range,
|
*range,
|
||||||
ExecutionContext::Runtime,
|
ExecutionContext::Runtime,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract `__all__` names from the global scope.
|
|
||||||
let all_names: Option<(&[&str], TextRange)> = self
|
|
||||||
.semantic_model
|
|
||||||
.global_scope()
|
|
||||||
.get("__all__")
|
|
||||||
.map(|binding_id| &self.semantic_model.bindings[binding_id])
|
|
||||||
.and_then(|binding| match &binding.kind {
|
|
||||||
BindingKind::Export(Export { names }) => Some((names.as_slice(), binding.range)),
|
|
||||||
_ => None,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Identify any valid runtime imports. If a module is imported at runtime, and
|
// Identify any valid runtime imports. If a module is imported at runtime, and
|
||||||
// used at runtime, then by default, we avoid flagging any other
|
// used at runtime, then by default, we avoid flagging any other
|
||||||
// imports from that model as typing-only.
|
// imports from that model as typing-only.
|
||||||
|
|
@ -5099,35 +4958,33 @@ impl<'a> Checker<'a> {
|
||||||
// F822
|
// F822
|
||||||
if self.enabled(Rule::UndefinedExport) {
|
if self.enabled(Rule::UndefinedExport) {
|
||||||
if !self.path.ends_with("__init__.py") {
|
if !self.path.ends_with("__init__.py") {
|
||||||
if let Some((names, range)) = all_names {
|
for (name, range) in &exports {
|
||||||
diagnostics
|
diagnostics
|
||||||
.extend(pyflakes::rules::undefined_export(names, range, scope));
|
.extend(pyflakes::rules::undefined_export(name, *range, scope));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// F405
|
// F405
|
||||||
if self.enabled(Rule::UndefinedLocalWithImportStarUsage) {
|
if self.enabled(Rule::UndefinedLocalWithImportStarUsage) {
|
||||||
if let Some((names, range)) = &all_names {
|
let sources: Vec<String> = scope
|
||||||
let sources: Vec<String> = scope
|
.star_imports()
|
||||||
.star_imports()
|
.map(|StarImportation { level, module }| {
|
||||||
.map(|StarImportation { level, module }| {
|
helpers::format_import_from(*level, *module)
|
||||||
helpers::format_import_from(*level, *module)
|
})
|
||||||
})
|
.sorted()
|
||||||
.sorted()
|
.dedup()
|
||||||
.dedup()
|
.collect();
|
||||||
.collect();
|
if !sources.is_empty() {
|
||||||
if !sources.is_empty() {
|
for (name, range) in &exports {
|
||||||
for name in names.iter() {
|
if !scope.defines(name) {
|
||||||
if !scope.defines(name) {
|
diagnostics.push(Diagnostic::new(
|
||||||
diagnostics.push(Diagnostic::new(
|
pyflakes::rules::UndefinedLocalWithImportStarUsage {
|
||||||
pyflakes::rules::UndefinedLocalWithImportStarUsage {
|
name: (*name).to_string(),
|
||||||
name: (*name).to_string(),
|
sources: sources.clone(),
|
||||||
sources: sources.clone(),
|
},
|
||||||
},
|
*range,
|
||||||
*range,
|
));
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -5160,52 +5017,35 @@ impl<'a> Checker<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Look for any bindings that were redefined in another scope, and remain
|
// Look for any bindings that were redefined in another scope, and remain
|
||||||
// unused. Note that we only store references in `redefinitions` if
|
// unused. Note that we only store references in `shadowed_bindings` if
|
||||||
// the bindings are in different scopes.
|
// the bindings are in different scopes.
|
||||||
if self.enabled(Rule::RedefinedWhileUnused) {
|
if self.enabled(Rule::RedefinedWhileUnused) {
|
||||||
for (name, binding_id) in scope.bindings() {
|
for (name, binding_id) in scope.bindings() {
|
||||||
let binding = &self.semantic_model.bindings[binding_id];
|
if let Some(shadowed) = self.semantic_model.shadowed_binding(binding_id) {
|
||||||
|
if shadowed.is_used() {
|
||||||
if matches!(
|
|
||||||
binding.kind,
|
|
||||||
BindingKind::Importation(..)
|
|
||||||
| BindingKind::FromImportation(..)
|
|
||||||
| BindingKind::SubmoduleImportation(..)
|
|
||||||
) {
|
|
||||||
if binding.is_used() {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(shadowed_ids) =
|
let binding = &self.semantic_model.bindings[binding_id];
|
||||||
self.semantic_model.shadowed_bindings.get(&binding_id)
|
|
||||||
{
|
|
||||||
for binding_id in shadowed_ids.iter().copied() {
|
|
||||||
let rebound = &self.semantic_model.bindings[binding_id];
|
|
||||||
#[allow(deprecated)]
|
|
||||||
let line = self.locator.compute_line_index(
|
|
||||||
binding
|
|
||||||
.trimmed_range(&self.semantic_model, self.locator)
|
|
||||||
.start(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut diagnostic = Diagnostic::new(
|
#[allow(deprecated)]
|
||||||
pyflakes::rules::RedefinedWhileUnused {
|
let line = self.locator.compute_line_index(
|
||||||
name: (*name).to_string(),
|
shadowed
|
||||||
line,
|
.trimmed_range(&self.semantic_model, self.locator)
|
||||||
},
|
.start(),
|
||||||
rebound.trimmed_range(&self.semantic_model, self.locator),
|
);
|
||||||
);
|
|
||||||
if let Some(source) = rebound.source {
|
let mut diagnostic = Diagnostic::new(
|
||||||
let parent = &self.semantic_model.stmts[source];
|
pyflakes::rules::RedefinedWhileUnused {
|
||||||
if matches!(parent, Stmt::ImportFrom(_))
|
name: (*name).to_string(),
|
||||||
&& parent.range().contains_range(rebound.range)
|
line,
|
||||||
{
|
},
|
||||||
diagnostic.set_parent(parent.start());
|
binding.trimmed_range(&self.semantic_model, self.locator),
|
||||||
}
|
);
|
||||||
};
|
if let Some(range) = binding.parent_range(&self.semantic_model) {
|
||||||
diagnostics.push(diagnostic);
|
diagnostic.set_parent(range.start());
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
diagnostics.push(diagnostic);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -18,10 +18,9 @@ pub(crate) fn check_noqa(
|
||||||
locator: &Locator,
|
locator: &Locator,
|
||||||
comment_ranges: &[TextRange],
|
comment_ranges: &[TextRange],
|
||||||
noqa_line_for: &NoqaMapping,
|
noqa_line_for: &NoqaMapping,
|
||||||
|
analyze_directives: bool,
|
||||||
settings: &Settings,
|
settings: &Settings,
|
||||||
) -> Vec<usize> {
|
) -> Vec<usize> {
|
||||||
let enforce_noqa = settings.rules.enabled(Rule::UnusedNOQA);
|
|
||||||
|
|
||||||
// Identify any codes that are globally exempted (within the current file).
|
// Identify any codes that are globally exempted (within the current file).
|
||||||
let exemption = noqa::file_exemption(locator.contents(), comment_ranges);
|
let exemption = noqa::file_exemption(locator.contents(), comment_ranges);
|
||||||
|
|
||||||
|
|
@ -93,7 +92,7 @@ pub(crate) fn check_noqa(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Enforce that the noqa directive was actually used (RUF100).
|
// Enforce that the noqa directive was actually used (RUF100).
|
||||||
if enforce_noqa {
|
if analyze_directives && settings.rules.enabled(Rule::UnusedNOQA) {
|
||||||
for line in noqa_directives.lines() {
|
for line in noqa_directives.lines() {
|
||||||
match &line.directive {
|
match &line.directive {
|
||||||
Directive::All(leading_spaces, noqa_range, trailing_spaces) => {
|
Directive::All(leading_spaces, noqa_range, trailing_spaces) => {
|
||||||
|
|
|
||||||
|
|
@ -167,6 +167,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||||
(Pylint, "E0118") => (RuleGroup::Unspecified, rules::pylint::rules::LoadBeforeGlobalDeclaration),
|
(Pylint, "E0118") => (RuleGroup::Unspecified, rules::pylint::rules::LoadBeforeGlobalDeclaration),
|
||||||
(Pylint, "E0241") => (RuleGroup::Unspecified, rules::pylint::rules::DuplicateBases),
|
(Pylint, "E0241") => (RuleGroup::Unspecified, rules::pylint::rules::DuplicateBases),
|
||||||
(Pylint, "E0302") => (RuleGroup::Unspecified, rules::pylint::rules::UnexpectedSpecialMethodSignature),
|
(Pylint, "E0302") => (RuleGroup::Unspecified, rules::pylint::rules::UnexpectedSpecialMethodSignature),
|
||||||
|
(Pylint, "E0307") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidStrReturnType),
|
||||||
(Pylint, "E0604") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidAllObject),
|
(Pylint, "E0604") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidAllObject),
|
||||||
(Pylint, "E0605") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidAllFormat),
|
(Pylint, "E0605") => (RuleGroup::Unspecified, rules::pylint::rules::InvalidAllFormat),
|
||||||
(Pylint, "E1142") => (RuleGroup::Unspecified, rules::pylint::rules::AwaitOutsideAsync),
|
(Pylint, "E1142") => (RuleGroup::Unspecified, rules::pylint::rules::AwaitOutsideAsync),
|
||||||
|
|
@ -196,7 +197,6 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||||
(Pylint, "R5501") => (RuleGroup::Unspecified, rules::pylint::rules::CollapsibleElseIf),
|
(Pylint, "R5501") => (RuleGroup::Unspecified, rules::pylint::rules::CollapsibleElseIf),
|
||||||
(Pylint, "W0120") => (RuleGroup::Unspecified, rules::pylint::rules::UselessElseOnLoop),
|
(Pylint, "W0120") => (RuleGroup::Unspecified, rules::pylint::rules::UselessElseOnLoop),
|
||||||
(Pylint, "W0129") => (RuleGroup::Unspecified, rules::pylint::rules::AssertOnStringLiteral),
|
(Pylint, "W0129") => (RuleGroup::Unspecified, rules::pylint::rules::AssertOnStringLiteral),
|
||||||
(Pylint, "W0130") => (RuleGroup::Unspecified, rules::pylint::rules::DuplicateValue),
|
|
||||||
(Pylint, "W0131") => (RuleGroup::Unspecified, rules::pylint::rules::NamedExprWithoutContext),
|
(Pylint, "W0131") => (RuleGroup::Unspecified, rules::pylint::rules::NamedExprWithoutContext),
|
||||||
(Pylint, "W0406") => (RuleGroup::Unspecified, rules::pylint::rules::ImportSelf),
|
(Pylint, "W0406") => (RuleGroup::Unspecified, rules::pylint::rules::ImportSelf),
|
||||||
(Pylint, "W0602") => (RuleGroup::Unspecified, rules::pylint::rules::GlobalVariableNotAssigned),
|
(Pylint, "W0602") => (RuleGroup::Unspecified, rules::pylint::rules::GlobalVariableNotAssigned),
|
||||||
|
|
@ -248,6 +248,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||||
(Flake8Bugbear, "030") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::ExceptWithNonExceptionClasses),
|
(Flake8Bugbear, "030") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::ExceptWithNonExceptionClasses),
|
||||||
(Flake8Bugbear, "031") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::ReuseOfGroupbyGenerator),
|
(Flake8Bugbear, "031") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::ReuseOfGroupbyGenerator),
|
||||||
(Flake8Bugbear, "032") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::UnintentionalTypeAnnotation),
|
(Flake8Bugbear, "032") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::UnintentionalTypeAnnotation),
|
||||||
|
(Flake8Bugbear, "033") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::DuplicateValue),
|
||||||
(Flake8Bugbear, "904") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::RaiseWithoutFromInsideExcept),
|
(Flake8Bugbear, "904") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::RaiseWithoutFromInsideExcept),
|
||||||
(Flake8Bugbear, "905") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::ZipWithoutExplicitStrict),
|
(Flake8Bugbear, "905") => (RuleGroup::Unspecified, rules::flake8_bugbear::rules::ZipWithoutExplicitStrict),
|
||||||
|
|
||||||
|
|
@ -604,6 +605,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
||||||
(Flake8Pyi, "021") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::DocstringInStub),
|
(Flake8Pyi, "021") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::DocstringInStub),
|
||||||
(Flake8Pyi, "024") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::CollectionsNamedTuple),
|
(Flake8Pyi, "024") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::CollectionsNamedTuple),
|
||||||
(Flake8Pyi, "025") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::UnaliasedCollectionsAbcSetImport),
|
(Flake8Pyi, "025") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::UnaliasedCollectionsAbcSetImport),
|
||||||
|
(Flake8Pyi, "029") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::StrOrReprDefinedInStub),
|
||||||
(Flake8Pyi, "032") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::AnyEqNeAnnotation),
|
(Flake8Pyi, "032") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::AnyEqNeAnnotation),
|
||||||
(Flake8Pyi, "033") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::TypeCommentInStub),
|
(Flake8Pyi, "033") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::TypeCommentInStub),
|
||||||
(Flake8Pyi, "034") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::NonSelfReturnType),
|
(Flake8Pyi, "034") => (RuleGroup::Unspecified, rules::flake8_pyi::rules::NonSelfReturnType),
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ use libcst_native::{
|
||||||
Arg, Attribute, Call, Comparison, CompoundStatement, Dict, Expression, FormattedString,
|
Arg, Attribute, Call, Comparison, CompoundStatement, Dict, Expression, FormattedString,
|
||||||
FormattedStringContent, FormattedStringExpression, FunctionDef, GeneratorExp, If, Import,
|
FormattedStringContent, FormattedStringExpression, FunctionDef, GeneratorExp, If, Import,
|
||||||
ImportAlias, ImportFrom, ImportNames, IndentedBlock, Lambda, ListComp, Module, Name,
|
ImportAlias, ImportFrom, ImportNames, IndentedBlock, Lambda, ListComp, Module, Name,
|
||||||
SimpleString, SmallStatement, Statement, Suite, Tuple, With,
|
SmallStatement, Statement, Suite, Tuple, With,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub(crate) fn match_module(module_text: &str) -> Result<Module> {
|
pub(crate) fn match_module(module_text: &str) -> Result<Module> {
|
||||||
|
|
@ -109,16 +109,6 @@ pub(crate) fn match_attribute<'a, 'b>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn match_simple_string<'a, 'b>(
|
|
||||||
expression: &'a mut Expression<'b>,
|
|
||||||
) -> Result<&'a mut SimpleString<'b>> {
|
|
||||||
if let Expression::SimpleString(simple_string) = expression {
|
|
||||||
Ok(simple_string)
|
|
||||||
} else {
|
|
||||||
bail!("Expected Expression::SimpleString")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn match_formatted_string<'a, 'b>(
|
pub(crate) fn match_formatted_string<'a, 'b>(
|
||||||
expression: &'a mut Expression<'b>,
|
expression: &'a mut Expression<'b>,
|
||||||
) -> Result<&'a mut FormattedString<'b>> {
|
) -> Result<&'a mut FormattedString<'b>> {
|
||||||
|
|
|
||||||
|
|
@ -3,11 +3,12 @@
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use libcst_native::{Codegen, CodegenState, ImportAlias, Name, NameOrAttribute};
|
use libcst_native::{ImportAlias, Name, NameOrAttribute};
|
||||||
use ruff_text_size::TextSize;
|
use ruff_text_size::TextSize;
|
||||||
use rustpython_parser::ast::{self, Ranged, Stmt, Suite};
|
use rustpython_parser::ast::{self, Ranged, Stmt, Suite};
|
||||||
|
|
||||||
use crate::autofix;
|
use crate::autofix;
|
||||||
|
use crate::autofix::codemods::CodegenStylist;
|
||||||
use ruff_diagnostics::Edit;
|
use ruff_diagnostics::Edit;
|
||||||
use ruff_python_ast::imports::{AnyImport, Import, ImportFrom};
|
use ruff_python_ast::imports::{AnyImport, Import, ImportFrom};
|
||||||
use ruff_python_ast::source_code::{Locator, Stylist};
|
use ruff_python_ast::source_code::{Locator, Stylist};
|
||||||
|
|
@ -87,7 +88,7 @@ impl<'a> Importer<'a> {
|
||||||
) -> Result<RuntimeImportEdit> {
|
) -> Result<RuntimeImportEdit> {
|
||||||
// Generate the modified import statement.
|
// Generate the modified import statement.
|
||||||
let content = autofix::codemods::retain_imports(
|
let content = autofix::codemods::retain_imports(
|
||||||
&[import.full_name],
|
&[import.qualified_name],
|
||||||
import.stmt,
|
import.stmt,
|
||||||
self.locator,
|
self.locator,
|
||||||
self.stylist,
|
self.stylist,
|
||||||
|
|
@ -119,7 +120,7 @@ impl<'a> Importer<'a> {
|
||||||
) -> Result<TypingImportEdit> {
|
) -> Result<TypingImportEdit> {
|
||||||
// Generate the modified import statement.
|
// Generate the modified import statement.
|
||||||
let content = autofix::codemods::retain_imports(
|
let content = autofix::codemods::retain_imports(
|
||||||
&[import.full_name],
|
&[import.qualified_name],
|
||||||
import.stmt,
|
import.stmt,
|
||||||
self.locator,
|
self.locator,
|
||||||
self.stylist,
|
self.stylist,
|
||||||
|
|
@ -324,13 +325,10 @@ impl<'a> Importer<'a> {
|
||||||
asname: None,
|
asname: None,
|
||||||
comma: aliases.last().and_then(|alias| alias.comma.clone()),
|
comma: aliases.last().and_then(|alias| alias.comma.clone()),
|
||||||
});
|
});
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &self.stylist.line_ending(),
|
statement.codegen_stylist(self.stylist),
|
||||||
default_indent: self.stylist.indentation(),
|
stmt.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
statement.codegen(&mut state);
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), stmt.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a `TYPE_CHECKING` block to the given module.
|
/// Add a `TYPE_CHECKING` block to the given module.
|
||||||
|
|
@ -449,7 +447,7 @@ pub(crate) struct StmtImport<'a> {
|
||||||
/// The import statement.
|
/// The import statement.
|
||||||
pub(crate) stmt: &'a Stmt,
|
pub(crate) stmt: &'a Stmt,
|
||||||
/// The "full name" of the imported module or member.
|
/// The "full name" of the imported module or member.
|
||||||
pub(crate) full_name: &'a str,
|
pub(crate) qualified_name: &'a str,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The result of an [`Importer::get_or_import_symbol`] call.
|
/// The result of an [`Importer::get_or_import_symbol`] call.
|
||||||
|
|
|
||||||
|
|
@ -214,6 +214,7 @@ pub fn check_path(
|
||||||
locator,
|
locator,
|
||||||
indexer.comment_ranges(),
|
indexer.comment_ranges(),
|
||||||
&directives.noqa_line_for,
|
&directives.noqa_line_for,
|
||||||
|
error.is_none(),
|
||||||
settings,
|
settings,
|
||||||
);
|
);
|
||||||
if noqa.into() {
|
if noqa.into() {
|
||||||
|
|
|
||||||
|
|
@ -93,5 +93,6 @@ static REDIRECTS: Lazy<HashMap<&'static str, &'static str>> = Lazy::new(|| {
|
||||||
// TODO(charlie): Remove by 2023-06-01.
|
// TODO(charlie): Remove by 2023-06-01.
|
||||||
("RUF004", "B026"),
|
("RUF004", "B026"),
|
||||||
("PIE802", "C419"),
|
("PIE802", "C419"),
|
||||||
|
("PLW0130", "B033"),
|
||||||
])
|
])
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -12,8 +12,10 @@ use crate::rule_redirects::get_redirect;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
pub enum RuleSelector {
|
pub enum RuleSelector {
|
||||||
/// Select all rules.
|
/// Select all stable rules.
|
||||||
All,
|
All,
|
||||||
|
/// Select all nursery rules.
|
||||||
|
Nursery,
|
||||||
/// Legacy category to select both the `mccabe` and `flake8-comprehensions` linters
|
/// Legacy category to select both the `mccabe` and `flake8-comprehensions` linters
|
||||||
/// via a single selector.
|
/// via a single selector.
|
||||||
C,
|
C,
|
||||||
|
|
@ -39,30 +41,30 @@ impl FromStr for RuleSelector {
|
||||||
type Err = ParseError;
|
type Err = ParseError;
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
if s == "ALL" {
|
match s {
|
||||||
Ok(Self::All)
|
"ALL" => Ok(Self::All),
|
||||||
} else if s == "C" {
|
"NURSERY" => Ok(Self::Nursery),
|
||||||
Ok(Self::C)
|
"C" => Ok(Self::C),
|
||||||
} else if s == "T" {
|
"T" => Ok(Self::T),
|
||||||
Ok(Self::T)
|
_ => {
|
||||||
} else {
|
let (s, redirected_from) = match get_redirect(s) {
|
||||||
let (s, redirected_from) = match get_redirect(s) {
|
Some((from, target)) => (target, Some(from)),
|
||||||
Some((from, target)) => (target, Some(from)),
|
None => (s, None),
|
||||||
None => (s, None),
|
};
|
||||||
};
|
|
||||||
|
|
||||||
let (linter, code) =
|
let (linter, code) =
|
||||||
Linter::parse_code(s).ok_or_else(|| ParseError::Unknown(s.to_string()))?;
|
Linter::parse_code(s).ok_or_else(|| ParseError::Unknown(s.to_string()))?;
|
||||||
|
|
||||||
if code.is_empty() {
|
if code.is_empty() {
|
||||||
return Ok(Self::Linter(linter));
|
return Ok(Self::Linter(linter));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self::Prefix {
|
||||||
|
prefix: RuleCodePrefix::parse(&linter, code)
|
||||||
|
.map_err(|_| ParseError::Unknown(s.to_string()))?,
|
||||||
|
redirected_from,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self::Prefix {
|
|
||||||
prefix: RuleCodePrefix::parse(&linter, code)
|
|
||||||
.map_err(|_| ParseError::Unknown(s.to_string()))?,
|
|
||||||
redirected_from,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -79,6 +81,7 @@ impl RuleSelector {
|
||||||
pub fn prefix_and_code(&self) -> (&'static str, &'static str) {
|
pub fn prefix_and_code(&self) -> (&'static str, &'static str) {
|
||||||
match self {
|
match self {
|
||||||
RuleSelector::All => ("", "ALL"),
|
RuleSelector::All => ("", "ALL"),
|
||||||
|
RuleSelector::Nursery => ("", "NURSERY"),
|
||||||
RuleSelector::C => ("", "C"),
|
RuleSelector::C => ("", "C"),
|
||||||
RuleSelector::T => ("", "T"),
|
RuleSelector::T => ("", "T"),
|
||||||
RuleSelector::Prefix { prefix, .. } => {
|
RuleSelector::Prefix { prefix, .. } => {
|
||||||
|
|
@ -141,13 +144,6 @@ impl From<RuleCodePrefix> for RuleSelector {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if the given rule should be selected by the `RuleSelector::All` selector.
|
|
||||||
fn select_all(rule: Rule) -> bool {
|
|
||||||
// Nursery rules have to be explicitly selected, so we ignore them when looking at
|
|
||||||
// prefixes.
|
|
||||||
!rule.is_nursery()
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IntoIterator for &RuleSelector {
|
impl IntoIterator for &RuleSelector {
|
||||||
type Item = Rule;
|
type Item = Rule;
|
||||||
type IntoIter = RuleSelectorIter;
|
type IntoIter = RuleSelectorIter;
|
||||||
|
|
@ -155,7 +151,10 @@ impl IntoIterator for &RuleSelector {
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
match self {
|
match self {
|
||||||
RuleSelector::All => {
|
RuleSelector::All => {
|
||||||
RuleSelectorIter::All(Rule::iter().filter(|rule| select_all(*rule)))
|
RuleSelectorIter::All(Rule::iter().filter(|rule| !rule.is_nursery()))
|
||||||
|
}
|
||||||
|
RuleSelector::Nursery => {
|
||||||
|
RuleSelectorIter::Nursery(Rule::iter().filter(Rule::is_nursery))
|
||||||
}
|
}
|
||||||
RuleSelector::C => RuleSelectorIter::Chain(
|
RuleSelector::C => RuleSelectorIter::Chain(
|
||||||
Linter::Flake8Comprehensions
|
Linter::Flake8Comprehensions
|
||||||
|
|
@ -175,6 +174,7 @@ impl IntoIterator for &RuleSelector {
|
||||||
|
|
||||||
pub enum RuleSelectorIter {
|
pub enum RuleSelectorIter {
|
||||||
All(std::iter::Filter<RuleIter, fn(&Rule) -> bool>),
|
All(std::iter::Filter<RuleIter, fn(&Rule) -> bool>),
|
||||||
|
Nursery(std::iter::Filter<RuleIter, fn(&Rule) -> bool>),
|
||||||
Chain(std::iter::Chain<std::vec::IntoIter<Rule>, std::vec::IntoIter<Rule>>),
|
Chain(std::iter::Chain<std::vec::IntoIter<Rule>, std::vec::IntoIter<Rule>>),
|
||||||
Vec(std::vec::IntoIter<Rule>),
|
Vec(std::vec::IntoIter<Rule>),
|
||||||
}
|
}
|
||||||
|
|
@ -185,6 +185,7 @@ impl Iterator for RuleSelectorIter {
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
match self {
|
match self {
|
||||||
RuleSelectorIter::All(iter) => iter.next(),
|
RuleSelectorIter::All(iter) => iter.next(),
|
||||||
|
RuleSelectorIter::Nursery(iter) => iter.next(),
|
||||||
RuleSelectorIter::Chain(iter) => iter.next(),
|
RuleSelectorIter::Chain(iter) => iter.next(),
|
||||||
RuleSelectorIter::Vec(iter) => iter.next(),
|
RuleSelectorIter::Vec(iter) => iter.next(),
|
||||||
}
|
}
|
||||||
|
|
@ -262,6 +263,7 @@ impl RuleSelector {
|
||||||
pub(crate) fn specificity(&self) -> Specificity {
|
pub(crate) fn specificity(&self) -> Specificity {
|
||||||
match self {
|
match self {
|
||||||
RuleSelector::All => Specificity::All,
|
RuleSelector::All => Specificity::All,
|
||||||
|
RuleSelector::Nursery => Specificity::All,
|
||||||
RuleSelector::T => Specificity::LinterGroup,
|
RuleSelector::T => Specificity::LinterGroup,
|
||||||
RuleSelector::C => Specificity::LinterGroup,
|
RuleSelector::C => Specificity::LinterGroup,
|
||||||
RuleSelector::Linter(..) => Specificity::Linter,
|
RuleSelector::Linter(..) => Specificity::Linter,
|
||||||
|
|
|
||||||
|
|
@ -418,12 +418,14 @@ impl Violation for AnyType {
|
||||||
fn is_none_returning(body: &[Stmt]) -> bool {
|
fn is_none_returning(body: &[Stmt]) -> bool {
|
||||||
let mut visitor = ReturnStatementVisitor::default();
|
let mut visitor = ReturnStatementVisitor::default();
|
||||||
visitor.visit_body(body);
|
visitor.visit_body(body);
|
||||||
for expr in visitor.returns.into_iter().flatten() {
|
for stmt in visitor.returns {
|
||||||
if !matches!(
|
if let Some(value) = stmt.value.as_deref() {
|
||||||
expr,
|
if !matches!(
|
||||||
Expr::Constant(ref constant) if constant.value.is_none()
|
value,
|
||||||
) {
|
Expr::Constant(constant) if constant.value.is_none()
|
||||||
return false;
|
) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
true
|
true
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ use ruff_macros::{CacheKey, CombineOptions, ConfigurationOptions};
|
||||||
|
|
||||||
fn default_tmp_dirs() -> Vec<String> {
|
fn default_tmp_dirs() -> Vec<String> {
|
||||||
["/tmp", "/var/tmp", "/dev/shm"]
|
["/tmp", "/var/tmp", "/dev/shm"]
|
||||||
.map(std::string::ToString::to_string)
|
.map(ToString::to_string)
|
||||||
.to_vec()
|
.to_vec()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ pub(super) const FUNC_CALL_NAME_ALLOWLIST: &[&str] = &[
|
||||||
"assertEquals",
|
"assertEquals",
|
||||||
"assertNotEqual",
|
"assertNotEqual",
|
||||||
"assertNotEquals",
|
"assertNotEquals",
|
||||||
|
"bool",
|
||||||
"bytes",
|
"bytes",
|
||||||
"count",
|
"count",
|
||||||
"failIfEqual",
|
"failIfEqual",
|
||||||
|
|
@ -27,6 +28,8 @@ pub(super) const FUNC_CALL_NAME_ALLOWLIST: &[&str] = &[
|
||||||
"param",
|
"param",
|
||||||
"pop",
|
"pop",
|
||||||
"remove",
|
"remove",
|
||||||
|
"set_blocking",
|
||||||
|
"set_enabled",
|
||||||
"setattr",
|
"setattr",
|
||||||
"__setattr__",
|
"__setattr__",
|
||||||
"setdefault",
|
"setdefault",
|
||||||
|
|
|
||||||
|
|
@ -81,12 +81,12 @@ FBT.py:19:5: FBT001 Boolean positional arg in function definition
|
||||||
23 | kwonly_nonvalued_nohint,
|
23 | kwonly_nonvalued_nohint,
|
||||||
|
|
|
|
||||||
|
|
||||||
FBT.py:81:19: FBT001 Boolean positional arg in function definition
|
FBT.py:85:19: FBT001 Boolean positional arg in function definition
|
||||||
|
|
|
|
||||||
81 | # FBT001: Boolean positional arg in function definition
|
85 | # FBT001: Boolean positional arg in function definition
|
||||||
82 | def foo(self, value: bool) -> None:
|
86 | def foo(self, value: bool) -> None:
|
||||||
| ^^^^^^^^^^^ FBT001
|
| ^^^^^^^^^^^ FBT001
|
||||||
83 | pass
|
87 | pass
|
||||||
|
|
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -28,4 +28,12 @@ FBT.py:57:17: FBT003 Boolean positional value in function call
|
||||||
61 | mylist.index(True)
|
61 | mylist.index(True)
|
||||||
|
|
|
|
||||||
|
|
||||||
|
FBT.py:69:38: FBT003 Boolean positional value in function call
|
||||||
|
|
|
||||||
|
69 | os.set_blocking(0, False)
|
||||||
|
70 | g_action.set_enabled(True)
|
||||||
|
71 | settings.set_enable_developer_extras(True)
|
||||||
|
| ^^^^ FBT003
|
||||||
|
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -14,39 +14,40 @@ mod tests {
|
||||||
use crate::settings::Settings;
|
use crate::settings::Settings;
|
||||||
use crate::test::test_path;
|
use crate::test::test_path;
|
||||||
|
|
||||||
#[test_case(Rule::UnaryPrefixIncrement, Path::new("B002.py"))]
|
|
||||||
#[test_case(Rule::AssignmentToOsEnviron, Path::new("B003.py"))]
|
|
||||||
#[test_case(Rule::UnreliableCallableCheck, Path::new("B004.py"))]
|
|
||||||
#[test_case(Rule::StripWithMultiCharacters, Path::new("B005.py"))]
|
|
||||||
#[test_case(Rule::MutableArgumentDefault, Path::new("B006_B008.py"))]
|
|
||||||
#[test_case(Rule::UnusedLoopControlVariable, Path::new("B007.py"))]
|
|
||||||
#[test_case(Rule::FunctionCallInDefaultArgument, Path::new("B006_B008.py"))]
|
|
||||||
#[test_case(Rule::GetAttrWithConstant, Path::new("B009_B010.py"))]
|
|
||||||
#[test_case(Rule::SetAttrWithConstant, Path::new("B009_B010.py"))]
|
|
||||||
#[test_case(Rule::AssertFalse, Path::new("B011.py"))]
|
|
||||||
#[test_case(Rule::JumpStatementInFinally, Path::new("B012.py"))]
|
|
||||||
#[test_case(Rule::RedundantTupleInExceptionHandler, Path::new("B013.py"))]
|
|
||||||
#[test_case(Rule::DuplicateHandlerException, Path::new("B014.py"))]
|
|
||||||
#[test_case(Rule::UselessComparison, Path::new("B015.py"))]
|
|
||||||
#[test_case(Rule::CannotRaiseLiteral, Path::new("B016.py"))]
|
|
||||||
#[test_case(Rule::AssertRaisesException, Path::new("B017.py"))]
|
|
||||||
#[test_case(Rule::UselessExpression, Path::new("B018.py"))]
|
|
||||||
#[test_case(Rule::CachedInstanceMethod, Path::new("B019.py"))]
|
|
||||||
#[test_case(Rule::LoopVariableOverridesIterator, Path::new("B020.py"))]
|
|
||||||
#[test_case(Rule::FStringDocstring, Path::new("B021.py"))]
|
|
||||||
#[test_case(Rule::UselessContextlibSuppress, Path::new("B022.py"))]
|
|
||||||
#[test_case(Rule::FunctionUsesLoopVariable, Path::new("B023.py"))]
|
|
||||||
#[test_case(Rule::AbstractBaseClassWithoutAbstractMethod, Path::new("B024.py"))]
|
#[test_case(Rule::AbstractBaseClassWithoutAbstractMethod, Path::new("B024.py"))]
|
||||||
|
#[test_case(Rule::AssertFalse, Path::new("B011.py"))]
|
||||||
|
#[test_case(Rule::AssertRaisesException, Path::new("B017.py"))]
|
||||||
|
#[test_case(Rule::AssignmentToOsEnviron, Path::new("B003.py"))]
|
||||||
|
#[test_case(Rule::CachedInstanceMethod, Path::new("B019.py"))]
|
||||||
|
#[test_case(Rule::CannotRaiseLiteral, Path::new("B016.py"))]
|
||||||
|
#[test_case(Rule::DuplicateHandlerException, Path::new("B014.py"))]
|
||||||
#[test_case(Rule::DuplicateTryBlockException, Path::new("B025.py"))]
|
#[test_case(Rule::DuplicateTryBlockException, Path::new("B025.py"))]
|
||||||
#[test_case(Rule::StarArgUnpackingAfterKeywordArg, Path::new("B026.py"))]
|
#[test_case(Rule::DuplicateValue, Path::new("B033.py"))]
|
||||||
#[test_case(Rule::EmptyMethodWithoutAbstractDecorator, Path::new("B027.py"))]
|
#[test_case(Rule::EmptyMethodWithoutAbstractDecorator, Path::new("B027.py"))]
|
||||||
#[test_case(Rule::EmptyMethodWithoutAbstractDecorator, Path::new("B027.pyi"))]
|
#[test_case(Rule::EmptyMethodWithoutAbstractDecorator, Path::new("B027.pyi"))]
|
||||||
#[test_case(Rule::NoExplicitStacklevel, Path::new("B028.py"))]
|
|
||||||
#[test_case(Rule::ExceptWithEmptyTuple, Path::new("B029.py"))]
|
#[test_case(Rule::ExceptWithEmptyTuple, Path::new("B029.py"))]
|
||||||
#[test_case(Rule::ExceptWithNonExceptionClasses, Path::new("B030.py"))]
|
#[test_case(Rule::ExceptWithNonExceptionClasses, Path::new("B030.py"))]
|
||||||
#[test_case(Rule::ReuseOfGroupbyGenerator, Path::new("B031.py"))]
|
#[test_case(Rule::FStringDocstring, Path::new("B021.py"))]
|
||||||
#[test_case(Rule::UnintentionalTypeAnnotation, Path::new("B032.py"))]
|
#[test_case(Rule::FunctionCallInDefaultArgument, Path::new("B006_B008.py"))]
|
||||||
|
#[test_case(Rule::FunctionUsesLoopVariable, Path::new("B023.py"))]
|
||||||
|
#[test_case(Rule::GetAttrWithConstant, Path::new("B009_B010.py"))]
|
||||||
|
#[test_case(Rule::JumpStatementInFinally, Path::new("B012.py"))]
|
||||||
|
#[test_case(Rule::LoopVariableOverridesIterator, Path::new("B020.py"))]
|
||||||
|
#[test_case(Rule::MutableArgumentDefault, Path::new("B006_B008.py"))]
|
||||||
|
#[test_case(Rule::NoExplicitStacklevel, Path::new("B028.py"))]
|
||||||
#[test_case(Rule::RaiseWithoutFromInsideExcept, Path::new("B904.py"))]
|
#[test_case(Rule::RaiseWithoutFromInsideExcept, Path::new("B904.py"))]
|
||||||
|
#[test_case(Rule::RedundantTupleInExceptionHandler, Path::new("B013.py"))]
|
||||||
|
#[test_case(Rule::ReuseOfGroupbyGenerator, Path::new("B031.py"))]
|
||||||
|
#[test_case(Rule::SetAttrWithConstant, Path::new("B009_B010.py"))]
|
||||||
|
#[test_case(Rule::StarArgUnpackingAfterKeywordArg, Path::new("B026.py"))]
|
||||||
|
#[test_case(Rule::StripWithMultiCharacters, Path::new("B005.py"))]
|
||||||
|
#[test_case(Rule::UnaryPrefixIncrement, Path::new("B002.py"))]
|
||||||
|
#[test_case(Rule::UnintentionalTypeAnnotation, Path::new("B032.py"))]
|
||||||
|
#[test_case(Rule::UnreliableCallableCheck, Path::new("B004.py"))]
|
||||||
|
#[test_case(Rule::UnusedLoopControlVariable, Path::new("B007.py"))]
|
||||||
|
#[test_case(Rule::UselessComparison, Path::new("B015.py"))]
|
||||||
|
#[test_case(Rule::UselessContextlibSuppress, Path::new("B022.py"))]
|
||||||
|
#[test_case(Rule::UselessExpression, Path::new("B018.py"))]
|
||||||
#[test_case(Rule::ZipWithoutExplicitStrict, Path::new("B905.py"))]
|
#[test_case(Rule::ZipWithoutExplicitStrict, Path::new("B905.py"))]
|
||||||
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
fn rules(rule_code: Rule, path: &Path) -> Result<()> {
|
||||||
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
|
let snapshot = format!("{}_{}", rule_code.noqa_code(), path.to_string_lossy());
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,57 @@
|
||||||
|
use rustc_hash::FxHashSet;
|
||||||
|
use rustpython_parser::ast::{self, Expr, Ranged};
|
||||||
|
|
||||||
|
use ruff_diagnostics::{Diagnostic, Violation};
|
||||||
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
|
use ruff_python_ast::comparable::ComparableExpr;
|
||||||
|
|
||||||
|
use crate::checkers::ast::Checker;
|
||||||
|
|
||||||
|
/// ## What it does
|
||||||
|
/// Checks for set literals that contain duplicate items.
|
||||||
|
///
|
||||||
|
/// ## Why is this bad?
|
||||||
|
/// In Python, sets are unordered collections of unique elements. Including a
|
||||||
|
/// duplicate item in a set literal is redundant, as the duplicate item will be
|
||||||
|
/// replaced with a single item at runtime.
|
||||||
|
///
|
||||||
|
/// ## Example
|
||||||
|
/// ```python
|
||||||
|
/// {1, 2, 3, 1}
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// Use instead:
|
||||||
|
/// ```python
|
||||||
|
/// {1, 2, 3}
|
||||||
|
/// ```
|
||||||
|
#[violation]
|
||||||
|
pub struct DuplicateValue {
|
||||||
|
value: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Violation for DuplicateValue {
|
||||||
|
#[derive_message_formats]
|
||||||
|
fn message(&self) -> String {
|
||||||
|
let DuplicateValue { value } = self;
|
||||||
|
format!("Sets should not contain duplicate item `{value}`")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// B033
|
||||||
|
pub(crate) fn duplicate_value(checker: &mut Checker, elts: &Vec<Expr>) {
|
||||||
|
let mut seen_values: FxHashSet<ComparableExpr> = FxHashSet::default();
|
||||||
|
for elt in elts {
|
||||||
|
if let Expr::Constant(ast::ExprConstant { value, .. }) = elt {
|
||||||
|
let comparable_value: ComparableExpr = elt.into();
|
||||||
|
|
||||||
|
if !seen_values.insert(comparable_value) {
|
||||||
|
checker.diagnostics.push(Diagnostic::new(
|
||||||
|
DuplicateValue {
|
||||||
|
value: checker.generator().constant(value),
|
||||||
|
},
|
||||||
|
elt.range(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -10,6 +10,7 @@ pub(crate) use cannot_raise_literal::{cannot_raise_literal, CannotRaiseLiteral};
|
||||||
pub(crate) use duplicate_exceptions::{
|
pub(crate) use duplicate_exceptions::{
|
||||||
duplicate_exceptions, DuplicateHandlerException, DuplicateTryBlockException,
|
duplicate_exceptions, DuplicateHandlerException, DuplicateTryBlockException,
|
||||||
};
|
};
|
||||||
|
pub(crate) use duplicate_value::{duplicate_value, DuplicateValue};
|
||||||
pub(crate) use except_with_empty_tuple::{except_with_empty_tuple, ExceptWithEmptyTuple};
|
pub(crate) use except_with_empty_tuple::{except_with_empty_tuple, ExceptWithEmptyTuple};
|
||||||
pub(crate) use except_with_non_exception_classes::{
|
pub(crate) use except_with_non_exception_classes::{
|
||||||
except_with_non_exception_classes, ExceptWithNonExceptionClasses,
|
except_with_non_exception_classes, ExceptWithNonExceptionClasses,
|
||||||
|
|
@ -66,6 +67,7 @@ mod assignment_to_os_environ;
|
||||||
mod cached_instance_method;
|
mod cached_instance_method;
|
||||||
mod cannot_raise_literal;
|
mod cannot_raise_literal;
|
||||||
mod duplicate_exceptions;
|
mod duplicate_exceptions;
|
||||||
|
mod duplicate_value;
|
||||||
mod except_with_empty_tuple;
|
mod except_with_empty_tuple;
|
||||||
mod except_with_non_exception_classes;
|
mod except_with_non_exception_classes;
|
||||||
mod f_string_docstring;
|
mod f_string_docstring;
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,23 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff/src/rules/flake8_bugbear/mod.rs
|
||||||
|
---
|
||||||
|
B033.py:4:35: B033 Sets should not contain duplicate item `"value1"`
|
||||||
|
|
|
||||||
|
4 | # Errors.
|
||||||
|
5 | ###
|
||||||
|
6 | incorrect_set = {"value1", 23, 5, "value1"}
|
||||||
|
| ^^^^^^^^ B033
|
||||||
|
7 | incorrect_set = {1, 1}
|
||||||
|
|
|
||||||
|
|
||||||
|
B033.py:5:21: B033 Sets should not contain duplicate item `1`
|
||||||
|
|
|
||||||
|
5 | ###
|
||||||
|
6 | incorrect_set = {"value1", 23, 5, "value1"}
|
||||||
|
7 | incorrect_set = {1, 1}
|
||||||
|
| ^ B033
|
||||||
|
8 |
|
||||||
|
9 | ###
|
||||||
|
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1,14 +1,15 @@
|
||||||
use anyhow::{bail, Result};
|
use anyhow::{bail, Result};
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
use libcst_native::{
|
use libcst_native::{
|
||||||
Arg, AssignEqual, AssignTargetExpression, Call, Codegen, CodegenState, Comment, CompFor, Dict,
|
Arg, AssignEqual, AssignTargetExpression, Call, Comment, CompFor, Dict, DictComp, DictElement,
|
||||||
DictComp, DictElement, Element, EmptyLine, Expression, GeneratorExp, LeftCurlyBrace, LeftParen,
|
Element, EmptyLine, Expression, GeneratorExp, LeftCurlyBrace, LeftParen, LeftSquareBracket,
|
||||||
LeftSquareBracket, List, ListComp, Name, ParenthesizableWhitespace, ParenthesizedWhitespace,
|
List, ListComp, Name, ParenthesizableWhitespace, ParenthesizedWhitespace, RightCurlyBrace,
|
||||||
RightCurlyBrace, RightParen, RightSquareBracket, Set, SetComp, SimpleString, SimpleWhitespace,
|
RightParen, RightSquareBracket, Set, SetComp, SimpleString, SimpleWhitespace,
|
||||||
TrailingWhitespace, Tuple,
|
TrailingWhitespace, Tuple,
|
||||||
};
|
};
|
||||||
use rustpython_parser::ast::Ranged;
|
use rustpython_parser::ast::Ranged;
|
||||||
|
|
||||||
|
use crate::autofix::codemods::CodegenStylist;
|
||||||
use ruff_diagnostics::{Edit, Fix};
|
use ruff_diagnostics::{Edit, Fix};
|
||||||
use ruff_python_ast::source_code::{Locator, Stylist};
|
use ruff_python_ast::source_code::{Locator, Stylist};
|
||||||
|
|
||||||
|
|
@ -44,14 +45,10 @@ pub(crate) fn fix_unnecessary_generator_list(
|
||||||
rpar: generator_exp.rpar.clone(),
|
rpar: generator_exp.rpar.clone(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
expr.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), expr.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// (C401) Convert `set(x for x in y)` to `{x for x in y}`.
|
/// (C401) Convert `set(x for x in y)` to `{x for x in y}`.
|
||||||
|
|
@ -82,14 +79,7 @@ pub(crate) fn fix_unnecessary_generator_set(
|
||||||
rpar: generator_exp.rpar.clone(),
|
rpar: generator_exp.rpar.clone(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let mut state = CodegenState {
|
let mut content = tree.codegen_stylist(stylist);
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
let mut content = state.to_string();
|
|
||||||
|
|
||||||
// If the expression is embedded in an f-string, surround it with spaces to avoid
|
// If the expression is embedded in an f-string, surround it with spaces to avoid
|
||||||
// syntax errors.
|
// syntax errors.
|
||||||
|
|
@ -136,14 +126,7 @@ pub(crate) fn fix_unnecessary_generator_dict(
|
||||||
whitespace_after_colon: ParenthesizableWhitespace::SimpleWhitespace(SimpleWhitespace(" ")),
|
whitespace_after_colon: ParenthesizableWhitespace::SimpleWhitespace(SimpleWhitespace(" ")),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let mut state = CodegenState {
|
let mut content = tree.codegen_stylist(stylist);
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
let mut content = state.to_string();
|
|
||||||
|
|
||||||
// If the expression is embedded in an f-string, surround it with spaces to avoid
|
// If the expression is embedded in an f-string, surround it with spaces to avoid
|
||||||
// syntax errors.
|
// syntax errors.
|
||||||
|
|
@ -182,14 +165,10 @@ pub(crate) fn fix_unnecessary_list_comprehension_set(
|
||||||
rpar: list_comp.rpar.clone(),
|
rpar: list_comp.rpar.clone(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
expr.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), expr.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// (C404) Convert `dict([(i, i) for i in range(3)])` to `{i: i for i in
|
/// (C404) Convert `dict([(i, i) for i in range(3)])` to `{i: i for i in
|
||||||
|
|
@ -229,14 +208,10 @@ pub(crate) fn fix_unnecessary_list_comprehension_dict(
|
||||||
rpar: list_comp.rpar.clone(),
|
rpar: list_comp.rpar.clone(),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
expr.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), expr.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Drop a trailing comma from a list of tuple elements.
|
/// Drop a trailing comma from a list of tuple elements.
|
||||||
|
|
@ -291,7 +266,7 @@ pub(crate) fn fix_unnecessary_literal_set(
|
||||||
// Expr(Call(List|Tuple)))) -> Expr(Set)))
|
// Expr(Call(List|Tuple)))) -> Expr(Set)))
|
||||||
let module_text = locator.slice(expr.range());
|
let module_text = locator.slice(expr.range());
|
||||||
let mut tree = match_expression(module_text)?;
|
let mut tree = match_expression(module_text)?;
|
||||||
let mut call = match_call_mut(&mut tree)?;
|
let call = match_call_mut(&mut tree)?;
|
||||||
let arg = match_arg(call)?;
|
let arg = match_arg(call)?;
|
||||||
|
|
||||||
let (elements, whitespace_after, whitespace_before) = match &arg.value {
|
let (elements, whitespace_after, whitespace_before) = match &arg.value {
|
||||||
|
|
@ -318,14 +293,10 @@ pub(crate) fn fix_unnecessary_literal_set(
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
expr.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), expr.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// (C406) Convert `dict([(1, 2)])` to `{1: 2}`.
|
/// (C406) Convert `dict([(1, 2)])` to `{1: 2}`.
|
||||||
|
|
@ -386,14 +357,10 @@ pub(crate) fn fix_unnecessary_literal_dict(
|
||||||
rpar: vec![],
|
rpar: vec![],
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
expr.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), expr.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// (C408)
|
/// (C408)
|
||||||
|
|
@ -495,14 +462,10 @@ pub(crate) fn fix_unnecessary_collection_call(
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
expr.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), expr.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// (C409) Convert `tuple([1, 2])` to `tuple(1, 2)`
|
/// (C409) Convert `tuple([1, 2])` to `tuple(1, 2)`
|
||||||
|
|
@ -549,14 +512,10 @@ pub(crate) fn fix_unnecessary_literal_within_tuple_call(
|
||||||
}],
|
}],
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
expr.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), expr.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// (C410) Convert `list([1, 2])` to `[1, 2]`
|
/// (C410) Convert `list([1, 2])` to `[1, 2]`
|
||||||
|
|
@ -605,14 +564,10 @@ pub(crate) fn fix_unnecessary_literal_within_list_call(
|
||||||
rpar: vec![],
|
rpar: vec![],
|
||||||
}));
|
}));
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
expr.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), expr.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// (C411) Convert `list([i * i for i in x])` to `[i * i for i in x]`.
|
/// (C411) Convert `list([i * i for i in x])` to `[i * i for i in x]`.
|
||||||
|
|
@ -629,14 +584,10 @@ pub(crate) fn fix_unnecessary_list_call(
|
||||||
|
|
||||||
tree = arg.value.clone();
|
tree = arg.value.clone();
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
expr.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), expr.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// (C413) Convert `list(sorted([2, 3, 1]))` to `sorted([2, 3, 1])`.
|
/// (C413) Convert `list(sorted([2, 3, 1]))` to `sorted([2, 3, 1])`.
|
||||||
|
|
@ -747,14 +698,10 @@ pub(crate) fn fix_unnecessary_call_around_sorted(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
expr.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), expr.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// (C414) Convert `sorted(list(foo))` to `sorted(foo)`
|
/// (C414) Convert `sorted(list(foo))` to `sorted(foo)`
|
||||||
|
|
@ -765,7 +712,7 @@ pub(crate) fn fix_unnecessary_double_cast_or_process(
|
||||||
) -> Result<Edit> {
|
) -> Result<Edit> {
|
||||||
let module_text = locator.slice(expr.range());
|
let module_text = locator.slice(expr.range());
|
||||||
let mut tree = match_expression(module_text)?;
|
let mut tree = match_expression(module_text)?;
|
||||||
let mut outer_call = match_call_mut(&mut tree)?;
|
let outer_call = match_call_mut(&mut tree)?;
|
||||||
|
|
||||||
outer_call.args = match outer_call.args.split_first() {
|
outer_call.args = match outer_call.args.split_first() {
|
||||||
Some((first, rest)) => {
|
Some((first, rest)) => {
|
||||||
|
|
@ -781,14 +728,10 @@ pub(crate) fn fix_unnecessary_double_cast_or_process(
|
||||||
None => bail!("Expected at least one argument in outer function call"),
|
None => bail!("Expected at least one argument in outer function call"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
expr.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), expr.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// (C416) Convert `[i for i in x]` to `list(x)`.
|
/// (C416) Convert `[i for i in x]` to `list(x)`.
|
||||||
|
|
@ -872,14 +815,10 @@ pub(crate) fn fix_unnecessary_comprehension(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
expr.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), expr.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// (C417) Convert `map(lambda x: x * 2, bar)` to `(x * 2 for x in bar)`.
|
/// (C417) Convert `map(lambda x: x * 2, bar)` to `(x * 2 for x in bar)`.
|
||||||
|
|
@ -1018,14 +957,7 @@ pub(crate) fn fix_unnecessary_map(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut state = CodegenState {
|
let mut content = tree.codegen_stylist(stylist);
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
let mut content = state.to_string();
|
|
||||||
|
|
||||||
// If the expression is embedded in an f-string, surround it with spaces to avoid
|
// If the expression is embedded in an f-string, surround it with spaces to avoid
|
||||||
// syntax errors.
|
// syntax errors.
|
||||||
|
|
@ -1054,14 +986,10 @@ pub(crate) fn fix_unnecessary_literal_within_dict_call(
|
||||||
|
|
||||||
tree = arg.value.clone();
|
tree = arg.value.clone();
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
expr.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), expr.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// (C419) Convert `[i for i in a]` into `i for i in a`
|
/// (C419) Convert `[i for i in a]` into `i for i in a`
|
||||||
|
|
@ -1231,15 +1159,8 @@ pub(crate) fn fix_unnecessary_comprehension_any_all(
|
||||||
_ => whitespace_after_arg,
|
_ => whitespace_after_arg,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut state = CodegenState {
|
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Fix::suggested(Edit::range_replacement(
|
Ok(Fix::suggested(Edit::range_replacement(
|
||||||
state.to_string(),
|
tree.codegen_stylist(stylist),
|
||||||
expr.range(),
|
expr.range(),
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -48,6 +48,8 @@ mod tests {
|
||||||
#[test_case(Rule::SnakeCaseTypeAlias, Path::new("PYI042.pyi"))]
|
#[test_case(Rule::SnakeCaseTypeAlias, Path::new("PYI042.pyi"))]
|
||||||
#[test_case(Rule::UnassignedSpecialVariableInStub, Path::new("PYI035.py"))]
|
#[test_case(Rule::UnassignedSpecialVariableInStub, Path::new("PYI035.py"))]
|
||||||
#[test_case(Rule::UnassignedSpecialVariableInStub, Path::new("PYI035.pyi"))]
|
#[test_case(Rule::UnassignedSpecialVariableInStub, Path::new("PYI035.pyi"))]
|
||||||
|
#[test_case(Rule::StrOrReprDefinedInStub, Path::new("PYI029.py"))]
|
||||||
|
#[test_case(Rule::StrOrReprDefinedInStub, Path::new("PYI029.pyi"))]
|
||||||
#[test_case(Rule::StubBodyMultipleStatements, Path::new("PYI048.py"))]
|
#[test_case(Rule::StubBodyMultipleStatements, Path::new("PYI048.py"))]
|
||||||
#[test_case(Rule::StubBodyMultipleStatements, Path::new("PYI048.pyi"))]
|
#[test_case(Rule::StubBodyMultipleStatements, Path::new("PYI048.pyi"))]
|
||||||
#[test_case(Rule::TSuffixedTypeAlias, Path::new("PYI043.py"))]
|
#[test_case(Rule::TSuffixedTypeAlias, Path::new("PYI043.py"))]
|
||||||
|
|
|
||||||
|
|
@ -27,6 +27,7 @@ pub(crate) use simple_defaults::{
|
||||||
unassigned_special_variable_in_stub, ArgumentDefaultInStub, AssignmentDefaultInStub,
|
unassigned_special_variable_in_stub, ArgumentDefaultInStub, AssignmentDefaultInStub,
|
||||||
TypedArgumentDefaultInStub, UnannotatedAssignmentInStub, UnassignedSpecialVariableInStub,
|
TypedArgumentDefaultInStub, UnannotatedAssignmentInStub, UnassignedSpecialVariableInStub,
|
||||||
};
|
};
|
||||||
|
pub(crate) use str_or_repr_defined_in_stub::{str_or_repr_defined_in_stub, StrOrReprDefinedInStub};
|
||||||
pub(crate) use string_or_bytes_too_long::{string_or_bytes_too_long, StringOrBytesTooLong};
|
pub(crate) use string_or_bytes_too_long::{string_or_bytes_too_long, StringOrBytesTooLong};
|
||||||
pub(crate) use stub_body_multiple_statements::{
|
pub(crate) use stub_body_multiple_statements::{
|
||||||
stub_body_multiple_statements, StubBodyMultipleStatements,
|
stub_body_multiple_statements, StubBodyMultipleStatements,
|
||||||
|
|
@ -58,6 +59,7 @@ mod pass_statement_stub_body;
|
||||||
mod prefix_type_params;
|
mod prefix_type_params;
|
||||||
mod quoted_annotation_in_stub;
|
mod quoted_annotation_in_stub;
|
||||||
mod simple_defaults;
|
mod simple_defaults;
|
||||||
|
mod str_or_repr_defined_in_stub;
|
||||||
mod string_or_bytes_too_long;
|
mod string_or_bytes_too_long;
|
||||||
mod stub_body_multiple_statements;
|
mod stub_body_multiple_statements;
|
||||||
mod type_alias_naming;
|
mod type_alias_naming;
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,110 @@
|
||||||
|
use rustpython_parser::ast;
|
||||||
|
use rustpython_parser::ast::Stmt;
|
||||||
|
|
||||||
|
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Fix};
|
||||||
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
|
use ruff_python_ast::helpers::identifier_range;
|
||||||
|
use ruff_python_semantic::analyze::visibility::is_abstract;
|
||||||
|
|
||||||
|
use crate::autofix::edits::delete_stmt;
|
||||||
|
use crate::checkers::ast::Checker;
|
||||||
|
use crate::registry::AsRule;
|
||||||
|
|
||||||
|
/// ## What it does
|
||||||
|
/// Checks for redundant definitions of `__str__` or `__repr__` in stubs.
|
||||||
|
///
|
||||||
|
/// ## Why is this bad?
|
||||||
|
/// Defining `__str__` or `__repr__` in a stub is almost always redundant,
|
||||||
|
/// as the signatures are almost always identical to those of the default
|
||||||
|
/// equivalent, `object.__str__` and `object.__repr__`, respectively.
|
||||||
|
///
|
||||||
|
/// ## Example
|
||||||
|
/// ```python
|
||||||
|
/// class Foo:
|
||||||
|
/// def __repr__(self) -> str:
|
||||||
|
/// ...
|
||||||
|
/// ```
|
||||||
|
#[violation]
|
||||||
|
pub struct StrOrReprDefinedInStub {
|
||||||
|
name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AlwaysAutofixableViolation for StrOrReprDefinedInStub {
|
||||||
|
#[derive_message_formats]
|
||||||
|
fn message(&self) -> String {
|
||||||
|
let StrOrReprDefinedInStub { name } = self;
|
||||||
|
format!("Defining `{name}` in a stub is almost always redundant")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn autofix_title(&self) -> String {
|
||||||
|
let StrOrReprDefinedInStub { name } = self;
|
||||||
|
format!("Remove definition of `{name}`")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// PYI029
|
||||||
|
pub(crate) fn str_or_repr_defined_in_stub(checker: &mut Checker, stmt: &Stmt) {
|
||||||
|
let Stmt::FunctionDef(ast::StmtFunctionDef {
|
||||||
|
name,
|
||||||
|
decorator_list,
|
||||||
|
returns,
|
||||||
|
args,
|
||||||
|
..
|
||||||
|
}) = stmt else {
|
||||||
|
return
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(returns) = returns else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
if !matches!(name.as_str(), "__str__" | "__repr__") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !checker.semantic_model().scope().kind.is_class() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// It is a violation only if the method signature matches that of `object.__str__`
|
||||||
|
// or `object.__repr__` exactly and the method is not decorated as abstract.
|
||||||
|
if !args.kwonlyargs.is_empty() || (args.args.len() + args.posonlyargs.len()) > 1 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_abstract(checker.semantic_model(), decorator_list) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if checker
|
||||||
|
.semantic_model()
|
||||||
|
.resolve_call_path(returns)
|
||||||
|
.map_or(true, |call_path| {
|
||||||
|
!matches!(call_path.as_slice(), ["" | "builtins", "str"])
|
||||||
|
})
|
||||||
|
{
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut diagnostic = Diagnostic::new(
|
||||||
|
StrOrReprDefinedInStub {
|
||||||
|
name: name.to_string(),
|
||||||
|
},
|
||||||
|
identifier_range(stmt, checker.locator),
|
||||||
|
);
|
||||||
|
if checker.patch(diagnostic.kind.rule()) {
|
||||||
|
let stmt = checker.semantic_model().stmt();
|
||||||
|
let parent = checker.semantic_model().stmt_parent();
|
||||||
|
let edit = delete_stmt(
|
||||||
|
stmt,
|
||||||
|
parent,
|
||||||
|
checker.locator,
|
||||||
|
checker.indexer,
|
||||||
|
checker.stylist,
|
||||||
|
);
|
||||||
|
diagnostic.set_fix(
|
||||||
|
Fix::automatic(edit).isolate(checker.isolation(checker.semantic_model().stmt_parent())),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
checker.diagnostics.push(diagnostic);
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff/src/rules/flake8_pyi/mod.rs
|
||||||
|
---
|
||||||
|
|
||||||
|
|
@ -0,0 +1,62 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff/src/rules/flake8_pyi/mod.rs
|
||||||
|
---
|
||||||
|
PYI029.pyi:10:9: PYI029 [*] Defining `__str__` in a stub is almost always redundant
|
||||||
|
|
|
||||||
|
10 | class ShouldRemoveSingle:
|
||||||
|
11 | def __str__(self) -> builtins.str: ... # Error: PYI029
|
||||||
|
| ^^^^^^^ PYI029
|
||||||
|
12 |
|
||||||
|
13 | class ShouldRemove:
|
||||||
|
|
|
||||||
|
= help: Remove definition of `str`
|
||||||
|
|
||||||
|
ℹ Fix
|
||||||
|
7 7 | def __repr__(self, *, foo) -> str: ...
|
||||||
|
8 8 |
|
||||||
|
9 9 | class ShouldRemoveSingle:
|
||||||
|
10 |- def __str__(self) -> builtins.str: ... # Error: PYI029
|
||||||
|
10 |+ pass # Error: PYI029
|
||||||
|
11 11 |
|
||||||
|
12 12 | class ShouldRemove:
|
||||||
|
13 13 | def __repr__(self) -> str: ... # Error: PYI029
|
||||||
|
|
||||||
|
PYI029.pyi:13:9: PYI029 [*] Defining `__repr__` in a stub is almost always redundant
|
||||||
|
|
|
||||||
|
13 | class ShouldRemove:
|
||||||
|
14 | def __repr__(self) -> str: ... # Error: PYI029
|
||||||
|
| ^^^^^^^^ PYI029
|
||||||
|
15 | def __str__(self) -> builtins.str: ... # Error: PYI029
|
||||||
|
|
|
||||||
|
= help: Remove definition of `repr`
|
||||||
|
|
||||||
|
ℹ Fix
|
||||||
|
10 10 | def __str__(self) -> builtins.str: ... # Error: PYI029
|
||||||
|
11 11 |
|
||||||
|
12 12 | class ShouldRemove:
|
||||||
|
13 |- def __repr__(self) -> str: ... # Error: PYI029
|
||||||
|
14 13 | def __str__(self) -> builtins.str: ... # Error: PYI029
|
||||||
|
15 14 |
|
||||||
|
16 15 | class NoReturnSpecified:
|
||||||
|
|
||||||
|
PYI029.pyi:14:9: PYI029 [*] Defining `__str__` in a stub is almost always redundant
|
||||||
|
|
|
||||||
|
14 | class ShouldRemove:
|
||||||
|
15 | def __repr__(self) -> str: ... # Error: PYI029
|
||||||
|
16 | def __str__(self) -> builtins.str: ... # Error: PYI029
|
||||||
|
| ^^^^^^^ PYI029
|
||||||
|
17 |
|
||||||
|
18 | class NoReturnSpecified:
|
||||||
|
|
|
||||||
|
= help: Remove definition of `str`
|
||||||
|
|
||||||
|
ℹ Fix
|
||||||
|
11 11 |
|
||||||
|
12 12 | class ShouldRemove:
|
||||||
|
13 13 | def __repr__(self) -> str: ... # Error: PYI029
|
||||||
|
14 |- def __str__(self) -> builtins.str: ... # Error: PYI029
|
||||||
|
15 14 |
|
||||||
|
16 15 | class NoReturnSpecified:
|
||||||
|
17 16 | def __str__(self): ...
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -3,12 +3,13 @@ use std::borrow::Cow;
|
||||||
use anyhow::bail;
|
use anyhow::bail;
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use libcst_native::{
|
use libcst_native::{
|
||||||
Assert, BooleanOp, Codegen, CodegenState, CompoundStatement, Expression,
|
Assert, BooleanOp, CompoundStatement, Expression, ParenthesizableWhitespace, ParenthesizedNode,
|
||||||
ParenthesizableWhitespace, ParenthesizedNode, SimpleStatementLine, SimpleWhitespace,
|
SimpleStatementLine, SimpleWhitespace, SmallStatement, Statement, TrailingWhitespace, UnaryOp,
|
||||||
SmallStatement, Statement, TrailingWhitespace, UnaryOp, UnaryOperation,
|
UnaryOperation,
|
||||||
};
|
};
|
||||||
use rustpython_parser::ast::{self, Boolop, Excepthandler, Expr, Keyword, Ranged, Stmt, Unaryop};
|
use rustpython_parser::ast::{self, Boolop, Excepthandler, Expr, Keyword, Ranged, Stmt, Unaryop};
|
||||||
|
|
||||||
|
use crate::autofix::codemods::CodegenStylist;
|
||||||
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
|
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
|
||||||
use ruff_macros::{derive_message_formats, violation};
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
use ruff_python_ast::helpers::{has_comments_in, Truthiness};
|
use ruff_python_ast::helpers::{has_comments_in, Truthiness};
|
||||||
|
|
@ -410,15 +411,8 @@ fn fix_composite_condition(stmt: &Stmt, locator: &Locator, stylist: &Stylist) ->
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut state = CodegenState {
|
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
// Reconstruct and reformat the code.
|
// Reconstruct and reformat the code.
|
||||||
let module_text = state.to_string();
|
let module_text = tree.codegen_stylist(stylist);
|
||||||
let contents = if outer_indent.is_empty() {
|
let contents = if outer_indent.is_empty() {
|
||||||
module_text
|
module_text
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ fn default_broad_exceptions() -> Vec<String> {
|
||||||
"EnvironmentError",
|
"EnvironmentError",
|
||||||
"socket.error",
|
"socket.error",
|
||||||
]
|
]
|
||||||
.map(std::string::ToString::to_string)
|
.map(ToString::to_string)
|
||||||
.to_vec()
|
.to_vec()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -506,34 +506,36 @@ fn implicit_return(checker: &mut Checker, stmt: &Stmt) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return `true` if the `id` has multiple assignments within the function.
|
/// Return `true` if the `id` has multiple declarations within the function.
|
||||||
fn has_multiple_assigns(id: &str, stack: &Stack) -> bool {
|
fn has_multiple_declarations(id: &str, stack: &Stack) -> bool {
|
||||||
if let Some(assigns) = stack.assignments.get(&id) {
|
stack
|
||||||
if assigns.len() > 1 {
|
.declarations
|
||||||
return true;
|
.get(&id)
|
||||||
}
|
.map_or(false, |declarations| declarations.len() > 1)
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return `true` if the `id` has a (read) reference between the `return_location` and its
|
/// Return `true` if the `id` has a (read) reference between the `return_location` and its
|
||||||
/// preceding assignment.
|
/// preceding declaration.
|
||||||
fn has_refs_before_next_assign(id: &str, return_range: TextRange, stack: &Stack) -> bool {
|
fn has_references_before_next_declaration(
|
||||||
let mut assignment_before_return: Option<TextSize> = None;
|
id: &str,
|
||||||
let mut assignment_after_return: Option<TextSize> = None;
|
return_range: TextRange,
|
||||||
if let Some(assignments) = stack.assignments.get(&id) {
|
stack: &Stack,
|
||||||
|
) -> bool {
|
||||||
|
let mut declaration_before_return: Option<TextSize> = None;
|
||||||
|
let mut declaration_after_return: Option<TextSize> = None;
|
||||||
|
if let Some(assignments) = stack.declarations.get(&id) {
|
||||||
for location in assignments.iter().sorted() {
|
for location in assignments.iter().sorted() {
|
||||||
if *location > return_range.start() {
|
if *location > return_range.start() {
|
||||||
assignment_after_return = Some(*location);
|
declaration_after_return = Some(*location);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
assignment_before_return = Some(*location);
|
declaration_before_return = Some(*location);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If there is no assignment before the return, then the variable must be defined in
|
// If there is no declaration before the return, then the variable must be declared in
|
||||||
// some other way (e.g., a function argument). No need to check for references.
|
// some other way (e.g., a function argument). No need to check for references.
|
||||||
let Some(assignment_before_return) = assignment_before_return else {
|
let Some(declaration_before_return) = declaration_before_return else {
|
||||||
return true;
|
return true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
@ -543,9 +545,9 @@ fn has_refs_before_next_assign(id: &str, return_range: TextRange, stack: &Stack)
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if assignment_before_return < *location {
|
if declaration_before_return < *location {
|
||||||
if let Some(assignment_after_return) = assignment_after_return {
|
if let Some(declaration_after_return) = declaration_after_return {
|
||||||
if *location <= assignment_after_return {
|
if *location <= declaration_after_return {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -559,7 +561,7 @@ fn has_refs_before_next_assign(id: &str, return_range: TextRange, stack: &Stack)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return `true` if the `id` has a read or write reference within a `try` or loop body.
|
/// Return `true` if the `id` has a read or write reference within a `try` or loop body.
|
||||||
fn has_refs_or_assigns_within_try_or_loop(id: &str, stack: &Stack) -> bool {
|
fn has_references_or_declarations_within_try_or_loop(id: &str, stack: &Stack) -> bool {
|
||||||
if let Some(references) = stack.references.get(&id) {
|
if let Some(references) = stack.references.get(&id) {
|
||||||
for location in references {
|
for location in references {
|
||||||
for try_range in &stack.tries {
|
for try_range in &stack.tries {
|
||||||
|
|
@ -574,7 +576,7 @@ fn has_refs_or_assigns_within_try_or_loop(id: &str, stack: &Stack) -> bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(references) = stack.assignments.get(&id) {
|
if let Some(references) = stack.declarations.get(&id) {
|
||||||
for location in references {
|
for location in references {
|
||||||
for try_range in &stack.tries {
|
for try_range in &stack.tries {
|
||||||
if try_range.contains(*location) {
|
if try_range.contains(*location) {
|
||||||
|
|
@ -594,7 +596,7 @@ fn has_refs_or_assigns_within_try_or_loop(id: &str, stack: &Stack) -> bool {
|
||||||
/// RET504
|
/// RET504
|
||||||
fn unnecessary_assign(checker: &mut Checker, stack: &Stack, expr: &Expr) {
|
fn unnecessary_assign(checker: &mut Checker, stack: &Stack, expr: &Expr) {
|
||||||
if let Expr::Name(ast::ExprName { id, .. }) = expr {
|
if let Expr::Name(ast::ExprName { id, .. }) = expr {
|
||||||
if !stack.assignments.contains_key(id.as_str()) {
|
if !stack.assigned_names.contains(id.as_str()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -605,9 +607,9 @@ fn unnecessary_assign(checker: &mut Checker, stack: &Stack, expr: &Expr) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if has_multiple_assigns(id, stack)
|
if has_multiple_declarations(id, stack)
|
||||||
|| has_refs_before_next_assign(id, expr.range(), stack)
|
|| has_references_before_next_declaration(id, expr.range(), stack)
|
||||||
|| has_refs_or_assigns_within_try_or_loop(id, stack)
|
|| has_references_or_declarations_within_try_or_loop(id, stack)
|
||||||
{
|
{
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -11,9 +11,14 @@ pub(crate) struct Stack<'a> {
|
||||||
pub(crate) yields: Vec<&'a Expr>,
|
pub(crate) yields: Vec<&'a Expr>,
|
||||||
pub(crate) elses: Vec<&'a Stmt>,
|
pub(crate) elses: Vec<&'a Stmt>,
|
||||||
pub(crate) elifs: Vec<&'a Stmt>,
|
pub(crate) elifs: Vec<&'a Stmt>,
|
||||||
|
/// The names that are assigned to in the current scope (e.g., anything on the left-hand side of
|
||||||
|
/// an assignment).
|
||||||
|
pub(crate) assigned_names: FxHashSet<&'a str>,
|
||||||
|
/// The names that are declared in the current scope, and the ranges of those declarations
|
||||||
|
/// (e.g., assignments, but also function and class definitions).
|
||||||
|
pub(crate) declarations: FxHashMap<&'a str, Vec<TextSize>>,
|
||||||
pub(crate) references: FxHashMap<&'a str, Vec<TextSize>>,
|
pub(crate) references: FxHashMap<&'a str, Vec<TextSize>>,
|
||||||
pub(crate) non_locals: FxHashSet<&'a str>,
|
pub(crate) non_locals: FxHashSet<&'a str>,
|
||||||
pub(crate) assignments: FxHashMap<&'a str, Vec<TextSize>>,
|
|
||||||
pub(crate) loops: Vec<TextRange>,
|
pub(crate) loops: Vec<TextRange>,
|
||||||
pub(crate) tries: Vec<TextRange>,
|
pub(crate) tries: Vec<TextRange>,
|
||||||
}
|
}
|
||||||
|
|
@ -34,8 +39,9 @@ impl<'a> ReturnVisitor<'a> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
Expr::Name(ast::ExprName { id, .. }) => {
|
Expr::Name(ast::ExprName { id, .. }) => {
|
||||||
|
self.stack.assigned_names.insert(id.as_str());
|
||||||
self.stack
|
self.stack
|
||||||
.assignments
|
.declarations
|
||||||
.entry(id)
|
.entry(id)
|
||||||
.or_insert_with(Vec::new)
|
.or_insert_with(Vec::new)
|
||||||
.push(expr.start());
|
.push(expr.start());
|
||||||
|
|
@ -45,7 +51,7 @@ impl<'a> ReturnVisitor<'a> {
|
||||||
// Attribute assignments are often side-effects (e.g., `self.property = value`),
|
// Attribute assignments are often side-effects (e.g., `self.property = value`),
|
||||||
// so we conservatively treat them as references to every known
|
// so we conservatively treat them as references to every known
|
||||||
// variable.
|
// variable.
|
||||||
for name in self.stack.assignments.keys() {
|
for name in self.stack.declarations.keys() {
|
||||||
self.stack
|
self.stack
|
||||||
.references
|
.references
|
||||||
.entry(name)
|
.entry(name)
|
||||||
|
|
@ -68,18 +74,44 @@ impl<'a> Visitor<'a> for ReturnVisitor<'a> {
|
||||||
.non_locals
|
.non_locals
|
||||||
.extend(names.iter().map(Identifier::as_str));
|
.extend(names.iter().map(Identifier::as_str));
|
||||||
}
|
}
|
||||||
Stmt::FunctionDef(ast::StmtFunctionDef {
|
Stmt::ClassDef(ast::StmtClassDef {
|
||||||
decorator_list,
|
decorator_list,
|
||||||
|
name,
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
// Mark a declaration.
|
||||||
|
self.stack
|
||||||
|
.declarations
|
||||||
|
.entry(name.as_str())
|
||||||
|
.or_insert_with(Vec::new)
|
||||||
|
.push(stmt.start());
|
||||||
|
|
||||||
|
// Don't recurse into the body, but visit the decorators, etc.
|
||||||
|
for expr in decorator_list {
|
||||||
|
visitor::walk_expr(self, expr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Stmt::FunctionDef(ast::StmtFunctionDef {
|
||||||
|
name,
|
||||||
args,
|
args,
|
||||||
|
decorator_list,
|
||||||
returns,
|
returns,
|
||||||
..
|
..
|
||||||
})
|
})
|
||||||
| Stmt::AsyncFunctionDef(ast::StmtAsyncFunctionDef {
|
| Stmt::AsyncFunctionDef(ast::StmtAsyncFunctionDef {
|
||||||
decorator_list,
|
name,
|
||||||
args,
|
args,
|
||||||
|
decorator_list,
|
||||||
returns,
|
returns,
|
||||||
..
|
..
|
||||||
}) => {
|
}) => {
|
||||||
|
// Mark a declaration.
|
||||||
|
self.stack
|
||||||
|
.declarations
|
||||||
|
.entry(name.as_str())
|
||||||
|
.or_insert_with(Vec::new)
|
||||||
|
.push(stmt.start());
|
||||||
|
|
||||||
// Don't recurse into the body, but visit the decorators, etc.
|
// Don't recurse into the body, but visit the decorators, etc.
|
||||||
for expr in decorator_list {
|
for expr in decorator_list {
|
||||||
visitor::walk_expr(self, expr);
|
visitor::walk_expr(self, expr);
|
||||||
|
|
@ -138,7 +170,7 @@ impl<'a> Visitor<'a> for ReturnVisitor<'a> {
|
||||||
|
|
||||||
if let Some(target) = targets.first() {
|
if let Some(target) = targets.first() {
|
||||||
// Skip unpacking assignments, like `x, y = my_object`.
|
// Skip unpacking assignments, like `x, y = my_object`.
|
||||||
if matches!(target, Expr::Tuple(_)) && !value.is_tuple_expr() {
|
if target.is_tuple_expr() && !value.is_tuple_expr() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -172,7 +204,7 @@ impl<'a> Visitor<'a> for ReturnVisitor<'a> {
|
||||||
Expr::Call(_) => {
|
Expr::Call(_) => {
|
||||||
// Arbitrary function calls can have side effects, so we conservatively treat
|
// Arbitrary function calls can have side effects, so we conservatively treat
|
||||||
// every function call as a reference to every known variable.
|
// every function call as a reference to every known variable.
|
||||||
for name in self.stack.assignments.keys() {
|
for name in self.stack.declarations.keys() {
|
||||||
self.stack
|
self.stack
|
||||||
.references
|
.references
|
||||||
.entry(name)
|
.entry(name)
|
||||||
|
|
|
||||||
|
|
@ -2,12 +2,12 @@ use std::borrow::Cow;
|
||||||
|
|
||||||
use anyhow::{bail, Result};
|
use anyhow::{bail, Result};
|
||||||
use libcst_native::{
|
use libcst_native::{
|
||||||
BooleanOp, BooleanOperation, Codegen, CodegenState, CompoundStatement, Expression, If,
|
BooleanOp, BooleanOperation, CompoundStatement, Expression, If, LeftParen,
|
||||||
LeftParen, ParenthesizableWhitespace, ParenthesizedNode, RightParen, SimpleWhitespace,
|
ParenthesizableWhitespace, ParenthesizedNode, RightParen, SimpleWhitespace, Statement, Suite,
|
||||||
Statement, Suite,
|
|
||||||
};
|
};
|
||||||
use rustpython_parser::ast::Ranged;
|
use rustpython_parser::ast::Ranged;
|
||||||
|
|
||||||
|
use crate::autofix::codemods::CodegenStylist;
|
||||||
use ruff_diagnostics::Edit;
|
use ruff_diagnostics::Edit;
|
||||||
use ruff_python_ast::source_code::{Locator, Stylist};
|
use ruff_python_ast::source_code::{Locator, Stylist};
|
||||||
use ruff_python_ast::whitespace;
|
use ruff_python_ast::whitespace;
|
||||||
|
|
@ -111,15 +111,8 @@ pub(crate) fn fix_nested_if_statements(
|
||||||
}));
|
}));
|
||||||
outer_if.body = inner_if.body.clone();
|
outer_if.body = inner_if.body.clone();
|
||||||
|
|
||||||
let mut state = CodegenState {
|
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
// Reconstruct and reformat the code.
|
// Reconstruct and reformat the code.
|
||||||
let module_text = state.to_string();
|
let module_text = tree.codegen_stylist(stylist);
|
||||||
let module_text = if outer_indent.is_empty() {
|
let module_text = if outer_indent.is_empty() {
|
||||||
&module_text
|
&module_text
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
use anyhow::{bail, Result};
|
use anyhow::{bail, Result};
|
||||||
use libcst_native::{Codegen, CodegenState, CompoundStatement, Statement, Suite, With};
|
use libcst_native::{CompoundStatement, Statement, Suite, With};
|
||||||
use rustpython_parser::ast::Ranged;
|
use rustpython_parser::ast::Ranged;
|
||||||
|
|
||||||
|
use crate::autofix::codemods::CodegenStylist;
|
||||||
use ruff_diagnostics::Edit;
|
use ruff_diagnostics::Edit;
|
||||||
use ruff_python_ast::source_code::{Locator, Stylist};
|
use ruff_python_ast::source_code::{Locator, Stylist};
|
||||||
use ruff_python_ast::whitespace;
|
use ruff_python_ast::whitespace;
|
||||||
|
|
@ -70,15 +71,8 @@ pub(crate) fn fix_multiple_with_statements(
|
||||||
}
|
}
|
||||||
outer_with.body = inner_with.body.clone();
|
outer_with.body = inner_with.body.clone();
|
||||||
|
|
||||||
let mut state = CodegenState {
|
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
// Reconstruct and reformat the code.
|
// Reconstruct and reformat the code.
|
||||||
let module_text = state.to_string();
|
let module_text = tree.codegen_stylist(stylist);
|
||||||
let contents = if outer_indent.is_empty() {
|
let contents = if outer_indent.is_empty() {
|
||||||
module_text
|
module_text
|
||||||
} else {
|
} else {
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,10 @@
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use libcst_native::{Codegen, CodegenState};
|
|
||||||
use log::error;
|
use log::error;
|
||||||
use ruff_text_size::TextRange;
|
use ruff_text_size::TextRange;
|
||||||
use rustpython_parser::ast::{self, Cmpop, Expr, Ranged};
|
use rustpython_parser::ast::{self, Cmpop, Expr, Ranged};
|
||||||
|
|
||||||
|
use crate::autofix::codemods::CodegenStylist;
|
||||||
use ruff_diagnostics::Edit;
|
use ruff_diagnostics::Edit;
|
||||||
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Fix};
|
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Fix};
|
||||||
use ruff_macros::{derive_message_formats, violation};
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
|
|
@ -42,14 +43,7 @@ fn get_value_content_for_key_in_dict(
|
||||||
let call = match_call_mut(&mut expression)?;
|
let call = match_call_mut(&mut expression)?;
|
||||||
let attribute = match_attribute(&mut call.func)?;
|
let attribute = match_attribute(&mut call.func)?;
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(attribute.value.codegen_stylist(stylist))
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
attribute.value.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(state.to_string())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// SIM118
|
/// SIM118
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,8 @@
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use libcst_native::{Codegen, CodegenState, CompOp};
|
use libcst_native::CompOp;
|
||||||
use rustpython_parser::ast::{self, Cmpop, Expr, Ranged, Unaryop};
|
use rustpython_parser::ast::{self, Cmpop, Expr, Ranged, Unaryop};
|
||||||
|
|
||||||
|
use crate::autofix::codemods::CodegenStylist;
|
||||||
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
|
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
|
||||||
use ruff_macros::{derive_message_formats, violation};
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
use ruff_python_ast::source_code::{Locator, Stylist};
|
use ruff_python_ast::source_code::{Locator, Stylist};
|
||||||
|
|
@ -59,7 +60,7 @@ fn reverse_comparison(expr: &Expr, locator: &Locator, stylist: &Stylist) -> Resu
|
||||||
let contents = locator.slice(range);
|
let contents = locator.slice(range);
|
||||||
|
|
||||||
let mut expression = match_expression(contents)?;
|
let mut expression = match_expression(contents)?;
|
||||||
let mut comparison = match_comparison(&mut expression)?;
|
let comparison = match_comparison(&mut expression)?;
|
||||||
|
|
||||||
let left = (*comparison.left).clone();
|
let left = (*comparison.left).clone();
|
||||||
|
|
||||||
|
|
@ -117,13 +118,7 @@ fn reverse_comparison(expr: &Expr, locator: &Locator, stylist: &Stylist) -> Resu
|
||||||
_ => panic!("Expected comparison operator"),
|
_ => panic!("Expected comparison operator"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(expression.codegen_stylist(stylist))
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
expression.codegen(&mut state);
|
|
||||||
Ok(state.to_string())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// SIM300
|
/// SIM300
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,6 @@
|
||||||
use ruff_diagnostics::{AutofixKind, Diagnostic, Fix, Violation};
|
use ruff_diagnostics::{AutofixKind, Diagnostic, Fix, Violation};
|
||||||
use ruff_macros::{derive_message_formats, violation};
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
use ruff_python_semantic::binding::{
|
use ruff_python_semantic::binding::Binding;
|
||||||
Binding, BindingKind, FromImportation, Importation, SubmoduleImportation,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::autofix;
|
use crate::autofix;
|
||||||
use crate::checkers::ast::Checker;
|
use crate::checkers::ast::Checker;
|
||||||
|
|
@ -41,7 +39,7 @@ use crate::registry::AsRule;
|
||||||
/// - [PEP 535](https://peps.python.org/pep-0563/#runtime-annotation-resolution-and-type-checking)
|
/// - [PEP 535](https://peps.python.org/pep-0563/#runtime-annotation-resolution-and-type-checking)
|
||||||
#[violation]
|
#[violation]
|
||||||
pub struct RuntimeImportInTypeCheckingBlock {
|
pub struct RuntimeImportInTypeCheckingBlock {
|
||||||
full_name: String,
|
qualified_name: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Violation for RuntimeImportInTypeCheckingBlock {
|
impl Violation for RuntimeImportInTypeCheckingBlock {
|
||||||
|
|
@ -49,9 +47,9 @@ impl Violation for RuntimeImportInTypeCheckingBlock {
|
||||||
|
|
||||||
#[derive_message_formats]
|
#[derive_message_formats]
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
let RuntimeImportInTypeCheckingBlock { full_name } = self;
|
let RuntimeImportInTypeCheckingBlock { qualified_name } = self;
|
||||||
format!(
|
format!(
|
||||||
"Move import `{full_name}` out of type-checking block. Import is used for more than type hinting."
|
"Move import `{qualified_name}` out of type-checking block. Import is used for more than type hinting."
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -66,11 +64,8 @@ pub(crate) fn runtime_import_in_type_checking_block(
|
||||||
binding: &Binding,
|
binding: &Binding,
|
||||||
diagnostics: &mut Vec<Diagnostic>,
|
diagnostics: &mut Vec<Diagnostic>,
|
||||||
) {
|
) {
|
||||||
let full_name = match &binding.kind {
|
let Some(qualified_name) = binding.qualified_name() else {
|
||||||
BindingKind::Importation(Importation { full_name }) => full_name,
|
return;
|
||||||
BindingKind::FromImportation(FromImportation { full_name }) => full_name.as_str(),
|
|
||||||
BindingKind::SubmoduleImportation(SubmoduleImportation { full_name }) => full_name,
|
|
||||||
_ => return,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(reference_id) = binding.references.first() else {
|
let Some(reference_id) = binding.references.first() else {
|
||||||
|
|
@ -89,10 +84,13 @@ pub(crate) fn runtime_import_in_type_checking_block(
|
||||||
{
|
{
|
||||||
let mut diagnostic = Diagnostic::new(
|
let mut diagnostic = Diagnostic::new(
|
||||||
RuntimeImportInTypeCheckingBlock {
|
RuntimeImportInTypeCheckingBlock {
|
||||||
full_name: full_name.to_string(),
|
qualified_name: qualified_name.to_string(),
|
||||||
},
|
},
|
||||||
binding.range,
|
binding.trimmed_range(checker.semantic_model(), checker.locator),
|
||||||
);
|
);
|
||||||
|
if let Some(range) = binding.parent_range(checker.semantic_model()) {
|
||||||
|
diagnostic.set_parent(range.start());
|
||||||
|
}
|
||||||
|
|
||||||
if checker.patch(diagnostic.kind.rule()) {
|
if checker.patch(diagnostic.kind.rule()) {
|
||||||
diagnostic.try_set_fix(|| {
|
diagnostic.try_set_fix(|| {
|
||||||
|
|
@ -102,7 +100,7 @@ pub(crate) fn runtime_import_in_type_checking_block(
|
||||||
let stmt = checker.semantic_model().stmts[source];
|
let stmt = checker.semantic_model().stmts[source];
|
||||||
let parent = checker.semantic_model().stmts.parent(stmt);
|
let parent = checker.semantic_model().stmts.parent(stmt);
|
||||||
let remove_import_edit = autofix::edits::remove_unused_imports(
|
let remove_import_edit = autofix::edits::remove_unused_imports(
|
||||||
std::iter::once(full_name),
|
std::iter::once(qualified_name),
|
||||||
stmt,
|
stmt,
|
||||||
parent,
|
parent,
|
||||||
checker.locator,
|
checker.locator,
|
||||||
|
|
@ -113,7 +111,10 @@ pub(crate) fn runtime_import_in_type_checking_block(
|
||||||
// Step 2) Add the import to the top-level.
|
// Step 2) Add the import to the top-level.
|
||||||
let reference = checker.semantic_model().references.resolve(*reference_id);
|
let reference = checker.semantic_model().references.resolve(*reference_id);
|
||||||
let add_import_edit = checker.importer.runtime_import_edit(
|
let add_import_edit = checker.importer.runtime_import_edit(
|
||||||
&StmtImport { stmt, full_name },
|
&StmtImport {
|
||||||
|
stmt,
|
||||||
|
qualified_name,
|
||||||
|
},
|
||||||
reference.range().start(),
|
reference.range().start(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,6 @@
|
||||||
use ruff_diagnostics::{AutofixKind, Diagnostic, Fix, Violation};
|
use ruff_diagnostics::{AutofixKind, Diagnostic, DiagnosticKind, Fix, Violation};
|
||||||
use ruff_macros::{derive_message_formats, violation};
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
use ruff_python_semantic::binding::{
|
use ruff_python_semantic::binding::Binding;
|
||||||
Binding, BindingKind, FromImportation, Importation, SubmoduleImportation,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::autofix;
|
use crate::autofix;
|
||||||
use crate::checkers::ast::Checker;
|
use crate::checkers::ast::Checker;
|
||||||
|
|
@ -47,7 +45,7 @@ use crate::rules::isort::{categorize, ImportSection, ImportType};
|
||||||
/// - [PEP 536](https://peps.python.org/pep-0563/#runtime-annotation-resolution-and-type-checking)
|
/// - [PEP 536](https://peps.python.org/pep-0563/#runtime-annotation-resolution-and-type-checking)
|
||||||
#[violation]
|
#[violation]
|
||||||
pub struct TypingOnlyFirstPartyImport {
|
pub struct TypingOnlyFirstPartyImport {
|
||||||
full_name: String,
|
qualified_name: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Violation for TypingOnlyFirstPartyImport {
|
impl Violation for TypingOnlyFirstPartyImport {
|
||||||
|
|
@ -57,7 +55,7 @@ impl Violation for TypingOnlyFirstPartyImport {
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
format!(
|
format!(
|
||||||
"Move application import `{}` into a type-checking block",
|
"Move application import `{}` into a type-checking block",
|
||||||
self.full_name
|
self.qualified_name
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -103,7 +101,7 @@ impl Violation for TypingOnlyFirstPartyImport {
|
||||||
/// - [PEP 536](https://peps.python.org/pep-0563/#runtime-annotation-resolution-and-type-checking)
|
/// - [PEP 536](https://peps.python.org/pep-0563/#runtime-annotation-resolution-and-type-checking)
|
||||||
#[violation]
|
#[violation]
|
||||||
pub struct TypingOnlyThirdPartyImport {
|
pub struct TypingOnlyThirdPartyImport {
|
||||||
full_name: String,
|
qualified_name: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Violation for TypingOnlyThirdPartyImport {
|
impl Violation for TypingOnlyThirdPartyImport {
|
||||||
|
|
@ -113,7 +111,7 @@ impl Violation for TypingOnlyThirdPartyImport {
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
format!(
|
format!(
|
||||||
"Move third-party import `{}` into a type-checking block",
|
"Move third-party import `{}` into a type-checking block",
|
||||||
self.full_name
|
self.qualified_name
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -159,7 +157,7 @@ impl Violation for TypingOnlyThirdPartyImport {
|
||||||
/// - [PEP 536](https://peps.python.org/pep-0563/#runtime-annotation-resolution-and-type-checking)
|
/// - [PEP 536](https://peps.python.org/pep-0563/#runtime-annotation-resolution-and-type-checking)
|
||||||
#[violation]
|
#[violation]
|
||||||
pub struct TypingOnlyStandardLibraryImport {
|
pub struct TypingOnlyStandardLibraryImport {
|
||||||
full_name: String,
|
qualified_name: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Violation for TypingOnlyStandardLibraryImport {
|
impl Violation for TypingOnlyStandardLibraryImport {
|
||||||
|
|
@ -169,7 +167,7 @@ impl Violation for TypingOnlyStandardLibraryImport {
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
format!(
|
format!(
|
||||||
"Move standard library import `{}` into a type-checking block",
|
"Move standard library import `{}` into a type-checking block",
|
||||||
self.full_name
|
self.qualified_name
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -180,65 +178,13 @@ impl Violation for TypingOnlyStandardLibraryImport {
|
||||||
|
|
||||||
/// Return `true` if `this` is implicitly loaded via importing `that`.
|
/// Return `true` if `this` is implicitly loaded via importing `that`.
|
||||||
fn is_implicit_import(this: &Binding, that: &Binding) -> bool {
|
fn is_implicit_import(this: &Binding, that: &Binding) -> bool {
|
||||||
match &this.kind {
|
let Some(this_module) = this.module_name() else {
|
||||||
BindingKind::Importation(Importation {
|
return false;
|
||||||
full_name: this_name,
|
};
|
||||||
})
|
let Some(that_module) = that.module_name() else {
|
||||||
| BindingKind::SubmoduleImportation(SubmoduleImportation {
|
return false;
|
||||||
full_name: this_name,
|
};
|
||||||
}) => match &that.kind {
|
this_module == that_module
|
||||||
BindingKind::FromImportation(FromImportation {
|
|
||||||
full_name: that_name,
|
|
||||||
}) => {
|
|
||||||
// Ex) `pkg.A` vs. `pkg`
|
|
||||||
let this_name = this_name.split('.').next().unwrap_or(this_name);
|
|
||||||
that_name
|
|
||||||
.rfind('.')
|
|
||||||
.map_or(false, |i| that_name[..i] == *this_name)
|
|
||||||
}
|
|
||||||
BindingKind::Importation(Importation {
|
|
||||||
full_name: that_name,
|
|
||||||
})
|
|
||||||
| BindingKind::SubmoduleImportation(SubmoduleImportation {
|
|
||||||
full_name: that_name,
|
|
||||||
}) => {
|
|
||||||
// Submodule importation with an alias (`import pkg.A as B`)
|
|
||||||
// are represented as `Importation`.
|
|
||||||
let this_name = this_name.split('.').next().unwrap_or(this_name);
|
|
||||||
let that_name = that_name.split('.').next().unwrap_or(that_name);
|
|
||||||
this_name == that_name
|
|
||||||
}
|
|
||||||
_ => false,
|
|
||||||
},
|
|
||||||
BindingKind::FromImportation(FromImportation {
|
|
||||||
full_name: this_name,
|
|
||||||
}) => match &that.kind {
|
|
||||||
BindingKind::Importation(Importation {
|
|
||||||
full_name: that_name,
|
|
||||||
})
|
|
||||||
| BindingKind::SubmoduleImportation(SubmoduleImportation {
|
|
||||||
full_name: that_name,
|
|
||||||
}) => {
|
|
||||||
// Ex) `pkg.A` vs. `pkg`
|
|
||||||
let that_name = that_name.split('.').next().unwrap_or(that_name);
|
|
||||||
this_name
|
|
||||||
.rfind('.')
|
|
||||||
.map_or(false, |i| &this_name[..i] == that_name)
|
|
||||||
}
|
|
||||||
BindingKind::FromImportation(FromImportation {
|
|
||||||
full_name: that_name,
|
|
||||||
}) => {
|
|
||||||
// Ex) `pkg.A` vs. `pkg.B`
|
|
||||||
this_name.rfind('.').map_or(false, |i| {
|
|
||||||
that_name
|
|
||||||
.rfind('.')
|
|
||||||
.map_or(false, |j| this_name[..i] == that_name[..j])
|
|
||||||
})
|
|
||||||
}
|
|
||||||
_ => false,
|
|
||||||
},
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return `true` if `name` is exempt from typing-only enforcement.
|
/// Return `true` if `name` is exempt from typing-only enforcement.
|
||||||
|
|
@ -274,15 +220,12 @@ pub(crate) fn typing_only_runtime_import(
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let full_name = match &binding.kind {
|
let Some(qualified_name) = binding.qualified_name() else {
|
||||||
BindingKind::Importation(Importation { full_name }) => full_name,
|
return;
|
||||||
BindingKind::FromImportation(FromImportation { full_name }) => full_name.as_str(),
|
|
||||||
BindingKind::SubmoduleImportation(SubmoduleImportation { full_name }) => full_name,
|
|
||||||
_ => return,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if is_exempt(
|
if is_exempt(
|
||||||
full_name,
|
qualified_name,
|
||||||
&checker
|
&checker
|
||||||
.settings
|
.settings
|
||||||
.flake8_type_checking
|
.flake8_type_checking
|
||||||
|
|
@ -312,7 +255,7 @@ pub(crate) fn typing_only_runtime_import(
|
||||||
// Extract the module base and level from the full name.
|
// Extract the module base and level from the full name.
|
||||||
// Ex) `foo.bar.baz` -> `foo`, `0`
|
// Ex) `foo.bar.baz` -> `foo`, `0`
|
||||||
// Ex) `.foo.bar.baz` -> `foo`, `1`
|
// Ex) `.foo.bar.baz` -> `foo`, `1`
|
||||||
let level = full_name
|
let level = qualified_name
|
||||||
.chars()
|
.chars()
|
||||||
.take_while(|c| *c == '.')
|
.take_while(|c| *c == '.')
|
||||||
.count()
|
.count()
|
||||||
|
|
@ -320,8 +263,8 @@ pub(crate) fn typing_only_runtime_import(
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// Categorize the import.
|
// Categorize the import.
|
||||||
let mut diagnostic = match categorize(
|
let kind: DiagnosticKind = match categorize(
|
||||||
full_name,
|
qualified_name,
|
||||||
Some(level),
|
Some(level),
|
||||||
&checker.settings.src,
|
&checker.settings.src,
|
||||||
checker.package(),
|
checker.package(),
|
||||||
|
|
@ -329,32 +272,35 @@ pub(crate) fn typing_only_runtime_import(
|
||||||
checker.settings.target_version,
|
checker.settings.target_version,
|
||||||
) {
|
) {
|
||||||
ImportSection::Known(ImportType::LocalFolder | ImportType::FirstParty) => {
|
ImportSection::Known(ImportType::LocalFolder | ImportType::FirstParty) => {
|
||||||
Diagnostic::new(
|
TypingOnlyFirstPartyImport {
|
||||||
TypingOnlyFirstPartyImport {
|
qualified_name: qualified_name.to_string(),
|
||||||
full_name: full_name.to_string(),
|
}
|
||||||
},
|
.into()
|
||||||
binding.range,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
ImportSection::Known(ImportType::ThirdParty) | ImportSection::UserDefined(_) => {
|
ImportSection::Known(ImportType::ThirdParty) | ImportSection::UserDefined(_) => {
|
||||||
Diagnostic::new(
|
TypingOnlyThirdPartyImport {
|
||||||
TypingOnlyThirdPartyImport {
|
qualified_name: qualified_name.to_string(),
|
||||||
full_name: full_name.to_string(),
|
}
|
||||||
},
|
.into()
|
||||||
binding.range,
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
ImportSection::Known(ImportType::StandardLibrary) => Diagnostic::new(
|
ImportSection::Known(ImportType::StandardLibrary) => TypingOnlyStandardLibraryImport {
|
||||||
TypingOnlyStandardLibraryImport {
|
qualified_name: qualified_name.to_string(),
|
||||||
full_name: full_name.to_string(),
|
}
|
||||||
},
|
.into(),
|
||||||
binding.range,
|
|
||||||
),
|
|
||||||
ImportSection::Known(ImportType::Future) => {
|
ImportSection::Known(ImportType::Future) => {
|
||||||
unreachable!("`__future__` imports should be marked as used")
|
unreachable!("`__future__` imports should be marked as used")
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mut diagnostic = Diagnostic::new(
|
||||||
|
kind,
|
||||||
|
binding.trimmed_range(checker.semantic_model(), checker.locator),
|
||||||
|
);
|
||||||
|
if let Some(range) = binding.parent_range(checker.semantic_model()) {
|
||||||
|
diagnostic.set_parent(range.start());
|
||||||
|
}
|
||||||
|
|
||||||
if checker.patch(diagnostic.kind.rule()) {
|
if checker.patch(diagnostic.kind.rule()) {
|
||||||
diagnostic.try_set_fix(|| {
|
diagnostic.try_set_fix(|| {
|
||||||
// Step 1) Remove the import.
|
// Step 1) Remove the import.
|
||||||
|
|
@ -363,7 +309,7 @@ pub(crate) fn typing_only_runtime_import(
|
||||||
let stmt = checker.semantic_model().stmts[source];
|
let stmt = checker.semantic_model().stmts[source];
|
||||||
let parent = checker.semantic_model().stmts.parent(stmt);
|
let parent = checker.semantic_model().stmts.parent(stmt);
|
||||||
let remove_import_edit = autofix::edits::remove_unused_imports(
|
let remove_import_edit = autofix::edits::remove_unused_imports(
|
||||||
std::iter::once(full_name),
|
std::iter::once(qualified_name),
|
||||||
stmt,
|
stmt,
|
||||||
parent,
|
parent,
|
||||||
checker.locator,
|
checker.locator,
|
||||||
|
|
@ -374,7 +320,10 @@ pub(crate) fn typing_only_runtime_import(
|
||||||
// Step 2) Add the import to a `TYPE_CHECKING` block.
|
// Step 2) Add the import to a `TYPE_CHECKING` block.
|
||||||
let reference = checker.semantic_model().references.resolve(*reference_id);
|
let reference = checker.semantic_model().references.resolve(*reference_id);
|
||||||
let add_import_edit = checker.importer.typing_import_edit(
|
let add_import_edit = checker.importer.typing_import_edit(
|
||||||
&StmtImport { stmt, full_name },
|
&StmtImport {
|
||||||
|
stmt,
|
||||||
|
qualified_name,
|
||||||
|
},
|
||||||
reference.range().start(),
|
reference.range().start(),
|
||||||
checker.semantic_model(),
|
checker.semantic_model(),
|
||||||
)?;
|
)?;
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ source: crates/ruff/src/rules/flake8_type_checking/mod.rs
|
||||||
---
|
---
|
||||||
strict.py:27:21: TCH002 [*] Move third-party import `pkg.A` into a type-checking block
|
strict.py:27:21: TCH002 [*] Move third-party import `pkg.A` into a type-checking block
|
||||||
|
|
|
|
||||||
27 | # In un-strict mode, this shouldn't rase an error, since `pkg` is used at runtime.
|
27 | # In un-strict mode, this shouldn't raise an error, since `pkg` is used at runtime.
|
||||||
28 | import pkg
|
28 | import pkg
|
||||||
29 | from pkg import A
|
29 | from pkg import A
|
||||||
| ^ TCH002
|
| ^ TCH002
|
||||||
|
|
@ -23,7 +23,7 @@ strict.py:27:21: TCH002 [*] Move third-party import `pkg.A` into a type-checking
|
||||||
4 8 | def f():
|
4 8 | def f():
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
24 28 | def f():
|
24 28 | def f():
|
||||||
25 29 | # In un-strict mode, this shouldn't rase an error, since `pkg` is used at runtime.
|
25 29 | # In un-strict mode, this shouldn't raise an error, since `pkg` is used at runtime.
|
||||||
26 30 | import pkg
|
26 30 | import pkg
|
||||||
27 |- from pkg import A
|
27 |- from pkg import A
|
||||||
28 31 |
|
28 31 |
|
||||||
|
|
@ -33,7 +33,7 @@ strict.py:27:21: TCH002 [*] Move third-party import `pkg.A` into a type-checking
|
||||||
strict.py:35:21: TCH002 [*] Move third-party import `pkg.A` into a type-checking block
|
strict.py:35:21: TCH002 [*] Move third-party import `pkg.A` into a type-checking block
|
||||||
|
|
|
|
||||||
35 | def f():
|
35 | def f():
|
||||||
36 | # In un-strict mode, this shouldn't rase an error, since `pkg` is used at runtime.
|
36 | # In un-strict mode, this shouldn't raise an error, since `pkg` is used at runtime.
|
||||||
37 | from pkg import A, B
|
37 | from pkg import A, B
|
||||||
| ^ TCH002
|
| ^ TCH002
|
||||||
38 |
|
38 |
|
||||||
|
|
@ -53,7 +53,7 @@ strict.py:35:21: TCH002 [*] Move third-party import `pkg.A` into a type-checking
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
32 36 |
|
32 36 |
|
||||||
33 37 | def f():
|
33 37 | def f():
|
||||||
34 38 | # In un-strict mode, this shouldn't rase an error, since `pkg` is used at runtime.
|
34 38 | # In un-strict mode, this shouldn't raise an error, since `pkg` is used at runtime.
|
||||||
35 |- from pkg import A, B
|
35 |- from pkg import A, B
|
||||||
39 |+ from pkg import B
|
39 |+ from pkg import B
|
||||||
36 40 |
|
36 40 |
|
||||||
|
|
@ -62,7 +62,7 @@ strict.py:35:21: TCH002 [*] Move third-party import `pkg.A` into a type-checking
|
||||||
|
|
||||||
strict.py:54:25: TCH002 [*] Move third-party import `pkg.bar.A` into a type-checking block
|
strict.py:54:25: TCH002 [*] Move third-party import `pkg.bar.A` into a type-checking block
|
||||||
|
|
|
|
||||||
54 | # In un-strict mode, this _should_ rase an error, since `pkg` is used at runtime.
|
54 | # In un-strict mode, this _should_ raise an error, since `pkg.bar` isn't used at runtime
|
||||||
55 | import pkg
|
55 | import pkg
|
||||||
56 | from pkg.bar import A
|
56 | from pkg.bar import A
|
||||||
| ^ TCH002
|
| ^ TCH002
|
||||||
|
|
@ -82,7 +82,7 @@ strict.py:54:25: TCH002 [*] Move third-party import `pkg.bar.A` into a type-chec
|
||||||
4 8 | def f():
|
4 8 | def f():
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
51 55 | def f():
|
51 55 | def f():
|
||||||
52 56 | # In un-strict mode, this _should_ rase an error, since `pkg` is used at runtime.
|
52 56 | # In un-strict mode, this _should_ raise an error, since `pkg.bar` isn't used at runtime
|
||||||
53 57 | import pkg
|
53 57 | import pkg
|
||||||
54 |- from pkg.bar import A
|
54 |- from pkg.bar import A
|
||||||
55 58 |
|
55 58 |
|
||||||
|
|
@ -92,7 +92,7 @@ strict.py:54:25: TCH002 [*] Move third-party import `pkg.bar.A` into a type-chec
|
||||||
strict.py:62:12: TCH002 [*] Move third-party import `pkg` into a type-checking block
|
strict.py:62:12: TCH002 [*] Move third-party import `pkg` into a type-checking block
|
||||||
|
|
|
|
||||||
62 | def f():
|
62 | def f():
|
||||||
63 | # In un-strict mode, this shouldn't rase an error, since `pkg.bar` is used at runtime.
|
63 | # In un-strict mode, this shouldn't raise an error, since `pkg.bar` is used at runtime.
|
||||||
64 | import pkg
|
64 | import pkg
|
||||||
| ^^^ TCH002
|
| ^^^ TCH002
|
||||||
65 | import pkg.bar as B
|
65 | import pkg.bar as B
|
||||||
|
|
@ -111,7 +111,7 @@ strict.py:62:12: TCH002 [*] Move third-party import `pkg` into a type-checking b
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
59 63 |
|
59 63 |
|
||||||
60 64 | def f():
|
60 64 | def f():
|
||||||
61 65 | # In un-strict mode, this shouldn't rase an error, since `pkg.bar` is used at runtime.
|
61 65 | # In un-strict mode, this shouldn't raise an error, since `pkg.bar` is used at runtime.
|
||||||
62 |- import pkg
|
62 |- import pkg
|
||||||
63 66 | import pkg.bar as B
|
63 66 | import pkg.bar as B
|
||||||
64 67 |
|
64 67 |
|
||||||
|
|
@ -120,7 +120,7 @@ strict.py:62:12: TCH002 [*] Move third-party import `pkg` into a type-checking b
|
||||||
strict.py:71:12: TCH002 [*] Move third-party import `pkg.foo` into a type-checking block
|
strict.py:71:12: TCH002 [*] Move third-party import `pkg.foo` into a type-checking block
|
||||||
|
|
|
|
||||||
71 | def f():
|
71 | def f():
|
||||||
72 | # In un-strict mode, this shouldn't rase an error, since `pkg.foo.bar` is used at runtime.
|
72 | # In un-strict mode, this shouldn't raise an error, since `pkg.foo.bar` is used at runtime.
|
||||||
73 | import pkg.foo as F
|
73 | import pkg.foo as F
|
||||||
| ^^^^^^^^^^^^ TCH002
|
| ^^^^^^^^^^^^ TCH002
|
||||||
74 | import pkg.foo.bar as B
|
74 | import pkg.foo.bar as B
|
||||||
|
|
@ -139,7 +139,7 @@ strict.py:71:12: TCH002 [*] Move third-party import `pkg.foo` into a type-checki
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
68 72 |
|
68 72 |
|
||||||
69 73 | def f():
|
69 73 | def f():
|
||||||
70 74 | # In un-strict mode, this shouldn't rase an error, since `pkg.foo.bar` is used at runtime.
|
70 74 | # In un-strict mode, this shouldn't raise an error, since `pkg.foo.bar` is used at runtime.
|
||||||
71 |- import pkg.foo as F
|
71 |- import pkg.foo as F
|
||||||
72 75 | import pkg.foo.bar as B
|
72 75 | import pkg.foo.bar as B
|
||||||
73 76 |
|
73 76 |
|
||||||
|
|
@ -148,7 +148,7 @@ strict.py:71:12: TCH002 [*] Move third-party import `pkg.foo` into a type-checki
|
||||||
strict.py:80:12: TCH002 [*] Move third-party import `pkg` into a type-checking block
|
strict.py:80:12: TCH002 [*] Move third-party import `pkg` into a type-checking block
|
||||||
|
|
|
|
||||||
80 | def f():
|
80 | def f():
|
||||||
81 | # In un-strict mode, this shouldn't rase an error, since `pkg.foo.bar` is used at runtime.
|
81 | # In un-strict mode, this shouldn't raise an error, since `pkg.foo.bar` is used at runtime.
|
||||||
82 | import pkg
|
82 | import pkg
|
||||||
| ^^^ TCH002
|
| ^^^ TCH002
|
||||||
83 | import pkg.foo.bar as B
|
83 | import pkg.foo.bar as B
|
||||||
|
|
@ -167,7 +167,7 @@ strict.py:80:12: TCH002 [*] Move third-party import `pkg` into a type-checking b
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
77 81 |
|
77 81 |
|
||||||
78 82 | def f():
|
78 82 | def f():
|
||||||
79 83 | # In un-strict mode, this shouldn't rase an error, since `pkg.foo.bar` is used at runtime.
|
79 83 | # In un-strict mode, this shouldn't raise an error, since `pkg.foo.bar` is used at runtime.
|
||||||
80 |- import pkg
|
80 |- import pkg
|
||||||
81 84 | import pkg.foo.bar as B
|
81 84 | import pkg.foo.bar as B
|
||||||
82 85 |
|
82 85 |
|
||||||
|
|
@ -193,7 +193,7 @@ strict.py:91:12: TCH002 [*] Move third-party import `pkg` into a type-checking b
|
||||||
3 7 |
|
3 7 |
|
||||||
4 8 | def f():
|
4 8 | def f():
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
88 92 | # In un-strict mode, this _should_ rase an error, since `pkgfoo.bar` is used at runtime.
|
88 92 | # In un-strict mode, this _should_ raise an error, since `pkg` isn't used at runtime.
|
||||||
89 93 | # Note that `pkg` is a prefix of `pkgfoo` which are both different modules. This is
|
89 93 | # Note that `pkg` is a prefix of `pkgfoo` which are both different modules. This is
|
||||||
90 94 | # testing the implementation.
|
90 94 | # testing the implementation.
|
||||||
91 |- import pkg
|
91 |- import pkg
|
||||||
|
|
@ -203,7 +203,7 @@ strict.py:91:12: TCH002 [*] Move third-party import `pkg` into a type-checking b
|
||||||
|
|
||||||
strict.py:101:12: TCH002 [*] Move third-party import `pkg.foo` into a type-checking block
|
strict.py:101:12: TCH002 [*] Move third-party import `pkg.foo` into a type-checking block
|
||||||
|
|
|
|
||||||
101 | # In un-strict mode, this shouldn't raise an error, since `pkg.bar` is used at runtime.
|
101 | # In un-strict mode, this shouldn't raise an error, since `pkg` is used at runtime.
|
||||||
102 | import pkg.bar as B
|
102 | import pkg.bar as B
|
||||||
103 | import pkg.foo as F
|
103 | import pkg.foo as F
|
||||||
| ^^^^^^^^^^^^ TCH002
|
| ^^^^^^^^^^^^ TCH002
|
||||||
|
|
@ -223,7 +223,7 @@ strict.py:101:12: TCH002 [*] Move third-party import `pkg.foo` into a type-check
|
||||||
4 8 | def f():
|
4 8 | def f():
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
98 102 | def f():
|
98 102 | def f():
|
||||||
99 103 | # In un-strict mode, this shouldn't raise an error, since `pkg.bar` is used at runtime.
|
99 103 | # In un-strict mode, this shouldn't raise an error, since `pkg` is used at runtime.
|
||||||
100 104 | import pkg.bar as B
|
100 104 | import pkg.bar as B
|
||||||
101 |- import pkg.foo as F
|
101 |- import pkg.foo as F
|
||||||
102 105 |
|
102 105 |
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ source: crates/ruff/src/rules/flake8_type_checking/mod.rs
|
||||||
---
|
---
|
||||||
strict.py:54:25: TCH002 [*] Move third-party import `pkg.bar.A` into a type-checking block
|
strict.py:54:25: TCH002 [*] Move third-party import `pkg.bar.A` into a type-checking block
|
||||||
|
|
|
|
||||||
54 | # In un-strict mode, this _should_ rase an error, since `pkg` is used at runtime.
|
54 | # In un-strict mode, this _should_ raise an error, since `pkg.bar` isn't used at runtime
|
||||||
55 | import pkg
|
55 | import pkg
|
||||||
56 | from pkg.bar import A
|
56 | from pkg.bar import A
|
||||||
| ^ TCH002
|
| ^ TCH002
|
||||||
|
|
@ -23,7 +23,7 @@ strict.py:54:25: TCH002 [*] Move third-party import `pkg.bar.A` into a type-chec
|
||||||
4 8 | def f():
|
4 8 | def f():
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
51 55 | def f():
|
51 55 | def f():
|
||||||
52 56 | # In un-strict mode, this _should_ rase an error, since `pkg` is used at runtime.
|
52 56 | # In un-strict mode, this _should_ raise an error, since `pkg.bar` isn't used at runtime
|
||||||
53 57 | import pkg
|
53 57 | import pkg
|
||||||
54 |- from pkg.bar import A
|
54 |- from pkg.bar import A
|
||||||
55 58 |
|
55 58 |
|
||||||
|
|
@ -50,7 +50,7 @@ strict.py:91:12: TCH002 [*] Move third-party import `pkg` into a type-checking b
|
||||||
3 7 |
|
3 7 |
|
||||||
4 8 | def f():
|
4 8 | def f():
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
88 92 | # In un-strict mode, this _should_ rase an error, since `pkgfoo.bar` is used at runtime.
|
88 92 | # In un-strict mode, this _should_ raise an error, since `pkg` isn't used at runtime.
|
||||||
89 93 | # Note that `pkg` is a prefix of `pkgfoo` which are both different modules. This is
|
89 93 | # Note that `pkg` is a prefix of `pkgfoo` which are both different modules. This is
|
||||||
90 94 | # testing the implementation.
|
90 94 | # testing the implementation.
|
||||||
91 |- import pkg
|
91 |- import pkg
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
|
use itertools::Itertools;
|
||||||
use ruff_text_size::TextRange;
|
use ruff_text_size::TextRange;
|
||||||
use rustpython_parser::ast::{self, Expr, Ranged};
|
use rustpython_parser::ast::{self, Constant, Expr, Ranged};
|
||||||
|
|
||||||
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
|
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
|
||||||
use ruff_macros::{derive_message_formats, violation};
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
|
|
@ -27,15 +28,48 @@ impl AlwaysAutofixableViolation for StaticJoinToFString {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_static_length(elts: &[Expr]) -> bool {
|
fn is_static_length(elts: &[Expr]) -> bool {
|
||||||
elts.iter().all(|e| !matches!(e, Expr::Starred(_)))
|
elts.iter().all(|e| !e.is_starred_expr())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_fstring(joiner: &str, joinees: &[Expr]) -> Option<Expr> {
|
fn build_fstring(joiner: &str, joinees: &[Expr]) -> Option<Expr> {
|
||||||
|
// If all elements are string constants, join them into a single string.
|
||||||
|
if joinees.iter().all(|expr| {
|
||||||
|
matches!(
|
||||||
|
expr,
|
||||||
|
Expr::Constant(ast::ExprConstant {
|
||||||
|
value: Constant::Str(_),
|
||||||
|
..
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}) {
|
||||||
|
let node = ast::ExprConstant {
|
||||||
|
value: Constant::Str(
|
||||||
|
joinees
|
||||||
|
.iter()
|
||||||
|
.filter_map(|expr| {
|
||||||
|
if let Expr::Constant(ast::ExprConstant {
|
||||||
|
value: Constant::Str(string),
|
||||||
|
..
|
||||||
|
}) = expr
|
||||||
|
{
|
||||||
|
Some(string.as_str())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.join(joiner),
|
||||||
|
),
|
||||||
|
range: TextRange::default(),
|
||||||
|
kind: None,
|
||||||
|
};
|
||||||
|
return Some(node.into());
|
||||||
|
}
|
||||||
|
|
||||||
let mut fstring_elems = Vec::with_capacity(joinees.len() * 2);
|
let mut fstring_elems = Vec::with_capacity(joinees.len() * 2);
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
|
|
||||||
for expr in joinees {
|
for expr in joinees {
|
||||||
if matches!(expr, Expr::JoinedStr(_)) {
|
if expr.is_joined_str_expr() {
|
||||||
// Oops, already an f-string. We don't know how to handle those
|
// Oops, already an f-string. We don't know how to handle those
|
||||||
// gracefully right now.
|
// gracefully right now.
|
||||||
return None;
|
return None;
|
||||||
|
|
@ -58,7 +92,7 @@ pub(crate) fn static_join_to_fstring(checker: &mut Checker, expr: &Expr, joiner:
|
||||||
args,
|
args,
|
||||||
keywords,
|
keywords,
|
||||||
..
|
..
|
||||||
})= expr else {
|
}) = expr else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -42,7 +42,7 @@ FLY002.py:6:7: FLY002 [*] Consider `f"Finally, {a} World"` instead of string joi
|
||||||
8 8 | ok4 = "y".join([1, 2, 3]) # Technically OK, though would've been an error originally
|
8 8 | ok4 = "y".join([1, 2, 3]) # Technically OK, though would've been an error originally
|
||||||
9 9 | ok5 = "a".join([random(), random()]) # OK (simple calls)
|
9 9 | ok5 = "a".join([random(), random()]) # OK (simple calls)
|
||||||
|
|
||||||
FLY002.py:7:7: FLY002 [*] Consider `f"1x2x3"` instead of string join
|
FLY002.py:7:7: FLY002 [*] Consider `"1x2x3"` instead of string join
|
||||||
|
|
|
|
||||||
7 | ok1 = " ".join([a, " World"]) # OK
|
7 | ok1 = " ".join([a, " World"]) # OK
|
||||||
8 | ok2 = "".join(["Finally, ", a, " World"]) # OK
|
8 | ok2 = "".join(["Finally, ", a, " World"]) # OK
|
||||||
|
|
@ -51,14 +51,14 @@ FLY002.py:7:7: FLY002 [*] Consider `f"1x2x3"` instead of string join
|
||||||
10 | ok4 = "y".join([1, 2, 3]) # Technically OK, though would've been an error originally
|
10 | ok4 = "y".join([1, 2, 3]) # Technically OK, though would've been an error originally
|
||||||
11 | ok5 = "a".join([random(), random()]) # OK (simple calls)
|
11 | ok5 = "a".join([random(), random()]) # OK (simple calls)
|
||||||
|
|
|
|
||||||
= help: Replace with `f"1x2x3"`
|
= help: Replace with `"1x2x3"`
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Suggested fix
|
||||||
4 4 | a = "Hello"
|
4 4 | a = "Hello"
|
||||||
5 5 | ok1 = " ".join([a, " World"]) # OK
|
5 5 | ok1 = " ".join([a, " World"]) # OK
|
||||||
6 6 | ok2 = "".join(["Finally, ", a, " World"]) # OK
|
6 6 | ok2 = "".join(["Finally, ", a, " World"]) # OK
|
||||||
7 |-ok3 = "x".join(("1", "2", "3")) # OK
|
7 |-ok3 = "x".join(("1", "2", "3")) # OK
|
||||||
7 |+ok3 = f"1x2x3" # OK
|
7 |+ok3 = "1x2x3" # OK
|
||||||
8 8 | ok4 = "y".join([1, 2, 3]) # Technically OK, though would've been an error originally
|
8 8 | ok4 = "y".join([1, 2, 3]) # Technically OK, though would've been an error originally
|
||||||
9 9 | ok5 = "a".join([random(), random()]) # OK (simple calls)
|
9 9 | ok5 = "a".join([random(), random()]) # OK (simple calls)
|
||||||
10 10 | ok6 = "a".join([secrets.token_urlsafe(), secrets.token_hex()]) # OK (attr calls)
|
10 10 | ok6 = "a".join([secrets.token_urlsafe(), secrets.token_hex()]) # OK (attr calls)
|
||||||
|
|
|
||||||
|
|
@ -40,11 +40,9 @@ pub(crate) fn test_expression(expr: &Expr, model: &SemanticModel) -> Resolution
|
||||||
| BindingKind::LoopVar
|
| BindingKind::LoopVar
|
||||||
| BindingKind::Global
|
| BindingKind::Global
|
||||||
| BindingKind::Nonlocal => Resolution::RelevantLocal,
|
| BindingKind::Nonlocal => Resolution::RelevantLocal,
|
||||||
BindingKind::Importation(Importation { full_name: module })
|
BindingKind::Importation(Importation {
|
||||||
if module == "pandas" =>
|
qualified_name: module,
|
||||||
{
|
}) if module == "pandas" => Resolution::PandasModule,
|
||||||
Resolution::PandasModule
|
|
||||||
}
|
|
||||||
_ => Resolution::IrrelevantBinding,
|
_ => Resolution::IrrelevantBinding,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -71,7 +71,7 @@ pub(crate) fn inplace_argument(
|
||||||
matches!(
|
matches!(
|
||||||
binding.kind,
|
binding.kind,
|
||||||
BindingKind::Importation(Importation {
|
BindingKind::Importation(Importation {
|
||||||
full_name: "pandas"
|
qualified_name: "pandas"
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,6 @@
|
||||||
use anyhow::{bail, Ok, Result};
|
use anyhow::{bail, Ok, Result};
|
||||||
use libcst_native::{Codegen, CodegenState, DictElement, Expression};
|
use libcst_native::{DictElement, Expression};
|
||||||
use ruff_text_size::TextRange;
|
use ruff_text_size::TextRange;
|
||||||
use rustpython_format::{
|
|
||||||
FieldName, FieldNamePart, FieldType, FormatPart, FormatString, FromTemplate,
|
|
||||||
};
|
|
||||||
use rustpython_parser::ast::{Excepthandler, Expr, Ranged};
|
use rustpython_parser::ast::{Excepthandler, Expr, Ranged};
|
||||||
use rustpython_parser::{lexer, Mode, Tok};
|
use rustpython_parser::{lexer, Mode, Tok};
|
||||||
|
|
||||||
|
|
@ -11,9 +8,8 @@ use ruff_diagnostics::Edit;
|
||||||
use ruff_python_ast::source_code::{Locator, Stylist};
|
use ruff_python_ast::source_code::{Locator, Stylist};
|
||||||
use ruff_python_ast::str::raw_contents;
|
use ruff_python_ast::str::raw_contents;
|
||||||
|
|
||||||
use crate::cst::matchers::{
|
use crate::autofix::codemods::CodegenStylist;
|
||||||
match_attribute, match_call_mut, match_dict, match_expression, match_simple_string,
|
use crate::cst::matchers::{match_call_mut, match_dict, match_expression};
|
||||||
};
|
|
||||||
|
|
||||||
/// Generate a [`Edit`] to remove unused keys from format dict.
|
/// Generate a [`Edit`] to remove unused keys from format dict.
|
||||||
pub(crate) fn remove_unused_format_arguments_from_dict(
|
pub(crate) fn remove_unused_format_arguments_from_dict(
|
||||||
|
|
@ -33,14 +29,10 @@ pub(crate) fn remove_unused_format_arguments_from_dict(
|
||||||
} if raw_contents(name.value).map_or(false, |name| unused_arguments.contains(&name)))
|
} if raw_contents(name.value).map_or(false, |name| unused_arguments.contains(&name)))
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
stmt.range(),
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), stmt.range()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate a [`Edit`] to remove unused keyword arguments from a `format` call.
|
/// Generate a [`Edit`] to remove unused keyword arguments from a `format` call.
|
||||||
|
|
@ -57,72 +49,10 @@ pub(crate) fn remove_unused_keyword_arguments_from_format_call(
|
||||||
call.args
|
call.args
|
||||||
.retain(|e| !matches!(&e.keyword, Some(kw) if unused_arguments.contains(&kw.value)));
|
.retain(|e| !matches!(&e.keyword, Some(kw) if unused_arguments.contains(&kw.value)));
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Ok(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
location,
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), location))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn unparse_format_part(format_part: FormatPart) -> String {
|
|
||||||
match format_part {
|
|
||||||
FormatPart::Literal(literal) => literal,
|
|
||||||
FormatPart::Field {
|
|
||||||
field_name,
|
|
||||||
conversion_spec,
|
|
||||||
format_spec,
|
|
||||||
} => {
|
|
||||||
let mut field_name = field_name;
|
|
||||||
if let Some(conversion) = conversion_spec {
|
|
||||||
field_name.push_str(&format!("!{conversion}"));
|
|
||||||
}
|
|
||||||
if !format_spec.is_empty() {
|
|
||||||
field_name.push_str(&format!(":{format_spec}"));
|
|
||||||
}
|
|
||||||
format!("{{{field_name}}}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_field_types(format_string: &FormatString, min_unused: usize) -> String {
|
|
||||||
format_string
|
|
||||||
.format_parts
|
|
||||||
.iter()
|
|
||||||
.map(|part| match part {
|
|
||||||
FormatPart::Literal(literal) => FormatPart::Literal(literal.to_string()),
|
|
||||||
FormatPart::Field {
|
|
||||||
field_name,
|
|
||||||
conversion_spec,
|
|
||||||
format_spec,
|
|
||||||
} => {
|
|
||||||
let new_field_name = FieldName::parse(field_name).unwrap(); // This should never fail because we parsed it before
|
|
||||||
let mut new_field_name_string = match new_field_name.field_type {
|
|
||||||
FieldType::Auto => String::new(),
|
|
||||||
FieldType::Index(i) => (i - min_unused).to_string(),
|
|
||||||
FieldType::Keyword(keyword) => keyword,
|
|
||||||
};
|
|
||||||
for field_name_part in &new_field_name.parts {
|
|
||||||
let field_name_part_string = match field_name_part {
|
|
||||||
FieldNamePart::Attribute(attribute) => format!(".{attribute}"),
|
|
||||||
FieldNamePart::Index(i) => format!("[{i}]"),
|
|
||||||
FieldNamePart::StringIndex(s) => format!("[{s}]"),
|
|
||||||
};
|
|
||||||
new_field_name_string.push_str(&field_name_part_string);
|
|
||||||
}
|
|
||||||
let new_format_spec = FormatString::from_str(format_spec).unwrap(); // This should never fail because we parsed it before
|
|
||||||
let new_format_spec_string = update_field_types(&new_format_spec, min_unused);
|
|
||||||
FormatPart::Field {
|
|
||||||
field_name: new_field_name_string,
|
|
||||||
conversion_spec: *conversion_spec,
|
|
||||||
format_spec: new_format_spec_string,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.map(unparse_format_part)
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate a [`Edit`] to remove unused positional arguments from a `format` call.
|
/// Generate a [`Edit`] to remove unused positional arguments from a `format` call.
|
||||||
|
|
@ -131,44 +61,23 @@ pub(crate) fn remove_unused_positional_arguments_from_format_call(
|
||||||
location: TextRange,
|
location: TextRange,
|
||||||
locator: &Locator,
|
locator: &Locator,
|
||||||
stylist: &Stylist,
|
stylist: &Stylist,
|
||||||
format_string: &FormatString,
|
|
||||||
) -> Result<Edit> {
|
) -> Result<Edit> {
|
||||||
let module_text = locator.slice(location);
|
let module_text = locator.slice(location);
|
||||||
let mut tree = match_expression(module_text)?;
|
let mut tree = match_expression(module_text)?;
|
||||||
let call = match_call_mut(&mut tree)?;
|
let call = match_call_mut(&mut tree)?;
|
||||||
|
|
||||||
|
// Remove any unused arguments.
|
||||||
let mut index = 0;
|
let mut index = 0;
|
||||||
call.args.retain(|_| {
|
call.args.retain(|_| {
|
||||||
|
let is_unused = unused_arguments.contains(&index);
|
||||||
index += 1;
|
index += 1;
|
||||||
!unused_arguments.contains(&(index - 1))
|
!is_unused
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut min_unused_index = 0;
|
Ok(Edit::range_replacement(
|
||||||
for index in unused_arguments {
|
tree.codegen_stylist(stylist),
|
||||||
if *index == min_unused_index {
|
location,
|
||||||
min_unused_index += 1;
|
))
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut new_format_string;
|
|
||||||
if min_unused_index > 0 {
|
|
||||||
let func = match_attribute(&mut call.func)?;
|
|
||||||
let simple_string = match_simple_string(&mut func.value)?;
|
|
||||||
new_format_string = update_field_types(format_string, min_unused_index);
|
|
||||||
new_format_string = format!(r#""{new_format_string}""#);
|
|
||||||
simple_string.value = new_format_string.as_str();
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut state = CodegenState {
|
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Edit::range_replacement(state.to_string(), location))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generate a [`Edit`] to remove the binding from an exception handler.
|
/// Generate a [`Edit`] to remove the binding from an exception handler.
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,6 @@ pub(crate) struct FormatSummary {
|
||||||
pub(crate) indices: Vec<usize>,
|
pub(crate) indices: Vec<usize>,
|
||||||
pub(crate) keywords: Vec<String>,
|
pub(crate) keywords: Vec<String>,
|
||||||
pub(crate) has_nested_parts: bool,
|
pub(crate) has_nested_parts: bool,
|
||||||
pub(crate) format_string: FormatString,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<&str> for FormatSummary {
|
impl TryFrom<&str> for FormatSummary {
|
||||||
|
|
@ -75,7 +74,6 @@ impl TryFrom<&str> for FormatSummary {
|
||||||
indices,
|
indices,
|
||||||
keywords,
|
keywords,
|
||||||
has_nested_parts,
|
has_nested_parts,
|
||||||
format_string,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -40,6 +40,7 @@ mod tests {
|
||||||
#[test_case(Rule::UnusedImport, Path::new("F401_13.py"))]
|
#[test_case(Rule::UnusedImport, Path::new("F401_13.py"))]
|
||||||
#[test_case(Rule::UnusedImport, Path::new("F401_14.py"))]
|
#[test_case(Rule::UnusedImport, Path::new("F401_14.py"))]
|
||||||
#[test_case(Rule::UnusedImport, Path::new("F401_15.py"))]
|
#[test_case(Rule::UnusedImport, Path::new("F401_15.py"))]
|
||||||
|
#[test_case(Rule::UnusedImport, Path::new("F401_16.py"))]
|
||||||
#[test_case(Rule::ImportShadowedByLoopVar, Path::new("F402.py"))]
|
#[test_case(Rule::ImportShadowedByLoopVar, Path::new("F402.py"))]
|
||||||
#[test_case(Rule::UndefinedLocalWithImportStar, Path::new("F403.py"))]
|
#[test_case(Rule::UndefinedLocalWithImportStar, Path::new("F403.py"))]
|
||||||
#[test_case(Rule::LateFutureImport, Path::new("F404.py"))]
|
#[test_case(Rule::LateFutureImport, Path::new("F404.py"))]
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ use ruff_text_size::TextRange;
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use rustpython_parser::ast::{self, Constant, Expr, Identifier, Keyword};
|
use rustpython_parser::ast::{self, Constant, Expr, Identifier, Keyword};
|
||||||
|
|
||||||
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Violation};
|
use ruff_diagnostics::{AlwaysAutofixableViolation, AutofixKind, Diagnostic, Fix, Violation};
|
||||||
use ruff_macros::{derive_message_formats, violation};
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
|
|
||||||
use crate::checkers::ast::Checker;
|
use crate::checkers::ast::Checker;
|
||||||
|
|
@ -425,7 +425,9 @@ pub struct StringDotFormatExtraPositionalArguments {
|
||||||
missing: Vec<String>,
|
missing: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AlwaysAutofixableViolation for StringDotFormatExtraPositionalArguments {
|
impl Violation for StringDotFormatExtraPositionalArguments {
|
||||||
|
const AUTOFIX: AutofixKind = AutofixKind::Sometimes;
|
||||||
|
|
||||||
#[derive_message_formats]
|
#[derive_message_formats]
|
||||||
fn message(&self) -> String {
|
fn message(&self) -> String {
|
||||||
let StringDotFormatExtraPositionalArguments { missing } = self;
|
let StringDotFormatExtraPositionalArguments { missing } = self;
|
||||||
|
|
@ -433,10 +435,12 @@ impl AlwaysAutofixableViolation for StringDotFormatExtraPositionalArguments {
|
||||||
format!("`.format` call has unused arguments at position(s): {message}")
|
format!("`.format` call has unused arguments at position(s): {message}")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn autofix_title(&self) -> String {
|
fn autofix_title(&self) -> Option<String> {
|
||||||
let StringDotFormatExtraPositionalArguments { missing } = self;
|
let StringDotFormatExtraPositionalArguments { missing } = self;
|
||||||
let message = missing.join(", ");
|
let message = missing.join(", ");
|
||||||
format!("Remove extra positional arguments at position(s): {message}")
|
Some(format!(
|
||||||
|
"Remove extra positional arguments at position(s): {message}"
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -600,14 +604,14 @@ pub(crate) fn percent_format_extra_named_arguments(
|
||||||
location,
|
location,
|
||||||
);
|
);
|
||||||
if checker.patch(diagnostic.kind.rule()) {
|
if checker.patch(diagnostic.kind.rule()) {
|
||||||
#[allow(deprecated)]
|
diagnostic.try_set_fix(|| {
|
||||||
diagnostic.try_set_fix_from_edit(|| {
|
let edit = remove_unused_format_arguments_from_dict(
|
||||||
remove_unused_format_arguments_from_dict(
|
|
||||||
&missing,
|
&missing,
|
||||||
right,
|
right,
|
||||||
checker.locator,
|
checker.locator,
|
||||||
checker.stylist,
|
checker.stylist,
|
||||||
)
|
)?;
|
||||||
|
Ok(Fix::automatic(edit))
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
checker.diagnostics.push(diagnostic);
|
checker.diagnostics.push(diagnostic);
|
||||||
|
|
@ -766,14 +770,14 @@ pub(crate) fn string_dot_format_extra_named_arguments(
|
||||||
location,
|
location,
|
||||||
);
|
);
|
||||||
if checker.patch(diagnostic.kind.rule()) {
|
if checker.patch(diagnostic.kind.rule()) {
|
||||||
#[allow(deprecated)]
|
diagnostic.try_set_fix(|| {
|
||||||
diagnostic.try_set_fix_from_edit(|| {
|
let edit = remove_unused_keyword_arguments_from_format_call(
|
||||||
remove_unused_keyword_arguments_from_format_call(
|
|
||||||
&missing,
|
&missing,
|
||||||
location,
|
location,
|
||||||
checker.locator,
|
checker.locator,
|
||||||
checker.stylist,
|
checker.stylist,
|
||||||
)
|
)?;
|
||||||
|
Ok(Fix::automatic(edit))
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
checker.diagnostics.push(diagnostic);
|
checker.diagnostics.push(diagnostic);
|
||||||
|
|
@ -805,22 +809,48 @@ pub(crate) fn string_dot_format_extra_positional_arguments(
|
||||||
StringDotFormatExtraPositionalArguments {
|
StringDotFormatExtraPositionalArguments {
|
||||||
missing: missing
|
missing: missing
|
||||||
.iter()
|
.iter()
|
||||||
.map(std::string::ToString::to_string)
|
.map(ToString::to_string)
|
||||||
.collect::<Vec<String>>(),
|
.collect::<Vec<String>>(),
|
||||||
},
|
},
|
||||||
location,
|
location,
|
||||||
);
|
);
|
||||||
if checker.patch(diagnostic.kind.rule()) {
|
if checker.patch(diagnostic.kind.rule()) {
|
||||||
#[allow(deprecated)]
|
// We can only fix if the positional arguments we're removing don't require re-indexing
|
||||||
diagnostic.try_set_fix_from_edit(|| {
|
// the format string itself. For example, we can't fix `"{1}{2}".format(0, 1, 2)"`, since
|
||||||
remove_unused_positional_arguments_from_format_call(
|
// this requires changing the format string to `"{0}{1}"`. But we can fix
|
||||||
&missing,
|
// `"{0}{1}".format(0, 1, 2)`, since this only requires modifying the call arguments.
|
||||||
location,
|
fn is_contiguous_from_end<T>(indexes: &[usize], target: &[T]) -> bool {
|
||||||
checker.locator,
|
if indexes.is_empty() {
|
||||||
checker.stylist,
|
return true;
|
||||||
&summary.format_string,
|
}
|
||||||
)
|
|
||||||
});
|
let mut expected_index = target.len() - 1;
|
||||||
|
for &index in indexes.iter().rev() {
|
||||||
|
if index != expected_index {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if expected_index == 0 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_index -= 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
if is_contiguous_from_end(&missing, args) {
|
||||||
|
diagnostic.try_set_fix(|| {
|
||||||
|
let edit = remove_unused_positional_arguments_from_format_call(
|
||||||
|
&missing,
|
||||||
|
location,
|
||||||
|
checker.locator,
|
||||||
|
checker.stylist,
|
||||||
|
)?;
|
||||||
|
Ok(Fix::automatic(edit))
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
checker.diagnostics.push(diagnostic);
|
checker.diagnostics.push(diagnostic);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -48,18 +48,16 @@ impl Violation for UndefinedExport {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// F822
|
/// F822
|
||||||
pub(crate) fn undefined_export(names: &[&str], range: TextRange, scope: &Scope) -> Vec<Diagnostic> {
|
pub(crate) fn undefined_export(name: &str, range: TextRange, scope: &Scope) -> Vec<Diagnostic> {
|
||||||
let mut diagnostics = Vec::new();
|
let mut diagnostics = Vec::new();
|
||||||
if !scope.uses_star_imports() {
|
if !scope.uses_star_imports() {
|
||||||
for name in names {
|
if !scope.defines(name) {
|
||||||
if !scope.defines(name) {
|
diagnostics.push(Diagnostic::new(
|
||||||
diagnostics.push(Diagnostic::new(
|
UndefinedExport {
|
||||||
UndefinedExport {
|
name: (*name).to_string(),
|
||||||
name: (*name).to_string(),
|
},
|
||||||
},
|
range,
|
||||||
range,
|
));
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
diagnostics
|
diagnostics
|
||||||
|
|
|
||||||
|
|
@ -5,9 +5,7 @@ use rustpython_parser::ast::Ranged;
|
||||||
|
|
||||||
use ruff_diagnostics::{AutofixKind, Diagnostic, Fix, IsolationLevel, Violation};
|
use ruff_diagnostics::{AutofixKind, Diagnostic, Fix, IsolationLevel, Violation};
|
||||||
use ruff_macros::{derive_message_formats, violation};
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
use ruff_python_semantic::binding::{
|
use ruff_python_semantic::binding::Exceptions;
|
||||||
BindingKind, Exceptions, FromImportation, Importation, SubmoduleImportation,
|
|
||||||
};
|
|
||||||
use ruff_python_semantic::node::NodeId;
|
use ruff_python_semantic::node::NodeId;
|
||||||
use ruff_python_semantic::scope::Scope;
|
use ruff_python_semantic::scope::Scope;
|
||||||
|
|
||||||
|
|
@ -102,8 +100,8 @@ impl Violation for UnusedImport {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type SpannedName<'a> = (&'a str, &'a TextRange);
|
type SpannedName<'a> = (&'a str, TextRange);
|
||||||
type BindingContext<'a> = (NodeId, Option<NodeId>, Exceptions);
|
type BindingContext = (NodeId, Option<NodeId>, Exceptions);
|
||||||
|
|
||||||
pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut Vec<Diagnostic>) {
|
pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut Vec<Diagnostic>) {
|
||||||
// Collect all unused imports by statement.
|
// Collect all unused imports by statement.
|
||||||
|
|
@ -117,11 +115,8 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let full_name = match &binding.kind {
|
let Some(qualified_name) = binding.qualified_name() else {
|
||||||
BindingKind::Importation(Importation { full_name }) => full_name,
|
continue;
|
||||||
BindingKind::FromImportation(FromImportation { full_name }) => full_name.as_str(),
|
|
||||||
BindingKind::SubmoduleImportation(SubmoduleImportation { full_name }) => full_name,
|
|
||||||
_ => continue,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let stmt_id = binding.source.unwrap();
|
let stmt_id = binding.source.unwrap();
|
||||||
|
|
@ -144,12 +139,12 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||||
ignored
|
ignored
|
||||||
.entry((stmt_id, parent_id, exceptions))
|
.entry((stmt_id, parent_id, exceptions))
|
||||||
.or_default()
|
.or_default()
|
||||||
.push((full_name, &binding.range));
|
.push((qualified_name, binding.range));
|
||||||
} else {
|
} else {
|
||||||
unused
|
unused
|
||||||
.entry((stmt_id, parent_id, exceptions))
|
.entry((stmt_id, parent_id, exceptions))
|
||||||
.or_default()
|
.or_default()
|
||||||
.push((full_name, &binding.range));
|
.push((qualified_name, binding.range));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -170,7 +165,9 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||||
|
|
||||||
let fix = if !in_init && !in_except_handler && checker.patch(Rule::UnusedImport) {
|
let fix = if !in_init && !in_except_handler && checker.patch(Rule::UnusedImport) {
|
||||||
autofix::edits::remove_unused_imports(
|
autofix::edits::remove_unused_imports(
|
||||||
unused_imports.iter().map(|(full_name, _)| *full_name),
|
unused_imports
|
||||||
|
.iter()
|
||||||
|
.map(|(qualified_name, _)| *qualified_name),
|
||||||
stmt,
|
stmt,
|
||||||
parent,
|
parent,
|
||||||
checker.locator,
|
checker.locator,
|
||||||
|
|
@ -182,10 +179,10 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
for (full_name, range) in unused_imports {
|
for (qualified_name, range) in unused_imports {
|
||||||
let mut diagnostic = Diagnostic::new(
|
let mut diagnostic = Diagnostic::new(
|
||||||
UnusedImport {
|
UnusedImport {
|
||||||
name: full_name.to_string(),
|
name: qualified_name.to_string(),
|
||||||
context: if in_except_handler {
|
context: if in_except_handler {
|
||||||
Some(UnusedImportContext::ExceptHandler)
|
Some(UnusedImportContext::ExceptHandler)
|
||||||
} else if in_init {
|
} else if in_init {
|
||||||
|
|
@ -195,7 +192,7 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||||
},
|
},
|
||||||
multiple,
|
multiple,
|
||||||
},
|
},
|
||||||
*range,
|
range,
|
||||||
);
|
);
|
||||||
if stmt.is_import_from_stmt() {
|
if stmt.is_import_from_stmt() {
|
||||||
diagnostic.set_parent(stmt.start());
|
diagnostic.set_parent(stmt.start());
|
||||||
|
|
@ -222,10 +219,10 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||||
let multiple = unused_imports.len() > 1;
|
let multiple = unused_imports.len() > 1;
|
||||||
let in_except_handler =
|
let in_except_handler =
|
||||||
exceptions.intersects(Exceptions::MODULE_NOT_FOUND_ERROR | Exceptions::IMPORT_ERROR);
|
exceptions.intersects(Exceptions::MODULE_NOT_FOUND_ERROR | Exceptions::IMPORT_ERROR);
|
||||||
for (full_name, range) in unused_imports {
|
for (qualified_name, range) in unused_imports {
|
||||||
let mut diagnostic = Diagnostic::new(
|
let mut diagnostic = Diagnostic::new(
|
||||||
UnusedImport {
|
UnusedImport {
|
||||||
name: full_name.to_string(),
|
name: qualified_name.to_string(),
|
||||||
context: if in_except_handler {
|
context: if in_except_handler {
|
||||||
Some(UnusedImportContext::ExceptHandler)
|
Some(UnusedImportContext::ExceptHandler)
|
||||||
} else if in_init {
|
} else if in_init {
|
||||||
|
|
@ -235,7 +232,7 @@ pub(crate) fn unused_import(checker: &Checker, scope: &Scope, diagnostics: &mut
|
||||||
},
|
},
|
||||||
multiple,
|
multiple,
|
||||||
},
|
},
|
||||||
*range,
|
range,
|
||||||
);
|
);
|
||||||
if stmt.is_import_from_stmt() {
|
if stmt.is_import_from_stmt() {
|
||||||
diagnostic.set_parent(stmt.start());
|
diagnostic.set_parent(stmt.start());
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff/src/rules/pyflakes/mod.rs
|
||||||
|
---
|
||||||
|
|
||||||
|
|
@ -12,7 +12,7 @@ F504.py:3:1: F504 [*] `%`-format string has unused named argument(s): b
|
||||||
|
|
|
|
||||||
= help: Remove extra named arguments: b
|
= help: Remove extra named arguments: b
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Fix
|
||||||
1 1 | # Ruff has no way of knowing if the following are F505s
|
1 1 | # Ruff has no way of knowing if the following are F505s
|
||||||
2 2 | a = "wrong"
|
2 2 | a = "wrong"
|
||||||
3 |-"%(a)s %(c)s" % {a: "?", "b": "!"} # F504 ("b" not used)
|
3 |-"%(a)s %(c)s" % {a: "?", "b": "!"} # F504 ("b" not used)
|
||||||
|
|
@ -31,7 +31,7 @@ F504.py:8:1: F504 [*] `%`-format string has unused named argument(s): b
|
||||||
|
|
|
|
||||||
= help: Remove extra named arguments: b
|
= help: Remove extra named arguments: b
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Fix
|
||||||
5 5 | hidden = {"a": "!"}
|
5 5 | hidden = {"a": "!"}
|
||||||
6 6 | "%(a)s %(c)s" % {"x": 1, **hidden} # Ok (cannot see through splat)
|
6 6 | "%(a)s %(c)s" % {"x": 1, **hidden} # Ok (cannot see through splat)
|
||||||
7 7 |
|
7 7 |
|
||||||
|
|
@ -47,7 +47,7 @@ F504.py:9:1: F504 [*] `%`-format string has unused named argument(s): b
|
||||||
|
|
|
|
||||||
= help: Remove extra named arguments: b
|
= help: Remove extra named arguments: b
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Fix
|
||||||
6 6 | "%(a)s %(c)s" % {"x": 1, **hidden} # Ok (cannot see through splat)
|
6 6 | "%(a)s %(c)s" % {"x": 1, **hidden} # Ok (cannot see through splat)
|
||||||
7 7 |
|
7 7 |
|
||||||
8 8 | "%(a)s" % {"a": 1, r"b": "!"} # F504 ("b" not used)
|
8 8 | "%(a)s" % {"a": 1, r"b": "!"} # F504 ("b" not used)
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ F50x.py:8:1: F504 [*] `%`-format string has unused named argument(s): baz
|
||||||
|
|
|
|
||||||
= help: Remove extra named arguments: baz
|
= help: Remove extra named arguments: baz
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Fix
|
||||||
5 5 | '%s %s' % (1,) # F507
|
5 5 | '%s %s' % (1,) # F507
|
||||||
6 6 | '%s %s' % (1, 2, 3) # F507
|
6 6 | '%s %s' % (1, 2, 3) # F507
|
||||||
7 7 | '%(bar)s' % {} # F505
|
7 7 | '%(bar)s' % {} # F505
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ F522.py:1:1: F522 [*] `.format` call has unused named argument(s): bar
|
||||||
|
|
|
|
||||||
= help: Remove extra named arguments: bar
|
= help: Remove extra named arguments: bar
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Fix
|
||||||
1 |-"{}".format(1, bar=2) # F522
|
1 |-"{}".format(1, bar=2) # F522
|
||||||
1 |+"{}".format(1, ) # F522
|
1 |+"{}".format(1, ) # F522
|
||||||
2 2 | "{bar}{}".format(1, bar=2, spam=3) # F522
|
2 2 | "{bar}{}".format(1, bar=2, spam=3) # F522
|
||||||
|
|
@ -27,7 +27,7 @@ F522.py:2:1: F522 [*] `.format` call has unused named argument(s): spam
|
||||||
|
|
|
|
||||||
= help: Remove extra named arguments: spam
|
= help: Remove extra named arguments: spam
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Fix
|
||||||
1 1 | "{}".format(1, bar=2) # F522
|
1 1 | "{}".format(1, bar=2) # F522
|
||||||
2 |-"{bar}{}".format(1, bar=2, spam=3) # F522
|
2 |-"{bar}{}".format(1, bar=2, spam=3) # F522
|
||||||
2 |+"{bar}{}".format(1, bar=2, ) # F522
|
2 |+"{bar}{}".format(1, bar=2, ) # F522
|
||||||
|
|
@ -43,7 +43,7 @@ F522.py:4:1: F522 [*] `.format` call has unused named argument(s): eggs, ham
|
||||||
|
|
|
|
||||||
= help: Remove extra named arguments: eggs, ham
|
= help: Remove extra named arguments: eggs, ham
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Fix
|
||||||
1 1 | "{}".format(1, bar=2) # F522
|
1 1 | "{}".format(1, bar=2) # F522
|
||||||
2 2 | "{bar}{}".format(1, bar=2, spam=3) # F522
|
2 2 | "{bar}{}".format(1, bar=2, spam=3) # F522
|
||||||
3 3 | "{bar:{spam}}".format(bar=2, spam=3) # No issues
|
3 3 | "{bar:{spam}}".format(bar=2, spam=3) # No issues
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ F523.py:2:1: F523 [*] `.format` call has unused arguments at position(s): 1
|
||||||
|
|
|
|
||||||
= help: Remove extra positional arguments at position(s): 1
|
= help: Remove extra positional arguments at position(s): 1
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Fix
|
||||||
1 1 | # With indexes
|
1 1 | # With indexes
|
||||||
2 |-"{0}".format(1, 2) # F523
|
2 |-"{0}".format(1, 2) # F523
|
||||||
2 |+"{0}".format(1, ) # F523
|
2 |+"{0}".format(1, ) # F523
|
||||||
|
|
@ -19,7 +19,7 @@ F523.py:2:1: F523 [*] `.format` call has unused arguments at position(s): 1
|
||||||
4 4 | "{1:{0}}".format(1, 2) # No issues
|
4 4 | "{1:{0}}".format(1, 2) # No issues
|
||||||
5 5 | "{1:{0}}".format(1, 2, 3) # F523
|
5 5 | "{1:{0}}".format(1, 2, 3) # F523
|
||||||
|
|
||||||
F523.py:3:1: F523 [*] `.format` call has unused arguments at position(s): 0, 2
|
F523.py:3:1: F523 `.format` call has unused arguments at position(s): 0, 2
|
||||||
|
|
|
|
||||||
3 | # With indexes
|
3 | # With indexes
|
||||||
4 | "{0}".format(1, 2) # F523
|
4 | "{0}".format(1, 2) # F523
|
||||||
|
|
@ -30,15 +30,6 @@ F523.py:3:1: F523 [*] `.format` call has unused arguments at position(s): 0, 2
|
||||||
|
|
|
|
||||||
= help: Remove extra positional arguments at position(s): 0, 2
|
= help: Remove extra positional arguments at position(s): 0, 2
|
||||||
|
|
||||||
ℹ Suggested fix
|
|
||||||
1 1 | # With indexes
|
|
||||||
2 2 | "{0}".format(1, 2) # F523
|
|
||||||
3 |-"{1}".format(1, 2, 3) # F523
|
|
||||||
3 |+"{0}".format(2, ) # F523
|
|
||||||
4 4 | "{1:{0}}".format(1, 2) # No issues
|
|
||||||
5 5 | "{1:{0}}".format(1, 2, 3) # F523
|
|
||||||
6 6 | "{0}{2}".format(1, 2) # F523, # F524
|
|
||||||
|
|
||||||
F523.py:5:1: F523 [*] `.format` call has unused arguments at position(s): 2
|
F523.py:5:1: F523 [*] `.format` call has unused arguments at position(s): 2
|
||||||
|
|
|
|
||||||
5 | "{1}".format(1, 2, 3) # F523
|
5 | "{1}".format(1, 2, 3) # F523
|
||||||
|
|
@ -50,7 +41,7 @@ F523.py:5:1: F523 [*] `.format` call has unused arguments at position(s): 2
|
||||||
|
|
|
|
||||||
= help: Remove extra positional arguments at position(s): 2
|
= help: Remove extra positional arguments at position(s): 2
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Fix
|
||||||
2 2 | "{0}".format(1, 2) # F523
|
2 2 | "{0}".format(1, 2) # F523
|
||||||
3 3 | "{1}".format(1, 2, 3) # F523
|
3 3 | "{1}".format(1, 2, 3) # F523
|
||||||
4 4 | "{1:{0}}".format(1, 2) # No issues
|
4 4 | "{1:{0}}".format(1, 2) # No issues
|
||||||
|
|
@ -70,7 +61,7 @@ F523.py:6:1: F523 [*] `.format` call has unused arguments at position(s): 1
|
||||||
|
|
|
|
||||||
= help: Remove extra positional arguments at position(s): 1
|
= help: Remove extra positional arguments at position(s): 1
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Fix
|
||||||
3 3 | "{1}".format(1, 2, 3) # F523
|
3 3 | "{1}".format(1, 2, 3) # F523
|
||||||
4 4 | "{1:{0}}".format(1, 2) # No issues
|
4 4 | "{1:{0}}".format(1, 2) # No issues
|
||||||
5 5 | "{1:{0}}".format(1, 2, 3) # F523
|
5 5 | "{1:{0}}".format(1, 2, 3) # F523
|
||||||
|
|
@ -80,7 +71,7 @@ F523.py:6:1: F523 [*] `.format` call has unused arguments at position(s): 1
|
||||||
8 8 |
|
8 8 |
|
||||||
9 9 | # With no indexes
|
9 9 | # With no indexes
|
||||||
|
|
||||||
F523.py:7:1: F523 [*] `.format` call has unused arguments at position(s): 0, 3
|
F523.py:7:1: F523 `.format` call has unused arguments at position(s): 0, 3
|
||||||
|
|
|
|
||||||
7 | "{1:{0}}".format(1, 2, 3) # F523
|
7 | "{1:{0}}".format(1, 2, 3) # F523
|
||||||
8 | "{0}{2}".format(1, 2) # F523, # F524
|
8 | "{0}{2}".format(1, 2) # F523, # F524
|
||||||
|
|
@ -91,16 +82,6 @@ F523.py:7:1: F523 [*] `.format` call has unused arguments at position(s): 0, 3
|
||||||
|
|
|
|
||||||
= help: Remove extra positional arguments at position(s): 0, 3
|
= help: Remove extra positional arguments at position(s): 0, 3
|
||||||
|
|
||||||
ℹ Suggested fix
|
|
||||||
4 4 | "{1:{0}}".format(1, 2) # No issues
|
|
||||||
5 5 | "{1:{0}}".format(1, 2, 3) # F523
|
|
||||||
6 6 | "{0}{2}".format(1, 2) # F523, # F524
|
|
||||||
7 |-"{1.arg[1]!r:0{2['arg']}{1}}".format(1, 2, 3, 4) # F523
|
|
||||||
7 |+"{0.arg[1]!r:0{1['arg']}{0}}".format(2, 3, ) # F523
|
|
||||||
8 8 |
|
|
||||||
9 9 | # With no indexes
|
|
||||||
10 10 | "{}".format(1, 2) # F523
|
|
||||||
|
|
||||||
F523.py:10:1: F523 [*] `.format` call has unused arguments at position(s): 1
|
F523.py:10:1: F523 [*] `.format` call has unused arguments at position(s): 1
|
||||||
|
|
|
|
||||||
10 | # With no indexes
|
10 | # With no indexes
|
||||||
|
|
@ -111,7 +92,7 @@ F523.py:10:1: F523 [*] `.format` call has unused arguments at position(s): 1
|
||||||
|
|
|
|
||||||
= help: Remove extra positional arguments at position(s): 1
|
= help: Remove extra positional arguments at position(s): 1
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Fix
|
||||||
7 7 | "{1.arg[1]!r:0{2['arg']}{1}}".format(1, 2, 3, 4) # F523
|
7 7 | "{1.arg[1]!r:0{2['arg']}{1}}".format(1, 2, 3, 4) # F523
|
||||||
8 8 |
|
8 8 |
|
||||||
9 9 | # With no indexes
|
9 9 | # With no indexes
|
||||||
|
|
@ -132,7 +113,7 @@ F523.py:11:1: F523 [*] `.format` call has unused arguments at position(s): 1, 2
|
||||||
|
|
|
|
||||||
= help: Remove extra positional arguments at position(s): 1, 2
|
= help: Remove extra positional arguments at position(s): 1, 2
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Fix
|
||||||
8 8 |
|
8 8 |
|
||||||
9 9 | # With no indexes
|
9 9 | # With no indexes
|
||||||
10 10 | "{}".format(1, 2) # F523
|
10 10 | "{}".format(1, 2) # F523
|
||||||
|
|
@ -153,7 +134,7 @@ F523.py:13:1: F523 [*] `.format` call has unused arguments at position(s): 2
|
||||||
|
|
|
|
||||||
= help: Remove extra positional arguments at position(s): 2
|
= help: Remove extra positional arguments at position(s): 2
|
||||||
|
|
||||||
ℹ Suggested fix
|
ℹ Fix
|
||||||
10 10 | "{}".format(1, 2) # F523
|
10 10 | "{}".format(1, 2) # F523
|
||||||
11 11 | "{}".format(1, 2, 3) # F523
|
11 11 | "{}".format(1, 2, 3) # F523
|
||||||
12 12 | "{:{}}".format(1, 2) # No issues
|
12 12 | "{:{}}".format(1, 2) # No issues
|
||||||
|
|
@ -163,20 +144,117 @@ F523.py:13:1: F523 [*] `.format` call has unused arguments at position(s): 2
|
||||||
15 15 | # With *args
|
15 15 | # With *args
|
||||||
16 16 | "{0}{1}".format(*args) # No issues
|
16 16 | "{0}{1}".format(*args) # No issues
|
||||||
|
|
||||||
F523.py:19:1: F523 [*] `.format` call has unused arguments at position(s): 2
|
F523.py:19:1: F523 `.format` call has unused arguments at position(s): 2
|
||||||
|
|
|
|
||||||
19 | "{0}{1}".format(1, *args) # No issues
|
19 | "{0}{1}".format(1, *args) # No issues
|
||||||
20 | "{0}{1}".format(1, 2, *args) # No issues
|
20 | "{0}{1}".format(1, 2, *args) # No issues
|
||||||
21 | "{0}{1}".format(1, 2, 3, *args) # F523
|
21 | "{0}{1}".format(1, 2, 3, *args) # F523
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ F523
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ F523
|
||||||
|
22 |
|
||||||
|
23 | # With nested quotes
|
||||||
|
|
|
|
||||||
= help: Remove extra positional arguments at position(s): 2
|
= help: Remove extra positional arguments at position(s): 2
|
||||||
|
|
||||||
ℹ Suggested fix
|
F523.py:22:1: F523 [*] `.format` call has unused arguments at position(s): 1, 2
|
||||||
16 16 | "{0}{1}".format(*args) # No issues
|
|
|
||||||
17 17 | "{0}{1}".format(1, *args) # No issues
|
22 | # With nested quotes
|
||||||
18 18 | "{0}{1}".format(1, 2, *args) # No issues
|
23 | "''1{0}".format(1, 2, 3) # F523
|
||||||
19 |-"{0}{1}".format(1, 2, 3, *args) # F523
|
| ^^^^^^^^^^^^^^^^^^^^^^^^ F523
|
||||||
19 |+"{0}{1}".format(1, 2, *args) # F523
|
24 | "\"\"{1}{0}".format(1, 2, 3) # F523
|
||||||
|
25 | '""{1}{0}'.format(1, 2, 3) # F523
|
||||||
|
|
|
||||||
|
= help: Remove extra positional arguments at position(s): 1, 2
|
||||||
|
|
||||||
|
ℹ Fix
|
||||||
|
19 19 | "{0}{1}".format(1, 2, 3, *args) # F523
|
||||||
|
20 20 |
|
||||||
|
21 21 | # With nested quotes
|
||||||
|
22 |-"''1{0}".format(1, 2, 3) # F523
|
||||||
|
22 |+"''1{0}".format(1, ) # F523
|
||||||
|
23 23 | "\"\"{1}{0}".format(1, 2, 3) # F523
|
||||||
|
24 24 | '""{1}{0}'.format(1, 2, 3) # F523
|
||||||
|
25 25 |
|
||||||
|
|
||||||
|
F523.py:23:1: F523 [*] `.format` call has unused arguments at position(s): 2
|
||||||
|
|
|
||||||
|
23 | # With nested quotes
|
||||||
|
24 | "''1{0}".format(1, 2, 3) # F523
|
||||||
|
25 | "\"\"{1}{0}".format(1, 2, 3) # F523
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ F523
|
||||||
|
26 | '""{1}{0}'.format(1, 2, 3) # F523
|
||||||
|
|
|
||||||
|
= help: Remove extra positional arguments at position(s): 2
|
||||||
|
|
||||||
|
ℹ Fix
|
||||||
|
20 20 |
|
||||||
|
21 21 | # With nested quotes
|
||||||
|
22 22 | "''1{0}".format(1, 2, 3) # F523
|
||||||
|
23 |-"\"\"{1}{0}".format(1, 2, 3) # F523
|
||||||
|
23 |+"\"\"{1}{0}".format(1, 2, ) # F523
|
||||||
|
24 24 | '""{1}{0}'.format(1, 2, 3) # F523
|
||||||
|
25 25 |
|
||||||
|
26 26 | # With modified indexes
|
||||||
|
|
||||||
|
F523.py:24:1: F523 [*] `.format` call has unused arguments at position(s): 2
|
||||||
|
|
|
||||||
|
24 | "''1{0}".format(1, 2, 3) # F523
|
||||||
|
25 | "\"\"{1}{0}".format(1, 2, 3) # F523
|
||||||
|
26 | '""{1}{0}'.format(1, 2, 3) # F523
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ F523
|
||||||
|
27 |
|
||||||
|
28 | # With modified indexes
|
||||||
|
|
|
||||||
|
= help: Remove extra positional arguments at position(s): 2
|
||||||
|
|
||||||
|
ℹ Fix
|
||||||
|
21 21 | # With nested quotes
|
||||||
|
22 22 | "''1{0}".format(1, 2, 3) # F523
|
||||||
|
23 23 | "\"\"{1}{0}".format(1, 2, 3) # F523
|
||||||
|
24 |-'""{1}{0}'.format(1, 2, 3) # F523
|
||||||
|
24 |+'""{1}{0}'.format(1, 2, ) # F523
|
||||||
|
25 25 |
|
||||||
|
26 26 | # With modified indexes
|
||||||
|
27 27 | "{1}{2}".format(1, 2, 3) # F523, # F524
|
||||||
|
|
||||||
|
F523.py:27:1: F523 `.format` call has unused arguments at position(s): 0
|
||||||
|
|
|
||||||
|
27 | # With modified indexes
|
||||||
|
28 | "{1}{2}".format(1, 2, 3) # F523, # F524
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^ F523
|
||||||
|
29 | "{1}{3}".format(1, 2, 3, 4) # F523, # F524
|
||||||
|
30 | "{1} {8}".format(0, 1) # F523, # F524
|
||||||
|
|
|
||||||
|
= help: Remove extra positional arguments at position(s): 0
|
||||||
|
|
||||||
|
F523.py:28:1: F523 `.format` call has unused arguments at position(s): 0, 2
|
||||||
|
|
|
||||||
|
28 | # With modified indexes
|
||||||
|
29 | "{1}{2}".format(1, 2, 3) # F523, # F524
|
||||||
|
30 | "{1}{3}".format(1, 2, 3, 4) # F523, # F524
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ F523
|
||||||
|
31 | "{1} {8}".format(0, 1) # F523, # F524
|
||||||
|
|
|
||||||
|
= help: Remove extra positional arguments at position(s): 0, 2
|
||||||
|
|
||||||
|
F523.py:29:1: F523 `.format` call has unused arguments at position(s): 0
|
||||||
|
|
|
||||||
|
29 | "{1}{2}".format(1, 2, 3) # F523, # F524
|
||||||
|
30 | "{1}{3}".format(1, 2, 3, 4) # F523, # F524
|
||||||
|
31 | "{1} {8}".format(0, 1) # F523, # F524
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^ F523
|
||||||
|
32 |
|
||||||
|
33 | # Not fixable
|
||||||
|
|
|
||||||
|
= help: Remove extra positional arguments at position(s): 0
|
||||||
|
|
||||||
|
F523.py:32:2: F523 `.format` call has unused arguments at position(s): 0
|
||||||
|
|
|
||||||
|
32 | # Not fixable
|
||||||
|
33 | (''
|
||||||
|
| __^
|
||||||
|
34 | | .format(2))
|
||||||
|
| |__________^ F523
|
||||||
|
|
|
||||||
|
= help: Remove extra positional arguments at position(s): 0
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -45,6 +45,7 @@ F524.py:5:1: F524 `.format` call is missing argument(s) for placeholder(s): 0, b
|
||||||
7 | "{0} {bar}".format() # F524
|
7 | "{0} {bar}".format() # F524
|
||||||
| ^^^^^^^^^^^^^^^^^^^^ F524
|
| ^^^^^^^^^^^^^^^^^^^^ F524
|
||||||
8 | "{bar} {0}".format() # F524
|
8 | "{bar} {0}".format() # F524
|
||||||
|
9 | "{1} {8}".format(0, 1)
|
||||||
|
|
|
|
||||||
|
|
||||||
F524.py:6:1: F524 `.format` call is missing argument(s) for placeholder(s): 0, bar
|
F524.py:6:1: F524 `.format` call is missing argument(s) for placeholder(s): 0, bar
|
||||||
|
|
@ -53,6 +54,15 @@ F524.py:6:1: F524 `.format` call is missing argument(s) for placeholder(s): 0, b
|
||||||
7 | "{0} {bar}".format() # F524
|
7 | "{0} {bar}".format() # F524
|
||||||
8 | "{bar} {0}".format() # F524
|
8 | "{bar} {0}".format() # F524
|
||||||
| ^^^^^^^^^^^^^^^^^^^^ F524
|
| ^^^^^^^^^^^^^^^^^^^^ F524
|
||||||
|
9 | "{1} {8}".format(0, 1)
|
||||||
|
|
|
||||||
|
|
||||||
|
F524.py:7:1: F524 `.format` call is missing argument(s) for placeholder(s): 8
|
||||||
|
|
|
||||||
|
7 | "{0} {bar}".format() # F524
|
||||||
|
8 | "{bar} {0}".format() # F524
|
||||||
|
9 | "{1} {8}".format(0, 1)
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^ F524
|
||||||
|
|
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -52,8 +52,8 @@ mod tests {
|
||||||
#[test_case(Rule::ImportSelf, Path::new("import_self/module.py"))]
|
#[test_case(Rule::ImportSelf, Path::new("import_self/module.py"))]
|
||||||
#[test_case(Rule::InvalidAllFormat, Path::new("invalid_all_format.py"))]
|
#[test_case(Rule::InvalidAllFormat, Path::new("invalid_all_format.py"))]
|
||||||
#[test_case(Rule::InvalidAllObject, Path::new("invalid_all_object.py"))]
|
#[test_case(Rule::InvalidAllObject, Path::new("invalid_all_object.py"))]
|
||||||
|
#[test_case(Rule::InvalidStrReturnType, Path::new("invalid_return_type_str.py"))]
|
||||||
#[test_case(Rule::DuplicateBases, Path::new("duplicate_bases.py"))]
|
#[test_case(Rule::DuplicateBases, Path::new("duplicate_bases.py"))]
|
||||||
#[test_case(Rule::DuplicateValue, Path::new("duplicate_value.py"))]
|
|
||||||
#[test_case(Rule::InvalidCharacterBackspace, Path::new("invalid_characters.py"))]
|
#[test_case(Rule::InvalidCharacterBackspace, Path::new("invalid_characters.py"))]
|
||||||
#[test_case(Rule::InvalidCharacterEsc, Path::new("invalid_characters.py"))]
|
#[test_case(Rule::InvalidCharacterEsc, Path::new("invalid_characters.py"))]
|
||||||
#[test_case(Rule::InvalidCharacterNul, Path::new("invalid_characters.py"))]
|
#[test_case(Rule::InvalidCharacterNul, Path::new("invalid_characters.py"))]
|
||||||
|
|
|
||||||
|
|
@ -3,12 +3,13 @@ use std::str::FromStr;
|
||||||
use ruff_text_size::TextRange;
|
use ruff_text_size::TextRange;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use rustpython_format::cformat::{CFormatPart, CFormatSpec, CFormatStrOrBytes, CFormatString};
|
use rustpython_format::cformat::{CFormatPart, CFormatSpec, CFormatStrOrBytes, CFormatString};
|
||||||
use rustpython_parser::ast::{self, Constant, Expr, Operator, Ranged};
|
use rustpython_parser::ast::{self, Constant, Expr, Ranged};
|
||||||
use rustpython_parser::{lexer, Mode, Tok};
|
use rustpython_parser::{lexer, Mode, Tok};
|
||||||
|
|
||||||
use ruff_diagnostics::{Diagnostic, Violation};
|
use ruff_diagnostics::{Diagnostic, Violation};
|
||||||
use ruff_macros::{derive_message_formats, violation};
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
use ruff_python_ast::str::{leading_quote, trailing_quote};
|
use ruff_python_ast::str::{leading_quote, trailing_quote};
|
||||||
|
use ruff_python_semantic::analyze::type_inference::PythonType;
|
||||||
|
|
||||||
use crate::checkers::ast::Checker;
|
use crate::checkers::ast::Checker;
|
||||||
|
|
||||||
|
|
@ -38,87 +39,6 @@ impl Violation for BadStringFormatType {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
enum DataType {
|
|
||||||
String,
|
|
||||||
Integer,
|
|
||||||
Float,
|
|
||||||
Object,
|
|
||||||
Unknown,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<&Expr> for DataType {
|
|
||||||
fn from(expr: &Expr) -> Self {
|
|
||||||
match expr {
|
|
||||||
Expr::NamedExpr(ast::ExprNamedExpr { value, .. }) => (&**value).into(),
|
|
||||||
Expr::UnaryOp(ast::ExprUnaryOp { operand, .. }) => (&**operand).into(),
|
|
||||||
Expr::Dict(_) => DataType::Object,
|
|
||||||
Expr::Set(_) => DataType::Object,
|
|
||||||
Expr::ListComp(_) => DataType::Object,
|
|
||||||
Expr::SetComp(_) => DataType::Object,
|
|
||||||
Expr::DictComp(_) => DataType::Object,
|
|
||||||
Expr::GeneratorExp(_) => DataType::Object,
|
|
||||||
Expr::JoinedStr(_) => DataType::String,
|
|
||||||
Expr::BinOp(ast::ExprBinOp { left, op, .. }) => {
|
|
||||||
// Ex) "a" % "b"
|
|
||||||
if matches!(
|
|
||||||
left.as_ref(),
|
|
||||||
Expr::Constant(ast::ExprConstant {
|
|
||||||
value: Constant::Str(..),
|
|
||||||
..
|
|
||||||
})
|
|
||||||
) && matches!(op, Operator::Mod)
|
|
||||||
{
|
|
||||||
return DataType::String;
|
|
||||||
}
|
|
||||||
DataType::Unknown
|
|
||||||
}
|
|
||||||
Expr::Constant(ast::ExprConstant { value, .. }) => match value {
|
|
||||||
Constant::Str(_) => DataType::String,
|
|
||||||
Constant::Int(_) => DataType::Integer,
|
|
||||||
Constant::Float(_) => DataType::Float,
|
|
||||||
_ => DataType::Unknown,
|
|
||||||
},
|
|
||||||
Expr::List(_) => DataType::Object,
|
|
||||||
Expr::Tuple(_) => DataType::Object,
|
|
||||||
_ => DataType::Unknown,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DataType {
|
|
||||||
fn is_compatible_with(self, format: FormatType) -> bool {
|
|
||||||
match self {
|
|
||||||
DataType::String => matches!(
|
|
||||||
format,
|
|
||||||
FormatType::Unknown | FormatType::String | FormatType::Repr
|
|
||||||
),
|
|
||||||
DataType::Object => matches!(
|
|
||||||
format,
|
|
||||||
FormatType::Unknown | FormatType::String | FormatType::Repr
|
|
||||||
),
|
|
||||||
DataType::Integer => matches!(
|
|
||||||
format,
|
|
||||||
FormatType::Unknown
|
|
||||||
| FormatType::String
|
|
||||||
| FormatType::Repr
|
|
||||||
| FormatType::Integer
|
|
||||||
| FormatType::Float
|
|
||||||
| FormatType::Number
|
|
||||||
),
|
|
||||||
DataType::Float => matches!(
|
|
||||||
format,
|
|
||||||
FormatType::Unknown
|
|
||||||
| FormatType::String
|
|
||||||
| FormatType::Repr
|
|
||||||
| FormatType::Float
|
|
||||||
| FormatType::Number
|
|
||||||
),
|
|
||||||
DataType::Unknown => true,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
enum FormatType {
|
enum FormatType {
|
||||||
Repr,
|
Repr,
|
||||||
|
|
@ -129,6 +49,45 @@ enum FormatType {
|
||||||
Unknown,
|
Unknown,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl FormatType {
|
||||||
|
fn is_compatible_with(self, data_type: PythonType) -> bool {
|
||||||
|
match data_type {
|
||||||
|
PythonType::String
|
||||||
|
| PythonType::Bytes
|
||||||
|
| PythonType::List
|
||||||
|
| PythonType::Dict
|
||||||
|
| PythonType::Set
|
||||||
|
| PythonType::Tuple
|
||||||
|
| PythonType::Generator
|
||||||
|
| PythonType::Complex
|
||||||
|
| PythonType::Bool
|
||||||
|
| PythonType::Ellipsis
|
||||||
|
| PythonType::None => matches!(
|
||||||
|
self,
|
||||||
|
FormatType::Unknown | FormatType::String | FormatType::Repr
|
||||||
|
),
|
||||||
|
PythonType::Integer => matches!(
|
||||||
|
self,
|
||||||
|
FormatType::Unknown
|
||||||
|
| FormatType::String
|
||||||
|
| FormatType::Repr
|
||||||
|
| FormatType::Integer
|
||||||
|
| FormatType::Float
|
||||||
|
| FormatType::Number
|
||||||
|
),
|
||||||
|
PythonType::Float => matches!(
|
||||||
|
self,
|
||||||
|
FormatType::Unknown
|
||||||
|
| FormatType::String
|
||||||
|
| FormatType::Repr
|
||||||
|
| FormatType::Float
|
||||||
|
| FormatType::Number
|
||||||
|
),
|
||||||
|
PythonType::Unknown => true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<char> for FormatType {
|
impl From<char> for FormatType {
|
||||||
fn from(format: char) -> Self {
|
fn from(format: char) -> Self {
|
||||||
match format {
|
match format {
|
||||||
|
|
@ -159,9 +118,9 @@ fn collect_specs(formats: &[CFormatStrOrBytes<String>]) -> Vec<&CFormatSpec> {
|
||||||
|
|
||||||
/// Return `true` if the format string is equivalent to the constant type
|
/// Return `true` if the format string is equivalent to the constant type
|
||||||
fn equivalent(format: &CFormatSpec, value: &Expr) -> bool {
|
fn equivalent(format: &CFormatSpec, value: &Expr) -> bool {
|
||||||
let constant: DataType = value.into();
|
|
||||||
let format: FormatType = format.format_char.into();
|
let format: FormatType = format.format_char.into();
|
||||||
constant.is_compatible_with(format)
|
let constant: PythonType = value.into();
|
||||||
|
format.is_compatible_with(constant)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return `true` if the [`Constnat`] aligns with the format type.
|
/// Return `true` if the [`Constnat`] aligns with the format type.
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,60 @@
|
||||||
|
use rustpython_parser::ast::{Ranged, Stmt};
|
||||||
|
|
||||||
|
use ruff_diagnostics::{Diagnostic, Violation};
|
||||||
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
|
use ruff_python_ast::{helpers::ReturnStatementVisitor, statement_visitor::StatementVisitor};
|
||||||
|
use ruff_python_semantic::analyze::type_inference::PythonType;
|
||||||
|
|
||||||
|
use crate::checkers::ast::Checker;
|
||||||
|
|
||||||
|
/// ## What it does
|
||||||
|
/// Checks for `__str__` implementations that return a type other than `str`.
|
||||||
|
///
|
||||||
|
/// ## Why is this bad?
|
||||||
|
/// The `__str__` method should return a `str` object. Returning a different
|
||||||
|
/// type may cause unexpected behavior.
|
||||||
|
#[violation]
|
||||||
|
pub struct InvalidStrReturnType;
|
||||||
|
|
||||||
|
impl Violation for InvalidStrReturnType {
|
||||||
|
#[derive_message_formats]
|
||||||
|
fn message(&self) -> String {
|
||||||
|
format!("`__str__` does not return `str`")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// E0307
|
||||||
|
pub(crate) fn invalid_str_return(checker: &mut Checker, name: &str, body: &[Stmt]) {
|
||||||
|
if name != "__str__" {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !checker.semantic_model().scope().kind.is_class() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let returns = {
|
||||||
|
let mut visitor = ReturnStatementVisitor::default();
|
||||||
|
visitor.visit_body(body);
|
||||||
|
visitor.returns
|
||||||
|
};
|
||||||
|
|
||||||
|
for stmt in returns {
|
||||||
|
if let Some(value) = stmt.value.as_deref() {
|
||||||
|
// Disallow other, non-
|
||||||
|
if !matches!(
|
||||||
|
PythonType::from(value),
|
||||||
|
PythonType::String | PythonType::Unknown
|
||||||
|
) {
|
||||||
|
checker
|
||||||
|
.diagnostics
|
||||||
|
.push(Diagnostic::new(InvalidStrReturnType, value.range()));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Disallow implicit `None`.
|
||||||
|
checker
|
||||||
|
.diagnostics
|
||||||
|
.push(Diagnostic::new(InvalidStrReturnType, stmt.range()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -9,7 +9,6 @@ pub(crate) use compare_to_empty_string::{compare_to_empty_string, CompareToEmpty
|
||||||
pub(crate) use comparison_of_constant::{comparison_of_constant, ComparisonOfConstant};
|
pub(crate) use comparison_of_constant::{comparison_of_constant, ComparisonOfConstant};
|
||||||
pub(crate) use continue_in_finally::{continue_in_finally, ContinueInFinally};
|
pub(crate) use continue_in_finally::{continue_in_finally, ContinueInFinally};
|
||||||
pub(crate) use duplicate_bases::{duplicate_bases, DuplicateBases};
|
pub(crate) use duplicate_bases::{duplicate_bases, DuplicateBases};
|
||||||
pub(crate) use duplicate_value::{duplicate_value, DuplicateValue};
|
|
||||||
pub(crate) use global_statement::{global_statement, GlobalStatement};
|
pub(crate) use global_statement::{global_statement, GlobalStatement};
|
||||||
pub(crate) use global_variable_not_assigned::GlobalVariableNotAssigned;
|
pub(crate) use global_variable_not_assigned::GlobalVariableNotAssigned;
|
||||||
pub(crate) use import_self::{import_from_self, import_self, ImportSelf};
|
pub(crate) use import_self::{import_from_self, import_self, ImportSelf};
|
||||||
|
|
@ -17,6 +16,7 @@ pub(crate) use invalid_all_format::{invalid_all_format, InvalidAllFormat};
|
||||||
pub(crate) use invalid_all_object::{invalid_all_object, InvalidAllObject};
|
pub(crate) use invalid_all_object::{invalid_all_object, InvalidAllObject};
|
||||||
pub(crate) use invalid_envvar_default::{invalid_envvar_default, InvalidEnvvarDefault};
|
pub(crate) use invalid_envvar_default::{invalid_envvar_default, InvalidEnvvarDefault};
|
||||||
pub(crate) use invalid_envvar_value::{invalid_envvar_value, InvalidEnvvarValue};
|
pub(crate) use invalid_envvar_value::{invalid_envvar_value, InvalidEnvvarValue};
|
||||||
|
pub(crate) use invalid_str_return::{invalid_str_return, InvalidStrReturnType};
|
||||||
pub(crate) use invalid_string_characters::{
|
pub(crate) use invalid_string_characters::{
|
||||||
invalid_string_characters, InvalidCharacterBackspace, InvalidCharacterEsc, InvalidCharacterNul,
|
invalid_string_characters, InvalidCharacterBackspace, InvalidCharacterEsc, InvalidCharacterNul,
|
||||||
InvalidCharacterSub, InvalidCharacterZeroWidthSpace,
|
InvalidCharacterSub, InvalidCharacterZeroWidthSpace,
|
||||||
|
|
@ -65,7 +65,6 @@ mod compare_to_empty_string;
|
||||||
mod comparison_of_constant;
|
mod comparison_of_constant;
|
||||||
mod continue_in_finally;
|
mod continue_in_finally;
|
||||||
mod duplicate_bases;
|
mod duplicate_bases;
|
||||||
mod duplicate_value;
|
|
||||||
mod global_statement;
|
mod global_statement;
|
||||||
mod global_variable_not_assigned;
|
mod global_variable_not_assigned;
|
||||||
mod import_self;
|
mod import_self;
|
||||||
|
|
@ -73,6 +72,7 @@ mod invalid_all_format;
|
||||||
mod invalid_all_object;
|
mod invalid_all_object;
|
||||||
mod invalid_envvar_default;
|
mod invalid_envvar_default;
|
||||||
mod invalid_envvar_value;
|
mod invalid_envvar_value;
|
||||||
|
mod invalid_str_return;
|
||||||
mod invalid_string_characters;
|
mod invalid_string_characters;
|
||||||
mod iteration_over_set;
|
mod iteration_over_set;
|
||||||
mod load_before_global_declaration;
|
mod load_before_global_declaration;
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,44 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff/src/rules/pylint/mod.rs
|
||||||
|
---
|
||||||
|
invalid_return_type_str.py:3:16: PLE0307 `__str__` does not return `str`
|
||||||
|
|
|
||||||
|
3 | class Str:
|
||||||
|
4 | def __str__(self):
|
||||||
|
5 | return 1
|
||||||
|
| ^ PLE0307
|
||||||
|
6 |
|
||||||
|
7 | class Float:
|
||||||
|
|
|
||||||
|
|
||||||
|
invalid_return_type_str.py:7:16: PLE0307 `__str__` does not return `str`
|
||||||
|
|
|
||||||
|
7 | class Float:
|
||||||
|
8 | def __str__(self):
|
||||||
|
9 | return 3.05
|
||||||
|
| ^^^^ PLE0307
|
||||||
|
10 |
|
||||||
|
11 | class Int:
|
||||||
|
|
|
||||||
|
|
||||||
|
invalid_return_type_str.py:11:16: PLE0307 `__str__` does not return `str`
|
||||||
|
|
|
||||||
|
11 | class Int:
|
||||||
|
12 | def __str__(self):
|
||||||
|
13 | return 0
|
||||||
|
| ^ PLE0307
|
||||||
|
14 |
|
||||||
|
15 | class Bool:
|
||||||
|
|
|
||||||
|
|
||||||
|
invalid_return_type_str.py:15:16: PLE0307 `__str__` does not return `str`
|
||||||
|
|
|
||||||
|
15 | class Bool:
|
||||||
|
16 | def __str__(self):
|
||||||
|
17 | return False
|
||||||
|
| ^^^^^ PLE0307
|
||||||
|
18 |
|
||||||
|
19 | class Str2:
|
||||||
|
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1,23 +0,0 @@
|
||||||
---
|
|
||||||
source: crates/ruff/src/rules/pylint/mod.rs
|
|
||||||
---
|
|
||||||
duplicate_value.py:4:35: PLW0130 Duplicate value `"value1"` in set
|
|
||||||
|
|
|
||||||
4 | # Errors.
|
|
||||||
5 | ###
|
|
||||||
6 | incorrect_set = {"value1", 23, 5, "value1"}
|
|
||||||
| ^^^^^^^^ PLW0130
|
|
||||||
7 | incorrect_set = {1, 1}
|
|
||||||
|
|
|
||||||
|
|
||||||
duplicate_value.py:5:21: PLW0130 Duplicate value `1` in set
|
|
||||||
|
|
|
||||||
5 | ###
|
|
||||||
6 | incorrect_set = {"value1", 23, 5, "value1"}
|
|
||||||
7 | incorrect_set = {1, 1}
|
|
||||||
| ^ PLW0130
|
|
||||||
8 |
|
|
||||||
9 | ###
|
|
||||||
|
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -1,9 +1,10 @@
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use libcst_native::{Codegen, CodegenState, ParenthesizableWhitespace};
|
use libcst_native::ParenthesizableWhitespace;
|
||||||
use ruff_text_size::{TextRange, TextSize};
|
use ruff_text_size::{TextRange, TextSize};
|
||||||
use rustpython_parser::ast::{Expr, Ranged};
|
use rustpython_parser::ast::{Expr, Ranged};
|
||||||
use rustpython_parser::{lexer, Mode, Tok};
|
use rustpython_parser::{lexer, Mode, Tok};
|
||||||
|
|
||||||
|
use crate::autofix::codemods::CodegenStylist;
|
||||||
use ruff_diagnostics::Edit;
|
use ruff_diagnostics::Edit;
|
||||||
use ruff_python_ast::source_code::{Locator, Stylist};
|
use ruff_python_ast::source_code::{Locator, Stylist};
|
||||||
|
|
||||||
|
|
@ -29,14 +30,7 @@ pub(crate) fn adjust_indentation(
|
||||||
let indented_block = match_indented_block(&mut embedding.body)?;
|
let indented_block = match_indented_block(&mut embedding.body)?;
|
||||||
indented_block.indent = Some(indentation);
|
indented_block.indent = Some(indentation);
|
||||||
|
|
||||||
let mut state = CodegenState {
|
let module_text = indented_block.codegen_stylist(stylist);
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
indented_block.codegen(&mut state);
|
|
||||||
|
|
||||||
let module_text = state.to_string();
|
|
||||||
let module_text = module_text
|
let module_text = module_text
|
||||||
.strip_prefix(stylist.line_ending().as_str())
|
.strip_prefix(stylist.line_ending().as_str())
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
|
@ -61,14 +55,10 @@ pub(crate) fn remove_super_arguments(
|
||||||
body.whitespace_before_args = ParenthesizableWhitespace::default();
|
body.whitespace_before_args = ParenthesizableWhitespace::default();
|
||||||
body.whitespace_after_func = ParenthesizableWhitespace::default();
|
body.whitespace_after_func = ParenthesizableWhitespace::default();
|
||||||
|
|
||||||
let mut state = CodegenState {
|
Some(Edit::range_replacement(
|
||||||
default_newline: &stylist.line_ending(),
|
tree.codegen_stylist(stylist),
|
||||||
default_indent: stylist.indentation(),
|
range,
|
||||||
..CodegenState::default()
|
))
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
Some(Edit::range_replacement(state.to_string(), range))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Remove any imports matching `members` from an import-from statement.
|
/// Remove any imports matching `members` from an import-from statement.
|
||||||
|
|
|
||||||
|
|
@ -493,14 +493,14 @@ impl<'a> ImportReplacer<'a> {
|
||||||
fn format_import_from(names: &[&Alias], module: &str) -> String {
|
fn format_import_from(names: &[&Alias], module: &str) -> String {
|
||||||
// Construct the whitespace strings.
|
// Construct the whitespace strings.
|
||||||
// Generate the formatted names.
|
// Generate the formatted names.
|
||||||
let full_names: String = names
|
let qualified_names: String = names
|
||||||
.iter()
|
.iter()
|
||||||
.map(|name| match &name.asname {
|
.map(|name| match &name.asname {
|
||||||
Some(asname) => format!("{} as {}", name.name, asname),
|
Some(asname) => format!("{} as {}", name.name, asname),
|
||||||
None => format!("{}", name.name),
|
None => format!("{}", name.name),
|
||||||
})
|
})
|
||||||
.join(", ");
|
.join(", ");
|
||||||
format!("from {module} import {full_names}")
|
format!("from {module} import {qualified_names}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,12 @@
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use libcst_native::{
|
use libcst_native::{
|
||||||
AsName, AssignTargetExpression, Attribute, Codegen, CodegenState, Dot, Expression, Import,
|
AsName, AssignTargetExpression, Attribute, Dot, Expression, Import, ImportAlias, ImportFrom,
|
||||||
ImportAlias, ImportFrom, ImportNames, Name, NameOrAttribute, ParenthesizableWhitespace,
|
ImportNames, Name, NameOrAttribute, ParenthesizableWhitespace,
|
||||||
};
|
};
|
||||||
use log::error;
|
use log::error;
|
||||||
use rustpython_parser::ast::{self, Expr, Ranged, Stmt};
|
use rustpython_parser::ast::{self, Expr, Ranged, Stmt};
|
||||||
|
|
||||||
|
use crate::autofix::codemods::CodegenStylist;
|
||||||
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
|
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
|
||||||
use ruff_macros::{derive_message_formats, violation};
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
use ruff_python_ast::call_path::collect_call_path;
|
use ruff_python_ast::call_path::collect_call_path;
|
||||||
|
|
@ -127,7 +128,7 @@ fn format_import(
|
||||||
) -> Result<String> {
|
) -> Result<String> {
|
||||||
let module_text = locator.slice(stmt.range());
|
let module_text = locator.slice(stmt.range());
|
||||||
let mut tree = match_statement(module_text)?;
|
let mut tree = match_statement(module_text)?;
|
||||||
let mut import = match_import(&mut tree)?;
|
let import = match_import(&mut tree)?;
|
||||||
|
|
||||||
let Import { names, .. } = import.clone();
|
let Import { names, .. } = import.clone();
|
||||||
let (clean_aliases, mock_aliases) = clean_import_aliases(names);
|
let (clean_aliases, mock_aliases) = clean_import_aliases(names);
|
||||||
|
|
@ -137,14 +138,7 @@ fn format_import(
|
||||||
} else {
|
} else {
|
||||||
import.names = clean_aliases;
|
import.names = clean_aliases;
|
||||||
|
|
||||||
let mut state = CodegenState {
|
let mut content = tree.codegen_stylist(stylist);
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
let mut content = state.to_string();
|
|
||||||
content.push_str(&stylist.line_ending());
|
content.push_str(&stylist.line_ending());
|
||||||
content.push_str(indent);
|
content.push_str(indent);
|
||||||
content.push_str(&format_mocks(mock_aliases, indent, stylist));
|
content.push_str(&format_mocks(mock_aliases, indent, stylist));
|
||||||
|
|
@ -161,7 +155,7 @@ fn format_import_from(
|
||||||
) -> Result<String> {
|
) -> Result<String> {
|
||||||
let module_text = locator.slice(stmt.range());
|
let module_text = locator.slice(stmt.range());
|
||||||
let mut tree = match_statement(module_text).unwrap();
|
let mut tree = match_statement(module_text).unwrap();
|
||||||
let mut import = match_import_from(&mut tree)?;
|
let import = match_import_from(&mut tree)?;
|
||||||
|
|
||||||
if let ImportFrom {
|
if let ImportFrom {
|
||||||
names: ImportNames::Star(..),
|
names: ImportNames::Star(..),
|
||||||
|
|
@ -187,13 +181,7 @@ fn format_import_from(
|
||||||
lpar: vec![],
|
lpar: vec![],
|
||||||
rpar: vec![],
|
rpar: vec![],
|
||||||
})));
|
})));
|
||||||
let mut state = CodegenState {
|
Ok(tree.codegen_stylist(stylist))
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
Ok(state.to_string())
|
|
||||||
} else if let ImportFrom {
|
} else if let ImportFrom {
|
||||||
names: ImportNames::Aliases(aliases),
|
names: ImportNames::Aliases(aliases),
|
||||||
..
|
..
|
||||||
|
|
@ -224,14 +212,7 @@ fn format_import_from(
|
||||||
rpar: vec![],
|
rpar: vec![],
|
||||||
})));
|
})));
|
||||||
|
|
||||||
let mut state = CodegenState {
|
let mut content = tree.codegen_stylist(stylist);
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
tree.codegen(&mut state);
|
|
||||||
|
|
||||||
let mut content = state.to_string();
|
|
||||||
if !mock_aliases.is_empty() {
|
if !mock_aliases.is_empty() {
|
||||||
content.push_str(&stylist.line_ending());
|
content.push_str(&stylist.line_ending());
|
||||||
content.push_str(indent);
|
content.push_str(indent);
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,10 @@
|
||||||
use anyhow::{anyhow, bail, Result};
|
use anyhow::{anyhow, bail, Result};
|
||||||
use libcst_native::{Arg, Codegen, CodegenState};
|
use libcst_native::Arg;
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use rustpython_parser::ast::{Expr, Ranged};
|
use rustpython_parser::ast::{Expr, Ranged};
|
||||||
|
|
||||||
|
use crate::autofix::codemods::CodegenStylist;
|
||||||
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
|
use ruff_diagnostics::{AutofixKind, Diagnostic, Edit, Fix, Violation};
|
||||||
use ruff_macros::{derive_message_formats, violation};
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
use ruff_python_ast::source_code::{Locator, Stylist};
|
use ruff_python_ast::source_code::{Locator, Stylist};
|
||||||
|
|
@ -89,7 +90,7 @@ fn generate_call(
|
||||||
) -> Result<String> {
|
) -> Result<String> {
|
||||||
let module_text = locator.slice(expr.range());
|
let module_text = locator.slice(expr.range());
|
||||||
let mut expression = match_expression(module_text)?;
|
let mut expression = match_expression(module_text)?;
|
||||||
let mut call = match_call_mut(&mut expression)?;
|
let call = match_call_mut(&mut expression)?;
|
||||||
|
|
||||||
// Fix the call arguments.
|
// Fix the call arguments.
|
||||||
if !is_sequential(correct_order) {
|
if !is_sequential(correct_order) {
|
||||||
|
|
@ -99,27 +100,16 @@ fn generate_call(
|
||||||
// Fix the string itself.
|
// Fix the string itself.
|
||||||
let item = match_attribute(&mut call.func)?;
|
let item = match_attribute(&mut call.func)?;
|
||||||
|
|
||||||
let mut state = CodegenState {
|
let cleaned = remove_specifiers(&item.codegen_stylist(stylist));
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
item.codegen(&mut state);
|
|
||||||
let cleaned = remove_specifiers(&state.to_string());
|
|
||||||
|
|
||||||
call.func = Box::new(match_expression(&cleaned)?);
|
call.func = Box::new(match_expression(&cleaned)?);
|
||||||
|
|
||||||
let mut state = CodegenState {
|
let state = expression.codegen_stylist(stylist);
|
||||||
default_newline: &stylist.line_ending(),
|
if module_text == state {
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
expression.codegen(&mut state);
|
|
||||||
if module_text == state.to_string() {
|
|
||||||
// Ex) `'{' '0}'.format(1)`
|
// Ex) `'{' '0}'.format(1)`
|
||||||
bail!("Failed to generate call expression for: {module_text}")
|
bail!("Failed to generate call expression for: {module_text}")
|
||||||
}
|
}
|
||||||
Ok(state.to_string())
|
Ok(state)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// UP030
|
/// UP030
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
use anyhow::{bail, Result};
|
use anyhow::{bail, Result};
|
||||||
use libcst_native::{Codegen, CodegenState};
|
|
||||||
use rustpython_parser::ast::{self, Expr, Ranged};
|
use rustpython_parser::ast::{self, Expr, Ranged};
|
||||||
|
|
||||||
|
use crate::autofix::codemods::CodegenStylist;
|
||||||
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
|
use ruff_diagnostics::{AlwaysAutofixableViolation, Diagnostic, Edit, Fix};
|
||||||
use ruff_macros::{derive_message_formats, violation};
|
use ruff_macros::{derive_message_formats, violation};
|
||||||
use ruff_python_ast::source_code::{Locator, Stylist};
|
use ruff_python_ast::source_code::{Locator, Stylist};
|
||||||
|
|
@ -59,7 +59,7 @@ fn fix_explicit_f_string_type_conversion(
|
||||||
let formatted_string = match_formatted_string(&mut expression)?;
|
let formatted_string = match_formatted_string(&mut expression)?;
|
||||||
|
|
||||||
// Replace the formatted call expression at `index` with a conversion flag.
|
// Replace the formatted call expression at `index` with a conversion flag.
|
||||||
let mut formatted_string_expression =
|
let formatted_string_expression =
|
||||||
match_formatted_string_expression(&mut formatted_string.parts[index])?;
|
match_formatted_string_expression(&mut formatted_string.parts[index])?;
|
||||||
let call = match_call_mut(&mut formatted_string_expression.expression)?;
|
let call = match_call_mut(&mut formatted_string_expression.expression)?;
|
||||||
let name = match_name(&call.func)?;
|
let name = match_name(&call.func)?;
|
||||||
|
|
@ -77,15 +77,8 @@ fn fix_explicit_f_string_type_conversion(
|
||||||
}
|
}
|
||||||
formatted_string_expression.expression = call.args[0].value.clone();
|
formatted_string_expression.expression = call.args[0].value.clone();
|
||||||
|
|
||||||
let mut state = CodegenState {
|
|
||||||
default_newline: &stylist.line_ending(),
|
|
||||||
default_indent: stylist.indentation(),
|
|
||||||
..CodegenState::default()
|
|
||||||
};
|
|
||||||
expression.codegen(&mut state);
|
|
||||||
|
|
||||||
Ok(Fix::automatic(Edit::range_replacement(
|
Ok(Fix::automatic(Edit::range_replacement(
|
||||||
state.to_string(),
|
expression.codegen_stylist(stylist),
|
||||||
range,
|
range,
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
|
|
@ -104,9 +97,10 @@ pub(crate) fn explicit_f_string_type_conversion(
|
||||||
}) = &formatted_value else {
|
}) = &formatted_value else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Skip if there's already a conversion flag.
|
// Skip if there's already a conversion flag.
|
||||||
if !conversion.is_none() {
|
if !conversion.is_none() {
|
||||||
return;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let Expr::Call(ast::ExprCall {
|
let Expr::Call(ast::ExprCall {
|
||||||
|
|
@ -115,24 +109,24 @@ pub(crate) fn explicit_f_string_type_conversion(
|
||||||
keywords,
|
keywords,
|
||||||
..
|
..
|
||||||
}) = value.as_ref() else {
|
}) = value.as_ref() else {
|
||||||
return;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
// Can't be a conversion otherwise.
|
// Can't be a conversion otherwise.
|
||||||
if args.len() != 1 || !keywords.is_empty() {
|
if args.len() != 1 || !keywords.is_empty() {
|
||||||
return;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let Expr::Name(ast::ExprName { id, .. }) = func.as_ref() else {
|
let Expr::Name(ast::ExprName { id, .. }) = func.as_ref() else {
|
||||||
return;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
if !matches!(id.as_str(), "str" | "repr" | "ascii") {
|
if !matches!(id.as_str(), "str" | "repr" | "ascii") {
|
||||||
return;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
if !checker.semantic_model().is_builtin(id) {
|
if !checker.semantic_model().is_builtin(id) {
|
||||||
return;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut diagnostic = Diagnostic::new(ExplicitFStringTypeConversion, value.range());
|
let mut diagnostic = Diagnostic::new(ExplicitFStringTypeConversion, value.range());
|
||||||
|
|
|
||||||
|
|
@ -128,7 +128,7 @@ RUF010.py:13:5: RUF010 [*] Use conversion in f-string
|
||||||
15 | f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
15 | f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
| ^^^^^^^^ RUF010
|
| ^^^^^^^^ RUF010
|
||||||
16 |
|
16 |
|
||||||
17 | f"{foo(bla)}" # OK
|
17 | f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
|
|
|
|
||||||
= help: Replace f-string function call with conversion
|
= help: Replace f-string function call with conversion
|
||||||
|
|
||||||
|
|
@ -139,7 +139,7 @@ RUF010.py:13:5: RUF010 [*] Use conversion in f-string
|
||||||
13 |-f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
13 |-f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
13 |+f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
13 |+f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
14 14 |
|
14 14 |
|
||||||
15 15 | f"{foo(bla)}" # OK
|
15 15 | f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
16 16 |
|
16 16 |
|
||||||
|
|
||||||
RUF010.py:13:19: RUF010 [*] Use conversion in f-string
|
RUF010.py:13:19: RUF010 [*] Use conversion in f-string
|
||||||
|
|
@ -149,7 +149,7 @@ RUF010.py:13:19: RUF010 [*] Use conversion in f-string
|
||||||
15 | f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
15 | f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
| ^^^^^^^^^ RUF010
|
| ^^^^^^^^^ RUF010
|
||||||
16 |
|
16 |
|
||||||
17 | f"{foo(bla)}" # OK
|
17 | f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
|
|
|
|
||||||
= help: Replace f-string function call with conversion
|
= help: Replace f-string function call with conversion
|
||||||
|
|
||||||
|
|
@ -160,7 +160,7 @@ RUF010.py:13:19: RUF010 [*] Use conversion in f-string
|
||||||
13 |-f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
13 |-f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
13 |+f"{(str(bla))}, {bla!r}, {(ascii(bla))}" # RUF010
|
13 |+f"{(str(bla))}, {bla!r}, {(ascii(bla))}" # RUF010
|
||||||
14 14 |
|
14 14 |
|
||||||
15 15 | f"{foo(bla)}" # OK
|
15 15 | f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
16 16 |
|
16 16 |
|
||||||
|
|
||||||
RUF010.py:13:34: RUF010 [*] Use conversion in f-string
|
RUF010.py:13:34: RUF010 [*] Use conversion in f-string
|
||||||
|
|
@ -170,7 +170,7 @@ RUF010.py:13:34: RUF010 [*] Use conversion in f-string
|
||||||
15 | f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
15 | f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
| ^^^^^^^^^^ RUF010
|
| ^^^^^^^^^^ RUF010
|
||||||
16 |
|
16 |
|
||||||
17 | f"{foo(bla)}" # OK
|
17 | f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
|
|
|
|
||||||
= help: Replace f-string function call with conversion
|
= help: Replace f-string function call with conversion
|
||||||
|
|
||||||
|
|
@ -181,7 +181,49 @@ RUF010.py:13:34: RUF010 [*] Use conversion in f-string
|
||||||
13 |-f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
13 |-f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
13 |+f"{(str(bla))}, {(repr(bla))}, {bla!a}" # RUF010
|
13 |+f"{(str(bla))}, {(repr(bla))}, {bla!a}" # RUF010
|
||||||
14 14 |
|
14 14 |
|
||||||
15 15 | f"{foo(bla)}" # OK
|
15 15 | f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
16 16 |
|
16 16 |
|
||||||
|
|
||||||
|
RUF010.py:15:14: RUF010 [*] Use conversion in f-string
|
||||||
|
|
|
||||||
|
15 | f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
|
16 |
|
||||||
|
17 | f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
|
| ^^^^^^^^^ RUF010
|
||||||
|
18 |
|
||||||
|
19 | f"{foo(bla)}" # OK
|
||||||
|
|
|
||||||
|
= help: Replace f-string function call with conversion
|
||||||
|
|
||||||
|
ℹ Fix
|
||||||
|
12 12 |
|
||||||
|
13 13 | f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
|
14 14 |
|
||||||
|
15 |-f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
|
15 |+f"{bla!s}, {bla!r}, {(ascii(bla))}" # RUF010
|
||||||
|
16 16 |
|
||||||
|
17 17 | f"{foo(bla)}" # OK
|
||||||
|
18 18 |
|
||||||
|
|
||||||
|
RUF010.py:15:29: RUF010 [*] Use conversion in f-string
|
||||||
|
|
|
||||||
|
15 | f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
|
16 |
|
||||||
|
17 | f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
|
| ^^^^^^^^^^ RUF010
|
||||||
|
18 |
|
||||||
|
19 | f"{foo(bla)}" # OK
|
||||||
|
|
|
||||||
|
= help: Replace f-string function call with conversion
|
||||||
|
|
||||||
|
ℹ Fix
|
||||||
|
12 12 |
|
||||||
|
13 13 | f"{(str(bla))}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
|
14 14 |
|
||||||
|
15 |-f"{bla!s}, {(repr(bla))}, {(ascii(bla))}" # RUF010
|
||||||
|
15 |+f"{bla!s}, {(repr(bla))}, {bla!a}" # RUF010
|
||||||
|
16 16 |
|
||||||
|
17 17 | f"{foo(bla)}" # OK
|
||||||
|
18 18 |
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -259,7 +259,10 @@ impl From<&Configuration> for RuleTable {
|
||||||
// The select_set keeps track of which rules have been selected.
|
// The select_set keeps track of which rules have been selected.
|
||||||
let mut select_set: RuleSet = defaults::PREFIXES.iter().flatten().collect();
|
let mut select_set: RuleSet = defaults::PREFIXES.iter().flatten().collect();
|
||||||
// The fixable set keeps track of which rules are fixable.
|
// The fixable set keeps track of which rules are fixable.
|
||||||
let mut fixable_set: RuleSet = RuleSelector::All.into_iter().collect();
|
let mut fixable_set: RuleSet = RuleSelector::All
|
||||||
|
.into_iter()
|
||||||
|
.chain(RuleSelector::Nursery.into_iter())
|
||||||
|
.collect();
|
||||||
|
|
||||||
// Ignores normally only subtract from the current set of selected
|
// Ignores normally only subtract from the current set of selected
|
||||||
// rules. By that logic the ignore in `select = [], ignore = ["E501"]`
|
// rules. By that logic the ignore in `select = [], ignore = ["E501"]`
|
||||||
|
|
|
||||||
|
|
@ -20,6 +20,10 @@ harness = false
|
||||||
name = "parser"
|
name = "parser"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
|
[[bench]]
|
||||||
|
name = "formatter"
|
||||||
|
harness = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
once_cell.workspace = true
|
once_cell.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
|
|
@ -30,7 +34,8 @@ ureq = "2.6.2"
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
ruff.path = "../ruff"
|
ruff.path = "../ruff"
|
||||||
ruff_python_ast.path = "../ruff_python_ast"
|
ruff_python_ast.path = "../ruff_python_ast"
|
||||||
criterion = { version = "0.4.0"}
|
ruff_python_formatter = { path = "../ruff_python_formatter" }
|
||||||
|
criterion = { version = "0.5.1"}
|
||||||
rustpython-parser.workspace = true
|
rustpython-parser.workspace = true
|
||||||
|
|
||||||
[target.'cfg(target_os = "windows")'.dev-dependencies]
|
[target.'cfg(target_os = "windows")'.dev-dependencies]
|
||||||
|
|
@ -38,3 +43,4 @@ mimalloc = "0.1.34"
|
||||||
|
|
||||||
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dev-dependencies]
|
[target.'cfg(all(not(target_os = "windows"), not(target_os = "openbsd"), any(target_arch = "x86_64", target_arch = "aarch64", target_arch = "powerpc64")))'.dev-dependencies]
|
||||||
tikv-jemallocator = "0.5.0"
|
tikv-jemallocator = "0.5.0"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,62 @@
|
||||||
|
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
|
||||||
|
use ruff_benchmark::{TestCase, TestCaseSpeed, TestFile, TestFileDownloadError};
|
||||||
|
use ruff_python_formatter::format_module;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
#[cfg(target_os = "windows")]
|
||||||
|
#[global_allocator]
|
||||||
|
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||||
|
|
||||||
|
#[cfg(all(
|
||||||
|
not(target_os = "windows"),
|
||||||
|
not(target_os = "openbsd"),
|
||||||
|
any(
|
||||||
|
target_arch = "x86_64",
|
||||||
|
target_arch = "aarch64",
|
||||||
|
target_arch = "powerpc64"
|
||||||
|
)
|
||||||
|
))]
|
||||||
|
#[global_allocator]
|
||||||
|
static GLOBAL: tikv_jemallocator::Jemalloc = tikv_jemallocator::Jemalloc;
|
||||||
|
|
||||||
|
fn create_test_cases() -> Result<Vec<TestCase>, TestFileDownloadError> {
|
||||||
|
Ok(vec![
|
||||||
|
TestCase::fast(TestFile::try_download("numpy/globals.py", "https://raw.githubusercontent.com/numpy/numpy/89d64415e349ca75a25250f22b874aa16e5c0973/numpy/_globals.py")?),
|
||||||
|
TestCase::normal(TestFile::try_download(
|
||||||
|
"pydantic/types.py",
|
||||||
|
"https://raw.githubusercontent.com/pydantic/pydantic/83b3c49e99ceb4599d9286a3d793cea44ac36d4b/pydantic/types.py",
|
||||||
|
)?),
|
||||||
|
TestCase::normal(TestFile::try_download("numpy/ctypeslib.py", "https://raw.githubusercontent.com/numpy/numpy/e42c9503a14d66adfd41356ef5640c6975c45218/numpy/ctypeslib.py")?),
|
||||||
|
TestCase::slow(TestFile::try_download(
|
||||||
|
"large/dataset.py",
|
||||||
|
"https://raw.githubusercontent.com/DHI/mikeio/b7d26418f4db2909b0aa965253dbe83194d7bb5b/tests/test_dataset.py",
|
||||||
|
)?),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
fn benchmark_formatter(criterion: &mut Criterion) {
|
||||||
|
let mut group = criterion.benchmark_group("formatter");
|
||||||
|
let test_cases = create_test_cases().unwrap();
|
||||||
|
|
||||||
|
for case in test_cases {
|
||||||
|
group.throughput(Throughput::Bytes(case.code().len() as u64));
|
||||||
|
group.measurement_time(match case.speed() {
|
||||||
|
TestCaseSpeed::Fast => Duration::from_secs(5),
|
||||||
|
TestCaseSpeed::Normal => Duration::from_secs(10),
|
||||||
|
TestCaseSpeed::Slow => Duration::from_secs(20),
|
||||||
|
});
|
||||||
|
|
||||||
|
group.bench_with_input(
|
||||||
|
BenchmarkId::from_parameter(case.name()),
|
||||||
|
&case,
|
||||||
|
|b, case| {
|
||||||
|
b.iter(|| format_module(case.code()).expect("Formatting to succeed"));
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
group.finish();
|
||||||
|
}
|
||||||
|
|
||||||
|
criterion_group!(formatter, benchmark_formatter);
|
||||||
|
criterion_main!(formatter);
|
||||||
|
|
@ -2131,11 +2131,11 @@ impl<'a, 'buf, Context> FillBuilder<'a, 'buf, Context> {
|
||||||
/// The first variant is the most flat, and the last is the most expanded variant.
|
/// The first variant is the most flat, and the last is the most expanded variant.
|
||||||
/// See [`best_fitting!`] macro for a more in-detail documentation
|
/// See [`best_fitting!`] macro for a more in-detail documentation
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
pub struct BestFitting<'a, Context> {
|
pub struct FormatBestFitting<'a, Context> {
|
||||||
variants: Arguments<'a, Context>,
|
variants: Arguments<'a, Context>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, Context> BestFitting<'a, Context> {
|
impl<'a, Context> FormatBestFitting<'a, Context> {
|
||||||
/// Creates a new best fitting IR with the given variants. The method itself isn't unsafe
|
/// Creates a new best fitting IR with the given variants. The method itself isn't unsafe
|
||||||
/// but it is to discourage people from using it because the printer will panic if
|
/// but it is to discourage people from using it because the printer will panic if
|
||||||
/// the slice doesn't contain at least the least and most expanded variants.
|
/// the slice doesn't contain at least the least and most expanded variants.
|
||||||
|
|
@ -2154,7 +2154,7 @@ impl<'a, Context> BestFitting<'a, Context> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Context> Format<Context> for BestFitting<'_, Context> {
|
impl<Context> Format<Context> for FormatBestFitting<'_, Context> {
|
||||||
fn fmt(&self, f: &mut Formatter<Context>) -> FormatResult<()> {
|
fn fmt(&self, f: &mut Formatter<Context>) -> FormatResult<()> {
|
||||||
let mut buffer = VecBuffer::new(f.state_mut());
|
let mut buffer = VecBuffer::new(f.state_mut());
|
||||||
let variants = self.variants.items();
|
let variants = self.variants.items();
|
||||||
|
|
|
||||||
|
|
@ -48,7 +48,7 @@ pub use buffer::{
|
||||||
Buffer, BufferExtensions, BufferSnapshot, Inspect, PreambleBuffer, RemoveSoftLinesBuffer,
|
Buffer, BufferExtensions, BufferSnapshot, Inspect, PreambleBuffer, RemoveSoftLinesBuffer,
|
||||||
VecBuffer,
|
VecBuffer,
|
||||||
};
|
};
|
||||||
pub use builders::BestFitting;
|
pub use builders::FormatBestFitting;
|
||||||
pub use source_code::{SourceCode, SourceCodeSlice};
|
pub use source_code::{SourceCode, SourceCodeSlice};
|
||||||
|
|
||||||
pub use crate::diagnostics::{ActualStart, FormatError, InvalidDocumentError, PrintError};
|
pub use crate::diagnostics::{ActualStart, FormatError, InvalidDocumentError, PrintError};
|
||||||
|
|
|
||||||
|
|
@ -320,7 +320,7 @@ macro_rules! format {
|
||||||
/// the content up to the first non-soft line break without exceeding the configured print width.
|
/// the content up to the first non-soft line break without exceeding the configured print width.
|
||||||
/// This definition differs from groups as that non-soft line breaks make group expand.
|
/// This definition differs from groups as that non-soft line breaks make group expand.
|
||||||
///
|
///
|
||||||
/// [crate::BestFitting] acts as a "break" boundary, meaning that it is considered to fit
|
/// [crate::FormatBestFitting] acts as a "break" boundary, meaning that it is considered to fit
|
||||||
///
|
///
|
||||||
///
|
///
|
||||||
/// [`Flat`]: crate::format_element::PrintMode::Flat
|
/// [`Flat`]: crate::format_element::PrintMode::Flat
|
||||||
|
|
@ -330,7 +330,7 @@ macro_rules! format {
|
||||||
macro_rules! best_fitting {
|
macro_rules! best_fitting {
|
||||||
($least_expanded:expr, $($tail:expr),+ $(,)?) => {{
|
($least_expanded:expr, $($tail:expr),+ $(,)?) => {{
|
||||||
unsafe {
|
unsafe {
|
||||||
$crate::BestFitting::from_arguments_unchecked($crate::format_args!($least_expanded, $($tail),+))
|
$crate::FormatBestFitting::from_arguments_unchecked($crate::format_args!($least_expanded, $($tail),+))
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -800,7 +800,7 @@ pub fn format_import_from(level: Option<u32>, module: Option<&str>) -> String {
|
||||||
/// assert_eq!(format_import_from_member(Some(1), Some("foo"), "bar"), ".foo.bar".to_string());
|
/// assert_eq!(format_import_from_member(Some(1), Some("foo"), "bar"), ".foo.bar".to_string());
|
||||||
/// ```
|
/// ```
|
||||||
pub fn format_import_from_member(level: Option<u32>, module: Option<&str>, member: &str) -> String {
|
pub fn format_import_from_member(level: Option<u32>, module: Option<&str>, member: &str) -> String {
|
||||||
let mut full_name = String::with_capacity(
|
let mut qualified_name = String::with_capacity(
|
||||||
(level.unwrap_or(0) as usize)
|
(level.unwrap_or(0) as usize)
|
||||||
+ module.as_ref().map_or(0, |module| module.len())
|
+ module.as_ref().map_or(0, |module| module.len())
|
||||||
+ 1
|
+ 1
|
||||||
|
|
@ -808,15 +808,15 @@ pub fn format_import_from_member(level: Option<u32>, module: Option<&str>, membe
|
||||||
);
|
);
|
||||||
if let Some(level) = level {
|
if let Some(level) = level {
|
||||||
for _ in 0..level {
|
for _ in 0..level {
|
||||||
full_name.push('.');
|
qualified_name.push('.');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(module) = module {
|
if let Some(module) = module {
|
||||||
full_name.push_str(module);
|
qualified_name.push_str(module);
|
||||||
full_name.push('.');
|
qualified_name.push('.');
|
||||||
}
|
}
|
||||||
full_name.push_str(member);
|
qualified_name.push_str(member);
|
||||||
full_name
|
qualified_name
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a module path from a (package, path) pair.
|
/// Create a module path from a (package, path) pair.
|
||||||
|
|
@ -907,7 +907,7 @@ pub fn resolve_imported_module_path<'a>(
|
||||||
/// A [`StatementVisitor`] that collects all `return` statements in a function or method.
|
/// A [`StatementVisitor`] that collects all `return` statements in a function or method.
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct ReturnStatementVisitor<'a> {
|
pub struct ReturnStatementVisitor<'a> {
|
||||||
pub returns: Vec<Option<&'a Expr>>,
|
pub returns: Vec<&'a ast::StmtReturn>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'b> StatementVisitor<'b> for ReturnStatementVisitor<'a>
|
impl<'a, 'b> StatementVisitor<'b> for ReturnStatementVisitor<'a>
|
||||||
|
|
@ -919,10 +919,7 @@ where
|
||||||
Stmt::FunctionDef(_) | Stmt::AsyncFunctionDef(_) => {
|
Stmt::FunctionDef(_) | Stmt::AsyncFunctionDef(_) => {
|
||||||
// Don't recurse.
|
// Don't recurse.
|
||||||
}
|
}
|
||||||
Stmt::Return(ast::StmtReturn {
|
Stmt::Return(stmt) => self.returns.push(stmt),
|
||||||
value,
|
|
||||||
range: _range,
|
|
||||||
}) => self.returns.push(value.as_deref()),
|
|
||||||
_ => walk_stmt(self, stmt),
|
_ => walk_stmt(self, stmt),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -209,14 +209,13 @@ impl<'a> Generator<'a> {
|
||||||
..
|
..
|
||||||
}) => {
|
}) => {
|
||||||
self.newlines(if self.indent_depth == 0 { 2 } else { 1 });
|
self.newlines(if self.indent_depth == 0 { 2 } else { 1 });
|
||||||
|
for decorator in decorator_list {
|
||||||
|
statement!({
|
||||||
|
self.p("@");
|
||||||
|
self.unparse_expr(decorator, precedence::MAX);
|
||||||
|
});
|
||||||
|
}
|
||||||
statement!({
|
statement!({
|
||||||
for decorator in decorator_list {
|
|
||||||
statement!({
|
|
||||||
self.p("@");
|
|
||||||
self.unparse_expr(decorator, precedence::MAX);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
self.newline();
|
|
||||||
self.p("def ");
|
self.p("def ");
|
||||||
self.p_id(name);
|
self.p_id(name);
|
||||||
self.p("(");
|
self.p("(");
|
||||||
|
|
@ -242,13 +241,13 @@ impl<'a> Generator<'a> {
|
||||||
..
|
..
|
||||||
}) => {
|
}) => {
|
||||||
self.newlines(if self.indent_depth == 0 { 2 } else { 1 });
|
self.newlines(if self.indent_depth == 0 { 2 } else { 1 });
|
||||||
|
for decorator in decorator_list {
|
||||||
|
statement!({
|
||||||
|
self.p("@");
|
||||||
|
self.unparse_expr(decorator, precedence::MAX);
|
||||||
|
});
|
||||||
|
}
|
||||||
statement!({
|
statement!({
|
||||||
for decorator in decorator_list {
|
|
||||||
statement!({
|
|
||||||
self.unparse_expr(decorator, precedence::MAX);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
self.newline();
|
|
||||||
self.p("async def ");
|
self.p("async def ");
|
||||||
self.p_id(name);
|
self.p_id(name);
|
||||||
self.p("(");
|
self.p("(");
|
||||||
|
|
@ -274,13 +273,13 @@ impl<'a> Generator<'a> {
|
||||||
range: _range,
|
range: _range,
|
||||||
}) => {
|
}) => {
|
||||||
self.newlines(if self.indent_depth == 0 { 2 } else { 1 });
|
self.newlines(if self.indent_depth == 0 { 2 } else { 1 });
|
||||||
|
for decorator in decorator_list {
|
||||||
|
statement!({
|
||||||
|
self.p("@");
|
||||||
|
self.unparse_expr(decorator, precedence::MAX);
|
||||||
|
});
|
||||||
|
}
|
||||||
statement!({
|
statement!({
|
||||||
for decorator in decorator_list {
|
|
||||||
statement!({
|
|
||||||
self.unparse_expr(decorator, precedence::MAX);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
self.newline();
|
|
||||||
self.p("class ");
|
self.p("class ");
|
||||||
self.p_id(name);
|
self.p_id(name);
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
|
|
@ -1614,6 +1613,29 @@ except* Exception as e:
|
||||||
);
|
);
|
||||||
assert_eq!(round_trip(r#"x = (1, 2, 3)"#), r#"x = 1, 2, 3"#);
|
assert_eq!(round_trip(r#"x = (1, 2, 3)"#), r#"x = 1, 2, 3"#);
|
||||||
assert_eq!(round_trip(r#"-(1) + ~(2) + +(3)"#), r#"-1 + ~2 + +3"#);
|
assert_eq!(round_trip(r#"-(1) + ~(2) + +(3)"#), r#"-1 + ~2 + +3"#);
|
||||||
|
assert_round_trip!(
|
||||||
|
r#"def f():
|
||||||
|
|
||||||
|
def f():
|
||||||
|
pass"#
|
||||||
|
);
|
||||||
|
assert_round_trip!(
|
||||||
|
r#"@foo
|
||||||
|
def f():
|
||||||
|
|
||||||
|
@foo
|
||||||
|
def f():
|
||||||
|
pass"#
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_round_trip!(
|
||||||
|
r#"@foo
|
||||||
|
class Foo:
|
||||||
|
|
||||||
|
@foo
|
||||||
|
def f():
|
||||||
|
pass"#
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
||||||
30
crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/while.py
vendored
Normal file
30
crates/ruff_python_formatter/resources/test/fixtures/ruff/statement/while.py
vendored
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
while 34: # trailing test comment
|
||||||
|
pass # trailing last statement comment
|
||||||
|
|
||||||
|
# trailing while body comment
|
||||||
|
|
||||||
|
# leading else comment
|
||||||
|
|
||||||
|
else: # trailing else comment
|
||||||
|
pass
|
||||||
|
|
||||||
|
# trailing else body comment
|
||||||
|
|
||||||
|
|
||||||
|
while aVeryLongConditionThatSpillsOverToTheNextLineBecauseItIsExtremelyLongAndGoesOnAndOnAndOnAndOnAndOnAndOnAndOnAndOnAndOn: # trailing comment
|
||||||
|
pass
|
||||||
|
|
||||||
|
else:
|
||||||
|
...
|
||||||
|
|
||||||
|
while (
|
||||||
|
some_condition(unformatted, args) and anotherCondition or aThirdCondition
|
||||||
|
): # comment
|
||||||
|
print("Do something")
|
||||||
|
|
||||||
|
|
||||||
|
while (
|
||||||
|
some_condition(unformatted, args) # trailing some condition
|
||||||
|
and anotherCondition or aThirdCondition # trailing third condition
|
||||||
|
): # comment
|
||||||
|
print("Do something")
|
||||||
|
|
@ -11,7 +11,7 @@ pub(crate) trait PyFormatterExtensions<'ast, 'buf> {
|
||||||
/// empty lines between any two nodes. Separates any two nodes by at least a hard line break.
|
/// empty lines between any two nodes. Separates any two nodes by at least a hard line break.
|
||||||
///
|
///
|
||||||
/// * [`NodeLevel::Module`]: Up to two empty lines
|
/// * [`NodeLevel::Module`]: Up to two empty lines
|
||||||
/// * [`NodeLevel::Statement`]: Up to one empty line
|
/// * [`NodeLevel::CompoundStatement`]: Up to one empty line
|
||||||
/// * [`NodeLevel::Parenthesized`]: No empty lines
|
/// * [`NodeLevel::Parenthesized`]: No empty lines
|
||||||
fn join_nodes<'fmt>(&'fmt mut self, level: NodeLevel) -> JoinNodesBuilder<'fmt, 'ast, 'buf>;
|
fn join_nodes<'fmt>(&'fmt mut self, level: NodeLevel) -> JoinNodesBuilder<'fmt, 'ast, 'buf>;
|
||||||
}
|
}
|
||||||
|
|
@ -53,10 +53,12 @@ impl<'fmt, 'ast, 'buf> JoinNodesBuilder<'fmt, 'ast, 'buf> {
|
||||||
2 => empty_line().fmt(f),
|
2 => empty_line().fmt(f),
|
||||||
_ => write!(f, [empty_line(), empty_line()]),
|
_ => write!(f, [empty_line(), empty_line()]),
|
||||||
},
|
},
|
||||||
NodeLevel::Statement => match lines_before(f.context().contents(), node.start()) {
|
NodeLevel::CompoundStatement => {
|
||||||
0 | 1 => hard_line_break().fmt(f),
|
match lines_before(f.context().contents(), node.start()) {
|
||||||
_ => empty_line().fmt(f),
|
0 | 1 => hard_line_break().fmt(f),
|
||||||
},
|
_ => empty_line().fmt(f),
|
||||||
|
}
|
||||||
|
}
|
||||||
NodeLevel::Parenthesized => hard_line_break().fmt(f),
|
NodeLevel::Parenthesized => hard_line_break().fmt(f),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -180,7 +182,7 @@ no_leading_newline = 30"#
|
||||||
// Should keep at most one empty level
|
// Should keep at most one empty level
|
||||||
#[test]
|
#[test]
|
||||||
fn ranged_builder_statement_level() {
|
fn ranged_builder_statement_level() {
|
||||||
let printed = format_ranged(NodeLevel::Statement);
|
let printed = format_ranged(NodeLevel::CompoundStatement);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
&printed,
|
&printed,
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,75 @@
|
||||||
|
#![allow(clippy::print_stdout)]
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use clap::{command, Parser};
|
use anyhow::{bail, Context, Result};
|
||||||
|
use clap::{command, Parser, ValueEnum};
|
||||||
|
use rustpython_parser::lexer::lex;
|
||||||
|
use rustpython_parser::{parse_tokens, Mode};
|
||||||
|
|
||||||
|
use ruff_formatter::SourceCode;
|
||||||
|
use ruff_python_ast::source_code::CommentRangesBuilder;
|
||||||
|
|
||||||
|
use crate::format_node;
|
||||||
|
|
||||||
|
#[derive(ValueEnum, Clone, Debug)]
|
||||||
|
pub enum Emit {
|
||||||
|
/// Write back to the original files
|
||||||
|
Files,
|
||||||
|
/// Write to stdout
|
||||||
|
Stdout,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
#[command(author, version, about, long_about = None)]
|
#[command(author, version, about, long_about = None)]
|
||||||
pub struct Cli {
|
pub struct Cli {
|
||||||
/// Python file to round-trip.
|
/// Python files to format. If there are none, stdin will be used. `-` as stdin is not supported
|
||||||
#[arg(required = true)]
|
pub files: Vec<PathBuf>,
|
||||||
pub file: PathBuf,
|
#[clap(long)]
|
||||||
|
pub emit: Option<Emit>,
|
||||||
|
/// Run in 'check' mode. Exits with 0 if input is formatted correctly. Exits with 1 and prints
|
||||||
|
/// a diff if formatting is required.
|
||||||
|
#[clap(long)]
|
||||||
|
pub check: bool,
|
||||||
|
#[clap(long)]
|
||||||
|
pub print_ir: bool,
|
||||||
|
#[clap(long)]
|
||||||
|
pub print_comments: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn format_and_debug_print(input: &str, cli: &Cli) -> Result<String> {
|
||||||
|
let mut tokens = Vec::new();
|
||||||
|
let mut comment_ranges = CommentRangesBuilder::default();
|
||||||
|
|
||||||
|
for result in lex(input, Mode::Module) {
|
||||||
|
let (token, range) = match result {
|
||||||
|
Ok((token, range)) => (token, range),
|
||||||
|
Err(err) => bail!("Source contains syntax errors {err:?}"),
|
||||||
|
};
|
||||||
|
|
||||||
|
comment_ranges.visit_token(&token, range);
|
||||||
|
tokens.push(Ok((token, range)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let comment_ranges = comment_ranges.finish();
|
||||||
|
|
||||||
|
// Parse the AST.
|
||||||
|
let python_ast = parse_tokens(tokens, Mode::Module, "<filename>")
|
||||||
|
.with_context(|| "Syntax error in input")?;
|
||||||
|
|
||||||
|
let formatted = format_node(&python_ast, &comment_ranges, input)?;
|
||||||
|
if cli.print_ir {
|
||||||
|
println!("{}", formatted.document().display(SourceCode::new(input)));
|
||||||
|
}
|
||||||
|
if cli.print_comments {
|
||||||
|
println!(
|
||||||
|
"{:?}",
|
||||||
|
formatted.context().comments().debug(SourceCode::new(input))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(formatted
|
||||||
|
.print()
|
||||||
|
.with_context(|| "Failed to print the formatter IR")?
|
||||||
|
.as_code()
|
||||||
|
.to_string())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -183,7 +183,6 @@ mod tests {
|
||||||
use ruff_python_ast::node::AnyNode;
|
use ruff_python_ast::node::AnyNode;
|
||||||
use ruff_text_size::{TextRange, TextSize};
|
use ruff_text_size::{TextRange, TextSize};
|
||||||
use rustpython_parser::ast::{StmtBreak, StmtContinue};
|
use rustpython_parser::ast::{StmtBreak, StmtContinue};
|
||||||
use std::cell::Cell;
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn debug() {
|
fn debug() {
|
||||||
|
|
@ -210,7 +209,7 @@ break;
|
||||||
SourceComment {
|
SourceComment {
|
||||||
slice: source_code.slice(TextRange::at(TextSize::new(0), TextSize::new(17))),
|
slice: source_code.slice(TextRange::at(TextSize::new(0), TextSize::new(17))),
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
formatted: Cell::new(false),
|
formatted: std::cell::Cell::new(false),
|
||||||
position: CommentTextPosition::OwnLine,
|
position: CommentTextPosition::OwnLine,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
@ -220,7 +219,7 @@ break;
|
||||||
SourceComment {
|
SourceComment {
|
||||||
slice: source_code.slice(TextRange::at(TextSize::new(28), TextSize::new(10))),
|
slice: source_code.slice(TextRange::at(TextSize::new(28), TextSize::new(10))),
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
formatted: Cell::new(false),
|
formatted: std::cell::Cell::new(false),
|
||||||
position: CommentTextPosition::EndOfLine,
|
position: CommentTextPosition::EndOfLine,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
@ -230,7 +229,7 @@ break;
|
||||||
SourceComment {
|
SourceComment {
|
||||||
slice: source_code.slice(TextRange::at(TextSize::new(39), TextSize::new(15))),
|
slice: source_code.slice(TextRange::at(TextSize::new(39), TextSize::new(15))),
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
formatted: Cell::new(false),
|
formatted: std::cell::Cell::new(false),
|
||||||
position: CommentTextPosition::OwnLine,
|
position: CommentTextPosition::OwnLine,
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -6,27 +6,37 @@ use ruff_formatter::{format_args, write, FormatError, SourceCode};
|
||||||
use ruff_python_ast::node::AnyNodeRef;
|
use ruff_python_ast::node::AnyNodeRef;
|
||||||
use ruff_python_ast::prelude::AstNode;
|
use ruff_python_ast::prelude::AstNode;
|
||||||
use ruff_text_size::{TextLen, TextRange, TextSize};
|
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||||
|
use rustpython_parser::ast::Ranged;
|
||||||
|
|
||||||
/// Formats the leading comments of a node.
|
/// Formats the leading comments of a node.
|
||||||
pub(crate) fn leading_comments<T>(node: &T) -> FormatLeadingComments
|
pub(crate) fn leading_node_comments<T>(node: &T) -> FormatLeadingComments
|
||||||
where
|
where
|
||||||
T: AstNode,
|
T: AstNode,
|
||||||
{
|
{
|
||||||
FormatLeadingComments {
|
FormatLeadingComments::Node(node.as_any_node_ref())
|
||||||
node: node.as_any_node_ref(),
|
}
|
||||||
}
|
|
||||||
|
/// Formats the passed comments as leading comments
|
||||||
|
pub(crate) const fn leading_comments(comments: &[SourceComment]) -> FormatLeadingComments {
|
||||||
|
FormatLeadingComments::Comments(comments)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub(crate) struct FormatLeadingComments<'a> {
|
pub(crate) enum FormatLeadingComments<'a> {
|
||||||
node: AnyNodeRef<'a>,
|
Node(AnyNodeRef<'a>),
|
||||||
|
Comments(&'a [SourceComment]),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Format<PyFormatContext<'_>> for FormatLeadingComments<'_> {
|
impl Format<PyFormatContext<'_>> for FormatLeadingComments<'_> {
|
||||||
fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> {
|
fn fmt(&self, f: &mut PyFormatter) -> FormatResult<()> {
|
||||||
let comments = f.context().comments().clone();
|
let comments = f.context().comments().clone();
|
||||||
|
|
||||||
for comment in comments.leading_comments(self.node) {
|
let leading_comments = match self {
|
||||||
|
FormatLeadingComments::Node(node) => comments.leading_comments(*node),
|
||||||
|
FormatLeadingComments::Comments(comments) => comments,
|
||||||
|
};
|
||||||
|
|
||||||
|
for comment in leading_comments {
|
||||||
let slice = comment.slice();
|
let slice = comment.slice();
|
||||||
|
|
||||||
let lines_after_comment = lines_after(f.context().contents(), slice.end());
|
let lines_after_comment = lines_after(f.context().contents(), slice.end());
|
||||||
|
|
@ -42,32 +52,88 @@ impl Format<PyFormatContext<'_>> for FormatLeadingComments<'_> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Formats the trailing comments of `node`
|
/// Formats the leading `comments` of an alternate branch and ensures that it preserves the right
|
||||||
pub(crate) fn trailing_comments<T>(node: &T) -> FormatTrailingComments
|
/// number of empty lines before. The `last_node` is the last node of the preceding body.
|
||||||
|
///
|
||||||
|
/// For example, `last_node` is the last statement in the if body when formatting the leading
|
||||||
|
/// comments of the `else` branch.
|
||||||
|
pub(crate) fn leading_alternate_branch_comments<'a, T>(
|
||||||
|
comments: &'a [SourceComment],
|
||||||
|
last_node: Option<T>,
|
||||||
|
) -> FormatLeadingAlternateBranchComments<'a>
|
||||||
where
|
where
|
||||||
T: AstNode,
|
T: Into<AnyNodeRef<'a>>,
|
||||||
{
|
{
|
||||||
FormatTrailingComments {
|
FormatLeadingAlternateBranchComments {
|
||||||
node: node.as_any_node_ref(),
|
comments,
|
||||||
|
last_node: last_node.map(std::convert::Into::into),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) struct FormatTrailingComments<'a> {
|
pub(crate) struct FormatLeadingAlternateBranchComments<'a> {
|
||||||
node: AnyNodeRef<'a>,
|
comments: &'a [SourceComment],
|
||||||
|
last_node: Option<AnyNodeRef<'a>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Format<PyFormatContext<'_>> for FormatLeadingAlternateBranchComments<'_> {
|
||||||
|
fn fmt(&self, f: &mut Formatter<PyFormatContext<'_>>) -> FormatResult<()> {
|
||||||
|
if let Some(first_leading) = self.comments.first() {
|
||||||
|
// Leading comments only preserves the lines after the comment but not before.
|
||||||
|
// Insert the necessary lines.
|
||||||
|
if lines_before(f.context().contents(), first_leading.slice().start()) > 1 {
|
||||||
|
write!(f, [empty_line()])?;
|
||||||
|
}
|
||||||
|
|
||||||
|
write!(f, [leading_comments(self.comments)])?;
|
||||||
|
} else if let Some(last_preceding) = self.last_node {
|
||||||
|
// The leading comments formatting ensures that it preserves the right amount of lines after
|
||||||
|
// We need to take care of this ourselves, if there's no leading `else` comment.
|
||||||
|
if lines_after(f.context().contents(), last_preceding.end()) > 1 {
|
||||||
|
write!(f, [empty_line()])?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Formats the trailing comments of `node`
|
||||||
|
pub(crate) fn trailing_node_comments<T>(node: &T) -> FormatTrailingComments
|
||||||
|
where
|
||||||
|
T: AstNode,
|
||||||
|
{
|
||||||
|
FormatTrailingComments::Node(node.as_any_node_ref())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Formats the passed comments as trailing comments
|
||||||
|
pub(crate) fn trailing_comments(comments: &[SourceComment]) -> FormatTrailingComments {
|
||||||
|
FormatTrailingComments::Comments(comments)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) enum FormatTrailingComments<'a> {
|
||||||
|
Node(AnyNodeRef<'a>),
|
||||||
|
Comments(&'a [SourceComment]),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Format<PyFormatContext<'_>> for FormatTrailingComments<'_> {
|
impl Format<PyFormatContext<'_>> for FormatTrailingComments<'_> {
|
||||||
fn fmt(&self, f: &mut Formatter<PyFormatContext<'_>>) -> FormatResult<()> {
|
fn fmt(&self, f: &mut Formatter<PyFormatContext<'_>>) -> FormatResult<()> {
|
||||||
let comments = f.context().comments().clone();
|
let comments = f.context().comments().clone();
|
||||||
let mut has_empty_lines_before = false;
|
|
||||||
|
|
||||||
for trailing in comments.trailing_comments(self.node) {
|
let trailing_comments = match self {
|
||||||
|
FormatTrailingComments::Node(node) => comments.trailing_comments(*node),
|
||||||
|
FormatTrailingComments::Comments(comments) => comments,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut has_trailing_own_line_comment = false;
|
||||||
|
|
||||||
|
for trailing in trailing_comments {
|
||||||
let slice = trailing.slice();
|
let slice = trailing.slice();
|
||||||
|
|
||||||
let lines_before_comment = lines_before(f.context().contents(), slice.start());
|
has_trailing_own_line_comment |= trailing.position().is_own_line();
|
||||||
has_empty_lines_before |= lines_before_comment > 0;
|
|
||||||
|
if has_trailing_own_line_comment {
|
||||||
|
let lines_before_comment = lines_before(f.context().contents(), slice.start());
|
||||||
|
|
||||||
if has_empty_lines_before {
|
|
||||||
// A trailing comment at the end of a body or list
|
// A trailing comment at the end of a body or list
|
||||||
// ```python
|
// ```python
|
||||||
// def test():
|
// def test():
|
||||||
|
|
@ -105,7 +171,7 @@ impl Format<PyFormatContext<'_>> for FormatTrailingComments<'_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Formats the dangling comments of `node`.
|
/// Formats the dangling comments of `node`.
|
||||||
pub(crate) fn dangling_comments<T>(node: &T) -> FormatDanglingComments
|
pub(crate) fn dangling_node_comments<T>(node: &T) -> FormatDanglingComments
|
||||||
where
|
where
|
||||||
T: AstNode,
|
T: AstNode,
|
||||||
{
|
{
|
||||||
|
|
@ -229,7 +295,7 @@ impl Format<PyFormatContext<'_>> for FormatEmptyLines {
|
||||||
_ => write!(f, [empty_line(), empty_line()]),
|
_ => write!(f, [empty_line(), empty_line()]),
|
||||||
},
|
},
|
||||||
|
|
||||||
NodeLevel::Statement => match self.lines {
|
NodeLevel::CompoundStatement => match self.lines {
|
||||||
0 | 1 => write!(f, [hard_line_break()]),
|
0 | 1 => write!(f, [hard_line_break()]),
|
||||||
_ => write!(f, [empty_line()]),
|
_ => write!(f, [empty_line()]),
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -88,7 +88,6 @@
|
||||||
//! It is possible to add an additional optional label to [`SourceComment`] If ever the need arises to distinguish two *dangling comments* in the formatting logic,
|
//! It is possible to add an additional optional label to [`SourceComment`] If ever the need arises to distinguish two *dangling comments* in the formatting logic,
|
||||||
|
|
||||||
use rustpython_parser::ast::Mod;
|
use rustpython_parser::ast::Mod;
|
||||||
use std::cell::Cell;
|
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
|
|
||||||
|
|
@ -103,7 +102,10 @@ use crate::comments::debug::{DebugComment, DebugComments};
|
||||||
use crate::comments::map::MultiMap;
|
use crate::comments::map::MultiMap;
|
||||||
use crate::comments::node_key::NodeRefEqualityKey;
|
use crate::comments::node_key::NodeRefEqualityKey;
|
||||||
use crate::comments::visitor::CommentsVisitor;
|
use crate::comments::visitor::CommentsVisitor;
|
||||||
pub(crate) use format::{dangling_comments, leading_comments, trailing_comments};
|
pub(crate) use format::{
|
||||||
|
dangling_node_comments, leading_alternate_branch_comments, leading_node_comments,
|
||||||
|
trailing_comments, trailing_node_comments,
|
||||||
|
};
|
||||||
use ruff_formatter::{SourceCode, SourceCodeSlice};
|
use ruff_formatter::{SourceCode, SourceCodeSlice};
|
||||||
use ruff_python_ast::node::AnyNodeRef;
|
use ruff_python_ast::node::AnyNodeRef;
|
||||||
use ruff_python_ast::source_code::CommentRanges;
|
use ruff_python_ast::source_code::CommentRanges;
|
||||||
|
|
@ -116,13 +118,11 @@ pub(crate) struct SourceComment {
|
||||||
|
|
||||||
/// Whether the comment has been formatted or not.
|
/// Whether the comment has been formatted or not.
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
formatted: Cell<bool>,
|
formatted: std::cell::Cell<bool>,
|
||||||
|
|
||||||
position: CommentTextPosition,
|
position: CommentTextPosition,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
// TODO(micha): Remove after using the new comments infrastructure in the formatter.
|
|
||||||
impl SourceComment {
|
impl SourceComment {
|
||||||
/// Returns the location of the comment in the original source code.
|
/// Returns the location of the comment in the original source code.
|
||||||
/// Allows retrieving the text of the comment.
|
/// Allows retrieving the text of the comment.
|
||||||
|
|
@ -136,7 +136,7 @@ impl SourceComment {
|
||||||
|
|
||||||
#[cfg(not(debug_assertions))]
|
#[cfg(not(debug_assertions))]
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
pub fn mark_formatted(&self) {}
|
pub(crate) fn mark_formatted(&self) {}
|
||||||
|
|
||||||
/// Marks the comment as formatted
|
/// Marks the comment as formatted
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
|
@ -184,8 +184,6 @@ pub(crate) enum CommentTextPosition {
|
||||||
OwnLine,
|
OwnLine,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
// TODO(micha): Remove after using the new comments infrastructure in the formatter.
|
|
||||||
impl CommentTextPosition {
|
impl CommentTextPosition {
|
||||||
pub(crate) const fn is_own_line(self) -> bool {
|
pub(crate) const fn is_own_line(self) -> bool {
|
||||||
matches!(self, CommentTextPosition::OwnLine)
|
matches!(self, CommentTextPosition::OwnLine)
|
||||||
|
|
@ -858,4 +856,33 @@ a = (
|
||||||
|
|
||||||
assert_debug_snapshot!(comments.debug(test_case.source_code));
|
assert_debug_snapshot!(comments.debug(test_case.source_code));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn while_trailing_end_of_line_comment() {
|
||||||
|
let source = r#"while True:
|
||||||
|
if something.changed:
|
||||||
|
do.stuff() # trailing comment
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let test_case = CommentsTestCase::from_code(source);
|
||||||
|
|
||||||
|
let comments = test_case.to_comments();
|
||||||
|
|
||||||
|
assert_debug_snapshot!(comments.debug(test_case.source_code));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn while_trailing_else_end_of_line_comment() {
|
||||||
|
let source = r#"while True:
|
||||||
|
pass
|
||||||
|
else: # trailing comment
|
||||||
|
pass
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let test_case = CommentsTestCase::from_code(source);
|
||||||
|
|
||||||
|
let comments = test_case.to_comments();
|
||||||
|
|
||||||
|
assert_debug_snapshot!(comments.debug(test_case.source_code));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ use crate::trivia::find_first_non_trivia_character_in_range;
|
||||||
use ruff_python_ast::node::AnyNodeRef;
|
use ruff_python_ast::node::AnyNodeRef;
|
||||||
use ruff_python_ast::source_code::Locator;
|
use ruff_python_ast::source_code::Locator;
|
||||||
use ruff_python_ast::whitespace;
|
use ruff_python_ast::whitespace;
|
||||||
use ruff_text_size::{TextRange, TextSize};
|
use ruff_text_size::{TextLen, TextRange, TextSize};
|
||||||
use rustpython_parser::ast::Ranged;
|
use rustpython_parser::ast::Ranged;
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
|
|
@ -16,8 +16,11 @@ pub(super) fn place_comment<'a>(
|
||||||
) -> CommentPlacement<'a> {
|
) -> CommentPlacement<'a> {
|
||||||
handle_in_between_excepthandlers_or_except_handler_and_else_or_finally_comment(comment, locator)
|
handle_in_between_excepthandlers_or_except_handler_and_else_or_finally_comment(comment, locator)
|
||||||
.or_else(|comment| handle_match_comment(comment, locator))
|
.or_else(|comment| handle_match_comment(comment, locator))
|
||||||
.or_else(|comment| handle_in_between_bodies_comment(comment, locator))
|
.or_else(|comment| handle_in_between_bodies_own_line_comment(comment, locator))
|
||||||
|
.or_else(|comment| handle_in_between_bodies_end_of_line_comment(comment, locator))
|
||||||
.or_else(|comment| handle_trailing_body_comment(comment, locator))
|
.or_else(|comment| handle_trailing_body_comment(comment, locator))
|
||||||
|
.or_else(handle_trailing_end_of_line_body_comment)
|
||||||
|
.or_else(|comment| handle_trailing_end_of_line_condition_comment(comment, locator))
|
||||||
.or_else(|comment| handle_positional_only_arguments_separator_comment(comment, locator))
|
.or_else(|comment| handle_positional_only_arguments_separator_comment(comment, locator))
|
||||||
.or_else(|comment| {
|
.or_else(|comment| {
|
||||||
handle_trailing_binary_expression_left_or_operator_comment(comment, locator)
|
handle_trailing_binary_expression_left_or_operator_comment(comment, locator)
|
||||||
|
|
@ -177,7 +180,7 @@ fn handle_in_between_excepthandlers_or_except_handler_and_else_or_finally_commen
|
||||||
CommentPlacement::Default(comment)
|
CommentPlacement::Default(comment)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Handles comments between the last statement and the first statement of two bodies.
|
/// Handles own line comments between the last statement and the first statement of two bodies.
|
||||||
///
|
///
|
||||||
/// ```python
|
/// ```python
|
||||||
/// if x == y:
|
/// if x == y:
|
||||||
|
|
@ -187,15 +190,11 @@ fn handle_in_between_excepthandlers_or_except_handler_and_else_or_finally_commen
|
||||||
/// else:
|
/// else:
|
||||||
/// print("I have no comments")
|
/// print("I have no comments")
|
||||||
/// ```
|
/// ```
|
||||||
fn handle_in_between_bodies_comment<'a>(
|
fn handle_in_between_bodies_own_line_comment<'a>(
|
||||||
comment: DecoratedComment<'a>,
|
comment: DecoratedComment<'a>,
|
||||||
locator: &Locator,
|
locator: &Locator,
|
||||||
) -> CommentPlacement<'a> {
|
) -> CommentPlacement<'a> {
|
||||||
use ruff_python_ast::prelude::*;
|
if !comment.text_position().is_own_line() {
|
||||||
|
|
||||||
// The rule only applies to own line comments. The default logic associates end of line comments
|
|
||||||
// correctly.
|
|
||||||
if comment.text_position().is_end_of_line() {
|
|
||||||
return CommentPlacement::Default(comment);
|
return CommentPlacement::Default(comment);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -203,39 +202,7 @@ fn handle_in_between_bodies_comment<'a>(
|
||||||
if let (Some(preceding), Some(following)) = (comment.preceding_node(), comment.following_node())
|
if let (Some(preceding), Some(following)) = (comment.preceding_node(), comment.following_node())
|
||||||
{
|
{
|
||||||
// ...and the following statement must be the first statement in an alternate body of the parent...
|
// ...and the following statement must be the first statement in an alternate body of the parent...
|
||||||
let is_following_the_first_statement_in_a_parents_alternate_body =
|
if !is_first_statement_in_enclosing_alternate_body(following, comment.enclosing_node()) {
|
||||||
match comment.enclosing_node() {
|
|
||||||
AnyNodeRef::StmtIf(StmtIf { orelse, .. })
|
|
||||||
| AnyNodeRef::StmtFor(StmtFor { orelse, .. })
|
|
||||||
| AnyNodeRef::StmtAsyncFor(StmtAsyncFor { orelse, .. })
|
|
||||||
| AnyNodeRef::StmtWhile(StmtWhile { orelse, .. }) => {
|
|
||||||
are_same_optional(following, orelse.first())
|
|
||||||
}
|
|
||||||
|
|
||||||
AnyNodeRef::StmtTry(StmtTry {
|
|
||||||
handlers,
|
|
||||||
orelse,
|
|
||||||
finalbody,
|
|
||||||
..
|
|
||||||
})
|
|
||||||
| AnyNodeRef::StmtTryStar(StmtTryStar {
|
|
||||||
handlers,
|
|
||||||
orelse,
|
|
||||||
finalbody,
|
|
||||||
..
|
|
||||||
}) => {
|
|
||||||
are_same_optional(following, handlers.first())
|
|
||||||
// Comments between the handlers and the `else`, or comments between the `handlers` and the `finally`
|
|
||||||
// are already handled by `handle_in_between_excepthandlers_or_except_handler_and_else_or_finally_comment`
|
|
||||||
|| handlers.is_empty() && are_same_optional(following, orelse.first())
|
|
||||||
|| (handlers.is_empty() || !orelse.is_empty())
|
|
||||||
&& are_same_optional(following, finalbody.first())
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => false,
|
|
||||||
};
|
|
||||||
|
|
||||||
if !is_following_the_first_statement_in_a_parents_alternate_body {
|
|
||||||
// ```python
|
// ```python
|
||||||
// if test:
|
// if test:
|
||||||
// a
|
// a
|
||||||
|
|
@ -304,6 +271,75 @@ fn handle_in_between_bodies_comment<'a>(
|
||||||
CommentPlacement::Default(comment)
|
CommentPlacement::Default(comment)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Handles end of line comments comments between the last statement and the first statement of two bodies.
|
||||||
|
///
|
||||||
|
/// ```python
|
||||||
|
/// if x == y:
|
||||||
|
/// pass # trailing comment of pass
|
||||||
|
/// else: # trailing comment of `else`
|
||||||
|
/// print("I have no comments")
|
||||||
|
/// ```
|
||||||
|
fn handle_in_between_bodies_end_of_line_comment<'a>(
|
||||||
|
comment: DecoratedComment<'a>,
|
||||||
|
locator: &Locator,
|
||||||
|
) -> CommentPlacement<'a> {
|
||||||
|
if !comment.text_position().is_end_of_line() {
|
||||||
|
return CommentPlacement::Default(comment);
|
||||||
|
}
|
||||||
|
|
||||||
|
// The comment must be between two statements...
|
||||||
|
if let (Some(preceding), Some(following)) = (comment.preceding_node(), comment.following_node())
|
||||||
|
{
|
||||||
|
// ...and the following statement must be the first statement in an alternate body of the parent...
|
||||||
|
if !is_first_statement_in_enclosing_alternate_body(following, comment.enclosing_node()) {
|
||||||
|
// ```python
|
||||||
|
// if test:
|
||||||
|
// a
|
||||||
|
// # comment
|
||||||
|
// b
|
||||||
|
// ```
|
||||||
|
return CommentPlacement::Default(comment);
|
||||||
|
}
|
||||||
|
|
||||||
|
if !locator.contains_line_break(TextRange::new(preceding.end(), comment.slice().start())) {
|
||||||
|
// Trailing comment of the preceding statement
|
||||||
|
// ```python
|
||||||
|
// while test:
|
||||||
|
// a # comment
|
||||||
|
// else:
|
||||||
|
// b
|
||||||
|
// ```
|
||||||
|
CommentPlacement::trailing(preceding, comment)
|
||||||
|
} else if following.is_stmt_if() || following.is_except_handler() {
|
||||||
|
// The `elif` or except handlers have their own body to which we can attach the trailing comment
|
||||||
|
// ```python
|
||||||
|
// if test:
|
||||||
|
// a
|
||||||
|
// elif c: # comment
|
||||||
|
// b
|
||||||
|
// ```
|
||||||
|
CommentPlacement::trailing(following, comment)
|
||||||
|
} else {
|
||||||
|
// There are no bodies for the "else" branch and other bodies that are represented as a `Vec<Stmt>`.
|
||||||
|
// This means, there's no good place to attach the comments to.
|
||||||
|
// Make this a dangling comments and manually format the comment in
|
||||||
|
// in the enclosing node's formatting logic. For `try`, it's the formatters responsibility
|
||||||
|
// to correctly identify the comments for the `finally` and `orelse` block by looking
|
||||||
|
// at the comment's range.
|
||||||
|
//
|
||||||
|
// ```python
|
||||||
|
// while x == y:
|
||||||
|
// pass
|
||||||
|
// else: # trailing
|
||||||
|
// print("nooop")
|
||||||
|
// ```
|
||||||
|
CommentPlacement::dangling(comment.enclosing_node(), comment)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
CommentPlacement::Default(comment)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Handles trailing comments at the end of a body block (or any other block that is indented).
|
/// Handles trailing comments at the end of a body block (or any other block that is indented).
|
||||||
/// ```python
|
/// ```python
|
||||||
/// def test():
|
/// def test():
|
||||||
|
|
@ -401,6 +437,126 @@ fn handle_trailing_body_comment<'a>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Handles end of line comments of the last statement in an indented body:
|
||||||
|
///
|
||||||
|
/// ```python
|
||||||
|
/// while True:
|
||||||
|
/// if something.changed:
|
||||||
|
/// do.stuff() # trailing comment
|
||||||
|
/// ```
|
||||||
|
fn handle_trailing_end_of_line_body_comment(comment: DecoratedComment<'_>) -> CommentPlacement<'_> {
|
||||||
|
// Must be an end of line comment
|
||||||
|
if comment.text_position().is_own_line() {
|
||||||
|
return CommentPlacement::Default(comment);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Must be *after* a statement
|
||||||
|
let Some(preceding) = comment.preceding_node() else {
|
||||||
|
return CommentPlacement::Default(comment);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Recursively get the last child of statements with a body.
|
||||||
|
let last_children = std::iter::successors(last_child_in_body(preceding), |parent| {
|
||||||
|
last_child_in_body(*parent)
|
||||||
|
});
|
||||||
|
|
||||||
|
if let Some(last_child) = last_children.last() {
|
||||||
|
CommentPlacement::trailing(last_child, comment)
|
||||||
|
} else {
|
||||||
|
// End of line comment of a statement that has no body. This is not what we're looking for.
|
||||||
|
// ```python
|
||||||
|
// a # trailing comment
|
||||||
|
// b
|
||||||
|
// ```
|
||||||
|
CommentPlacement::Default(comment)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Handles end of line comments after the `:` of a condition
|
||||||
|
///
|
||||||
|
/// ```python
|
||||||
|
/// while True: # comment
|
||||||
|
/// pass
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// It attaches the comment as dangling comment to the enclosing `while` statement.
|
||||||
|
fn handle_trailing_end_of_line_condition_comment<'a>(
|
||||||
|
comment: DecoratedComment<'a>,
|
||||||
|
locator: &Locator,
|
||||||
|
) -> CommentPlacement<'a> {
|
||||||
|
use ruff_python_ast::prelude::*;
|
||||||
|
|
||||||
|
// Must be an end of line comment
|
||||||
|
if comment.text_position().is_own_line() {
|
||||||
|
return CommentPlacement::Default(comment);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Must be between the condition expression and the first body element
|
||||||
|
let (Some(preceding), Some(following)) = (comment.preceding_node(), comment.following_node()) else {
|
||||||
|
return CommentPlacement::Default(comment);
|
||||||
|
};
|
||||||
|
|
||||||
|
let expression_before_colon = match comment.enclosing_node() {
|
||||||
|
AnyNodeRef::StmtIf(StmtIf { test: expr, .. })
|
||||||
|
| AnyNodeRef::StmtWhile(StmtWhile { test: expr, .. })
|
||||||
|
| AnyNodeRef::StmtFor(StmtFor { iter: expr, .. })
|
||||||
|
| AnyNodeRef::StmtAsyncFor(StmtAsyncFor { iter: expr, .. }) => {
|
||||||
|
Some(AnyNodeRef::from(expr.as_ref()))
|
||||||
|
}
|
||||||
|
|
||||||
|
AnyNodeRef::StmtWith(StmtWith { items, .. })
|
||||||
|
| AnyNodeRef::StmtAsyncWith(StmtAsyncWith { items, .. }) => {
|
||||||
|
items.last().map(AnyNodeRef::from)
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(last_before_colon) = expression_before_colon else {
|
||||||
|
return CommentPlacement::Default(comment);
|
||||||
|
};
|
||||||
|
|
||||||
|
// If the preceding is the node before the `colon`
|
||||||
|
// `while true:` The node before the `colon` is the `true` constant.
|
||||||
|
if preceding.ptr_eq(last_before_colon) {
|
||||||
|
let mut start = preceding.end();
|
||||||
|
while let Some((offset, c)) = find_first_non_trivia_character_in_range(
|
||||||
|
locator.contents(),
|
||||||
|
TextRange::new(start, following.start()),
|
||||||
|
) {
|
||||||
|
match c {
|
||||||
|
':' => {
|
||||||
|
if comment.slice().start() > offset {
|
||||||
|
// Comment comes after the colon
|
||||||
|
// ```python
|
||||||
|
// while a: # comment
|
||||||
|
// ...
|
||||||
|
// ```
|
||||||
|
return CommentPlacement::dangling(comment.enclosing_node(), comment);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Comment comes before the colon
|
||||||
|
// ```python
|
||||||
|
// while (
|
||||||
|
// a # comment
|
||||||
|
// ):
|
||||||
|
// ...
|
||||||
|
// ```
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
')' => {
|
||||||
|
// Skip over any closing parentheses
|
||||||
|
start = offset + ')'.text_len();
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
unreachable!("Only ')' or ':' should follow the condition")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
CommentPlacement::Default(comment)
|
||||||
|
}
|
||||||
|
|
||||||
/// Attaches comments for the positional-only arguments separator `/` as trailing comments to the
|
/// Attaches comments for the positional-only arguments separator `/` as trailing comments to the
|
||||||
/// enclosing [`Arguments`] node.
|
/// enclosing [`Arguments`] node.
|
||||||
///
|
///
|
||||||
|
|
@ -667,3 +823,42 @@ fn last_child_in_body(node: AnyNodeRef) -> Option<AnyNodeRef> {
|
||||||
|
|
||||||
body.last().map(AnyNodeRef::from)
|
body.last().map(AnyNodeRef::from)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if `following` is the first statement in an alternate `body` (e.g. the else of an if statement) of the `enclosing` node.
|
||||||
|
fn is_first_statement_in_enclosing_alternate_body(
|
||||||
|
following: AnyNodeRef,
|
||||||
|
enclosing: AnyNodeRef,
|
||||||
|
) -> bool {
|
||||||
|
use ruff_python_ast::prelude::*;
|
||||||
|
|
||||||
|
match enclosing {
|
||||||
|
AnyNodeRef::StmtIf(StmtIf { orelse, .. })
|
||||||
|
| AnyNodeRef::StmtFor(StmtFor { orelse, .. })
|
||||||
|
| AnyNodeRef::StmtAsyncFor(StmtAsyncFor { orelse, .. })
|
||||||
|
| AnyNodeRef::StmtWhile(StmtWhile { orelse, .. }) => {
|
||||||
|
are_same_optional(following, orelse.first())
|
||||||
|
}
|
||||||
|
|
||||||
|
AnyNodeRef::StmtTry(StmtTry {
|
||||||
|
handlers,
|
||||||
|
orelse,
|
||||||
|
finalbody,
|
||||||
|
..
|
||||||
|
})
|
||||||
|
| AnyNodeRef::StmtTryStar(StmtTryStar {
|
||||||
|
handlers,
|
||||||
|
orelse,
|
||||||
|
finalbody,
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
are_same_optional(following, handlers.first())
|
||||||
|
// Comments between the handlers and the `else`, or comments between the `handlers` and the `finally`
|
||||||
|
// are already handled by `handle_in_between_excepthandlers_or_except_handler_and_else_or_finally_comment`
|
||||||
|
|| handlers.is_empty() && are_same_optional(following, orelse.first())
|
||||||
|
|| (handlers.is_empty() || !orelse.is_empty())
|
||||||
|
&& are_same_optional(following, finalbody.first())
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -19,19 +19,19 @@ expression: comments.debug(test_case.source_code)
|
||||||
"trailing": [],
|
"trailing": [],
|
||||||
},
|
},
|
||||||
Node {
|
Node {
|
||||||
kind: ExprCompare,
|
kind: StmtIf,
|
||||||
range: 51..57,
|
range: 48..212,
|
||||||
source: `x == y`,
|
source: `if x == y: # if statement e...ne comment⏎`,
|
||||||
}: {
|
}: {
|
||||||
"leading": [],
|
"leading": [],
|
||||||
"dangling": [],
|
"dangling": [
|
||||||
"trailing": [
|
|
||||||
SourceComment {
|
SourceComment {
|
||||||
text: "# if statement end of line comment",
|
text: "# if statement end of line comment",
|
||||||
position: EndOfLine,
|
position: EndOfLine,
|
||||||
formatted: false,
|
formatted: false,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
"trailing": [],
|
||||||
},
|
},
|
||||||
Node {
|
Node {
|
||||||
kind: StmtIf,
|
kind: StmtIf,
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff_python_formatter/src/comments/mod.rs
|
||||||
|
expression: comments.debug(test_case.source_code)
|
||||||
|
---
|
||||||
|
{
|
||||||
|
Node {
|
||||||
|
kind: StmtWhile,
|
||||||
|
range: 0..54,
|
||||||
|
source: `while True:⏎`,
|
||||||
|
}: {
|
||||||
|
"leading": [],
|
||||||
|
"dangling": [
|
||||||
|
SourceComment {
|
||||||
|
text: "# trailing comment",
|
||||||
|
position: EndOfLine,
|
||||||
|
formatted: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"trailing": [],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,21 @@
|
||||||
|
---
|
||||||
|
source: crates/ruff_python_formatter/src/comments/mod.rs
|
||||||
|
expression: comments.debug(test_case.source_code)
|
||||||
|
---
|
||||||
|
{
|
||||||
|
Node {
|
||||||
|
kind: StmtExpr,
|
||||||
|
range: 46..56,
|
||||||
|
source: `do.stuff()`,
|
||||||
|
}: {
|
||||||
|
"leading": [],
|
||||||
|
"dangling": [],
|
||||||
|
"trailing": [
|
||||||
|
SourceComment {
|
||||||
|
text: "# trailing comment",
|
||||||
|
position: EndOfLine,
|
||||||
|
formatted: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
@ -5,7 +5,6 @@ use ruff_formatter::{SourceCode, SourceCodeSlice};
|
||||||
use ruff_python_ast::node::AnyNodeRef;
|
use ruff_python_ast::node::AnyNodeRef;
|
||||||
use ruff_python_ast::prelude::*;
|
use ruff_python_ast::prelude::*;
|
||||||
use ruff_python_ast::source_code::{CommentRanges, Locator};
|
use ruff_python_ast::source_code::{CommentRanges, Locator};
|
||||||
use std::cell::Cell;
|
|
||||||
// The interface is designed to only export the members relevant for iterating nodes in
|
// The interface is designed to only export the members relevant for iterating nodes in
|
||||||
// pre-order.
|
// pre-order.
|
||||||
#[allow(clippy::wildcard_imports)]
|
#[allow(clippy::wildcard_imports)]
|
||||||
|
|
@ -418,7 +417,7 @@ impl From<DecoratedComment<'_>> for SourceComment {
|
||||||
slice: decorated.slice,
|
slice: decorated.slice,
|
||||||
position: decorated.text_position,
|
position: decorated.text_position,
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
formatted: Cell::new(false),
|
formatted: std::cell::Cell::new(false),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,6 @@ impl<'a> PyFormatContext<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub(crate) fn contents(&self) -> &'a str {
|
pub(crate) fn contents(&self) -> &'a str {
|
||||||
self.contents
|
self.contents
|
||||||
}
|
}
|
||||||
|
|
@ -35,7 +34,6 @@ impl<'a> PyFormatContext<'a> {
|
||||||
Locator::new(self.contents)
|
Locator::new(self.contents)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub(crate) fn set_node_level(&mut self, level: NodeLevel) {
|
pub(crate) fn set_node_level(&mut self, level: NodeLevel) {
|
||||||
self.node_level = level;
|
self.node_level = level;
|
||||||
}
|
}
|
||||||
|
|
@ -44,7 +42,6 @@ impl<'a> PyFormatContext<'a> {
|
||||||
self.node_level
|
self.node_level
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
pub(crate) fn comments(&self) -> &Comments<'a> {
|
pub(crate) fn comments(&self) -> &Comments<'a> {
|
||||||
&self.comments
|
&self.comments
|
||||||
}
|
}
|
||||||
|
|
@ -80,11 +77,10 @@ pub(crate) enum NodeLevel {
|
||||||
#[default]
|
#[default]
|
||||||
TopLevel,
|
TopLevel,
|
||||||
|
|
||||||
/// Formatting nodes that are enclosed by a statement.
|
/// Formatting the body statements of a [compound statement](https://docs.python.org/3/reference/compound_stmts.html#compound-statements)
|
||||||
#[allow(unused)]
|
/// (`if`, `while`, `match`, etc.).
|
||||||
Statement,
|
CompoundStatement,
|
||||||
|
|
||||||
/// Formatting nodes that are enclosed in a parenthesized expression.
|
/// Formatting nodes that are enclosed in a parenthesized expression.
|
||||||
#[allow(unused)]
|
|
||||||
Parenthesized,
|
Parenthesized,
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
use crate::{verbatim_text, FormatNodeRule, PyFormatter};
|
use crate::prelude::*;
|
||||||
use ruff_formatter::{write, Buffer, FormatResult};
|
use crate::FormatNodeRule;
|
||||||
|
use ruff_formatter::{write, FormatContext};
|
||||||
use rustpython_parser::ast::ExprName;
|
use rustpython_parser::ast::ExprName;
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
|
@ -7,6 +8,44 @@ pub struct FormatExprName;
|
||||||
|
|
||||||
impl FormatNodeRule<ExprName> for FormatExprName {
|
impl FormatNodeRule<ExprName> for FormatExprName {
|
||||||
fn fmt_fields(&self, item: &ExprName, f: &mut PyFormatter) -> FormatResult<()> {
|
fn fmt_fields(&self, item: &ExprName, f: &mut PyFormatter) -> FormatResult<()> {
|
||||||
write!(f, [verbatim_text(item.range)])
|
let ExprName { id, range, ctx: _ } = item;
|
||||||
|
|
||||||
|
debug_assert_eq!(
|
||||||
|
id.as_str(),
|
||||||
|
f.context()
|
||||||
|
.source_code()
|
||||||
|
.slice(*range)
|
||||||
|
.text(f.context().source_code())
|
||||||
|
);
|
||||||
|
|
||||||
|
write!(f, [source_text_slice(*range, ContainsNewlines::No)])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use ruff_text_size::{TextRange, TextSize};
|
||||||
|
use rustpython_parser::ast::{ModModule, Ranged};
|
||||||
|
use rustpython_parser::Parse;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn name_range_with_comments() {
|
||||||
|
let source = ModModule::parse("a # comment", "file.py").unwrap();
|
||||||
|
|
||||||
|
let expression_statement = source
|
||||||
|
.body
|
||||||
|
.first()
|
||||||
|
.expect("Expected non-empty body")
|
||||||
|
.as_expr_stmt()
|
||||||
|
.unwrap();
|
||||||
|
let name = expression_statement
|
||||||
|
.value
|
||||||
|
.as_name_expr()
|
||||||
|
.expect("Expected name expression");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
name.range(),
|
||||||
|
TextRange::at(TextSize::new(0), TextSize::new(1))
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue