mirror of https://github.com/astral-sh/ruff
Merge branch 'astral-sh:main' into feature/slots-type-inference
This commit is contained in:
commit
80468d15e8
|
|
@ -8,7 +8,3 @@ benchmark = "bench -p ruff_benchmark --bench linter --bench formatter --"
|
|||
# See: https://github.com/astral-sh/ruff/issues/11503
|
||||
[target.'cfg(all(target_env="msvc", target_os = "windows"))']
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
|
||||
[target.'wasm32-unknown-unknown']
|
||||
# See https://docs.rs/getrandom/latest/getrandom/#webassembly-support
|
||||
rustflags = ["--cfg", 'getrandom_backend="wasm_js"']
|
||||
|
|
@ -1,3 +1,12 @@
|
|||
# Define serial test group for running tests sequentially.
|
||||
[test-groups]
|
||||
serial = { max-threads = 1 }
|
||||
|
||||
# Run ty file watching tests sequentially to avoid race conditions.
|
||||
[[profile.default.overrides]]
|
||||
filter = 'binary(file_watching)'
|
||||
test-group = 'serial'
|
||||
|
||||
[profile.ci]
|
||||
# Print out output for failing tests as soon as they fail, and also at the end
|
||||
# of the run (for easy scrollability).
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ indent_style = space
|
|||
insert_final_newline = true
|
||||
indent_size = 2
|
||||
|
||||
[*.{rs,py,pyi}]
|
||||
[*.{rs,py,pyi,toml}]
|
||||
indent_size = 4
|
||||
|
||||
[*.snap]
|
||||
|
|
@ -18,6 +18,3 @@ trim_trailing_whitespace = false
|
|||
|
||||
[*.md]
|
||||
max_line_length = 100
|
||||
|
||||
[*.toml]
|
||||
indent_size = 4
|
||||
|
|
@ -12,13 +12,18 @@ concurrency:
|
|||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTUP_MAX_RETRIES: 10
|
||||
PACKAGE_NAME: ruff
|
||||
PYTHON_VERSION: "3.13"
|
||||
PYTHON_VERSION: "3.14"
|
||||
NEXTEST_PROFILE: ci
|
||||
|
||||
jobs:
|
||||
determine_changes:
|
||||
|
|
@ -142,7 +147,7 @@ jobs:
|
|||
env:
|
||||
MERGE_BASE: ${{ steps.merge_base.outputs.sha }}
|
||||
run: |
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- 'python/py_fuzzer/**' \
|
||||
if git diff --quiet "${MERGE_BASE}...HEAD" -- 'python/py-fuzzer/**' \
|
||||
; then
|
||||
echo "changed=false" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
|
|
@ -237,7 +242,7 @@ jobs:
|
|||
|
||||
cargo-test-linux:
|
||||
name: "cargo test (linux)"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-16' || 'ubuntu-latest' }}
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
timeout-minutes: 20
|
||||
|
|
@ -271,11 +276,9 @@ jobs:
|
|||
# This step is just to get nice GitHub annotations on the PR diff in the files-changed tab.
|
||||
run: cargo test -p ty_python_semantic --test mdtest || true
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo insta test --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
# Dogfood ty on py-fuzzer
|
||||
- run: uv run --project=./python/py-fuzzer cargo run -p ty check --project=./python/py-fuzzer
|
||||
# Check for broken links in the documentation.
|
||||
- run: cargo doc --all --no-deps
|
||||
env:
|
||||
|
|
@ -299,9 +302,13 @@ jobs:
|
|||
|
||||
cargo-test-linux-release:
|
||||
name: "cargo test (linux, release)"
|
||||
# release builds timeout on GitHub runners, so this job is just skipped on forks in the `if` check
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
if: |
|
||||
github.repository == 'astral-sh/ruff' &&
|
||||
!contains(github.event.pull_request.labels.*.name, 'no-test') &&
|
||||
(needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main')
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
|
@ -325,14 +332,16 @@ jobs:
|
|||
with:
|
||||
enable-cache: "true"
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
run: cargo insta test --release --all-features --unreferenced reject --test-runner nextest
|
||||
|
||||
cargo-test-windows:
|
||||
name: "cargo test (windows)"
|
||||
runs-on: depot-windows-2022-16
|
||||
cargo-test-other:
|
||||
strategy:
|
||||
matrix:
|
||||
platform:
|
||||
- ${{ github.repository == 'astral-sh/ruff' && 'depot-windows-2022-16' || 'windows-latest' }}
|
||||
- macos-latest
|
||||
name: "cargo test (${{ matrix.platform }})"
|
||||
runs-on: ${{ matrix.platform }}
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
timeout-minutes: 20
|
||||
|
|
@ -352,11 +361,6 @@ jobs:
|
|||
with:
|
||||
enable-cache: "true"
|
||||
- name: "Run tests"
|
||||
shell: bash
|
||||
env:
|
||||
NEXTEST_PROFILE: "ci"
|
||||
# Workaround for <https://github.com/nextest-rs/nextest/issues/1493>.
|
||||
RUSTUP_WINDOWS_PATH_ADD_BIN: 1
|
||||
run: |
|
||||
cargo nextest run --all-features --profile ci
|
||||
cargo test --all-features --doc
|
||||
|
|
@ -391,26 +395,9 @@ jobs:
|
|||
cd crates/ty_wasm
|
||||
wasm-pack test --node
|
||||
|
||||
cargo-build-release:
|
||||
name: "cargo build (release)"
|
||||
runs-on: macos-latest
|
||||
if: ${{ github.ref == 'refs/heads/main' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Build"
|
||||
run: cargo build --release --locked
|
||||
|
||||
cargo-build-msrv:
|
||||
name: "cargo build (msrv)"
|
||||
runs-on: depot-ubuntu-latest-8
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-latest-8' || 'ubuntu-latest' }}
|
||||
needs: determine_changes
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && (needs.determine_changes.outputs.code == 'true' || github.ref == 'refs/heads/main') }}
|
||||
timeout-minutes: 20
|
||||
|
|
@ -431,7 +418,6 @@ jobs:
|
|||
- name: "Install mold"
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Build tests"
|
||||
shell: bash
|
||||
env:
|
||||
MSRV: ${{ steps.msrv.outputs.value }}
|
||||
run: cargo "+${MSRV}" test --no-run --all-features
|
||||
|
|
@ -452,7 +438,7 @@ jobs:
|
|||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Install cargo-binstall"
|
||||
uses: cargo-bins/cargo-binstall@38e8f5e4c386b611d51e8aa997b9a06a3c8eb67a # v1.15.6
|
||||
uses: cargo-bins/cargo-binstall@a66119fbb1c952daba62640c2609111fe0803621 # v1.15.7
|
||||
- name: "Install cargo-fuzz"
|
||||
# Download the latest version from quick install and not the github releases because github releases only has MUSL targets.
|
||||
run: cargo binstall cargo-fuzz --force --disable-strategies crate-meta-data --no-confirm
|
||||
|
|
@ -487,9 +473,10 @@ jobs:
|
|||
chmod +x "${DOWNLOAD_PATH}/ruff"
|
||||
|
||||
(
|
||||
uvx \
|
||||
uv run \
|
||||
--python="${PYTHON_VERSION}" \
|
||||
--from=./python/py-fuzzer \
|
||||
--project=./python/py-fuzzer \
|
||||
--locked \
|
||||
fuzz \
|
||||
--test-executable="${DOWNLOAD_PATH}/ruff" \
|
||||
--bin=ruff \
|
||||
|
|
@ -507,6 +494,7 @@ jobs:
|
|||
with:
|
||||
persist-credentials: false
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
- name: "Install Rust toolchain"
|
||||
run: rustup component add rustfmt
|
||||
# Run all code generation scripts, and verify that the current output is
|
||||
|
|
@ -521,10 +509,15 @@ jobs:
|
|||
./scripts/add_plugin.py test --url https://pypi.org/project/-test/0.1.0/ --prefix TST
|
||||
./scripts/add_rule.py --name FirstRule --prefix TST --code 001 --linter test
|
||||
- run: cargo check
|
||||
# Lint/format/type-check py-fuzzer
|
||||
# (dogfooding with ty is done in a separate job)
|
||||
- run: uv run --directory=./python/py-fuzzer mypy
|
||||
- run: uv run --directory=./python/py-fuzzer ruff format --check
|
||||
- run: uv run --directory=./python/py-fuzzer ruff check
|
||||
|
||||
ecosystem:
|
||||
name: "ecosystem"
|
||||
runs-on: depot-ubuntu-latest-8
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-latest-8' || 'ubuntu-latest' }}
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
|
|
@ -536,9 +529,11 @@ jobs:
|
|||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
- uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
python-version: ${{ env.PYTHON_VERSION }}
|
||||
# TODO: figure out why `ruff-ecosystem` crashes on Python 3.14
|
||||
python-version: "3.13"
|
||||
activate-environment: true
|
||||
|
||||
- uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
name: Download comparison Ruff binary
|
||||
|
|
@ -557,7 +552,7 @@ jobs:
|
|||
|
||||
- name: Install ruff-ecosystem
|
||||
run: |
|
||||
pip install ./python/ruff-ecosystem
|
||||
uv pip install ./python/ruff-ecosystem
|
||||
|
||||
- name: Run `ruff check` stable ecosystem check
|
||||
if: ${{ needs.determine_changes.outputs.linter == 'true' }}
|
||||
|
|
@ -649,13 +644,13 @@ jobs:
|
|||
|
||||
fuzz-ty:
|
||||
name: "Fuzz for new ty panics"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-16' || 'ubuntu-latest' }}
|
||||
needs:
|
||||
- cargo-test-linux
|
||||
- determine_changes
|
||||
# Only runs on pull requests, since that is the only we way we can find the base version for comparison.
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'no-test') && github.event_name == 'pull_request' && (needs.determine_changes.outputs.ty == 'true' || needs.determine_changes.outputs.py-fuzzer == 'true') }}
|
||||
timeout-minutes: 20
|
||||
timeout-minutes: ${{ github.repository == 'astral-sh/ruff' && 5 || 20 }}
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
|
|
@ -683,15 +678,16 @@ jobs:
|
|||
chmod +x "${PWD}/ty" "${NEW_TY}/ty"
|
||||
|
||||
(
|
||||
uvx \
|
||||
uv run \
|
||||
--python="${PYTHON_VERSION}" \
|
||||
--from=./python/py-fuzzer \
|
||||
--project=./python/py-fuzzer \
|
||||
--locked \
|
||||
fuzz \
|
||||
--test-executable="${NEW_TY}/ty" \
|
||||
--baseline-executable="${PWD}/ty" \
|
||||
--only-new-bugs \
|
||||
--bin=ty \
|
||||
0-500
|
||||
0-1000
|
||||
)
|
||||
|
||||
cargo-shear:
|
||||
|
|
@ -703,13 +699,13 @@ jobs:
|
|||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: cargo-bins/cargo-binstall@38e8f5e4c386b611d51e8aa997b9a06a3c8eb67a # v1.15.6
|
||||
- uses: cargo-bins/cargo-binstall@a66119fbb1c952daba62640c2609111fe0803621 # v1.15.7
|
||||
- run: cargo binstall --no-confirm cargo-shear
|
||||
- run: cargo shear
|
||||
|
||||
ty-completion-evaluation:
|
||||
name: "ty completion evaluation"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-16' || 'ubuntu-latest' }}
|
||||
needs: determine_changes
|
||||
if: ${{ needs.determine_changes.outputs.ty == 'true' || github.ref == 'refs/heads/main' }}
|
||||
steps:
|
||||
|
|
@ -721,7 +717,7 @@ jobs:
|
|||
- name: "Install Rust toolchain"
|
||||
run: rustup show
|
||||
- name: "Run ty completion evaluation"
|
||||
run: cargo run --release --package ty_completion_eval -- all --threshold 0.1 --tasks /tmp/completion-evaluation-tasks.csv
|
||||
run: cargo run --release --package ty_completion_eval -- all --threshold 0.4 --tasks /tmp/completion-evaluation-tasks.csv
|
||||
- name: "Ensure there are no changes"
|
||||
run: diff ./crates/ty_completion_eval/completion-evaluation-tasks.csv /tmp/completion-evaluation-tasks.csv
|
||||
|
||||
|
|
@ -755,7 +751,7 @@ jobs:
|
|||
|
||||
pre-commit:
|
||||
name: "pre-commit"
|
||||
runs-on: depot-ubuntu-22.04-16
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-16' || 'ubuntu-latest' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
|
@ -792,9 +788,6 @@ jobs:
|
|||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- name: "Add SSH key"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
|
|
@ -805,12 +798,15 @@ jobs:
|
|||
run: rustup show
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
|
||||
with:
|
||||
python-version: 3.13
|
||||
activate-environment: true
|
||||
- name: "Install Insiders dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS == 'true' }}
|
||||
run: uv pip install -r docs/requirements-insiders.txt --system
|
||||
run: uv pip install -r docs/requirements-insiders.txt
|
||||
- name: "Install dependencies"
|
||||
if: ${{ env.MKDOCS_INSIDERS_SSH_KEY_EXISTS != 'true' }}
|
||||
run: uv pip install -r docs/requirements.txt --system
|
||||
run: uv pip install -r docs/requirements.txt
|
||||
- name: "Update README File"
|
||||
run: python scripts/transform_readme.py --target mkdocs
|
||||
- name: "Generate docs"
|
||||
|
|
@ -929,8 +925,12 @@ jobs:
|
|||
runs-on: ubuntu-24.04
|
||||
needs: determine_changes
|
||||
if: |
|
||||
github.ref == 'refs/heads/main' ||
|
||||
(needs.determine_changes.outputs.formatter == 'true' || needs.determine_changes.outputs.linter == 'true')
|
||||
github.repository == 'astral-sh/ruff' &&
|
||||
(
|
||||
github.ref == 'refs/heads/main' ||
|
||||
needs.determine_changes.outputs.formatter == 'true' ||
|
||||
needs.determine_changes.outputs.linter == 'true'
|
||||
)
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
|
|
@ -953,7 +953,7 @@ jobs:
|
|||
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench formatter --bench lexer --bench linter --bench parser
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@3959e9e296ef25296e93e32afcc97196f966e57f # v4.1.0
|
||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||
with:
|
||||
mode: instrumentation
|
||||
run: cargo codspeed run
|
||||
|
|
@ -964,8 +964,11 @@ jobs:
|
|||
runs-on: ubuntu-24.04
|
||||
needs: determine_changes
|
||||
if: |
|
||||
github.ref == 'refs/heads/main' ||
|
||||
needs.determine_changes.outputs.ty == 'true'
|
||||
github.repository == 'astral-sh/ruff' &&
|
||||
(
|
||||
github.ref == 'refs/heads/main' ||
|
||||
needs.determine_changes.outputs.ty == 'true'
|
||||
)
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: "Checkout Branch"
|
||||
|
|
@ -988,7 +991,7 @@ jobs:
|
|||
run: cargo codspeed build --features "codspeed,instrumented" --no-default-features -p ruff_benchmark --bench ty
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@3959e9e296ef25296e93e32afcc97196f966e57f # v4.1.0
|
||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||
with:
|
||||
mode: instrumentation
|
||||
run: cargo codspeed run
|
||||
|
|
@ -1026,7 +1029,7 @@ jobs:
|
|||
run: cargo codspeed build --features "codspeed,walltime" --no-default-features -p ruff_benchmark
|
||||
|
||||
- name: "Run benchmarks"
|
||||
uses: CodSpeedHQ/action@3959e9e296ef25296e93e32afcc97196f966e57f # v4.1.0
|
||||
uses: CodSpeedHQ/action@6b43a0cd438f6ca5ad26f9ed03ed159ed2df7da9 # v4.1.1
|
||||
env:
|
||||
# enabling walltime flamegraphs adds ~6 minutes to the CI time, and they don't
|
||||
# appear to provide much useful insight for our walltime benchmarks right now
|
||||
|
|
|
|||
|
|
@ -48,9 +48,10 @@ jobs:
|
|||
run: |
|
||||
# shellcheck disable=SC2046
|
||||
(
|
||||
uvx \
|
||||
--python=3.12 \
|
||||
--from=./python/py-fuzzer \
|
||||
uv run \
|
||||
--python=3.14 \
|
||||
--project=./python/py-fuzzer \
|
||||
--locked \
|
||||
fuzz \
|
||||
--test-executable=target/debug/ruff \
|
||||
--bin=ruff \
|
||||
|
|
|
|||
|
|
@ -19,6 +19,10 @@ concurrency:
|
|||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
CARGO_NET_RETRY: 10
|
||||
|
|
@ -29,7 +33,7 @@ env:
|
|||
jobs:
|
||||
mypy_primer:
|
||||
name: Run mypy_primer
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
|
@ -49,7 +53,6 @@ jobs:
|
|||
run: rustup show
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
env:
|
||||
PRIMER_SELECTOR: crates/ty_python_semantic/resources/primer/good.txt
|
||||
DIFF_FILE: mypy_primer.diff
|
||||
|
|
@ -72,7 +75,7 @@ jobs:
|
|||
|
||||
memory_usage:
|
||||
name: Run memory statistics
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
|
@ -92,7 +95,6 @@ jobs:
|
|||
run: rustup show
|
||||
|
||||
- name: Run mypy_primer
|
||||
shell: bash
|
||||
env:
|
||||
TY_MAX_PARALLELISM: 1 # for deterministic memory numbers
|
||||
TY_MEMORY_REPORT: mypy_primer
|
||||
|
|
|
|||
|
|
@ -16,8 +16,10 @@ name: Sync typeshed
|
|||
# 3. Once the Windows worker is done, a MacOS worker:
|
||||
# a. Checks out the branch created by the Linux worker
|
||||
# b. Syncs all docstrings available on MacOS that are not available on Linux or Windows
|
||||
# c. Commits the changes and pushes them to the same upstream branch
|
||||
# d. Creates a PR against the `main` branch using the branch all three workers have pushed to
|
||||
# c. Attempts to update any snapshots that might have changed
|
||||
# (this sub-step is allowed to fail)
|
||||
# d. Commits the changes and pushes them to the same upstream branch
|
||||
# e. Creates a PR against the `main` branch using the branch all three workers have pushed to
|
||||
# 4. If any of steps 1-3 failed, an issue is created in the `astral-sh/ruff` repository
|
||||
|
||||
on:
|
||||
|
|
@ -26,8 +28,18 @@ on:
|
|||
# Run on the 1st and the 15th of every month:
|
||||
- cron: "0 0 1,15 * *"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
# Don't set this flag globally for the workflow: it does strange things
|
||||
# to the snapshots in the `cargo insta test --accept` step in the MacOS job.
|
||||
#
|
||||
# FORCE_COLOR: 1
|
||||
|
||||
CARGO_TERM_COLOR: always
|
||||
NEXTEST_PROFILE: "ci"
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
# The name of the upstream branch that the first worker creates,
|
||||
|
|
@ -86,6 +98,8 @@ jobs:
|
|||
git commit -m "Sync typeshed. Source commit: https://github.com/python/typeshed/commit/$(git -C ../typeshed rev-parse HEAD)" --allow-empty
|
||||
- name: Sync Linux docstrings
|
||||
if: ${{ success() }}
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
run: |
|
||||
cd ruff
|
||||
./scripts/codemod_docstrings.sh
|
||||
|
|
@ -124,7 +138,8 @@ jobs:
|
|||
git config --global user.email '<>'
|
||||
- name: Sync Windows docstrings
|
||||
id: docstrings
|
||||
shell: bash
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
run: ./scripts/codemod_docstrings.sh
|
||||
- name: Commit the changes
|
||||
if: ${{ steps.docstrings.outcome == 'success' }}
|
||||
|
|
@ -161,26 +176,63 @@ jobs:
|
|||
git config --global user.name typeshedbot
|
||||
git config --global user.email '<>'
|
||||
- name: Sync macOS docstrings
|
||||
run: ./scripts/codemod_docstrings.sh
|
||||
- name: Commit and push the changes
|
||||
if: ${{ success() }}
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
run: |
|
||||
./scripts/codemod_docstrings.sh
|
||||
git commit -am "Sync macOS docstrings" --allow-empty
|
||||
|
||||
- name: Format the changes
|
||||
if: ${{ success() }}
|
||||
env:
|
||||
FORCE_COLOR: 1
|
||||
run: |
|
||||
# Here we just reformat the codemodded stubs so that they are
|
||||
# consistent with the other typeshed stubs around them.
|
||||
# Typeshed formats code using black in their CI, so we just invoke
|
||||
# black on the stubs the same way that typeshed does.
|
||||
uvx black "${VENDORED_TYPESHED}/stdlib" --config "${VENDORED_TYPESHED}/pyproject.toml" || true
|
||||
git commit -am "Format codemodded docstrings" --allow-empty
|
||||
|
||||
rm "${VENDORED_TYPESHED}/pyproject.toml"
|
||||
git commit -am "Remove pyproject.toml file"
|
||||
|
||||
git push
|
||||
- name: Create a PR
|
||||
- name: Remove typeshed pyproject.toml file
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
rm "${VENDORED_TYPESHED}/pyproject.toml"
|
||||
git commit -am "Remove pyproject.toml file"
|
||||
- uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
|
||||
- name: "Install Rust toolchain"
|
||||
if: ${{ success() }}
|
||||
run: rustup show
|
||||
- name: "Install mold"
|
||||
if: ${{ success() }}
|
||||
uses: rui314/setup-mold@725a8794d15fc7563f59595bd9556495c0564878 # v1
|
||||
- name: "Install cargo nextest"
|
||||
if: ${{ success() }}
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
with:
|
||||
tool: cargo-nextest
|
||||
- name: "Install cargo insta"
|
||||
if: ${{ success() }}
|
||||
uses: taiki-e/install-action@522492a8c115f1b6d4d318581f09638e9442547b # v2.62.21
|
||||
with:
|
||||
tool: cargo-insta
|
||||
- name: Update snapshots
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
# The `cargo insta` docs indicate that `--unreferenced=delete` might be a good option,
|
||||
# but from local testing it appears to just revert all changes made by `cargo insta test --accept`.
|
||||
#
|
||||
# If there were only snapshot-related failures, `cargo insta test --accept` will have exit code 0,
|
||||
# but if there were also other mdtest failures (for example), it will return a nonzero exit code.
|
||||
# We don't care about other tests failing here, we just want snapshots updated where possible,
|
||||
# so we use `|| true` here to ignore the exit code.
|
||||
cargo insta test --accept --color=always --all-features --test-runner=nextest || true
|
||||
- name: Commit snapshot changes
|
||||
if: ${{ success() }}
|
||||
run: git commit -am "Update snapshots" || echo "No snapshot changes to commit"
|
||||
- name: Push changes upstream and create a PR
|
||||
if: ${{ success() }}
|
||||
run: |
|
||||
git push
|
||||
gh pr list --repo "${GITHUB_REPOSITORY}" --head "${UPSTREAM_BRANCH}" --json id --jq length | grep 1 && exit 0 # exit if there is existing pr
|
||||
gh pr create --title "[ty] Sync vendored typeshed stubs" --body "Close and reopen this PR to trigger CI" --label "ty"
|
||||
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ env:
|
|||
jobs:
|
||||
ty-ecosystem-analyzer:
|
||||
name: Compute diagnostic diff
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||
timeout-minutes: 20
|
||||
if: contains(github.event.label.name, 'ecosystem-analyzer')
|
||||
steps:
|
||||
|
|
@ -64,12 +64,12 @@ jobs:
|
|||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@279f8a15b0e7f77213bf9096dbc2335a19ef89c5"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@908758da02a73ef3f3308e1dbb2248510029bbe4"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--repository ruff \
|
||||
diff \
|
||||
--profile=release \
|
||||
--profile=profiling \
|
||||
--projects-old ruff/projects_old.txt \
|
||||
--projects-new ruff/projects_new.txt \
|
||||
--old old_commit \
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ env:
|
|||
jobs:
|
||||
ty-ecosystem-report:
|
||||
name: Create ecosystem report
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
|
@ -49,13 +49,13 @@ jobs:
|
|||
|
||||
cd ..
|
||||
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@279f8a15b0e7f77213bf9096dbc2335a19ef89c5"
|
||||
uv tool install "git+https://github.com/astral-sh/ecosystem-analyzer@908758da02a73ef3f3308e1dbb2248510029bbe4"
|
||||
|
||||
ecosystem-analyzer \
|
||||
--verbose \
|
||||
--repository ruff \
|
||||
analyze \
|
||||
--profile=release \
|
||||
--profile=profiling \
|
||||
--projects ruff/crates/ty_python_semantic/resources/primer/good.txt \
|
||||
--output ecosystem-diagnostics.json
|
||||
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ env:
|
|||
jobs:
|
||||
typing_conformance:
|
||||
name: Compute diagnostic diff
|
||||
runs-on: depot-ubuntu-22.04-32
|
||||
runs-on: ${{ github.repository == 'astral-sh/ruff' && 'depot-ubuntu-22.04-32' || 'ubuntu-latest' }}
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
|
|
|||
58
CHANGELOG.md
58
CHANGELOG.md
|
|
@ -1,5 +1,63 @@
|
|||
# Changelog
|
||||
|
||||
## 0.14.1
|
||||
|
||||
Released on 2025-10-16.
|
||||
|
||||
### Preview features
|
||||
|
||||
- [formatter] Remove parentheses around multiple exception types on Python 3.14+ ([#20768](https://github.com/astral-sh/ruff/pull/20768))
|
||||
- \[`flake8-bugbear`\] Omit annotation in preview fix for `B006` ([#20877](https://github.com/astral-sh/ruff/pull/20877))
|
||||
- \[`flake8-logging-format`\] Avoid dropping implicitly concatenated pieces in the `G004` fix ([#20793](https://github.com/astral-sh/ruff/pull/20793))
|
||||
- \[`pydoclint`\] Implement `docstring-extraneous-parameter` (`DOC102`) ([#20376](https://github.com/astral-sh/ruff/pull/20376))
|
||||
- \[`pyupgrade`\] Extend `UP019` to detect `typing_extensions.Text` (`UP019`) ([#20825](https://github.com/astral-sh/ruff/pull/20825))
|
||||
- \[`pyupgrade`\] Fix false negative for `TypeVar` with default argument in `non-pep695-generic-class` (`UP046`) ([#20660](https://github.com/astral-sh/ruff/pull/20660))
|
||||
|
||||
### Bug fixes
|
||||
|
||||
- Fix false negatives in `Truthiness::from_expr` for lambdas, generators, and f-strings ([#20704](https://github.com/astral-sh/ruff/pull/20704))
|
||||
- Fix syntax error false positives for escapes and quotes in f-strings ([#20867](https://github.com/astral-sh/ruff/pull/20867))
|
||||
- Fix syntax error false positives on parenthesized context managers ([#20846](https://github.com/astral-sh/ruff/pull/20846))
|
||||
- \[`fastapi`\] Fix false positives for path parameters that FastAPI doesn't recognize (`FAST003`) ([#20687](https://github.com/astral-sh/ruff/pull/20687))
|
||||
- \[`flake8-pyi`\] Fix operator precedence by adding parentheses when needed (`PYI061`) ([#20508](https://github.com/astral-sh/ruff/pull/20508))
|
||||
- \[`ruff`\] Suppress diagnostic for f-string interpolations with debug text (`RUF010`) ([#20525](https://github.com/astral-sh/ruff/pull/20525))
|
||||
|
||||
### Rule changes
|
||||
|
||||
- \[`airflow`\] Add warning to `airflow.datasets.DatasetEvent` usage (`AIR301`) ([#20551](https://github.com/astral-sh/ruff/pull/20551))
|
||||
- \[`flake8-bugbear`\] Mark `B905` and `B912` fixes as unsafe ([#20695](https://github.com/astral-sh/ruff/pull/20695))
|
||||
- Use `DiagnosticTag` for more rules - changes display in editors ([#20758](https://github.com/astral-sh/ruff/pull/20758),[#20734](https://github.com/astral-sh/ruff/pull/20734))
|
||||
|
||||
### Documentation
|
||||
|
||||
- Update Python compatibility from 3.13 to 3.14 in README.md ([#20852](https://github.com/astral-sh/ruff/pull/20852))
|
||||
- Update `lint.flake8-type-checking.quoted-annotations` docs ([#20765](https://github.com/astral-sh/ruff/pull/20765))
|
||||
- Update setup instructions for Zed 0.208.0+ ([#20902](https://github.com/astral-sh/ruff/pull/20902))
|
||||
- \[`flake8-datetimez`\] Clarify docs for several rules ([#20778](https://github.com/astral-sh/ruff/pull/20778))
|
||||
- Fix typo in `RUF015` description ([#20873](https://github.com/astral-sh/ruff/pull/20873))
|
||||
|
||||
### Other changes
|
||||
|
||||
- Reduce binary size ([#20863](https://github.com/astral-sh/ruff/pull/20863))
|
||||
- Improved error recovery for unclosed strings (including f- and t-strings) ([#20848](https://github.com/astral-sh/ruff/pull/20848))
|
||||
|
||||
### Contributors
|
||||
|
||||
- [@ntBre](https://github.com/ntBre)
|
||||
- [@Paillat-dev](https://github.com/Paillat-dev)
|
||||
- [@terror](https://github.com/terror)
|
||||
- [@pieterh-oai](https://github.com/pieterh-oai)
|
||||
- [@MichaReiser](https://github.com/MichaReiser)
|
||||
- [@TaKO8Ki](https://github.com/TaKO8Ki)
|
||||
- [@ageorgou](https://github.com/ageorgou)
|
||||
- [@danparizher](https://github.com/danparizher)
|
||||
- [@mgaitan](https://github.com/mgaitan)
|
||||
- [@augustelalande](https://github.com/augustelalande)
|
||||
- [@dylwil3](https://github.com/dylwil3)
|
||||
- [@Lee-W](https://github.com/Lee-W)
|
||||
- [@injust](https://github.com/injust)
|
||||
- [@CarrotManMatt](https://github.com/CarrotManMatt)
|
||||
|
||||
## 0.14.0
|
||||
|
||||
Released on 2025-10-07.
|
||||
|
|
|
|||
|
|
@ -50,9 +50,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "anstream"
|
||||
version = "0.6.20"
|
||||
version = "0.6.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192"
|
||||
checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"anstyle-parse",
|
||||
|
|
@ -65,9 +65,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.11"
|
||||
version = "1.0.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
|
||||
checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-lossy"
|
||||
|
|
@ -243,7 +243,7 @@ dependencies = [
|
|||
"bitflags 2.9.4",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"itertools 0.13.0",
|
||||
"itertools 0.10.5",
|
||||
"log",
|
||||
"prettyplease",
|
||||
"proc-macro2",
|
||||
|
|
@ -327,9 +327,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "camino"
|
||||
version = "1.2.0"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e1de8bc0aa9e9385ceb3bf0c152e3a9b9544f6c4a912c8ae504e80c1f0368603"
|
||||
checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
|
|
@ -433,9 +433,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.48"
|
||||
version = "4.5.49"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae"
|
||||
checksum = "f4512b90fa68d3a9932cea5184017c5d200f5921df706d45e853537dea51508f"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
|
|
@ -443,9 +443,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.48"
|
||||
version = "4.5.49"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9"
|
||||
checksum = "0025e98baa12e766c67ba13ff4695a887a1eba19569aad00a472546795bd6730"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
|
|
@ -486,9 +486,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.47"
|
||||
version = "4.5.49"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c"
|
||||
checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
|
|
@ -633,7 +633,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -642,7 +642,7 @@ version = "3.0.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fde0e0ec90c9dfb3b4b1a0891a7dcd0e2bffde2f7efed5fe7c9bb00e5bfb915e"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1007,7 +1007,7 @@ dependencies = [
|
|||
"libc",
|
||||
"option-ext",
|
||||
"redox_users",
|
||||
"windows-sys 0.61.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1093,7 +1093,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.61.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1262,20 +1262,20 @@ checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
|
|||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"wasi 0.11.1+wasi-snapshot-preview1",
|
||||
"wasi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.3.3"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
|
||||
checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"js-sys",
|
||||
"libc",
|
||||
"r-efi",
|
||||
"wasi 0.14.7+wasi-0.2.4",
|
||||
"wasip2",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
|
|
@ -1287,9 +1287,9 @@ checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280"
|
|||
|
||||
[[package]]
|
||||
name = "globset"
|
||||
version = "0.4.16"
|
||||
version = "0.4.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "54a1028dfc5f5df5da8a56a73e6c153c9a9708ec57232470703592a3f18e49f5"
|
||||
checksum = "eab69130804d941f8075cfd713bf8848a2c3b3f201a9457a11e6f87e1ab62305"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"bstr",
|
||||
|
|
@ -1563,7 +1563,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown 0.16.0",
|
||||
"hashbrown 0.15.5",
|
||||
"serde",
|
||||
"serde_core",
|
||||
]
|
||||
|
|
@ -1690,7 +1690,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
|
|||
dependencies = [
|
||||
"hermit-abi",
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1754,7 +1754,7 @@ dependencies = [
|
|||
"portable-atomic",
|
||||
"portable-atomic-util",
|
||||
"serde",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1789,7 +1789,7 @@ version = "0.1.34"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33"
|
||||
dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"getrandom 0.3.4",
|
||||
"libc",
|
||||
]
|
||||
|
||||
|
|
@ -1837,15 +1837,15 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
|||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.175"
|
||||
version = "0.2.177"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543"
|
||||
checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
|
||||
|
||||
[[package]]
|
||||
name = "libcst"
|
||||
version = "1.8.4"
|
||||
version = "1.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "052ef5d9fc958a51aeebdf3713573b36c6fd6eed0bf0e60e204d2c0f8cf19b9f"
|
||||
checksum = "9d56bcd52d9b5e5f43e7fba20eb1f423ccb18c84cdf1cb506b8c1b95776b0b49"
|
||||
dependencies = [
|
||||
"annotate-snippets",
|
||||
"libcst_derive",
|
||||
|
|
@ -1858,9 +1858,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "libcst_derive"
|
||||
version = "1.8.4"
|
||||
version = "1.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a91a751afee92cbdd59d4bc6754c7672712eec2d30a308f23de4e3287b2929cb"
|
||||
checksum = "3fcf5a725c4db703660124fe0edb98285f1605d0b87b7ee8684b699764a4f01a"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
|
|
@ -2017,9 +2017,9 @@ checksum = "2f926ade0c4e170215ae43342bf13b9310a437609c81f29f86c5df6657582ef9"
|
|||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.5"
|
||||
version = "2.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
|
||||
checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
|
||||
|
||||
[[package]]
|
||||
name = "memoffset"
|
||||
|
|
@ -2072,7 +2072,7 @@ checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c"
|
|||
dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"wasi 0.11.1+wasi-snapshot-preview1",
|
||||
"wasi",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
|
|
@ -2569,9 +2569,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "pyproject-toml"
|
||||
version = "0.13.6"
|
||||
version = "0.13.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec768e063102b426e8962989758115e8659485124de9207bc365fab524125d65"
|
||||
checksum = "f6d755483ad14b49e76713b52285235461a5b4f73f17612353e11a5de36a5fd2"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"pep440_rs",
|
||||
|
|
@ -2627,9 +2627,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.40"
|
||||
version = "1.0.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
|
||||
checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
|
@ -2724,7 +2724,7 @@ version = "0.9.3"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
|
||||
dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"getrandom 0.3.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -2767,6 +2767,26 @@ dependencies = [
|
|||
"thiserror 2.0.16",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ref-cast"
|
||||
version = "1.0.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d"
|
||||
dependencies = [
|
||||
"ref-cast-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ref-cast-impl"
|
||||
version = "1.0.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.11.3"
|
||||
|
|
@ -2781,9 +2801,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.4.11"
|
||||
version = "0.4.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "833eb9ce86d40ef33cb1306d8accf7bc8ec2bfea4355cbdebb3df68b40925cad"
|
||||
checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
|
|
@ -2815,7 +2835,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.14.0"
|
||||
version = "0.14.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"argfile",
|
||||
|
|
@ -3072,7 +3092,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ruff_linter"
|
||||
version = "0.14.0"
|
||||
version = "0.14.1"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"anyhow",
|
||||
|
|
@ -3197,6 +3217,7 @@ dependencies = [
|
|||
"salsa",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror 2.0.16",
|
||||
]
|
||||
|
||||
|
|
@ -3426,11 +3447,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ruff_wasm"
|
||||
version = "0.14.0"
|
||||
version = "0.14.1"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
"getrandom 0.3.3",
|
||||
"getrandom 0.3.4",
|
||||
"js-sys",
|
||||
"log",
|
||||
"ruff_formatter",
|
||||
|
|
@ -3484,6 +3505,7 @@ dependencies = [
|
|||
"rustc-hash",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand",
|
||||
"strum",
|
||||
"tempfile",
|
||||
|
|
@ -3523,7 +3545,7 @@ dependencies = [
|
|||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"windows-sys 0.61.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3540,8 +3562,8 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
|
|||
|
||||
[[package]]
|
||||
name = "salsa"
|
||||
version = "0.23.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=29ab321b45d00daa4315fa2a06f7207759a8c87e#29ab321b45d00daa4315fa2a06f7207759a8c87e"
|
||||
version = "0.24.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=ef9f9329be6923acd050c8dddd172e3bc93e8051#ef9f9329be6923acd050c8dddd172e3bc93e8051"
|
||||
dependencies = [
|
||||
"boxcar",
|
||||
"compact_str",
|
||||
|
|
@ -3564,13 +3586,13 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "salsa-macro-rules"
|
||||
version = "0.23.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=29ab321b45d00daa4315fa2a06f7207759a8c87e#29ab321b45d00daa4315fa2a06f7207759a8c87e"
|
||||
version = "0.24.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=ef9f9329be6923acd050c8dddd172e3bc93e8051#ef9f9329be6923acd050c8dddd172e3bc93e8051"
|
||||
|
||||
[[package]]
|
||||
name = "salsa-macros"
|
||||
version = "0.23.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=29ab321b45d00daa4315fa2a06f7207759a8c87e#29ab321b45d00daa4315fa2a06f7207759a8c87e"
|
||||
version = "0.24.0"
|
||||
source = "git+https://github.com/salsa-rs/salsa.git?rev=ef9f9329be6923acd050c8dddd172e3bc93e8051#ef9f9329be6923acd050c8dddd172e3bc93e8051"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -3589,11 +3611,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "schemars"
|
||||
version = "0.8.22"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615"
|
||||
checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0"
|
||||
dependencies = [
|
||||
"dyn-clone",
|
||||
"ref-cast",
|
||||
"schemars_derive",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
|
@ -3601,9 +3624,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "schemars_derive"
|
||||
version = "0.8.22"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d"
|
||||
checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -3625,9 +3648,9 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
|
|||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.226"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0dca6411025b24b60bfa7ec1fe1f8e710ac09782dca409ee8237ba74b51295fd"
|
||||
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
"serde_derive",
|
||||
|
|
@ -3646,18 +3669,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_core"
|
||||
version = "1.0.226"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ba2ba63999edb9dac981fb34b3e5c0d111a69b0924e253ed29d83f7c99e966a4"
|
||||
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.226"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8db53ae22f34573731bafa1db20f04027b2d25e02d8205921b569171699cdb33"
|
||||
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -3795,9 +3818,9 @@ checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
|
|||
|
||||
[[package]]
|
||||
name = "snapbox"
|
||||
version = "0.6.21"
|
||||
version = "0.6.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96dcfc4581e3355d70ac2ee14cfdf81dce3d85c85f1ed9e2c1d3013f53b3436b"
|
||||
checksum = "805d09a74586d9b17061e5be6ee5f8cc37e5982c349948114ffc5f68093fe5ec"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
|
|
@ -3810,7 +3833,7 @@ dependencies = [
|
|||
"similar",
|
||||
"snapbox-macros",
|
||||
"wait-timeout",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3915,10 +3938,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "84fa4d11fadde498443cca10fd3ac23c951f0dc59e080e9f4b93d4df4e4eea53"
|
||||
dependencies = [
|
||||
"fastrand",
|
||||
"getrandom 0.3.3",
|
||||
"getrandom 0.3.4",
|
||||
"once_cell",
|
||||
"rustix",
|
||||
"windows-sys 0.61.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -4384,6 +4407,7 @@ dependencies = [
|
|||
"salsa",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror 2.0.16",
|
||||
"toml",
|
||||
"tracing",
|
||||
|
|
@ -4431,6 +4455,7 @@ dependencies = [
|
|||
"salsa",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"smallvec",
|
||||
"static_assertions",
|
||||
"strsim",
|
||||
|
|
@ -4539,7 +4564,7 @@ version = "0.0.0"
|
|||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"console_log",
|
||||
"getrandom 0.3.3",
|
||||
"getrandom 0.3.4",
|
||||
"js-sys",
|
||||
"log",
|
||||
"ruff_db",
|
||||
|
|
@ -4732,7 +4757,7 @@ version = "1.18.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2"
|
||||
dependencies = [
|
||||
"getrandom 0.3.3",
|
||||
"getrandom 0.3.4",
|
||||
"js-sys",
|
||||
"rand 0.9.2",
|
||||
"uuid-macro-internal",
|
||||
|
|
@ -4844,15 +4869,6 @@ version = "0.11.1+wasi-snapshot-preview1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.14.7+wasi-0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c"
|
||||
dependencies = [
|
||||
"wasip2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasip2"
|
||||
version = "1.0.1+wasi-0.2.4"
|
||||
|
|
@ -5004,7 +5020,7 @@ version = "0.1.11"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
|
||||
dependencies = [
|
||||
"windows-sys 0.61.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
|||
34
Cargo.toml
34
Cargo.toml
|
|
@ -146,13 +146,13 @@ regex-automata = { version = "0.4.9" }
|
|||
rustc-hash = { version = "2.0.0" }
|
||||
rustc-stable-hash = { version = "0.1.2" }
|
||||
# When updating salsa, make sure to also update the revision in `fuzz/Cargo.toml`
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "29ab321b45d00daa4315fa2a06f7207759a8c87e", default-features = false, features = [
|
||||
salsa = { git = "https://github.com/salsa-rs/salsa.git", rev = "ef9f9329be6923acd050c8dddd172e3bc93e8051", default-features = false, features = [
|
||||
"compact_str",
|
||||
"macros",
|
||||
"salsa_unstable",
|
||||
"inventory",
|
||||
] }
|
||||
schemars = { version = "0.8.16" }
|
||||
schemars = { version = "1.0.4" }
|
||||
seahash = { version = "4.1.0" }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
serde-wasm-bindgen = { version = "0.6.4" }
|
||||
|
|
@ -268,12 +268,7 @@ large_stack_arrays = "allow"
|
|||
|
||||
|
||||
[profile.release]
|
||||
# Note that we set these explicitly, and these values
|
||||
# were chosen based on a trade-off between compile times
|
||||
# and runtime performance[1].
|
||||
#
|
||||
# [1]: https://github.com/astral-sh/ruff/pull/9031
|
||||
lto = "thin"
|
||||
lto = "fat"
|
||||
codegen-units = 16
|
||||
|
||||
# Some crates don't change as much but benefit more from
|
||||
|
|
@ -283,6 +278,8 @@ codegen-units = 16
|
|||
codegen-units = 1
|
||||
[profile.release.package.ruff_python_ast]
|
||||
codegen-units = 1
|
||||
[profile.release.package.salsa]
|
||||
codegen-units = 1
|
||||
|
||||
[profile.dev.package.insta]
|
||||
opt-level = 3
|
||||
|
|
@ -298,11 +295,30 @@ opt-level = 3
|
|||
[profile.dev.package.ruff_python_parser]
|
||||
opt-level = 1
|
||||
|
||||
# This profile is meant to mimic the `release` profile as closely as
|
||||
# possible, but using settings that are more beneficial for iterative
|
||||
# development. That is, the `release` profile is intended for actually
|
||||
# building the release, where as `profiling` is meant for building ty/ruff
|
||||
# for running benchmarks.
|
||||
#
|
||||
# The main differences here are to avoid stripping debug information
|
||||
# and disabling fat lto. This does result in a mismatch between our release
|
||||
# configuration and our benchmarking configuration, which is unfortunate.
|
||||
# But compile times with `lto = fat` are completely untenable.
|
||||
#
|
||||
# This setup does risk that we are measuring something in benchmarks
|
||||
# that we aren't shipping, but in order to make those two the same, we'd
|
||||
# either need to make compile times way worse for development, or take
|
||||
# a hit to binary size and a slight hit to runtime performance in our
|
||||
# release builds.
|
||||
#
|
||||
# Use the `--profile profiling` flag to show symbols in release mode.
|
||||
# e.g. `cargo build --profile profiling`
|
||||
[profile.profiling]
|
||||
inherits = "release"
|
||||
debug = 1
|
||||
strip = false
|
||||
debug = "full"
|
||||
lto = false
|
||||
|
||||
# The profile that 'cargo dist' will build with.
|
||||
[profile.dist]
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ An extremely fast Python linter and code formatter, written in Rust.
|
|||
- ⚡️ 10-100x faster than existing linters (like Flake8) and formatters (like Black)
|
||||
- 🐍 Installable via `pip`
|
||||
- 🛠️ `pyproject.toml` support
|
||||
- 🤝 Python 3.13 compatibility
|
||||
- 🤝 Python 3.14 compatibility
|
||||
- ⚖️ Drop-in parity with [Flake8](https://docs.astral.sh/ruff/faq/#how-does-ruffs-linter-compare-to-flake8), isort, and [Black](https://docs.astral.sh/ruff/faq/#how-does-ruffs-formatter-compare-to-black)
|
||||
- 📦 Built-in caching, to avoid re-analyzing unchanged files
|
||||
- 🔧 Fix support, for automatic error correction (e.g., automatically remove unused imports)
|
||||
|
|
@ -148,8 +148,8 @@ curl -LsSf https://astral.sh/ruff/install.sh | sh
|
|||
powershell -c "irm https://astral.sh/ruff/install.ps1 | iex"
|
||||
|
||||
# For a specific version.
|
||||
curl -LsSf https://astral.sh/ruff/0.14.0/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.14.0/install.ps1 | iex"
|
||||
curl -LsSf https://astral.sh/ruff/0.14.1/install.sh | sh
|
||||
powershell -c "irm https://astral.sh/ruff/0.14.1/install.ps1 | iex"
|
||||
```
|
||||
|
||||
You can also install Ruff via [Homebrew](https://formulae.brew.sh/formula/ruff), [Conda](https://anaconda.org/conda-forge/ruff),
|
||||
|
|
@ -182,7 +182,7 @@ Ruff can also be used as a [pre-commit](https://pre-commit.com/) hook via [`ruff
|
|||
```yaml
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.14.0
|
||||
rev: v0.14.1
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "ruff"
|
||||
version = "0.14.0"
|
||||
version = "0.14.1"
|
||||
publish = true
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ use serde::{Serialize, Serializer};
|
|||
use strum::IntoEnumIterator;
|
||||
|
||||
use ruff_linter::FixAvailability;
|
||||
use ruff_linter::codes::RuleGroup;
|
||||
use ruff_linter::registry::{Linter, Rule, RuleNamespace};
|
||||
|
||||
use crate::args::HelpFormat;
|
||||
|
|
@ -19,9 +20,11 @@ struct Explanation<'a> {
|
|||
summary: &'a str,
|
||||
message_formats: &'a [&'a str],
|
||||
fix: String,
|
||||
fix_availability: FixAvailability,
|
||||
#[expect(clippy::struct_field_names)]
|
||||
explanation: Option<&'a str>,
|
||||
preview: bool,
|
||||
status: RuleGroup,
|
||||
}
|
||||
|
||||
impl<'a> Explanation<'a> {
|
||||
|
|
@ -36,8 +39,10 @@ impl<'a> Explanation<'a> {
|
|||
summary: rule.message_formats()[0],
|
||||
message_formats: rule.message_formats(),
|
||||
fix,
|
||||
fix_availability: rule.fixable(),
|
||||
explanation: rule.explanation(),
|
||||
preview: rule.is_preview(),
|
||||
status: rule.group(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ use log::{debug, warn};
|
|||
use ruff_db::diagnostic::Diagnostic;
|
||||
use ruff_linter::codes::Rule;
|
||||
use ruff_linter::linter::{FixTable, FixerResult, LinterResult, ParseSource, lint_fix, lint_only};
|
||||
use ruff_linter::message::create_syntax_error_diagnostic;
|
||||
use ruff_linter::package::PackageRoot;
|
||||
use ruff_linter::pyproject_toml::lint_pyproject_toml;
|
||||
use ruff_linter::settings::types::UnsafeFixes;
|
||||
|
|
@ -103,11 +102,7 @@ impl Diagnostics {
|
|||
let name = path.map_or_else(|| "-".into(), Path::to_string_lossy);
|
||||
let dummy = SourceFileBuilder::new(name, "").finish();
|
||||
Self::new(
|
||||
vec![create_syntax_error_diagnostic(
|
||||
dummy,
|
||||
err,
|
||||
TextRange::default(),
|
||||
)],
|
||||
vec![Diagnostic::invalid_syntax(dummy, err, TextRange::default())],
|
||||
FxHashMap::default(),
|
||||
)
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -15,6 +15,7 @@ use std::{
|
|||
};
|
||||
use tempfile::TempDir;
|
||||
|
||||
mod format;
|
||||
mod lint;
|
||||
|
||||
const BIN_NAME: &str = "ruff";
|
||||
|
|
@ -57,6 +58,16 @@ impl CliTest {
|
|||
Self::with_settings(|_, settings| settings)
|
||||
}
|
||||
|
||||
pub(crate) fn with_files<'a>(
|
||||
files: impl IntoIterator<Item = (&'a str, &'a str)>,
|
||||
) -> anyhow::Result<Self> {
|
||||
let case = Self::new()?;
|
||||
for file in files {
|
||||
case.write_file(file.0, file.1)?;
|
||||
}
|
||||
Ok(case)
|
||||
}
|
||||
|
||||
pub(crate) fn with_settings(
|
||||
setup_settings: impl FnOnce(&Path, insta::Settings) -> insta::Settings,
|
||||
) -> Result<Self> {
|
||||
|
|
@ -174,4 +185,10 @@ impl CliTest {
|
|||
|
||||
command
|
||||
}
|
||||
|
||||
pub(crate) fn format_command(&self) -> Command {
|
||||
let mut command = self.command();
|
||||
command.args(["format", "--no-cache"]);
|
||||
command
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: crates/ruff/tests/format.rs
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: crates/ruff/tests/format.rs
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: crates/ruff/tests/format.rs
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: crates/ruff/tests/format.rs
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: crates/ruff/tests/format.rs
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: crates/ruff/tests/format.rs
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
|
@ -7,6 +7,7 @@ info:
|
|||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- grouped
|
||||
- "--preview"
|
||||
- "--check"
|
||||
- input.py
|
||||
---
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: crates/ruff/tests/format.rs
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: crates/ruff/tests/format.rs
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: crates/ruff/tests/format.rs
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: crates/ruff/tests/format.rs
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
|
@ -7,6 +7,7 @@ info:
|
|||
- "--no-cache"
|
||||
- "--output-format"
|
||||
- pylint
|
||||
- "--preview"
|
||||
- "--check"
|
||||
- input.py
|
||||
---
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: crates/ruff/tests/format.rs
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
---
|
||||
source: crates/ruff/tests/format.rs
|
||||
source: crates/ruff/tests/cli/format.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
|
|
@ -951,6 +951,16 @@ fn rule_f401() {
|
|||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "F401"]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rule_f401_output_json() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "F401", "--output-format", "json"]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rule_f401_output_text() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "F401", "--output-format", "text"]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rule_invalid_rule_name() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404"]), @r"
|
||||
|
|
@ -965,6 +975,34 @@ fn rule_invalid_rule_name() {
|
|||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rule_invalid_rule_name_output_json() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404", "--output-format", "json"]), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'RUF404' for '[RULE]'
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rule_invalid_rule_name_output_text() {
|
||||
assert_cmd_snapshot!(ruff_cmd().args(["rule", "RUF404", "--output-format", "text"]), @r"
|
||||
success: false
|
||||
exit_code: 2
|
||||
----- stdout -----
|
||||
|
||||
----- stderr -----
|
||||
error: invalid value 'RUF404' for '[RULE]'
|
||||
|
||||
For more information, try '--help'.
|
||||
");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn show_statistics() {
|
||||
let mut cmd = RuffCheck::default()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,30 @@
|
|||
---
|
||||
source: crates/ruff/tests/integration_test.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- rule
|
||||
- F401
|
||||
- "--output-format"
|
||||
- json
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
{
|
||||
"name": "unused-import",
|
||||
"code": "F401",
|
||||
"linter": "Pyflakes",
|
||||
"summary": "`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability",
|
||||
"message_formats": [
|
||||
"`{name}` imported but unused; consider using `importlib.util.find_spec` to test for availability",
|
||||
"`{name}` imported but unused; consider removing, adding to `__all__`, or using a redundant alias",
|
||||
"`{name}` imported but unused"
|
||||
],
|
||||
"fix": "Fix is sometimes available.",
|
||||
"fix_availability": "Sometimes",
|
||||
"explanation": "## What it does\nChecks for unused imports.\n\n## Why is this bad?\nUnused imports add a performance overhead at runtime, and risk creating\nimport cycles. They also increase the cognitive load of reading the code.\n\nIf an import statement is used to check for the availability or existence\nof a module, consider using `importlib.util.find_spec` instead.\n\nIf an import statement is used to re-export a symbol as part of a module's\npublic interface, consider using a \"redundant\" import alias, which\ninstructs Ruff (and other tools) to respect the re-export, and avoid\nmarking it as unused, as in:\n\n```python\nfrom module import member as member\n```\n\nAlternatively, you can use `__all__` to declare a symbol as part of the module's\ninterface, as in:\n\n```python\n# __init__.py\nimport some_module\n\n__all__ = [\"some_module\"]\n```\n\n## Preview\nWhen [preview] is enabled (and certain simplifying assumptions\nare met), we analyze all import statements for a given module\nwhen determining whether an import is used, rather than simply\nthe last of these statements. This can result in both different and\nmore import statements being marked as unused.\n\nFor example, if a module consists of\n\n```python\nimport a\nimport a.b\n```\n\nthen both statements are marked as unused under [preview], whereas\nonly the second is marked as unused under stable behavior.\n\nAs another example, if a module consists of\n\n```python\nimport a.b\nimport a\n\na.b.foo()\n```\n\nthen a diagnostic will only be emitted for the first line under [preview],\nwhereas a diagnostic would only be emitted for the second line under\nstable behavior.\n\nNote that this behavior is somewhat subjective and is designed\nto conform to the developer's intuition rather than Python's actual\nexecution. To wit, the statement `import a.b` automatically executes\n`import a`, so in some sense `import a` is _always_ redundant\nin the presence of `import a.b`.\n\n\n## Fix safety\n\nFixes to remove unused imports are safe, except in `__init__.py` files.\n\nApplying fixes to `__init__.py` files is currently in preview. The fix offered depends on the\ntype of the unused import. Ruff will suggest a safe fix to export first-party imports with\neither a redundant alias or, if already present in the file, an `__all__` entry. If multiple\n`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix\nto remove third-party and standard library imports -- the fix is unsafe because the module's\ninterface changes.\n\nSee [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)\nfor more details on how Ruff\ndetermines whether an import is first or third-party.\n\n## Example\n\n```python\nimport numpy as np # unused import\n\n\ndef area(radius):\n return 3.14 * radius**2\n```\n\nUse instead:\n\n```python\ndef area(radius):\n return 3.14 * radius**2\n```\n\nTo check the availability of a module, use `importlib.util.find_spec`:\n\n```python\nfrom importlib.util import find_spec\n\nif find_spec(\"numpy\") is not None:\n print(\"numpy is installed\")\nelse:\n print(\"numpy is not installed\")\n```\n\n## Options\n- `lint.ignore-init-module-imports`\n- `lint.pyflakes.allowed-unused-imports`\n\n## References\n- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)\n- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)\n- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)\n\n[preview]: https://docs.astral.sh/ruff/preview/\n",
|
||||
"preview": false,
|
||||
"status": "Stable"
|
||||
}
|
||||
----- stderr -----
|
||||
|
|
@ -0,0 +1,140 @@
|
|||
---
|
||||
source: crates/ruff/tests/integration_test.rs
|
||||
info:
|
||||
program: ruff
|
||||
args:
|
||||
- rule
|
||||
- F401
|
||||
- "--output-format"
|
||||
- text
|
||||
---
|
||||
success: true
|
||||
exit_code: 0
|
||||
----- stdout -----
|
||||
# unused-import (F401)
|
||||
|
||||
Derived from the **Pyflakes** linter.
|
||||
|
||||
Fix is sometimes available.
|
||||
|
||||
## What it does
|
||||
Checks for unused imports.
|
||||
|
||||
## Why is this bad?
|
||||
Unused imports add a performance overhead at runtime, and risk creating
|
||||
import cycles. They also increase the cognitive load of reading the code.
|
||||
|
||||
If an import statement is used to check for the availability or existence
|
||||
of a module, consider using `importlib.util.find_spec` instead.
|
||||
|
||||
If an import statement is used to re-export a symbol as part of a module's
|
||||
public interface, consider using a "redundant" import alias, which
|
||||
instructs Ruff (and other tools) to respect the re-export, and avoid
|
||||
marking it as unused, as in:
|
||||
|
||||
```python
|
||||
from module import member as member
|
||||
```
|
||||
|
||||
Alternatively, you can use `__all__` to declare a symbol as part of the module's
|
||||
interface, as in:
|
||||
|
||||
```python
|
||||
# __init__.py
|
||||
import some_module
|
||||
|
||||
__all__ = ["some_module"]
|
||||
```
|
||||
|
||||
## Preview
|
||||
When [preview] is enabled (and certain simplifying assumptions
|
||||
are met), we analyze all import statements for a given module
|
||||
when determining whether an import is used, rather than simply
|
||||
the last of these statements. This can result in both different and
|
||||
more import statements being marked as unused.
|
||||
|
||||
For example, if a module consists of
|
||||
|
||||
```python
|
||||
import a
|
||||
import a.b
|
||||
```
|
||||
|
||||
then both statements are marked as unused under [preview], whereas
|
||||
only the second is marked as unused under stable behavior.
|
||||
|
||||
As another example, if a module consists of
|
||||
|
||||
```python
|
||||
import a.b
|
||||
import a
|
||||
|
||||
a.b.foo()
|
||||
```
|
||||
|
||||
then a diagnostic will only be emitted for the first line under [preview],
|
||||
whereas a diagnostic would only be emitted for the second line under
|
||||
stable behavior.
|
||||
|
||||
Note that this behavior is somewhat subjective and is designed
|
||||
to conform to the developer's intuition rather than Python's actual
|
||||
execution. To wit, the statement `import a.b` automatically executes
|
||||
`import a`, so in some sense `import a` is _always_ redundant
|
||||
in the presence of `import a.b`.
|
||||
|
||||
|
||||
## Fix safety
|
||||
|
||||
Fixes to remove unused imports are safe, except in `__init__.py` files.
|
||||
|
||||
Applying fixes to `__init__.py` files is currently in preview. The fix offered depends on the
|
||||
type of the unused import. Ruff will suggest a safe fix to export first-party imports with
|
||||
either a redundant alias or, if already present in the file, an `__all__` entry. If multiple
|
||||
`__all__` declarations are present, Ruff will not offer a fix. Ruff will suggest an unsafe fix
|
||||
to remove third-party and standard library imports -- the fix is unsafe because the module's
|
||||
interface changes.
|
||||
|
||||
See [this FAQ section](https://docs.astral.sh/ruff/faq/#how-does-ruff-determine-which-of-my-imports-are-first-party-third-party-etc)
|
||||
for more details on how Ruff
|
||||
determines whether an import is first or third-party.
|
||||
|
||||
## Example
|
||||
|
||||
```python
|
||||
import numpy as np # unused import
|
||||
|
||||
|
||||
def area(radius):
|
||||
return 3.14 * radius**2
|
||||
```
|
||||
|
||||
Use instead:
|
||||
|
||||
```python
|
||||
def area(radius):
|
||||
return 3.14 * radius**2
|
||||
```
|
||||
|
||||
To check the availability of a module, use `importlib.util.find_spec`:
|
||||
|
||||
```python
|
||||
from importlib.util import find_spec
|
||||
|
||||
if find_spec("numpy") is not None:
|
||||
print("numpy is installed")
|
||||
else:
|
||||
print("numpy is not installed")
|
||||
```
|
||||
|
||||
## Options
|
||||
- `lint.ignore-init-module-imports`
|
||||
- `lint.pyflakes.allowed-unused-imports`
|
||||
|
||||
## References
|
||||
- [Python documentation: `import`](https://docs.python.org/3/reference/simple_stmts.html#the-import-statement)
|
||||
- [Python documentation: `importlib.util.find_spec`](https://docs.python.org/3/library/importlib.html#importlib.util.find_spec)
|
||||
- [Typing documentation: interface conventions](https://typing.python.org/en/latest/spec/distributing.html#library-interface-public-and-private-symbols)
|
||||
|
||||
[preview]: https://docs.astral.sh/ruff/preview/
|
||||
|
||||
----- stderr -----
|
||||
|
|
@ -7,7 +7,8 @@ use ruff_annotate_snippets::Level as AnnotateLevel;
|
|||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
|
||||
pub use self::render::{
|
||||
DisplayDiagnostic, DisplayDiagnostics, FileResolver, Input, ceil_char_boundary,
|
||||
DisplayDiagnostic, DisplayDiagnostics, DummyFileResolver, FileResolver, Input,
|
||||
ceil_char_boundary,
|
||||
github::{DisplayGithubDiagnostics, GithubRenderer},
|
||||
};
|
||||
use crate::{Db, files::File};
|
||||
|
|
@ -83,17 +84,14 @@ impl Diagnostic {
|
|||
/// at time of writing, `ruff_db` depends on `ruff_python_parser` instead of
|
||||
/// the other way around. And since we want to do this conversion in a couple
|
||||
/// places, it makes sense to centralize it _somewhere_. So it's here for now.
|
||||
///
|
||||
/// Note that `message` is stored in the primary annotation, _not_ in the primary diagnostic
|
||||
/// message.
|
||||
pub fn invalid_syntax(
|
||||
span: impl Into<Span>,
|
||||
message: impl IntoDiagnosticMessage,
|
||||
range: impl Ranged,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, "");
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, message);
|
||||
let span = span.into().with_range(range.range());
|
||||
diag.annotate(Annotation::primary(span).message(message));
|
||||
diag.annotate(Annotation::primary(span));
|
||||
diag
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1170,6 +1170,31 @@ pub fn ceil_char_boundary(text: &str, offset: TextSize) -> TextSize {
|
|||
.unwrap_or_else(|| TextSize::from(upper_bound))
|
||||
}
|
||||
|
||||
/// A stub implementation of [`FileResolver`] intended for testing.
|
||||
pub struct DummyFileResolver;
|
||||
|
||||
impl FileResolver for DummyFileResolver {
|
||||
fn path(&self, _file: File) -> &str {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn input(&self, _file: File) -> Input {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn notebook_index(&self, _file: &UnifiedFile) -> Option<NotebookIndex> {
|
||||
None
|
||||
}
|
||||
|
||||
fn is_notebook(&self, _file: &UnifiedFile) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn current_directory(&self) -> &Path {
|
||||
Path::new(".")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
|
|
|
|||
|
|
@ -193,6 +193,17 @@ impl Files {
|
|||
roots.at(&absolute)
|
||||
}
|
||||
|
||||
/// The same as [`Self::root`] but panics if no root is found.
|
||||
#[track_caller]
|
||||
pub fn expect_root(&self, db: &dyn Db, path: &SystemPath) -> FileRoot {
|
||||
if let Some(root) = self.root(db, path) {
|
||||
return root;
|
||||
}
|
||||
|
||||
let roots = self.inner.roots.read().unwrap();
|
||||
panic!("No root found for path '{path}'. Known roots: {roots:#?}");
|
||||
}
|
||||
|
||||
/// Adds a new root for `path` and returns the root.
|
||||
///
|
||||
/// The root isn't added nor is the file root's kind updated if a root for `path` already exists.
|
||||
|
|
|
|||
|
|
@ -81,6 +81,8 @@ impl FileRoots {
|
|||
}
|
||||
}
|
||||
|
||||
tracing::debug!("Adding new file root '{path}' of kind {kind:?}");
|
||||
|
||||
// normalize the path to use `/` separators and escape the '{' and '}' characters,
|
||||
// which matchit uses for routing parameters
|
||||
let mut route = normalized_path.replace('{', "{{").replace('}', "}}");
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ use std::path::PathBuf;
|
|||
|
||||
use anyhow::{Result, bail};
|
||||
use pretty_assertions::StrComparison;
|
||||
use schemars::schema_for;
|
||||
use schemars::generate::SchemaSettings;
|
||||
|
||||
use crate::ROOT_DIR;
|
||||
use crate::generate_all::{Mode, REGENERATE_ALL_COMMAND};
|
||||
|
|
@ -17,7 +17,9 @@ pub(crate) struct Args {
|
|||
}
|
||||
|
||||
pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
let schema = schema_for!(Options);
|
||||
let settings = SchemaSettings::draft07();
|
||||
let generator = settings.into_generator();
|
||||
let schema = generator.into_root_schema_for::<Options>();
|
||||
let schema_string = serde_json::to_string_pretty(&schema).unwrap();
|
||||
let filename = "ruff.schema.json";
|
||||
let schema_path = PathBuf::from(ROOT_DIR).join(filename);
|
||||
|
|
|
|||
|
|
@ -93,14 +93,39 @@ fn generate_markdown() -> String {
|
|||
})
|
||||
.join("\n");
|
||||
|
||||
let status_text = match lint.status() {
|
||||
ty_python_semantic::lint::LintStatus::Stable { since } => {
|
||||
format!(
|
||||
r#"Added in <a href="https://github.com/astral-sh/ty/releases/tag/{since}">{since}</a>"#
|
||||
)
|
||||
}
|
||||
ty_python_semantic::lint::LintStatus::Preview { since } => {
|
||||
format!(
|
||||
r#"Preview (since <a href="https://github.com/astral-sh/ty/releases/tag/{since}">{since}</a>)"#
|
||||
)
|
||||
}
|
||||
ty_python_semantic::lint::LintStatus::Deprecated { since, .. } => {
|
||||
format!(
|
||||
r#"Deprecated (since <a href="https://github.com/astral-sh/ty/releases/tag/{since}">{since}</a>)"#
|
||||
)
|
||||
}
|
||||
ty_python_semantic::lint::LintStatus::Removed { since, .. } => {
|
||||
format!(
|
||||
r#"Removed (since <a href="https://github.com/astral-sh/ty/releases/tag/{since}">{since}</a>)"#
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let _ = writeln!(
|
||||
&mut output,
|
||||
r#"<small>
|
||||
Default level: [`{level}`](../rules.md#rule-levels "This lint has a default level of '{level}'.") ·
|
||||
[Related issues](https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20{encoded_name}) ·
|
||||
[View source](https://github.com/astral-sh/ruff/blob/main/{file}#L{line})
|
||||
Default level: <a href="../rules.md#rule-levels" title="This lint has a default level of '{level}'."><code>{level}</code></a> ·
|
||||
{status_text} ·
|
||||
<a href="https://github.com/astral-sh/ty/issues?q=sort%3Aupdated-desc%20is%3Aissue%20is%3Aopen%20{encoded_name}" target="_blank">Related issues</a> ·
|
||||
<a href="https://github.com/astral-sh/ruff/blob/main/{file}#L{line}" target="_blank">View source</a>
|
||||
</small>
|
||||
|
||||
|
||||
{documentation}
|
||||
"#,
|
||||
level = lint.default_level(),
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ use std::path::PathBuf;
|
|||
|
||||
use anyhow::{Result, bail};
|
||||
use pretty_assertions::StrComparison;
|
||||
use schemars::schema_for;
|
||||
use schemars::generate::SchemaSettings;
|
||||
|
||||
use crate::ROOT_DIR;
|
||||
use crate::generate_all::{Mode, REGENERATE_ALL_COMMAND};
|
||||
|
|
@ -17,7 +17,9 @@ pub(crate) struct Args {
|
|||
}
|
||||
|
||||
pub(crate) fn main(args: &Args) -> Result<()> {
|
||||
let schema = schema_for!(Options);
|
||||
let settings = SchemaSettings::draft07();
|
||||
let generator = settings.into_generator();
|
||||
let schema = generator.into_root_schema_for::<Options>();
|
||||
let schema_string = serde_json::to_string_pretty(&schema).unwrap();
|
||||
let filename = "ty.schema.json";
|
||||
let schema_path = PathBuf::from(ROOT_DIR).join(filename);
|
||||
|
|
|
|||
|
|
@ -98,6 +98,10 @@ impl Db for ModuleDb {
|
|||
fn lint_registry(&self) -> &LintRegistry {
|
||||
default_lint_registry()
|
||||
}
|
||||
|
||||
fn verbose(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[salsa::db]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "ruff_linter"
|
||||
version = "0.14.0"
|
||||
version = "0.14.1"
|
||||
publish = false
|
||||
authors = { workspace = true }
|
||||
edition = { workspace = true }
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ from airflow import (
|
|||
)
|
||||
from airflow.api_connexion.security import requires_access
|
||||
from airflow.contrib.aws_athena_hook import AWSAthenaHook
|
||||
from airflow.datasets import DatasetAliasEvent
|
||||
from airflow.datasets import DatasetAliasEvent, DatasetEvent
|
||||
from airflow.operators.postgres_operator import Mapping
|
||||
from airflow.operators.subdag import SubDagOperator
|
||||
from airflow.secrets.cache import SecretCache
|
||||
|
|
@ -48,6 +48,7 @@ AWSAthenaHook()
|
|||
|
||||
# airflow.datasets
|
||||
DatasetAliasEvent()
|
||||
DatasetEvent()
|
||||
|
||||
|
||||
# airflow.operators.subdag.*
|
||||
|
|
|
|||
|
|
@ -33,3 +33,10 @@ class ShellConfig:
|
|||
|
||||
def run(self, username):
|
||||
Popen("true", shell={**self.shell_defaults, **self.fetch_shell_config(username)})
|
||||
|
||||
# Additional truthiness cases for generator, lambda, and f-strings
|
||||
Popen("true", shell=(i for i in ()))
|
||||
Popen("true", shell=lambda: 0)
|
||||
Popen("true", shell=f"{b''}")
|
||||
x = 1
|
||||
Popen("true", shell=f"{x=}")
|
||||
|
|
|
|||
|
|
@ -6,3 +6,19 @@ foo(shell=True)
|
|||
|
||||
foo(shell={**{}})
|
||||
foo(shell={**{**{}}})
|
||||
|
||||
# Truthy non-bool values for `shell`
|
||||
foo(shell=(i for i in ()))
|
||||
foo(shell=lambda: 0)
|
||||
|
||||
# f-strings guaranteed non-empty
|
||||
foo(shell=f"{b''}")
|
||||
x = 1
|
||||
foo(shell=f"{x=}")
|
||||
|
||||
# Additional truthiness cases for generator, lambda, and f-strings
|
||||
foo(shell=(i for i in ()))
|
||||
foo(shell=lambda: 0)
|
||||
foo(shell=f"{b''}")
|
||||
x = 1
|
||||
foo(shell=f"{x=}")
|
||||
|
|
|
|||
|
|
@ -9,3 +9,10 @@ os.system("tar cf foo.tar bar/*")
|
|||
|
||||
subprocess.Popen(["chmod", "+w", "*.py"], shell={**{}})
|
||||
subprocess.Popen(["chmod", "+w", "*.py"], shell={**{**{}}})
|
||||
|
||||
# Additional truthiness cases for generator, lambda, and f-strings
|
||||
subprocess.Popen("chmod +w foo*", shell=(i for i in ()))
|
||||
subprocess.Popen("chmod +w foo*", shell=lambda: 0)
|
||||
subprocess.Popen("chmod +w foo*", shell=f"{b''}")
|
||||
x = 1
|
||||
subprocess.Popen("chmod +w foo*", shell=f"{x=}")
|
||||
|
|
|
|||
|
|
@ -1,10 +1,10 @@
|
|||
# The lexer doesn't emit a string token if it's unterminated
|
||||
# The lexer emits a string token if it's unterminated
|
||||
"a" "b
|
||||
"a" "b" "c
|
||||
"a" """b
|
||||
c""" "d
|
||||
|
||||
# For f-strings, the `FStringRanges` won't contain the range for
|
||||
# This is also true for
|
||||
# unterminated f-strings.
|
||||
f"a" f"b
|
||||
f"a" f"b" f"c
|
||||
|
|
|
|||
|
|
@ -78,3 +78,11 @@ b: None | Literal[None] | None
|
|||
c: (None | Literal[None]) | None
|
||||
d: None | (Literal[None] | None)
|
||||
e: None | ((None | Literal[None]) | None) | None
|
||||
|
||||
# Test cases for operator precedence issue (https://github.com/astral-sh/ruff/issues/20265)
|
||||
print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
|
|
|
|||
|
|
@ -197,3 +197,10 @@ for x in {**a, **b} or [None]:
|
|||
|
||||
# https://github.com/astral-sh/ruff/issues/7127
|
||||
def f(a: "'b' or 'c'"): ...
|
||||
|
||||
# https://github.com/astral-sh/ruff/issues/20703
|
||||
print(f"{b''}" or "bar") # SIM222
|
||||
x = 1
|
||||
print(f"{x=}" or "bar") # SIM222
|
||||
(lambda: 1) or True # SIM222
|
||||
(i for i in range(1)) or "bar" # SIM222
|
||||
|
|
|
|||
|
|
@ -0,0 +1,264 @@
|
|||
# DOC102
|
||||
def add_numbers(b):
|
||||
"""
|
||||
Adds two numbers and returns the result.
|
||||
|
||||
Args:
|
||||
a (int): The first number to add.
|
||||
b (int): The second number to add.
|
||||
|
||||
Returns:
|
||||
int: The sum of the two numbers.
|
||||
"""
|
||||
return a + b
|
||||
|
||||
|
||||
# DOC102
|
||||
def multiply_list_elements(lst):
|
||||
"""
|
||||
Multiplies each element in a list by a given multiplier.
|
||||
|
||||
Args:
|
||||
lst (list of int): A list of integers.
|
||||
multiplier (int): The multiplier for each element in the list.
|
||||
|
||||
Returns:
|
||||
list of int: A new list with each element multiplied.
|
||||
"""
|
||||
return [x * multiplier for x in lst]
|
||||
|
||||
|
||||
# DOC102
|
||||
def find_max_value():
|
||||
"""
|
||||
Finds the maximum value in a list of numbers.
|
||||
|
||||
Args:
|
||||
numbers (list of int): A list of integers to search through.
|
||||
|
||||
Returns:
|
||||
int: The maximum value found in the list.
|
||||
"""
|
||||
return max(numbers)
|
||||
|
||||
|
||||
# DOC102
|
||||
def create_user_profile(location="here"):
|
||||
"""
|
||||
Creates a user profile with basic information.
|
||||
|
||||
Args:
|
||||
name (str): The name of the user.
|
||||
age (int): The age of the user.
|
||||
email (str): The user's email address.
|
||||
location (str): The location of the user.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing the user's profile.
|
||||
"""
|
||||
return {
|
||||
'name': name,
|
||||
'age': age,
|
||||
'email': email,
|
||||
'location': location
|
||||
}
|
||||
|
||||
|
||||
# DOC102
|
||||
def calculate_total_price(item_prices, discount):
|
||||
"""
|
||||
Calculates the total price after applying tax and a discount.
|
||||
|
||||
Args:
|
||||
item_prices (list of float): A list of prices for each item.
|
||||
tax_rate (float): The tax rate to apply.
|
||||
discount (float): The discount to subtract from the total.
|
||||
|
||||
Returns:
|
||||
float: The final total price after tax and discount.
|
||||
"""
|
||||
total = sum(item_prices)
|
||||
total_with_tax = total + (total * tax_rate)
|
||||
final_total = total_with_tax - discount
|
||||
return final_total
|
||||
|
||||
|
||||
# DOC102
|
||||
def send_email(subject, body, bcc_address=None):
|
||||
"""
|
||||
Sends an email to the specified recipients.
|
||||
|
||||
Args:
|
||||
subject (str): The subject of the email.
|
||||
body (str): The content of the email.
|
||||
to_address (str): The recipient's email address.
|
||||
cc_address (str, optional): The email address for CC. Defaults to None.
|
||||
bcc_address (str, optional): The email address for BCC. Defaults to None.
|
||||
|
||||
Returns:
|
||||
bool: True if the email was sent successfully, False otherwise.
|
||||
"""
|
||||
return True
|
||||
|
||||
|
||||
# DOC102
|
||||
def concatenate_strings(*args):
|
||||
"""
|
||||
Concatenates multiple strings with a specified separator.
|
||||
|
||||
Args:
|
||||
separator (str): The separator to use between strings.
|
||||
*args (str): Variable length argument list of strings to concatenate.
|
||||
|
||||
Returns:
|
||||
str: A single concatenated string.
|
||||
"""
|
||||
return separator.join(args)
|
||||
|
||||
|
||||
# DOC102
|
||||
def process_order(order_id):
|
||||
"""
|
||||
Processes an order with a list of items and optional order details.
|
||||
|
||||
Args:
|
||||
order_id (int): The unique identifier for the order.
|
||||
*items (str): Variable length argument list of items in the order.
|
||||
**details (dict): Additional details such as shipping method and address.
|
||||
|
||||
Returns:
|
||||
dict: A dictionary containing the order summary.
|
||||
"""
|
||||
return {
|
||||
'order_id': order_id,
|
||||
'items': items,
|
||||
'details': details
|
||||
}
|
||||
|
||||
|
||||
class Calculator:
|
||||
"""
|
||||
A simple calculator class that can perform basic arithmetic operations.
|
||||
"""
|
||||
|
||||
# DOC102
|
||||
def __init__(self):
|
||||
"""
|
||||
Initializes the calculator with an initial value.
|
||||
|
||||
Args:
|
||||
value (int, optional): The initial value of the calculator. Defaults to 0.
|
||||
"""
|
||||
self.value = value
|
||||
|
||||
# DOC102
|
||||
def add(self, number2):
|
||||
"""
|
||||
Adds a number to the current value.
|
||||
|
||||
Args:
|
||||
number (int or float): The number to add to the current value.
|
||||
|
||||
Returns:
|
||||
int or float: The updated value after addition.
|
||||
"""
|
||||
self.value += number + number2
|
||||
return self.value
|
||||
|
||||
# DOC102
|
||||
@classmethod
|
||||
def from_string(cls):
|
||||
"""
|
||||
Creates a Calculator instance from a string representation of a number.
|
||||
|
||||
Args:
|
||||
value_str (str): The string representing the initial value.
|
||||
|
||||
Returns:
|
||||
Calculator: A new instance of Calculator initialized with the value from the string.
|
||||
"""
|
||||
value = float(value_str)
|
||||
return cls(value)
|
||||
|
||||
# DOC102
|
||||
@staticmethod
|
||||
def is_valid_number():
|
||||
"""
|
||||
Checks if a given number is valid (int or float).
|
||||
|
||||
Args:
|
||||
number (any): The value to check.
|
||||
|
||||
Returns:
|
||||
bool: True if the number is valid, False otherwise.
|
||||
"""
|
||||
return isinstance(number, (int, float))
|
||||
|
||||
# OK
|
||||
def foo(param1, param2, *args, **kwargs):
|
||||
"""Foo.
|
||||
|
||||
Args:
|
||||
param1 (int): The first parameter.
|
||||
param2 (:obj:`str`, optional): The second parameter. Defaults to None.
|
||||
Second line of description: should be indented.
|
||||
*args: Variable length argument list.
|
||||
**kwargs: Arbitrary keyword arguments.
|
||||
"""
|
||||
return
|
||||
|
||||
# OK
|
||||
def on_server_unloaded(self, server_context: ServerContext) -> None:
|
||||
''' Execute ``on_server_unloaded`` from ``server_lifecycle.py`` (if
|
||||
it is defined) when the server cleanly exits. (Before stopping the
|
||||
server's ``IOLoop``.)
|
||||
|
||||
Args:
|
||||
server_context (ServerContext) :
|
||||
|
||||
.. warning::
|
||||
In practice this code may not run, since servers are often killed
|
||||
by a signal.
|
||||
|
||||
|
||||
'''
|
||||
return self._lifecycle_handler.on_server_unloaded(server_context)
|
||||
|
||||
# OK
|
||||
def function_with_kwargs(param1, param2, **kwargs):
|
||||
"""Function with **kwargs parameter.
|
||||
|
||||
Args:
|
||||
param1 (int): The first parameter.
|
||||
param2 (str): The second parameter.
|
||||
extra_param (str): An extra parameter that may be passed via **kwargs.
|
||||
another_extra (int): Another extra parameter.
|
||||
"""
|
||||
return
|
||||
|
||||
# OK
|
||||
def add_numbers(b):
|
||||
"""
|
||||
Adds two numbers and returns the result.
|
||||
|
||||
Args:
|
||||
b: The second number to add.
|
||||
|
||||
Returns:
|
||||
int: The sum of the two numbers.
|
||||
"""
|
||||
return
|
||||
|
||||
# DOC102
|
||||
def add_numbers(b):
|
||||
"""
|
||||
Adds two numbers and returns the result.
|
||||
|
||||
Args:
|
||||
a: The first number to add.
|
||||
b: The second number to add.
|
||||
|
||||
Returns:
|
||||
int: The sum of the two numbers.
|
||||
"""
|
||||
return a + b
|
||||
|
|
@ -0,0 +1,372 @@
|
|||
# DOC102
|
||||
def add_numbers(b):
|
||||
"""
|
||||
Adds two numbers and returns the result.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
a : int
|
||||
The first number to add.
|
||||
b : int
|
||||
The second number to add.
|
||||
|
||||
Returns
|
||||
-------
|
||||
int
|
||||
The sum of the two numbers.
|
||||
"""
|
||||
return a + b
|
||||
|
||||
|
||||
# DOC102
|
||||
def multiply_list_elements(lst):
|
||||
"""
|
||||
Multiplies each element in a list by a given multiplier.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
lst : list of int
|
||||
A list of integers.
|
||||
multiplier : int
|
||||
The multiplier for each element in the list.
|
||||
|
||||
Returns
|
||||
-------
|
||||
list of int
|
||||
A new list with each element multiplied.
|
||||
"""
|
||||
return [x * multiplier for x in lst]
|
||||
|
||||
|
||||
# DOC102
|
||||
def find_max_value():
|
||||
"""
|
||||
Finds the maximum value in a list of numbers.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
numbers : list of int
|
||||
A list of integers to search through.
|
||||
|
||||
Returns
|
||||
-------
|
||||
int
|
||||
The maximum value found in the list.
|
||||
"""
|
||||
return max(numbers)
|
||||
|
||||
|
||||
# DOC102
|
||||
def create_user_profile(location="here"):
|
||||
"""
|
||||
Creates a user profile with basic information.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
name : str
|
||||
The name of the user.
|
||||
age : int
|
||||
The age of the user.
|
||||
email : str
|
||||
The user's email address.
|
||||
location : str, optional
|
||||
The location of the user, by default "here".
|
||||
|
||||
Returns
|
||||
-------
|
||||
dict
|
||||
A dictionary containing the user's profile.
|
||||
"""
|
||||
return {
|
||||
'name': name,
|
||||
'age': age,
|
||||
'email': email,
|
||||
'location': location
|
||||
}
|
||||
|
||||
|
||||
# DOC102
|
||||
def calculate_total_price(item_prices, discount):
|
||||
"""
|
||||
Calculates the total price after applying tax and a discount.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
item_prices : list of float
|
||||
A list of prices for each item.
|
||||
tax_rate : float
|
||||
The tax rate to apply.
|
||||
discount : float
|
||||
The discount to subtract from the total.
|
||||
|
||||
Returns
|
||||
-------
|
||||
float
|
||||
The final total price after tax and discount.
|
||||
"""
|
||||
total = sum(item_prices)
|
||||
total_with_tax = total + (total * tax_rate)
|
||||
final_total = total_with_tax - discount
|
||||
return final_total
|
||||
|
||||
|
||||
# DOC102
|
||||
def send_email(subject, body, bcc_address=None):
|
||||
"""
|
||||
Sends an email to the specified recipients.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
subject : str
|
||||
The subject of the email.
|
||||
body : str
|
||||
The content of the email.
|
||||
to_address : str
|
||||
The recipient's email address.
|
||||
cc_address : str, optional
|
||||
The email address for CC, by default None.
|
||||
bcc_address : str, optional
|
||||
The email address for BCC, by default None.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
True if the email was sent successfully, False otherwise.
|
||||
"""
|
||||
return True
|
||||
|
||||
|
||||
# DOC102
|
||||
def concatenate_strings(*args):
|
||||
"""
|
||||
Concatenates multiple strings with a specified separator.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
separator : str
|
||||
The separator to use between strings.
|
||||
*args : str
|
||||
Variable length argument list of strings to concatenate.
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
A single concatenated string.
|
||||
"""
|
||||
return True
|
||||
|
||||
|
||||
# DOC102
|
||||
def process_order(order_id):
|
||||
"""
|
||||
Processes an order with a list of items and optional order details.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
order_id : int
|
||||
The unique identifier for the order.
|
||||
*items : str
|
||||
Variable length argument list of items in the order.
|
||||
**details : dict
|
||||
Additional details such as shipping method and address.
|
||||
|
||||
Returns
|
||||
-------
|
||||
dict
|
||||
A dictionary containing the order summary.
|
||||
"""
|
||||
return {
|
||||
'order_id': order_id,
|
||||
'items': items,
|
||||
'details': details
|
||||
}
|
||||
|
||||
|
||||
class Calculator:
|
||||
"""
|
||||
A simple calculator class that can perform basic arithmetic operations.
|
||||
"""
|
||||
|
||||
# DOC102
|
||||
def __init__(self):
|
||||
"""
|
||||
Initializes the calculator with an initial value.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
value : int, optional
|
||||
The initial value of the calculator, by default 0.
|
||||
"""
|
||||
self.value = value
|
||||
|
||||
# DOC102
|
||||
def add(self, number2):
|
||||
"""
|
||||
Adds two numbers to the current value.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
number : int or float
|
||||
The first number to add.
|
||||
number2 : int or float
|
||||
The second number to add.
|
||||
|
||||
Returns
|
||||
-------
|
||||
int or float
|
||||
The updated value after addition.
|
||||
"""
|
||||
self.value += number + number2
|
||||
return self.value
|
||||
|
||||
# DOC102
|
||||
@classmethod
|
||||
def from_string(cls):
|
||||
"""
|
||||
Creates a Calculator instance from a string representation of a number.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
value_str : str
|
||||
The string representing the initial value.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Calculator
|
||||
A new instance of Calculator initialized with the value from the string.
|
||||
"""
|
||||
value = float(value_str)
|
||||
return cls(value)
|
||||
|
||||
# DOC102
|
||||
@staticmethod
|
||||
def is_valid_number():
|
||||
"""
|
||||
Checks if a given number is valid (int or float).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
number : any
|
||||
The value to check.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
True if the number is valid, False otherwise.
|
||||
"""
|
||||
return isinstance(number, (int, float))
|
||||
|
||||
# OK
|
||||
def function_with_kwargs(param1, param2, **kwargs):
|
||||
"""Function with **kwargs parameter.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
param1 : int
|
||||
The first parameter.
|
||||
param2 : str
|
||||
The second parameter.
|
||||
extra_param : str
|
||||
An extra parameter that may be passed via **kwargs.
|
||||
another_extra : int
|
||||
Another extra parameter.
|
||||
"""
|
||||
return True
|
||||
|
||||
# OK
|
||||
def add_numbers(b):
|
||||
"""
|
||||
Adds two numbers and returns the result.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
b
|
||||
The second number to add.
|
||||
|
||||
Returns
|
||||
-------
|
||||
int
|
||||
The sum of the two numbers.
|
||||
"""
|
||||
return a + b
|
||||
|
||||
# DOC102
|
||||
def add_numbers(b):
|
||||
"""
|
||||
Adds two numbers and returns the result.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
a
|
||||
The first number to add.
|
||||
b
|
||||
The second number to add.
|
||||
|
||||
Returns
|
||||
-------
|
||||
int
|
||||
The sum of the two numbers.
|
||||
"""
|
||||
return a + b
|
||||
|
||||
class Foo:
|
||||
# OK
|
||||
def send_help(self, *args: Any) -> Any:
|
||||
"""|coro|
|
||||
|
||||
Shows the help command for the specified entity if given.
|
||||
The entity can be a command or a cog.
|
||||
|
||||
If no entity is given, then it'll show help for the
|
||||
entire bot.
|
||||
|
||||
If the entity is a string, then it looks up whether it's a
|
||||
:class:`Cog` or a :class:`Command`.
|
||||
|
||||
.. note::
|
||||
|
||||
Due to the way this function works, instead of returning
|
||||
something similar to :meth:`~.commands.HelpCommand.command_not_found`
|
||||
this returns :class:`None` on bad input or no help command.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
entity: Optional[Union[:class:`Command`, :class:`Cog`, :class:`str`]]
|
||||
The entity to show help for.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Any
|
||||
The result of the help command, if any.
|
||||
"""
|
||||
return
|
||||
|
||||
# OK
|
||||
@classmethod
|
||||
async def convert(cls, ctx: Context, argument: str) -> Self:
|
||||
"""|coro|
|
||||
|
||||
The method that actually converters an argument to the flag mapping.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
cls: Type[:class:`FlagConverter`]
|
||||
The flag converter class.
|
||||
ctx: :class:`Context`
|
||||
The invocation context.
|
||||
argument: :class:`str`
|
||||
The argument to convert from.
|
||||
|
||||
Raises
|
||||
------
|
||||
FlagError
|
||||
A flag related parsing error.
|
||||
CommandError
|
||||
A command related error.
|
||||
|
||||
Returns
|
||||
-------
|
||||
:class:`FlagConverter`
|
||||
The flag converter instance with all flags parsed.
|
||||
"""
|
||||
return
|
||||
|
|
@ -18,3 +18,20 @@ def print_third_word(word: Hello.Text) -> None:
|
|||
|
||||
def print_fourth_word(word: Goodbye) -> None:
|
||||
print(word)
|
||||
|
||||
|
||||
import typing_extensions
|
||||
import typing_extensions as TypingExt
|
||||
from typing_extensions import Text as TextAlias
|
||||
|
||||
|
||||
def print_fifth_word(word: typing_extensions.Text) -> None:
|
||||
print(word)
|
||||
|
||||
|
||||
def print_sixth_word(word: TypingExt.Text) -> None:
|
||||
print(word)
|
||||
|
||||
|
||||
def print_seventh_word(word: TextAlias) -> None:
|
||||
print(word)
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ class Foo:
|
|||
T = typing.TypeVar(*args)
|
||||
x: typing.TypeAlias = list[T]
|
||||
|
||||
# `default` should be skipped for now, added in Python 3.13
|
||||
# `default` was added in Python 3.13
|
||||
T = typing.TypeVar("T", default=Any)
|
||||
x: typing.TypeAlias = list[T]
|
||||
|
||||
|
|
@ -90,9 +90,9 @@ PositiveList = TypeAliasType(
|
|||
"PositiveList2", list[Annotated[T, Gt(0)]], type_params=(T,)
|
||||
)
|
||||
|
||||
# `default` should be skipped for now, added in Python 3.13
|
||||
# `default` was added in Python 3.13
|
||||
T = typing.TypeVar("T", default=Any)
|
||||
AnyList = TypeAliasType("AnyList", list[T], typep_params=(T,))
|
||||
AnyList = TypeAliasType("AnyList", list[T], type_params=(T,))
|
||||
|
||||
# unsafe fix if comments within the fix
|
||||
T = TypeVar("T")
|
||||
|
|
@ -128,3 +128,7 @@ T: TypeAlias = ( # comment0
|
|||
str # comment6
|
||||
# comment7
|
||||
) # comment8
|
||||
|
||||
# Test case for TypeVar with default - should be converted when preview mode is enabled
|
||||
T_default = TypeVar("T_default", default=int)
|
||||
DefaultList: TypeAlias = list[T_default]
|
||||
|
|
|
|||
|
|
@ -122,7 +122,7 @@ class MixedGenerics[U]:
|
|||
return (u, t)
|
||||
|
||||
|
||||
# TODO(brent) default requires 3.13
|
||||
# default requires 3.13
|
||||
V = TypeVar("V", default=Any, bound=str)
|
||||
|
||||
|
||||
|
|
@ -130,6 +130,14 @@ class DefaultTypeVar(Generic[V]): # -> [V: str = Any]
|
|||
var: V
|
||||
|
||||
|
||||
# Test case for TypeVar with default but no bound
|
||||
W = TypeVar("W", default=int)
|
||||
|
||||
|
||||
class DefaultOnlyTypeVar(Generic[W]): # -> [W = int]
|
||||
var: W
|
||||
|
||||
|
||||
# nested classes and functions are skipped
|
||||
class Outer:
|
||||
class Inner(Generic[T]):
|
||||
|
|
|
|||
|
|
@ -44,9 +44,7 @@ def any_str_param(s: AnyStr) -> AnyStr:
|
|||
return s
|
||||
|
||||
|
||||
# these cases are not handled
|
||||
|
||||
# TODO(brent) default requires 3.13
|
||||
# default requires 3.13
|
||||
V = TypeVar("V", default=Any, bound=str)
|
||||
|
||||
|
||||
|
|
@ -54,6 +52,8 @@ def default_var(v: V) -> V:
|
|||
return v
|
||||
|
||||
|
||||
# these cases are not handled
|
||||
|
||||
def outer():
|
||||
def inner(t: T) -> T:
|
||||
return t
|
||||
|
|
|
|||
|
|
@ -81,6 +81,7 @@ pub(crate) fn definitions(checker: &mut Checker) {
|
|||
Rule::UndocumentedPublicPackage,
|
||||
]);
|
||||
let enforce_pydoclint = checker.any_rule_enabled(&[
|
||||
Rule::DocstringExtraneousParameter,
|
||||
Rule::DocstringMissingReturns,
|
||||
Rule::DocstringExtraneousReturns,
|
||||
Rule::DocstringMissingYields,
|
||||
|
|
|
|||
|
|
@ -50,24 +50,6 @@ pub(crate) fn statement(stmt: &Stmt, checker: &mut Checker) {
|
|||
pylint::rules::nonlocal_and_global(checker, nonlocal);
|
||||
}
|
||||
}
|
||||
Stmt::Break(_) => {
|
||||
if checker.is_rule_enabled(Rule::BreakOutsideLoop) {
|
||||
pyflakes::rules::break_outside_loop(
|
||||
checker,
|
||||
stmt,
|
||||
&mut checker.semantic.current_statements().skip(1),
|
||||
);
|
||||
}
|
||||
}
|
||||
Stmt::Continue(_) => {
|
||||
if checker.is_rule_enabled(Rule::ContinueOutsideLoop) {
|
||||
pyflakes::rules::continue_outside_loop(
|
||||
checker,
|
||||
stmt,
|
||||
&mut checker.semantic.current_statements().skip(1),
|
||||
);
|
||||
}
|
||||
}
|
||||
Stmt::FunctionDef(
|
||||
function_def @ ast::StmtFunctionDef {
|
||||
is_async,
|
||||
|
|
|
|||
|
|
@ -697,6 +697,7 @@ impl SemanticSyntaxContext for Checker<'_> {
|
|||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::FutureFeatureNotDefined(name) => {
|
||||
// F407
|
||||
if self.is_rule_enabled(Rule::FutureFeatureNotDefined) {
|
||||
self.report_diagnostic(
|
||||
pyflakes::rules::FutureFeatureNotDefined { name },
|
||||
|
|
@ -704,12 +705,25 @@ impl SemanticSyntaxContext for Checker<'_> {
|
|||
);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::BreakOutsideLoop => {
|
||||
// F701
|
||||
if self.is_rule_enabled(Rule::BreakOutsideLoop) {
|
||||
self.report_diagnostic(pyflakes::rules::BreakOutsideLoop, error.range);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::ContinueOutsideLoop => {
|
||||
// F702
|
||||
if self.is_rule_enabled(Rule::ContinueOutsideLoop) {
|
||||
self.report_diagnostic(pyflakes::rules::ContinueOutsideLoop, error.range);
|
||||
}
|
||||
}
|
||||
SemanticSyntaxErrorKind::ReboundComprehensionVariable
|
||||
| SemanticSyntaxErrorKind::DuplicateTypeParameter
|
||||
| SemanticSyntaxErrorKind::MultipleCaseAssignment(_)
|
||||
| SemanticSyntaxErrorKind::IrrefutableCasePattern(_)
|
||||
| SemanticSyntaxErrorKind::SingleStarredAssignment
|
||||
| SemanticSyntaxErrorKind::WriteToDebug(_)
|
||||
| SemanticSyntaxErrorKind::DifferentMatchPatternBindings
|
||||
| SemanticSyntaxErrorKind::InvalidExpression(..)
|
||||
| SemanticSyntaxErrorKind::DuplicateMatchKey(_)
|
||||
| SemanticSyntaxErrorKind::DuplicateMatchClassAttribute(_)
|
||||
|
|
@ -811,19 +825,40 @@ impl SemanticSyntaxContext for Checker<'_> {
|
|||
}
|
||||
)
|
||||
}
|
||||
|
||||
fn in_loop_context(&self) -> bool {
|
||||
let mut child = self.semantic.current_statement();
|
||||
|
||||
for parent in self.semantic.current_statements().skip(1) {
|
||||
match parent {
|
||||
Stmt::For(ast::StmtFor { orelse, .. })
|
||||
| Stmt::While(ast::StmtWhile { orelse, .. }) => {
|
||||
if !orelse.contains(child) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
Stmt::FunctionDef(_) | Stmt::ClassDef(_) => {
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
child = parent;
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Visitor<'a> for Checker<'a> {
|
||||
fn visit_stmt(&mut self, stmt: &'a Stmt) {
|
||||
// Step 0: Pre-processing
|
||||
self.semantic.push_node(stmt);
|
||||
|
||||
// For functions, defer semantic syntax error checks until the body of the function is
|
||||
// visited
|
||||
if !stmt.is_function_def_stmt() {
|
||||
self.with_semantic_checker(|semantic, context| semantic.visit_stmt(stmt, context));
|
||||
}
|
||||
|
||||
// Step 0: Pre-processing
|
||||
self.semantic.push_node(stmt);
|
||||
|
||||
// For Jupyter Notebooks, we'll reset the `IMPORT_BOUNDARY` flag when
|
||||
// we encounter a cell boundary.
|
||||
if self.source_type.is_ipynb()
|
||||
|
|
|
|||
|
|
@ -130,6 +130,7 @@ pub(crate) fn check_noqa(
|
|||
let edit = delete_comment(directive.range(), locator);
|
||||
let mut diagnostic = context
|
||||
.report_diagnostic(UnusedNOQA { codes: None }, directive.range());
|
||||
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Unnecessary);
|
||||
diagnostic.set_fix(Fix::safe_edit(edit));
|
||||
}
|
||||
}
|
||||
|
|
@ -226,6 +227,7 @@ pub(crate) fn check_noqa(
|
|||
},
|
||||
directive.range(),
|
||||
);
|
||||
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Unnecessary);
|
||||
diagnostic.set_fix(Fix::safe_edit(edit));
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@
|
|||
use std::fmt::Formatter;
|
||||
|
||||
use ruff_db::diagnostic::SecondaryCode;
|
||||
use serde::Serialize;
|
||||
use strum_macros::EnumIter;
|
||||
|
||||
use crate::registry::Linter;
|
||||
|
|
@ -74,7 +75,7 @@ impl serde::Serialize for NoqaCode {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
#[derive(Debug, Copy, Clone, Serialize)]
|
||||
pub enum RuleGroup {
|
||||
/// The rule is stable.
|
||||
Stable,
|
||||
|
|
@ -988,6 +989,7 @@ pub fn code_to_rule(linter: Linter, code: &str) -> Option<(RuleGroup, Rule)> {
|
|||
(FastApi, "003") => (RuleGroup::Stable, rules::fastapi::rules::FastApiUnusedPathParameter),
|
||||
|
||||
// pydoclint
|
||||
(Pydoclint, "102") => (RuleGroup::Preview, rules::pydoclint::rules::DocstringExtraneousParameter),
|
||||
(Pydoclint, "201") => (RuleGroup::Preview, rules::pydoclint::rules::DocstringMissingReturns),
|
||||
(Pydoclint, "202") => (RuleGroup::Preview, rules::pydoclint::rules::DocstringExtraneousReturns),
|
||||
(Pydoclint, "402") => (RuleGroup::Preview, rules::pydoclint::rules::DocstringMissingYields),
|
||||
|
|
|
|||
|
|
@ -11,8 +11,7 @@ use crate::settings::types::CompiledPerFileIgnoreList;
|
|||
pub fn get_cwd() -> &'static Path {
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
static CWD: std::sync::LazyLock<PathBuf> = std::sync::LazyLock::new(|| PathBuf::from("."));
|
||||
&CWD
|
||||
Path::new(".")
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
path_absolutize::path_dedot::CWD.as_path()
|
||||
|
|
|
|||
|
|
@ -24,7 +24,6 @@ use crate::checkers::tokens::check_tokens;
|
|||
use crate::directives::Directives;
|
||||
use crate::doc_lines::{doc_lines_from_ast, doc_lines_from_tokens};
|
||||
use crate::fix::{FixResult, fix_file};
|
||||
use crate::message::create_syntax_error_diagnostic;
|
||||
use crate::noqa::add_noqa;
|
||||
use crate::package::PackageRoot;
|
||||
use crate::registry::Rule;
|
||||
|
|
@ -496,15 +495,15 @@ fn diagnostics_to_messages(
|
|||
parse_errors
|
||||
.iter()
|
||||
.map(|parse_error| {
|
||||
create_syntax_error_diagnostic(source_file.clone(), &parse_error.error, parse_error)
|
||||
Diagnostic::invalid_syntax(source_file.clone(), &parse_error.error, parse_error)
|
||||
})
|
||||
.chain(unsupported_syntax_errors.iter().map(|syntax_error| {
|
||||
create_syntax_error_diagnostic(source_file.clone(), syntax_error, syntax_error)
|
||||
Diagnostic::invalid_syntax(source_file.clone(), syntax_error, syntax_error)
|
||||
}))
|
||||
.chain(
|
||||
semantic_syntax_errors
|
||||
.iter()
|
||||
.map(|error| create_syntax_error_diagnostic(source_file.clone(), error, error)),
|
||||
.map(|error| Diagnostic::invalid_syntax(source_file.clone(), error, error)),
|
||||
)
|
||||
.chain(diagnostics.into_iter().map(|mut diagnostic| {
|
||||
if let Some(range) = diagnostic.range() {
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ use ruff_db::files::File;
|
|||
pub use grouped::GroupedEmitter;
|
||||
use ruff_notebook::NotebookIndex;
|
||||
use ruff_source_file::{SourceFile, SourceFileBuilder};
|
||||
use ruff_text_size::{Ranged, TextRange, TextSize};
|
||||
use ruff_text_size::{TextRange, TextSize};
|
||||
pub use sarif::SarifEmitter;
|
||||
|
||||
use crate::Fix;
|
||||
|
|
@ -26,24 +26,6 @@ use crate::settings::types::{OutputFormat, RuffOutputFormat};
|
|||
mod grouped;
|
||||
mod sarif;
|
||||
|
||||
/// Creates a `Diagnostic` from a syntax error, with the format expected by Ruff.
|
||||
///
|
||||
/// This is almost identical to `ruff_db::diagnostic::create_syntax_error_diagnostic`, except the
|
||||
/// `message` is stored as the primary diagnostic message instead of on the primary annotation.
|
||||
///
|
||||
/// TODO(brent) These should be unified at some point, but we keep them separate for now to avoid a
|
||||
/// ton of snapshot changes while combining ruff's diagnostic type with `Diagnostic`.
|
||||
pub fn create_syntax_error_diagnostic(
|
||||
span: impl Into<Span>,
|
||||
message: impl std::fmt::Display,
|
||||
range: impl Ranged,
|
||||
) -> Diagnostic {
|
||||
let mut diag = Diagnostic::new(DiagnosticId::InvalidSyntax, Severity::Error, message);
|
||||
let span = span.into().with_range(range.range());
|
||||
diag.annotate(Annotation::primary(span));
|
||||
diag
|
||||
}
|
||||
|
||||
/// Create a `Diagnostic` from a panic.
|
||||
pub fn create_panic_diagnostic(error: &PanicError, path: Option<&Path>) -> Diagnostic {
|
||||
let mut diagnostic = Diagnostic::new(
|
||||
|
|
@ -260,8 +242,6 @@ mod tests {
|
|||
use crate::message::{Emitter, EmitterContext, create_lint_diagnostic};
|
||||
use crate::{Edit, Fix};
|
||||
|
||||
use super::create_syntax_error_diagnostic;
|
||||
|
||||
pub(super) fn create_syntax_error_diagnostics() -> Vec<Diagnostic> {
|
||||
let source = r"from os import
|
||||
|
||||
|
|
@ -274,7 +254,7 @@ if call(foo
|
|||
.errors()
|
||||
.iter()
|
||||
.map(|parse_error| {
|
||||
create_syntax_error_diagnostic(source_file.clone(), &parse_error.error, parse_error)
|
||||
Diagnostic::invalid_syntax(source_file.clone(), &parse_error.error, parse_error)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -242,6 +242,11 @@ pub(crate) const fn is_refined_submodule_import_match_enabled(settings: &LinterS
|
|||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// https://github.com/astral-sh/ruff/pull/20660
|
||||
pub(crate) const fn is_type_var_default_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
// github.com/astral-sh/ruff/issues/20004
|
||||
pub(crate) const fn is_b006_check_guaranteed_mutable_expr_enabled(
|
||||
settings: &LinterSettings,
|
||||
|
|
@ -265,3 +270,7 @@ pub(crate) const fn is_fix_read_whole_file_enabled(settings: &LinterSettings) ->
|
|||
pub(crate) const fn is_fix_write_whole_file_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
||||
pub(crate) const fn is_typing_extensions_str_alias_enabled(settings: &LinterSettings) -> bool {
|
||||
settings.preview.is_enabled()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -257,9 +257,8 @@ pub struct PreviewOptions {
|
|||
#[cfg(feature = "schemars")]
|
||||
mod schema {
|
||||
use itertools::Itertools;
|
||||
use schemars::_serde_json::Value;
|
||||
use schemars::JsonSchema;
|
||||
use schemars::schema::{InstanceType, Schema, SchemaObject};
|
||||
use schemars::{JsonSchema, Schema, SchemaGenerator};
|
||||
use serde_json::Value;
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use crate::RuleSelector;
|
||||
|
|
@ -267,64 +266,65 @@ mod schema {
|
|||
use crate::rule_selector::{Linter, RuleCodePrefix};
|
||||
|
||||
impl JsonSchema for RuleSelector {
|
||||
fn schema_name() -> String {
|
||||
"RuleSelector".to_string()
|
||||
fn schema_name() -> std::borrow::Cow<'static, str> {
|
||||
std::borrow::Cow::Borrowed("RuleSelector")
|
||||
}
|
||||
|
||||
fn json_schema(_gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
|
||||
Schema::Object(SchemaObject {
|
||||
instance_type: Some(InstanceType::String.into()),
|
||||
enum_values: Some(
|
||||
[
|
||||
// Include the non-standard "ALL" selectors.
|
||||
"ALL".to_string(),
|
||||
// Include the legacy "C" and "T" selectors.
|
||||
"C".to_string(),
|
||||
"T".to_string(),
|
||||
// Include some common redirect targets for those legacy selectors.
|
||||
"C9".to_string(),
|
||||
"T1".to_string(),
|
||||
"T2".to_string(),
|
||||
]
|
||||
.into_iter()
|
||||
.chain(
|
||||
RuleCodePrefix::iter()
|
||||
.map(|p| {
|
||||
let prefix = p.linter().common_prefix();
|
||||
let code = p.short_code();
|
||||
format!("{prefix}{code}")
|
||||
})
|
||||
.chain(Linter::iter().filter_map(|l| {
|
||||
let prefix = l.common_prefix();
|
||||
(!prefix.is_empty()).then(|| prefix.to_string())
|
||||
})),
|
||||
)
|
||||
.filter(|p| {
|
||||
// Exclude any prefixes where all of the rules are removed
|
||||
if let Ok(Self::Rule { prefix, .. } | Self::Prefix { prefix, .. }) =
|
||||
RuleSelector::parse_no_redirect(p)
|
||||
{
|
||||
!prefix.rules().all(|rule| rule.is_removed())
|
||||
} else {
|
||||
true
|
||||
}
|
||||
fn json_schema(_gen: &mut SchemaGenerator) -> Schema {
|
||||
let enum_values: Vec<String> = [
|
||||
// Include the non-standard "ALL" selectors.
|
||||
"ALL".to_string(),
|
||||
// Include the legacy "C" and "T" selectors.
|
||||
"C".to_string(),
|
||||
"T".to_string(),
|
||||
// Include some common redirect targets for those legacy selectors.
|
||||
"C9".to_string(),
|
||||
"T1".to_string(),
|
||||
"T2".to_string(),
|
||||
]
|
||||
.into_iter()
|
||||
.chain(
|
||||
RuleCodePrefix::iter()
|
||||
.map(|p| {
|
||||
let prefix = p.linter().common_prefix();
|
||||
let code = p.short_code();
|
||||
format!("{prefix}{code}")
|
||||
})
|
||||
.filter(|_rule| {
|
||||
// Filter out all test-only rules
|
||||
#[cfg(any(feature = "test-rules", test))]
|
||||
#[expect(clippy::used_underscore_binding)]
|
||||
if _rule.starts_with("RUF9") || _rule == "PLW0101" {
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
})
|
||||
.sorted()
|
||||
.map(Value::String)
|
||||
.collect(),
|
||||
),
|
||||
..SchemaObject::default()
|
||||
.chain(Linter::iter().filter_map(|l| {
|
||||
let prefix = l.common_prefix();
|
||||
(!prefix.is_empty()).then(|| prefix.to_string())
|
||||
})),
|
||||
)
|
||||
.filter(|p| {
|
||||
// Exclude any prefixes where all of the rules are removed
|
||||
if let Ok(Self::Rule { prefix, .. } | Self::Prefix { prefix, .. }) =
|
||||
RuleSelector::parse_no_redirect(p)
|
||||
{
|
||||
!prefix.rules().all(|rule| rule.is_removed())
|
||||
} else {
|
||||
true
|
||||
}
|
||||
})
|
||||
.filter(|_rule| {
|
||||
// Filter out all test-only rules
|
||||
#[cfg(any(feature = "test-rules", test))]
|
||||
#[expect(clippy::used_underscore_binding)]
|
||||
if _rule.starts_with("RUF9") || _rule == "PLW0101" {
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
})
|
||||
.sorted()
|
||||
.collect();
|
||||
|
||||
let mut schema = schemars::json_schema!({ "type": "string" });
|
||||
schema.ensure_object().insert(
|
||||
"enum".to_string(),
|
||||
Value::Array(enum_values.into_iter().map(Value::String).collect()),
|
||||
);
|
||||
|
||||
schema
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -655,6 +655,11 @@ fn check_name(checker: &Checker, expr: &Expr, range: TextRange) {
|
|||
},
|
||||
// airflow.datasets
|
||||
["airflow", "datasets", "DatasetAliasEvent"] => Replacement::None,
|
||||
["airflow", "datasets", "DatasetEvent"] => Replacement::Message(
|
||||
"`DatasetEvent` has been made private in Airflow 3. \
|
||||
Use `dict[str, Any]` for the time being. \
|
||||
An `AssetEvent` type will be added to the apache-airflow-task-sdk in a future version.",
|
||||
),
|
||||
|
||||
// airflow.hooks
|
||||
["airflow", "hooks", "base_hook", "BaseHook"] => Replacement::Rename {
|
||||
|
|
|
|||
|
|
@ -104,38 +104,49 @@ AIR301 `airflow.datasets.DatasetAliasEvent` is removed in Airflow 3.0
|
|||
49 | # airflow.datasets
|
||||
50 | DatasetAliasEvent()
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
51 | DatasetEvent()
|
||||
|
|
||||
|
||||
AIR301 `airflow.operators.subdag.SubDagOperator` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:54:1
|
||||
AIR301 `airflow.datasets.DatasetEvent` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:51:1
|
||||
|
|
||||
53 | # airflow.operators.subdag.*
|
||||
54 | SubDagOperator()
|
||||
49 | # airflow.datasets
|
||||
50 | DatasetAliasEvent()
|
||||
51 | DatasetEvent()
|
||||
| ^^^^^^^^^^^^
|
||||
|
|
||||
help: `DatasetEvent` has been made private in Airflow 3. Use `dict[str, Any]` for the time being. An `AssetEvent` type will be added to the apache-airflow-task-sdk in a future version.
|
||||
|
||||
AIR301 `airflow.operators.subdag.SubDagOperator` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:55:1
|
||||
|
|
||||
54 | # airflow.operators.subdag.*
|
||||
55 | SubDagOperator()
|
||||
| ^^^^^^^^^^^^^^
|
||||
55 |
|
||||
56 | # airflow.operators.postgres_operator
|
||||
56 |
|
||||
57 | # airflow.operators.postgres_operator
|
||||
|
|
||||
help: The whole `airflow.subdag` module has been removed.
|
||||
|
||||
AIR301 `airflow.operators.postgres_operator.Mapping` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:57:1
|
||||
--> AIR301_names.py:58:1
|
||||
|
|
||||
56 | # airflow.operators.postgres_operator
|
||||
57 | Mapping()
|
||||
57 | # airflow.operators.postgres_operator
|
||||
58 | Mapping()
|
||||
| ^^^^^^^
|
||||
58 |
|
||||
59 | # airflow.secrets
|
||||
59 |
|
||||
60 | # airflow.secrets
|
||||
|
|
||||
|
||||
AIR301 [*] `airflow.secrets.cache.SecretCache` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:64:1
|
||||
--> AIR301_names.py:65:1
|
||||
|
|
||||
63 | # airflow.secrets.cache
|
||||
64 | SecretCache()
|
||||
64 | # airflow.secrets.cache
|
||||
65 | SecretCache()
|
||||
| ^^^^^^^^^^^
|
||||
|
|
||||
help: Use `SecretCache` from `airflow.sdk` instead.
|
||||
14 | from airflow.datasets import DatasetAliasEvent
|
||||
14 | from airflow.datasets import DatasetAliasEvent, DatasetEvent
|
||||
15 | from airflow.operators.postgres_operator import Mapping
|
||||
16 | from airflow.operators.subdag import SubDagOperator
|
||||
- from airflow.secrets.cache import SecretCache
|
||||
|
|
@ -153,211 +164,211 @@ help: Use `SecretCache` from `airflow.sdk` instead.
|
|||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
AIR301 `airflow.triggers.external_task.TaskStateTrigger` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:68:1
|
||||
--> AIR301_names.py:69:1
|
||||
|
|
||||
67 | # airflow.triggers.external_task
|
||||
68 | TaskStateTrigger()
|
||||
68 | # airflow.triggers.external_task
|
||||
69 | TaskStateTrigger()
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
69 |
|
||||
70 | # airflow.utils.date
|
||||
70 |
|
||||
71 | # airflow.utils.date
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dates.date_range` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:71:1
|
||||
--> AIR301_names.py:72:1
|
||||
|
|
||||
70 | # airflow.utils.date
|
||||
71 | dates.date_range
|
||||
71 | # airflow.utils.date
|
||||
72 | dates.date_range
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
72 | dates.days_ago
|
||||
73 | dates.days_ago
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dates.days_ago` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:72:1
|
||||
--> AIR301_names.py:73:1
|
||||
|
|
||||
70 | # airflow.utils.date
|
||||
71 | dates.date_range
|
||||
72 | dates.days_ago
|
||||
71 | # airflow.utils.date
|
||||
72 | dates.date_range
|
||||
73 | dates.days_ago
|
||||
| ^^^^^^^^^^^^^^
|
||||
73 |
|
||||
74 | date_range
|
||||
74 |
|
||||
75 | date_range
|
||||
|
|
||||
help: Use `pendulum.today('UTC').add(days=-N, ...)` instead
|
||||
|
||||
AIR301 `airflow.utils.dates.date_range` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:74:1
|
||||
--> AIR301_names.py:75:1
|
||||
|
|
||||
72 | dates.days_ago
|
||||
73 |
|
||||
74 | date_range
|
||||
73 | dates.days_ago
|
||||
74 |
|
||||
75 | date_range
|
||||
| ^^^^^^^^^^
|
||||
75 | days_ago
|
||||
76 | infer_time_unit
|
||||
76 | days_ago
|
||||
77 | infer_time_unit
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dates.days_ago` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:75:1
|
||||
--> AIR301_names.py:76:1
|
||||
|
|
||||
74 | date_range
|
||||
75 | days_ago
|
||||
75 | date_range
|
||||
76 | days_ago
|
||||
| ^^^^^^^^
|
||||
76 | infer_time_unit
|
||||
77 | parse_execution_date
|
||||
77 | infer_time_unit
|
||||
78 | parse_execution_date
|
||||
|
|
||||
help: Use `pendulum.today('UTC').add(days=-N, ...)` instead
|
||||
|
||||
AIR301 `airflow.utils.dates.infer_time_unit` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:76:1
|
||||
--> AIR301_names.py:77:1
|
||||
|
|
||||
74 | date_range
|
||||
75 | days_ago
|
||||
76 | infer_time_unit
|
||||
75 | date_range
|
||||
76 | days_ago
|
||||
77 | infer_time_unit
|
||||
| ^^^^^^^^^^^^^^^
|
||||
77 | parse_execution_date
|
||||
78 | round_time
|
||||
78 | parse_execution_date
|
||||
79 | round_time
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dates.parse_execution_date` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:77:1
|
||||
--> AIR301_names.py:78:1
|
||||
|
|
||||
75 | days_ago
|
||||
76 | infer_time_unit
|
||||
77 | parse_execution_date
|
||||
76 | days_ago
|
||||
77 | infer_time_unit
|
||||
78 | parse_execution_date
|
||||
| ^^^^^^^^^^^^^^^^^^^^
|
||||
78 | round_time
|
||||
79 | scale_time_units
|
||||
79 | round_time
|
||||
80 | scale_time_units
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dates.round_time` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:78:1
|
||||
--> AIR301_names.py:79:1
|
||||
|
|
||||
76 | infer_time_unit
|
||||
77 | parse_execution_date
|
||||
78 | round_time
|
||||
77 | infer_time_unit
|
||||
78 | parse_execution_date
|
||||
79 | round_time
|
||||
| ^^^^^^^^^^
|
||||
79 | scale_time_units
|
||||
80 | scale_time_units
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dates.scale_time_units` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:79:1
|
||||
--> AIR301_names.py:80:1
|
||||
|
|
||||
77 | parse_execution_date
|
||||
78 | round_time
|
||||
79 | scale_time_units
|
||||
78 | parse_execution_date
|
||||
79 | round_time
|
||||
80 | scale_time_units
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
80 |
|
||||
81 | # This one was not deprecated.
|
||||
81 |
|
||||
82 | # This one was not deprecated.
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.dag_cycle_tester.test_cycle` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:86:1
|
||||
--> AIR301_names.py:87:1
|
||||
|
|
||||
85 | # airflow.utils.dag_cycle_tester
|
||||
86 | test_cycle
|
||||
86 | # airflow.utils.dag_cycle_tester
|
||||
87 | test_cycle
|
||||
| ^^^^^^^^^^
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.db.create_session` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:90:1
|
||||
--> AIR301_names.py:91:1
|
||||
|
|
||||
89 | # airflow.utils.db
|
||||
90 | create_session
|
||||
90 | # airflow.utils.db
|
||||
91 | create_session
|
||||
| ^^^^^^^^^^^^^^
|
||||
91 |
|
||||
92 | # airflow.utils.decorators
|
||||
92 |
|
||||
93 | # airflow.utils.decorators
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.decorators.apply_defaults` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:93:1
|
||||
--> AIR301_names.py:94:1
|
||||
|
|
||||
92 | # airflow.utils.decorators
|
||||
93 | apply_defaults
|
||||
93 | # airflow.utils.decorators
|
||||
94 | apply_defaults
|
||||
| ^^^^^^^^^^^^^^
|
||||
94 |
|
||||
95 | # airflow.utils.file
|
||||
95 |
|
||||
96 | # airflow.utils.file
|
||||
|
|
||||
help: `apply_defaults` is now unconditionally done and can be safely removed.
|
||||
|
||||
AIR301 `airflow.utils.file.mkdirs` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:96:1
|
||||
--> AIR301_names.py:97:1
|
||||
|
|
||||
95 | # airflow.utils.file
|
||||
96 | mkdirs
|
||||
96 | # airflow.utils.file
|
||||
97 | mkdirs
|
||||
| ^^^^^^
|
||||
|
|
||||
help: Use `pathlib.Path({path}).mkdir` instead
|
||||
|
||||
AIR301 `airflow.utils.state.SHUTDOWN` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:100:1
|
||||
--> AIR301_names.py:101:1
|
||||
|
|
||||
99 | # airflow.utils.state
|
||||
100 | SHUTDOWN
|
||||
100 | # airflow.utils.state
|
||||
101 | SHUTDOWN
|
||||
| ^^^^^^^^
|
||||
101 | terminating_states
|
||||
102 | terminating_states
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.state.terminating_states` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:101:1
|
||||
--> AIR301_names.py:102:1
|
||||
|
|
||||
99 | # airflow.utils.state
|
||||
100 | SHUTDOWN
|
||||
101 | terminating_states
|
||||
100 | # airflow.utils.state
|
||||
101 | SHUTDOWN
|
||||
102 | terminating_states
|
||||
| ^^^^^^^^^^^^^^^^^^
|
||||
102 |
|
||||
103 | # airflow.utils.trigger_rule
|
||||
103 |
|
||||
104 | # airflow.utils.trigger_rule
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.trigger_rule.TriggerRule.DUMMY` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:104:1
|
||||
--> AIR301_names.py:105:1
|
||||
|
|
||||
103 | # airflow.utils.trigger_rule
|
||||
104 | TriggerRule.DUMMY
|
||||
104 | # airflow.utils.trigger_rule
|
||||
105 | TriggerRule.DUMMY
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
105 | TriggerRule.NONE_FAILED_OR_SKIPPED
|
||||
106 | TriggerRule.NONE_FAILED_OR_SKIPPED
|
||||
|
|
||||
|
||||
AIR301 `airflow.utils.trigger_rule.TriggerRule.NONE_FAILED_OR_SKIPPED` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:105:1
|
||||
--> AIR301_names.py:106:1
|
||||
|
|
||||
103 | # airflow.utils.trigger_rule
|
||||
104 | TriggerRule.DUMMY
|
||||
105 | TriggerRule.NONE_FAILED_OR_SKIPPED
|
||||
104 | # airflow.utils.trigger_rule
|
||||
105 | TriggerRule.DUMMY
|
||||
106 | TriggerRule.NONE_FAILED_OR_SKIPPED
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
|
||||
AIR301 `airflow.www.auth.has_access` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:109:1
|
||||
--> AIR301_names.py:110:1
|
||||
|
|
||||
108 | # airflow.www.auth
|
||||
109 | has_access
|
||||
109 | # airflow.www.auth
|
||||
110 | has_access
|
||||
| ^^^^^^^^^^
|
||||
110 | has_access_dataset
|
||||
111 | has_access_dataset
|
||||
|
|
||||
|
||||
AIR301 `airflow.www.auth.has_access_dataset` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:110:1
|
||||
--> AIR301_names.py:111:1
|
||||
|
|
||||
108 | # airflow.www.auth
|
||||
109 | has_access
|
||||
110 | has_access_dataset
|
||||
109 | # airflow.www.auth
|
||||
110 | has_access
|
||||
111 | has_access_dataset
|
||||
| ^^^^^^^^^^^^^^^^^^
|
||||
111 |
|
||||
112 | # airflow.www.utils
|
||||
112 |
|
||||
113 | # airflow.www.utils
|
||||
|
|
||||
|
||||
AIR301 `airflow.www.utils.get_sensitive_variables_fields` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:113:1
|
||||
--> AIR301_names.py:114:1
|
||||
|
|
||||
112 | # airflow.www.utils
|
||||
113 | get_sensitive_variables_fields
|
||||
113 | # airflow.www.utils
|
||||
114 | get_sensitive_variables_fields
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
114 | should_hide_value_for_key
|
||||
115 | should_hide_value_for_key
|
||||
|
|
||||
|
||||
AIR301 `airflow.www.utils.should_hide_value_for_key` is removed in Airflow 3.0
|
||||
--> AIR301_names.py:114:1
|
||||
--> AIR301_names.py:115:1
|
||||
|
|
||||
112 | # airflow.www.utils
|
||||
113 | get_sensitive_variables_fields
|
||||
114 | should_hide_value_for_key
|
||||
113 | # airflow.www.utils
|
||||
114 | get_sensitive_variables_fields
|
||||
115 | should_hide_value_for_key
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
|
|
|
|||
|
|
@ -1091,9 +1091,12 @@ fn suspicious_function(
|
|||
] => checker.report_diagnostic_if_enabled(SuspiciousInsecureCipherModeUsage, range),
|
||||
|
||||
// Mktemp
|
||||
["tempfile", "mktemp"] => {
|
||||
checker.report_diagnostic_if_enabled(SuspiciousMktempUsage, range)
|
||||
}
|
||||
["tempfile", "mktemp"] => checker
|
||||
.report_diagnostic_if_enabled(SuspiciousMktempUsage, range)
|
||||
.map(|mut diagnostic| {
|
||||
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Deprecated);
|
||||
diagnostic
|
||||
}),
|
||||
|
||||
// Eval
|
||||
["" | "builtins", "eval"] => {
|
||||
|
|
|
|||
|
|
@ -127,3 +127,44 @@ S602 `subprocess` call with `shell=True` identified, security issue
|
|||
21 |
|
||||
22 | # Check dict display with only double-starred expressions can be falsey.
|
||||
|
|
||||
|
||||
S602 `subprocess` call with truthy `shell` seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
--> S602.py:38:1
|
||||
|
|
||||
37 | # Additional truthiness cases for generator, lambda, and f-strings
|
||||
38 | Popen("true", shell=(i for i in ()))
|
||||
| ^^^^^
|
||||
39 | Popen("true", shell=lambda: 0)
|
||||
40 | Popen("true", shell=f"{b''}")
|
||||
|
|
||||
|
||||
S602 `subprocess` call with truthy `shell` seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
--> S602.py:39:1
|
||||
|
|
||||
37 | # Additional truthiness cases for generator, lambda, and f-strings
|
||||
38 | Popen("true", shell=(i for i in ()))
|
||||
39 | Popen("true", shell=lambda: 0)
|
||||
| ^^^^^
|
||||
40 | Popen("true", shell=f"{b''}")
|
||||
41 | x = 1
|
||||
|
|
||||
|
||||
S602 `subprocess` call with truthy `shell` seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
--> S602.py:40:1
|
||||
|
|
||||
38 | Popen("true", shell=(i for i in ()))
|
||||
39 | Popen("true", shell=lambda: 0)
|
||||
40 | Popen("true", shell=f"{b''}")
|
||||
| ^^^^^
|
||||
41 | x = 1
|
||||
42 | Popen("true", shell=f"{x=}")
|
||||
|
|
||||
|
||||
S602 `subprocess` call with truthy `shell` seems safe, but may be changed in the future; consider rewriting without `shell`
|
||||
--> S602.py:42:1
|
||||
|
|
||||
40 | Popen("true", shell=f"{b''}")
|
||||
41 | x = 1
|
||||
42 | Popen("true", shell=f"{x=}")
|
||||
| ^^^^^
|
||||
|
|
||||
|
|
|
|||
|
|
@ -9,3 +9,85 @@ S604 Function call with `shell=True` parameter identified, security issue
|
|||
6 |
|
||||
7 | foo(shell={**{}})
|
||||
|
|
||||
|
||||
S604 Function call with truthy `shell` parameter identified, security issue
|
||||
--> S604.py:11:1
|
||||
|
|
||||
10 | # Truthy non-bool values for `shell`
|
||||
11 | foo(shell=(i for i in ()))
|
||||
| ^^^
|
||||
12 | foo(shell=lambda: 0)
|
||||
|
|
||||
|
||||
S604 Function call with truthy `shell` parameter identified, security issue
|
||||
--> S604.py:12:1
|
||||
|
|
||||
10 | # Truthy non-bool values for `shell`
|
||||
11 | foo(shell=(i for i in ()))
|
||||
12 | foo(shell=lambda: 0)
|
||||
| ^^^
|
||||
13 |
|
||||
14 | # f-strings guaranteed non-empty
|
||||
|
|
||||
|
||||
S604 Function call with truthy `shell` parameter identified, security issue
|
||||
--> S604.py:15:1
|
||||
|
|
||||
14 | # f-strings guaranteed non-empty
|
||||
15 | foo(shell=f"{b''}")
|
||||
| ^^^
|
||||
16 | x = 1
|
||||
17 | foo(shell=f"{x=}")
|
||||
|
|
||||
|
||||
S604 Function call with truthy `shell` parameter identified, security issue
|
||||
--> S604.py:17:1
|
||||
|
|
||||
15 | foo(shell=f"{b''}")
|
||||
16 | x = 1
|
||||
17 | foo(shell=f"{x=}")
|
||||
| ^^^
|
||||
18 |
|
||||
19 | # Additional truthiness cases for generator, lambda, and f-strings
|
||||
|
|
||||
|
||||
S604 Function call with truthy `shell` parameter identified, security issue
|
||||
--> S604.py:20:1
|
||||
|
|
||||
19 | # Additional truthiness cases for generator, lambda, and f-strings
|
||||
20 | foo(shell=(i for i in ()))
|
||||
| ^^^
|
||||
21 | foo(shell=lambda: 0)
|
||||
22 | foo(shell=f"{b''}")
|
||||
|
|
||||
|
||||
S604 Function call with truthy `shell` parameter identified, security issue
|
||||
--> S604.py:21:1
|
||||
|
|
||||
19 | # Additional truthiness cases for generator, lambda, and f-strings
|
||||
20 | foo(shell=(i for i in ()))
|
||||
21 | foo(shell=lambda: 0)
|
||||
| ^^^
|
||||
22 | foo(shell=f"{b''}")
|
||||
23 | x = 1
|
||||
|
|
||||
|
||||
S604 Function call with truthy `shell` parameter identified, security issue
|
||||
--> S604.py:22:1
|
||||
|
|
||||
20 | foo(shell=(i for i in ()))
|
||||
21 | foo(shell=lambda: 0)
|
||||
22 | foo(shell=f"{b''}")
|
||||
| ^^^
|
||||
23 | x = 1
|
||||
24 | foo(shell=f"{x=}")
|
||||
|
|
||||
|
||||
S604 Function call with truthy `shell` parameter identified, security issue
|
||||
--> S604.py:24:1
|
||||
|
|
||||
22 | foo(shell=f"{b''}")
|
||||
23 | x = 1
|
||||
24 | foo(shell=f"{x=}")
|
||||
| ^^^
|
||||
|
|
||||
|
|
|
|||
|
|
@ -43,3 +43,44 @@ S609 Possible wildcard injection in call due to `*` usage
|
|||
9 |
|
||||
10 | subprocess.Popen(["chmod", "+w", "*.py"], shell={**{}})
|
||||
|
|
||||
|
||||
S609 Possible wildcard injection in call due to `*` usage
|
||||
--> S609.py:14:18
|
||||
|
|
||||
13 | # Additional truthiness cases for generator, lambda, and f-strings
|
||||
14 | subprocess.Popen("chmod +w foo*", shell=(i for i in ()))
|
||||
| ^^^^^^^^^^^^^^^
|
||||
15 | subprocess.Popen("chmod +w foo*", shell=lambda: 0)
|
||||
16 | subprocess.Popen("chmod +w foo*", shell=f"{b''}")
|
||||
|
|
||||
|
||||
S609 Possible wildcard injection in call due to `*` usage
|
||||
--> S609.py:15:18
|
||||
|
|
||||
13 | # Additional truthiness cases for generator, lambda, and f-strings
|
||||
14 | subprocess.Popen("chmod +w foo*", shell=(i for i in ()))
|
||||
15 | subprocess.Popen("chmod +w foo*", shell=lambda: 0)
|
||||
| ^^^^^^^^^^^^^^^
|
||||
16 | subprocess.Popen("chmod +w foo*", shell=f"{b''}")
|
||||
17 | x = 1
|
||||
|
|
||||
|
||||
S609 Possible wildcard injection in call due to `*` usage
|
||||
--> S609.py:16:18
|
||||
|
|
||||
14 | subprocess.Popen("chmod +w foo*", shell=(i for i in ()))
|
||||
15 | subprocess.Popen("chmod +w foo*", shell=lambda: 0)
|
||||
16 | subprocess.Popen("chmod +w foo*", shell=f"{b''}")
|
||||
| ^^^^^^^^^^^^^^^
|
||||
17 | x = 1
|
||||
18 | subprocess.Popen("chmod +w foo*", shell=f"{x=}")
|
||||
|
|
||||
|
||||
S609 Possible wildcard injection in call due to `*` usage
|
||||
--> S609.py:18:18
|
||||
|
|
||||
16 | subprocess.Popen("chmod +w foo*", shell=f"{b''}")
|
||||
17 | x = 1
|
||||
18 | subprocess.Popen("chmod +w foo*", shell=f"{x=}")
|
||||
| ^^^^^^^^^^^^^^^
|
||||
|
|
||||
|
|
|
|||
|
|
@ -188,16 +188,10 @@ fn move_initialization(
|
|||
content.push_str(stylist.line_ending().as_str());
|
||||
content.push_str(stylist.indentation());
|
||||
if is_b006_unsafe_fix_preserve_assignment_expr_enabled(checker.settings()) {
|
||||
let annotation = if let Some(ann) = parameter.annotation() {
|
||||
format!(": {}", locator.slice(ann))
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let _ = write!(
|
||||
&mut content,
|
||||
"{}{} = {}",
|
||||
"{} = {}",
|
||||
parameter.parameter.name(),
|
||||
annotation,
|
||||
locator.slice(
|
||||
parenthesized_range(
|
||||
default.into(),
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ help: Replace with `None`; initialize within function
|
|||
5 + def import_module_wrong(value: dict[str, str] = None):
|
||||
6 | import os
|
||||
7 + if value is None:
|
||||
8 + value: dict[str, str] = {}
|
||||
8 + value = {}
|
||||
9 |
|
||||
10 |
|
||||
11 | def import_module_with_values_wrong(value: dict[str, str] = {}):
|
||||
|
|
@ -38,7 +38,7 @@ help: Replace with `None`; initialize within function
|
|||
10 | import os
|
||||
11 |
|
||||
12 + if value is None:
|
||||
13 + value: dict[str, str] = {}
|
||||
13 + value = {}
|
||||
14 | return 2
|
||||
15 |
|
||||
16 |
|
||||
|
|
@ -62,7 +62,7 @@ help: Replace with `None`; initialize within function
|
|||
17 | import sys
|
||||
18 | import itertools
|
||||
19 + if value is None:
|
||||
20 + value: dict[str, str] = {}
|
||||
20 + value = {}
|
||||
21 |
|
||||
22 |
|
||||
23 | def from_import_module_wrong(value: dict[str, str] = {}):
|
||||
|
|
@ -83,7 +83,7 @@ help: Replace with `None`; initialize within function
|
|||
21 + def from_import_module_wrong(value: dict[str, str] = None):
|
||||
22 | from os import path
|
||||
23 + if value is None:
|
||||
24 + value: dict[str, str] = {}
|
||||
24 + value = {}
|
||||
25 |
|
||||
26 |
|
||||
27 | def from_imports_module_wrong(value: dict[str, str] = {}):
|
||||
|
|
@ -106,7 +106,7 @@ help: Replace with `None`; initialize within function
|
|||
26 | from os import path
|
||||
27 | from sys import version_info
|
||||
28 + if value is None:
|
||||
29 + value: dict[str, str] = {}
|
||||
29 + value = {}
|
||||
30 |
|
||||
31 |
|
||||
32 | def import_and_from_imports_module_wrong(value: dict[str, str] = {}):
|
||||
|
|
@ -129,7 +129,7 @@ help: Replace with `None`; initialize within function
|
|||
31 | import os
|
||||
32 | from sys import version_info
|
||||
33 + if value is None:
|
||||
34 + value: dict[str, str] = {}
|
||||
34 + value = {}
|
||||
35 |
|
||||
36 |
|
||||
37 | def import_docstring_module_wrong(value: dict[str, str] = {}):
|
||||
|
|
@ -152,7 +152,7 @@ help: Replace with `None`; initialize within function
|
|||
36 | """Docstring"""
|
||||
37 | import os
|
||||
38 + if value is None:
|
||||
39 + value: dict[str, str] = {}
|
||||
39 + value = {}
|
||||
40 |
|
||||
41 |
|
||||
42 | def import_module_wrong(value: dict[str, str] = {}):
|
||||
|
|
@ -175,7 +175,7 @@ help: Replace with `None`; initialize within function
|
|||
41 | """Docstring"""
|
||||
42 | import os; import sys
|
||||
43 + if value is None:
|
||||
44 + value: dict[str, str] = {}
|
||||
44 + value = {}
|
||||
45 |
|
||||
46 |
|
||||
47 | def import_module_wrong(value: dict[str, str] = {}):
|
||||
|
|
@ -197,7 +197,7 @@ help: Replace with `None`; initialize within function
|
|||
45 + def import_module_wrong(value: dict[str, str] = None):
|
||||
46 | """Docstring"""
|
||||
47 + if value is None:
|
||||
48 + value: dict[str, str] = {}
|
||||
48 + value = {}
|
||||
49 | import os; import sys; x = 1
|
||||
50 |
|
||||
51 |
|
||||
|
|
@ -220,7 +220,7 @@ help: Replace with `None`; initialize within function
|
|||
51 | """Docstring"""
|
||||
52 | import os; import sys
|
||||
53 + if value is None:
|
||||
54 + value: dict[str, str] = {}
|
||||
54 + value = {}
|
||||
55 |
|
||||
56 |
|
||||
57 | def import_module_wrong(value: dict[str, str] = {}):
|
||||
|
|
@ -241,7 +241,7 @@ help: Replace with `None`; initialize within function
|
|||
55 + def import_module_wrong(value: dict[str, str] = None):
|
||||
56 | import os; import sys
|
||||
57 + if value is None:
|
||||
58 + value: dict[str, str] = {}
|
||||
58 + value = {}
|
||||
59 |
|
||||
60 |
|
||||
61 | def import_module_wrong(value: dict[str, str] = {}):
|
||||
|
|
@ -261,7 +261,7 @@ help: Replace with `None`; initialize within function
|
|||
- def import_module_wrong(value: dict[str, str] = {}):
|
||||
59 + def import_module_wrong(value: dict[str, str] = None):
|
||||
60 + if value is None:
|
||||
61 + value: dict[str, str] = {}
|
||||
61 + value = {}
|
||||
62 | import os; import sys; x = 1
|
||||
63 |
|
||||
64 |
|
||||
|
|
@ -282,7 +282,7 @@ help: Replace with `None`; initialize within function
|
|||
63 + def import_module_wrong(value: dict[str, str] = None):
|
||||
64 | import os; import sys
|
||||
65 + if value is None:
|
||||
66 + value: dict[str, str] = {}
|
||||
66 + value = {}
|
||||
67 |
|
||||
68 |
|
||||
69 | def import_module_wrong(value: dict[str, str] = {}): import os
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ help: Replace with `None`; initialize within function
|
|||
10 + def baz(a: list = None):
|
||||
11 | """This one raises a different exception"""
|
||||
12 + if a is None:
|
||||
13 + a: list = []
|
||||
13 + a = []
|
||||
14 | raise IndexError()
|
||||
15 |
|
||||
16 |
|
||||
|
|
|
|||
|
|
@ -1,22 +1,23 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/flake8_implicit_str_concat/mod.rs
|
||||
---
|
||||
invalid-syntax: missing closing quote in string literal
|
||||
--> ISC_syntax_error.py:2:5
|
||||
ISC001 Implicitly concatenated string literals on one line
|
||||
--> ISC_syntax_error.py:2:1
|
||||
|
|
||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||
1 | # The lexer emits a string token if it's unterminated
|
||||
2 | "a" "b
|
||||
| ^^
|
||||
| ^^^^^^
|
||||
3 | "a" "b" "c
|
||||
4 | "a" """b
|
||||
|
|
||||
help: Combine string literals
|
||||
|
||||
invalid-syntax: Expected a statement
|
||||
--> ISC_syntax_error.py:2:7
|
||||
invalid-syntax: missing closing quote in string literal
|
||||
--> ISC_syntax_error.py:2:5
|
||||
|
|
||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||
1 | # The lexer emits a string token if it's unterminated
|
||||
2 | "a" "b
|
||||
| ^
|
||||
| ^^
|
||||
3 | "a" "b" "c
|
||||
4 | "a" """b
|
||||
|
|
||||
|
|
@ -24,7 +25,7 @@ invalid-syntax: Expected a statement
|
|||
ISC001 Implicitly concatenated string literals on one line
|
||||
--> ISC_syntax_error.py:3:1
|
||||
|
|
||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||
1 | # The lexer emits a string token if it's unterminated
|
||||
2 | "a" "b
|
||||
3 | "a" "b" "c
|
||||
| ^^^^^^^
|
||||
|
|
@ -33,24 +34,25 @@ ISC001 Implicitly concatenated string literals on one line
|
|||
|
|
||||
help: Combine string literals
|
||||
|
||||
invalid-syntax: missing closing quote in string literal
|
||||
--> ISC_syntax_error.py:3:9
|
||||
ISC001 Implicitly concatenated string literals on one line
|
||||
--> ISC_syntax_error.py:3:5
|
||||
|
|
||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||
1 | # The lexer emits a string token if it's unterminated
|
||||
2 | "a" "b
|
||||
3 | "a" "b" "c
|
||||
| ^^
|
||||
| ^^^^^^
|
||||
4 | "a" """b
|
||||
5 | c""" "d
|
||||
|
|
||||
help: Combine string literals
|
||||
|
||||
invalid-syntax: Expected a statement
|
||||
--> ISC_syntax_error.py:3:11
|
||||
invalid-syntax: missing closing quote in string literal
|
||||
--> ISC_syntax_error.py:3:9
|
||||
|
|
||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||
1 | # The lexer emits a string token if it's unterminated
|
||||
2 | "a" "b
|
||||
3 | "a" "b" "c
|
||||
| ^
|
||||
| ^^
|
||||
4 | "a" """b
|
||||
5 | c""" "d
|
||||
|
|
||||
|
|
@ -64,7 +66,21 @@ ISC001 Implicitly concatenated string literals on one line
|
|||
5 | | c""" "d
|
||||
| |____^
|
||||
6 |
|
||||
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
||||
7 | # This is also true for
|
||||
|
|
||||
help: Combine string literals
|
||||
|
||||
ISC001 Implicitly concatenated string literals on one line
|
||||
--> ISC_syntax_error.py:4:5
|
||||
|
|
||||
2 | "a" "b
|
||||
3 | "a" "b" "c
|
||||
4 | "a" """b
|
||||
| _____^
|
||||
5 | | c""" "d
|
||||
| |_______^
|
||||
6 |
|
||||
7 | # This is also true for
|
||||
|
|
||||
help: Combine string literals
|
||||
|
||||
|
|
@ -76,24 +92,13 @@ invalid-syntax: missing closing quote in string literal
|
|||
5 | c""" "d
|
||||
| ^^
|
||||
6 |
|
||||
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
||||
|
|
||||
|
||||
invalid-syntax: Expected a statement
|
||||
--> ISC_syntax_error.py:5:8
|
||||
|
|
||||
3 | "a" "b" "c
|
||||
4 | "a" """b
|
||||
5 | c""" "d
|
||||
| ^
|
||||
6 |
|
||||
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
||||
7 | # This is also true for
|
||||
|
|
||||
|
||||
invalid-syntax: f-string: unterminated string
|
||||
--> ISC_syntax_error.py:9:8
|
||||
|
|
||||
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
||||
7 | # This is also true for
|
||||
8 | # unterminated f-strings.
|
||||
9 | f"a" f"b
|
||||
| ^
|
||||
|
|
@ -104,7 +109,7 @@ invalid-syntax: f-string: unterminated string
|
|||
invalid-syntax: Expected FStringEnd, found newline
|
||||
--> ISC_syntax_error.py:9:9
|
||||
|
|
||||
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
||||
7 | # This is also true for
|
||||
8 | # unterminated f-strings.
|
||||
9 | f"a" f"b
|
||||
| ^
|
||||
|
|
@ -183,14 +188,6 @@ invalid-syntax: f-string: unterminated triple-quoted string
|
|||
| |__^
|
||||
|
|
||||
|
||||
invalid-syntax: unexpected EOF while parsing
|
||||
--> ISC_syntax_error.py:30:1
|
||||
|
|
||||
28 | "i" "j"
|
||||
29 | )
|
||||
| ^
|
||||
|
|
||||
|
||||
invalid-syntax: f-string: unterminated string
|
||||
--> ISC_syntax_error.py:30:1
|
||||
|
|
||||
|
|
|
|||
|
|
@ -4,27 +4,17 @@ source: crates/ruff_linter/src/rules/flake8_implicit_str_concat/mod.rs
|
|||
invalid-syntax: missing closing quote in string literal
|
||||
--> ISC_syntax_error.py:2:5
|
||||
|
|
||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||
1 | # The lexer emits a string token if it's unterminated
|
||||
2 | "a" "b
|
||||
| ^^
|
||||
3 | "a" "b" "c
|
||||
4 | "a" """b
|
||||
|
|
||||
|
||||
invalid-syntax: Expected a statement
|
||||
--> ISC_syntax_error.py:2:7
|
||||
|
|
||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||
2 | "a" "b
|
||||
| ^
|
||||
3 | "a" "b" "c
|
||||
4 | "a" """b
|
||||
|
|
||||
|
||||
invalid-syntax: missing closing quote in string literal
|
||||
--> ISC_syntax_error.py:3:9
|
||||
|
|
||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||
1 | # The lexer emits a string token if it's unterminated
|
||||
2 | "a" "b
|
||||
3 | "a" "b" "c
|
||||
| ^^
|
||||
|
|
@ -32,17 +22,6 @@ invalid-syntax: missing closing quote in string literal
|
|||
5 | c""" "d
|
||||
|
|
||||
|
||||
invalid-syntax: Expected a statement
|
||||
--> ISC_syntax_error.py:3:11
|
||||
|
|
||||
1 | # The lexer doesn't emit a string token if it's unterminated
|
||||
2 | "a" "b
|
||||
3 | "a" "b" "c
|
||||
| ^
|
||||
4 | "a" """b
|
||||
5 | c""" "d
|
||||
|
|
||||
|
||||
invalid-syntax: missing closing quote in string literal
|
||||
--> ISC_syntax_error.py:5:6
|
||||
|
|
||||
|
|
@ -51,24 +30,13 @@ invalid-syntax: missing closing quote in string literal
|
|||
5 | c""" "d
|
||||
| ^^
|
||||
6 |
|
||||
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
||||
|
|
||||
|
||||
invalid-syntax: Expected a statement
|
||||
--> ISC_syntax_error.py:5:8
|
||||
|
|
||||
3 | "a" "b" "c
|
||||
4 | "a" """b
|
||||
5 | c""" "d
|
||||
| ^
|
||||
6 |
|
||||
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
||||
7 | # This is also true for
|
||||
|
|
||||
|
||||
invalid-syntax: f-string: unterminated string
|
||||
--> ISC_syntax_error.py:9:8
|
||||
|
|
||||
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
||||
7 | # This is also true for
|
||||
8 | # unterminated f-strings.
|
||||
9 | f"a" f"b
|
||||
| ^
|
||||
|
|
@ -79,7 +47,7 @@ invalid-syntax: f-string: unterminated string
|
|||
invalid-syntax: Expected FStringEnd, found newline
|
||||
--> ISC_syntax_error.py:9:9
|
||||
|
|
||||
7 | # For f-strings, the `FStringRanges` won't contain the range for
|
||||
7 | # This is also true for
|
||||
8 | # unterminated f-strings.
|
||||
9 | f"a" f"b
|
||||
| ^
|
||||
|
|
@ -133,14 +101,6 @@ invalid-syntax: f-string: unterminated triple-quoted string
|
|||
| |__^
|
||||
|
|
||||
|
||||
invalid-syntax: unexpected EOF while parsing
|
||||
--> ISC_syntax_error.py:30:1
|
||||
|
|
||||
28 | "i" "j"
|
||||
29 | )
|
||||
| ^
|
||||
|
|
||||
|
||||
invalid-syntax: f-string: unterminated string
|
||||
--> ISC_syntax_error.py:30:1
|
||||
|
|
||||
|
|
|
|||
|
|
@ -74,7 +74,8 @@ pub(crate) fn bytestring_attribute(checker: &Checker, attribute: &Expr) {
|
|||
["collections", "abc", "ByteString"] => ByteStringOrigin::CollectionsAbc,
|
||||
_ => return,
|
||||
};
|
||||
checker.report_diagnostic(ByteStringUsage { origin }, attribute.range());
|
||||
let mut diagnostic = checker.report_diagnostic(ByteStringUsage { origin }, attribute.range());
|
||||
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Deprecated);
|
||||
}
|
||||
|
||||
/// PYI057
|
||||
|
|
@ -94,7 +95,9 @@ pub(crate) fn bytestring_import(checker: &Checker, import_from: &ast::StmtImport
|
|||
|
||||
for name in names {
|
||||
if name.name.as_str() == "ByteString" {
|
||||
checker.report_diagnostic(ByteStringUsage { origin }, name.range());
|
||||
let mut diagnostic =
|
||||
checker.report_diagnostic(ByteStringUsage { origin }, name.range());
|
||||
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Deprecated);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ use ruff_python_ast::{
|
|||
self as ast, Expr, ExprBinOp, ExprContext, ExprNoneLiteral, Operator, PythonVersion,
|
||||
helpers::{pep_604_union, typing_optional},
|
||||
name::Name,
|
||||
operator_precedence::OperatorPrecedence,
|
||||
parenthesize::parenthesized_range,
|
||||
};
|
||||
use ruff_python_semantic::analyze::typing::{traverse_literal, traverse_union};
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
|
@ -238,7 +240,19 @@ fn create_fix(
|
|||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
});
|
||||
let union_expr = pep_604_union(&[new_literal_expr, none_expr]);
|
||||
let content = checker.generator().expr(&union_expr);
|
||||
|
||||
// Check if we need parentheses to preserve operator precedence
|
||||
let content = if needs_parentheses_for_precedence(
|
||||
semantic,
|
||||
literal_expr,
|
||||
checker.comment_ranges(),
|
||||
checker.source(),
|
||||
) {
|
||||
format!("({})", checker.generator().expr(&union_expr))
|
||||
} else {
|
||||
checker.generator().expr(&union_expr)
|
||||
};
|
||||
|
||||
let union_edit = Edit::range_replacement(content, literal_expr.range());
|
||||
Fix::applicable_edit(union_edit, applicability)
|
||||
}
|
||||
|
|
@ -256,3 +270,37 @@ enum UnionKind {
|
|||
TypingOptional,
|
||||
BitOr,
|
||||
}
|
||||
|
||||
/// Check if the union expression needs parentheses to preserve operator precedence.
|
||||
/// This is needed when the union is part of a larger expression where the `|` operator
|
||||
/// has lower precedence than the surrounding operations (like attribute access).
|
||||
fn needs_parentheses_for_precedence(
|
||||
semantic: &ruff_python_semantic::SemanticModel,
|
||||
literal_expr: &Expr,
|
||||
comment_ranges: &ruff_python_trivia::CommentRanges,
|
||||
source: &str,
|
||||
) -> bool {
|
||||
// Get the parent expression to check if we're in a context that needs parentheses
|
||||
let Some(parent_expr) = semantic.current_expression_parent() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
// Check if the literal expression is already parenthesized
|
||||
if parenthesized_range(
|
||||
literal_expr.into(),
|
||||
parent_expr.into(),
|
||||
comment_ranges,
|
||||
source,
|
||||
)
|
||||
.is_some()
|
||||
{
|
||||
return false; // Already parenthesized, don't add more
|
||||
}
|
||||
|
||||
// Check if the parent expression has higher precedence than the `|` operator
|
||||
let union_precedence = OperatorPrecedence::BitOr;
|
||||
let parent_precedence = OperatorPrecedence::from(parent_expr);
|
||||
|
||||
// If the parent operation has higher precedence than `|`, we need parentheses
|
||||
parent_precedence > union_precedence
|
||||
}
|
||||
|
|
|
|||
|
|
@ -423,5 +423,117 @@ PYI061 Use `None` rather than `Literal[None]`
|
|||
79 | d: None | (Literal[None] | None)
|
||||
80 | e: None | ((None | Literal[None]) | None) | None
|
||||
| ^^^^
|
||||
81 |
|
||||
82 | # Test cases for operator precedence issue (https://github.com/astral-sh/ruff/issues/20265)
|
||||
|
|
||||
help: Replace with `None`
|
||||
|
||||
PYI061 [*] Use `Literal[...] | None` rather than `Literal[None, ...]`
|
||||
--> PYI061.py:83:18
|
||||
|
|
||||
82 | # Test cases for operator precedence issue (https://github.com/astral-sh/ruff/issues/20265)
|
||||
83 | print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
| ^^^^
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
|
|
||||
help: Replace with `Literal[...] | None`
|
||||
80 | e: None | ((None | Literal[None]) | None) | None
|
||||
81 |
|
||||
82 | # Test cases for operator precedence issue (https://github.com/astral-sh/ruff/issues/20265)
|
||||
- print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
83 + print((Literal[1] | None).__dict__) # Should become (Literal[1] | None).__dict__
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
|
||||
PYI061 [*] Use `Literal[...] | None` rather than `Literal[None, ...]`
|
||||
--> PYI061.py:84:18
|
||||
|
|
||||
82 | # Test cases for operator precedence issue (https://github.com/astral-sh/ruff/issues/20265)
|
||||
83 | print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
| ^^^^
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
|
|
||||
help: Replace with `Literal[...] | None`
|
||||
81 |
|
||||
82 | # Test cases for operator precedence issue (https://github.com/astral-sh/ruff/issues/20265)
|
||||
83 | print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
- print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
84 + print((Literal[1] | None).method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
|
||||
PYI061 [*] Use `Literal[...] | None` rather than `Literal[None, ...]`
|
||||
--> PYI061.py:85:18
|
||||
|
|
||||
83 | print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
| ^^^^
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
|
|
||||
help: Replace with `Literal[...] | None`
|
||||
82 | # Test cases for operator precedence issue (https://github.com/astral-sh/ruff/issues/20265)
|
||||
83 | print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
- print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
85 + print((Literal[1] | None)[0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
88 | print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
|
||||
PYI061 [*] Use `Literal[...] | None` rather than `Literal[None, ...]`
|
||||
--> PYI061.py:86:18
|
||||
|
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
| ^^^^
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
88 | print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
|
|
||||
help: Replace with `Literal[...] | None`
|
||||
83 | print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
- print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
86 + print((Literal[1] | None) + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
88 | print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
|
||||
PYI061 [*] Use `Literal[...] | None` rather than `Literal[None, ...]`
|
||||
--> PYI061.py:87:18
|
||||
|
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
| ^^^^
|
||||
88 | print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
|
|
||||
help: Replace with `Literal[...] | None`
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
- print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
87 + print((Literal[1] | None) * 2) # Should become (Literal[1] | None) * 2
|
||||
88 | print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
|
||||
PYI061 [*] Use `Literal[...] | None` rather than `Literal[None, ...]`
|
||||
--> PYI061.py:88:19
|
||||
|
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
88 | print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
| ^^^^
|
||||
|
|
||||
help: Replace with `Literal[...] | None`
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
- print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
88 + print((Literal[1] | None).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
|
|
|
|||
|
|
@ -465,5 +465,153 @@ PYI061 Use `None` rather than `Literal[None]`
|
|||
79 | d: None | (Literal[None] | None)
|
||||
80 | e: None | ((None | Literal[None]) | None) | None
|
||||
| ^^^^
|
||||
81 |
|
||||
82 | # Test cases for operator precedence issue (https://github.com/astral-sh/ruff/issues/20265)
|
||||
|
|
||||
help: Replace with `None`
|
||||
|
||||
PYI061 [*] Use `Optional[Literal[...]]` rather than `Literal[None, ...]`
|
||||
--> PYI061.py:83:18
|
||||
|
|
||||
82 | # Test cases for operator precedence issue (https://github.com/astral-sh/ruff/issues/20265)
|
||||
83 | print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
| ^^^^
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
|
|
||||
help: Replace with `Optional[Literal[...]]`
|
||||
- from typing import Literal, Union
|
||||
1 + from typing import Literal, Union, Optional
|
||||
2 |
|
||||
3 |
|
||||
4 | def func1(arg1: Literal[None]):
|
||||
--------------------------------------------------------------------------------
|
||||
80 | e: None | ((None | Literal[None]) | None) | None
|
||||
81 |
|
||||
82 | # Test cases for operator precedence issue (https://github.com/astral-sh/ruff/issues/20265)
|
||||
- print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
83 + print(Optional[Literal[1]].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
|
||||
PYI061 [*] Use `Optional[Literal[...]]` rather than `Literal[None, ...]`
|
||||
--> PYI061.py:84:18
|
||||
|
|
||||
82 | # Test cases for operator precedence issue (https://github.com/astral-sh/ruff/issues/20265)
|
||||
83 | print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
| ^^^^
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
|
|
||||
help: Replace with `Optional[Literal[...]]`
|
||||
- from typing import Literal, Union
|
||||
1 + from typing import Literal, Union, Optional
|
||||
2 |
|
||||
3 |
|
||||
4 | def func1(arg1: Literal[None]):
|
||||
--------------------------------------------------------------------------------
|
||||
81 |
|
||||
82 | # Test cases for operator precedence issue (https://github.com/astral-sh/ruff/issues/20265)
|
||||
83 | print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
- print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
84 + print(Optional[Literal[1]].method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
|
||||
PYI061 [*] Use `Optional[Literal[...]]` rather than `Literal[None, ...]`
|
||||
--> PYI061.py:85:18
|
||||
|
|
||||
83 | print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
| ^^^^
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
|
|
||||
help: Replace with `Optional[Literal[...]]`
|
||||
- from typing import Literal, Union
|
||||
1 + from typing import Literal, Union, Optional
|
||||
2 |
|
||||
3 |
|
||||
4 | def func1(arg1: Literal[None]):
|
||||
--------------------------------------------------------------------------------
|
||||
82 | # Test cases for operator precedence issue (https://github.com/astral-sh/ruff/issues/20265)
|
||||
83 | print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
- print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
85 + print(Optional[Literal[1]][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
88 | print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
|
||||
PYI061 [*] Use `Optional[Literal[...]]` rather than `Literal[None, ...]`
|
||||
--> PYI061.py:86:18
|
||||
|
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
| ^^^^
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
88 | print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
|
|
||||
help: Replace with `Optional[Literal[...]]`
|
||||
- from typing import Literal, Union
|
||||
1 + from typing import Literal, Union, Optional
|
||||
2 |
|
||||
3 |
|
||||
4 | def func1(arg1: Literal[None]):
|
||||
--------------------------------------------------------------------------------
|
||||
83 | print(Literal[1, None].__dict__) # Should become (Literal[1] | None).__dict__
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
- print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
86 + print(Optional[Literal[1]] + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
88 | print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
|
||||
PYI061 [*] Use `Optional[Literal[...]]` rather than `Literal[None, ...]`
|
||||
--> PYI061.py:87:18
|
||||
|
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
| ^^^^
|
||||
88 | print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
|
|
||||
help: Replace with `Optional[Literal[...]]`
|
||||
- from typing import Literal, Union
|
||||
1 + from typing import Literal, Union, Optional
|
||||
2 |
|
||||
3 |
|
||||
4 | def func1(arg1: Literal[None]):
|
||||
--------------------------------------------------------------------------------
|
||||
84 | print(Literal[1, None].method()) # Should become (Literal[1] | None).method()
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
- print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
87 + print(Optional[Literal[1]] * 2) # Should become (Literal[1] | None) * 2
|
||||
88 | print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
|
||||
PYI061 [*] Use `Optional[Literal[...]]` rather than `Literal[None, ...]`
|
||||
--> PYI061.py:88:19
|
||||
|
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
88 | print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
| ^^^^
|
||||
|
|
||||
help: Replace with `Optional[Literal[...]]`
|
||||
- from typing import Literal, Union
|
||||
1 + from typing import Literal, Union, Optional
|
||||
2 |
|
||||
3 |
|
||||
4 | def func1(arg1: Literal[None]):
|
||||
--------------------------------------------------------------------------------
|
||||
85 | print(Literal[1, None][0]) # Should become (Literal[1] | None)[0]
|
||||
86 | print(Literal[1, None] + 1) # Should become (Literal[1] | None) + 1
|
||||
87 | print(Literal[1, None] * 2) # Should become (Literal[1] | None) * 2
|
||||
- print((Literal[1, None]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
88 + print((Optional[Literal[1]]).__dict__) # Should become ((Literal[1] | None)).__dict__
|
||||
|
|
|
|||
|
|
@ -898,7 +898,9 @@ fn check_test_function_args(checker: &Checker, parameters: &Parameters, decorato
|
|||
/// PT020
|
||||
fn check_fixture_decorator_name(checker: &Checker, decorator: &Decorator) {
|
||||
if is_pytest_yield_fixture(decorator, checker.semantic()) {
|
||||
checker.report_diagnostic(PytestDeprecatedYieldFixture, decorator.range());
|
||||
let mut diagnostic =
|
||||
checker.report_diagnostic(PytestDeprecatedYieldFixture, decorator.range());
|
||||
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Deprecated);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1062,3 +1062,77 @@ help: Replace with `"bar"`
|
|||
170 |
|
||||
171 |
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
SIM222 [*] Use `f"{b''}"` instead of `f"{b''}" or ...`
|
||||
--> SIM222.py:202:7
|
||||
|
|
||||
201 | # https://github.com/astral-sh/ruff/issues/20703
|
||||
202 | print(f"{b''}" or "bar") # SIM222
|
||||
| ^^^^^^^^^^^^^^^^^
|
||||
203 | x = 1
|
||||
204 | print(f"{x=}" or "bar") # SIM222
|
||||
|
|
||||
help: Replace with `f"{b''}"`
|
||||
199 | def f(a: "'b' or 'c'"): ...
|
||||
200 |
|
||||
201 | # https://github.com/astral-sh/ruff/issues/20703
|
||||
- print(f"{b''}" or "bar") # SIM222
|
||||
202 + print(f"{b''}") # SIM222
|
||||
203 | x = 1
|
||||
204 | print(f"{x=}" or "bar") # SIM222
|
||||
205 | (lambda: 1) or True # SIM222
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
SIM222 [*] Use `f"{x=}"` instead of `f"{x=}" or ...`
|
||||
--> SIM222.py:204:7
|
||||
|
|
||||
202 | print(f"{b''}" or "bar") # SIM222
|
||||
203 | x = 1
|
||||
204 | print(f"{x=}" or "bar") # SIM222
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
205 | (lambda: 1) or True # SIM222
|
||||
206 | (i for i in range(1)) or "bar" # SIM222
|
||||
|
|
||||
help: Replace with `f"{x=}"`
|
||||
201 | # https://github.com/astral-sh/ruff/issues/20703
|
||||
202 | print(f"{b''}" or "bar") # SIM222
|
||||
203 | x = 1
|
||||
- print(f"{x=}" or "bar") # SIM222
|
||||
204 + print(f"{x=}") # SIM222
|
||||
205 | (lambda: 1) or True # SIM222
|
||||
206 | (i for i in range(1)) or "bar" # SIM222
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
SIM222 [*] Use `lambda: 1` instead of `lambda: 1 or ...`
|
||||
--> SIM222.py:205:1
|
||||
|
|
||||
203 | x = 1
|
||||
204 | print(f"{x=}" or "bar") # SIM222
|
||||
205 | (lambda: 1) or True # SIM222
|
||||
| ^^^^^^^^^^^^^^^^^^^
|
||||
206 | (i for i in range(1)) or "bar" # SIM222
|
||||
|
|
||||
help: Replace with `lambda: 1`
|
||||
202 | print(f"{b''}" or "bar") # SIM222
|
||||
203 | x = 1
|
||||
204 | print(f"{x=}" or "bar") # SIM222
|
||||
- (lambda: 1) or True # SIM222
|
||||
205 + lambda: 1 # SIM222
|
||||
206 | (i for i in range(1)) or "bar" # SIM222
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
||||
SIM222 [*] Use `(i for i in range(1))` instead of `(i for i in range(1)) or ...`
|
||||
--> SIM222.py:206:1
|
||||
|
|
||||
204 | print(f"{x=}" or "bar") # SIM222
|
||||
205 | (lambda: 1) or True # SIM222
|
||||
206 | (i for i in range(1)) or "bar" # SIM222
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
|
||||
help: Replace with `(i for i in range(1))`
|
||||
203 | x = 1
|
||||
204 | print(f"{x=}" or "bar") # SIM222
|
||||
205 | (lambda: 1) or True # SIM222
|
||||
- (i for i in range(1)) or "bar" # SIM222
|
||||
206 + (i for i in range(1)) # SIM222
|
||||
note: This is an unsafe fix and may change runtime behavior
|
||||
|
|
|
|||
|
|
@ -223,7 +223,7 @@ enum Argumentable {
|
|||
|
||||
impl Argumentable {
|
||||
fn check_for(self, checker: &Checker, name: String, range: TextRange) {
|
||||
match self {
|
||||
let mut diagnostic = match self {
|
||||
Self::Function => checker.report_diagnostic(UnusedFunctionArgument { name }, range),
|
||||
Self::Method => checker.report_diagnostic(UnusedMethodArgument { name }, range),
|
||||
Self::ClassMethod => {
|
||||
|
|
@ -234,6 +234,7 @@ impl Argumentable {
|
|||
}
|
||||
Self::Lambda => checker.report_diagnostic(UnusedLambdaArgument { name }, range),
|
||||
};
|
||||
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Unnecessary);
|
||||
}
|
||||
|
||||
const fn rule_code(self) -> Rule {
|
||||
|
|
|
|||
|
|
@ -80,6 +80,7 @@ pub(crate) fn deprecated_function(checker: &Checker, expr: &Expr) {
|
|||
},
|
||||
expr.range(),
|
||||
);
|
||||
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Deprecated);
|
||||
diagnostic.try_set_fix(|| {
|
||||
let (import_edit, binding) = checker.importer().get_or_import_symbol(
|
||||
&ImportRequest::import_from("numpy", replacement),
|
||||
|
|
|
|||
|
|
@ -80,6 +80,7 @@ pub(crate) fn deprecated_type_alias(checker: &Checker, expr: &Expr) {
|
|||
},
|
||||
expr.range(),
|
||||
);
|
||||
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Deprecated);
|
||||
let type_name = match type_name {
|
||||
"unicode" => "str",
|
||||
_ => type_name,
|
||||
|
|
|
|||
|
|
@ -165,16 +165,17 @@ pub(crate) fn subscript(checker: &Checker, value: &Expr, expr: &Expr) {
|
|||
match attr.as_str() {
|
||||
// PD007
|
||||
"ix" if checker.is_rule_enabled(Rule::PandasUseOfDotIx) => {
|
||||
checker.report_diagnostic(PandasUseOfDotIx, range)
|
||||
let mut diagnostic = checker.report_diagnostic(PandasUseOfDotIx, range);
|
||||
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Deprecated);
|
||||
}
|
||||
// PD008
|
||||
"at" if checker.is_rule_enabled(Rule::PandasUseOfDotAt) => {
|
||||
checker.report_diagnostic(PandasUseOfDotAt, range)
|
||||
checker.report_diagnostic(PandasUseOfDotAt, range);
|
||||
}
|
||||
// PD009
|
||||
"iat" if checker.is_rule_enabled(Rule::PandasUseOfDotIat) => {
|
||||
checker.report_diagnostic(PandasUseOfDotIat, range)
|
||||
checker.report_diagnostic(PandasUseOfDotIat, range);
|
||||
}
|
||||
_ => return,
|
||||
};
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ mod tests {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::DocstringExtraneousParameter, Path::new("DOC102_google.py"))]
|
||||
#[test_case(Rule::DocstringMissingReturns, Path::new("DOC201_google.py"))]
|
||||
#[test_case(Rule::DocstringExtraneousReturns, Path::new("DOC202_google.py"))]
|
||||
#[test_case(Rule::DocstringMissingYields, Path::new("DOC402_google.py"))]
|
||||
|
|
@ -50,6 +51,7 @@ mod tests {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::DocstringExtraneousParameter, Path::new("DOC102_numpy.py"))]
|
||||
#[test_case(Rule::DocstringMissingReturns, Path::new("DOC201_numpy.py"))]
|
||||
#[test_case(Rule::DocstringExtraneousReturns, Path::new("DOC202_numpy.py"))]
|
||||
#[test_case(Rule::DocstringMissingYields, Path::new("DOC402_numpy.py"))]
|
||||
|
|
|
|||
|
|
@ -1,14 +1,14 @@
|
|||
use itertools::Itertools;
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::helpers::map_callable;
|
||||
use ruff_python_ast::helpers::map_subscript;
|
||||
use ruff_python_ast::helpers::{map_callable, map_subscript};
|
||||
use ruff_python_ast::name::QualifiedName;
|
||||
use ruff_python_ast::visitor::Visitor;
|
||||
use ruff_python_ast::{self as ast, Expr, Stmt, visitor};
|
||||
use ruff_python_semantic::analyze::{function_type, visibility};
|
||||
use ruff_python_semantic::{Definition, SemanticModel};
|
||||
use ruff_source_file::NewlineWithTrailingNewline;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
use ruff_python_stdlib::identifiers::is_identifier;
|
||||
use ruff_source_file::{LineRanges, NewlineWithTrailingNewline};
|
||||
use ruff_text_size::{Ranged, TextLen, TextRange, TextSize};
|
||||
|
||||
use crate::Violation;
|
||||
use crate::checkers::ast::Checker;
|
||||
|
|
@ -18,6 +18,62 @@ use crate::docstrings::styles::SectionStyle;
|
|||
use crate::registry::Rule;
|
||||
use crate::rules::pydocstyle::settings::Convention;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for function docstrings that include parameters which are not
|
||||
/// in the function signature.
|
||||
///
|
||||
/// ## Why is this bad?
|
||||
/// If a docstring documents a parameter which is not in the function signature,
|
||||
/// it can be misleading to users and/or a sign of incomplete documentation or
|
||||
/// refactors.
|
||||
///
|
||||
/// ## Example
|
||||
/// ```python
|
||||
/// def calculate_speed(distance: float, time: float) -> float:
|
||||
/// """Calculate speed as distance divided by time.
|
||||
///
|
||||
/// Args:
|
||||
/// distance: Distance traveled.
|
||||
/// time: Time spent traveling.
|
||||
/// acceleration: Rate of change of speed.
|
||||
///
|
||||
/// Returns:
|
||||
/// Speed as distance divided by time.
|
||||
/// """
|
||||
/// return distance / time
|
||||
/// ```
|
||||
///
|
||||
/// Use instead:
|
||||
/// ```python
|
||||
/// def calculate_speed(distance: float, time: float) -> float:
|
||||
/// """Calculate speed as distance divided by time.
|
||||
///
|
||||
/// Args:
|
||||
/// distance: Distance traveled.
|
||||
/// time: Time spent traveling.
|
||||
///
|
||||
/// Returns:
|
||||
/// Speed as distance divided by time.
|
||||
/// """
|
||||
/// return distance / time
|
||||
/// ```
|
||||
#[derive(ViolationMetadata)]
|
||||
pub(crate) struct DocstringExtraneousParameter {
|
||||
id: String,
|
||||
}
|
||||
|
||||
impl Violation for DocstringExtraneousParameter {
|
||||
#[derive_message_formats]
|
||||
fn message(&self) -> String {
|
||||
let DocstringExtraneousParameter { id } = self;
|
||||
format!("Documented parameter `{id}` is not in the function's signature")
|
||||
}
|
||||
|
||||
fn fix_title(&self) -> Option<String> {
|
||||
Some("Remove the extraneous parameter from the docstring".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for functions with `return` statements that do not have "Returns"
|
||||
/// sections in their docstrings.
|
||||
|
|
@ -396,6 +452,19 @@ impl GenericSection {
|
|||
}
|
||||
}
|
||||
|
||||
/// A parameter in a docstring with its text range.
|
||||
#[derive(Debug, Clone)]
|
||||
struct ParameterEntry<'a> {
|
||||
name: &'a str,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl Ranged for ParameterEntry<'_> {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
/// A "Raises" section in a docstring.
|
||||
#[derive(Debug)]
|
||||
struct RaisesSection<'a> {
|
||||
|
|
@ -414,17 +483,46 @@ impl<'a> RaisesSection<'a> {
|
|||
/// a "Raises" section.
|
||||
fn from_section(section: &SectionContext<'a>, style: Option<SectionStyle>) -> Self {
|
||||
Self {
|
||||
raised_exceptions: parse_entries(section.following_lines_str(), style),
|
||||
raised_exceptions: parse_raises(section.following_lines_str(), style),
|
||||
range: section.range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An "Args" or "Parameters" section in a docstring.
|
||||
#[derive(Debug)]
|
||||
struct ParametersSection<'a> {
|
||||
parameters: Vec<ParameterEntry<'a>>,
|
||||
range: TextRange,
|
||||
}
|
||||
|
||||
impl Ranged for ParametersSection<'_> {
|
||||
fn range(&self) -> TextRange {
|
||||
self.range
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ParametersSection<'a> {
|
||||
/// Return the parameters for the docstring, or `None` if the docstring does not contain
|
||||
/// an "Args" or "Parameters" section.
|
||||
fn from_section(section: &SectionContext<'a>, style: Option<SectionStyle>) -> Self {
|
||||
Self {
|
||||
parameters: parse_parameters(
|
||||
section.following_lines_str(),
|
||||
section.following_range().start(),
|
||||
style,
|
||||
),
|
||||
range: section.section_name_range(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct DocstringSections<'a> {
|
||||
returns: Option<GenericSection>,
|
||||
yields: Option<GenericSection>,
|
||||
raises: Option<RaisesSection<'a>>,
|
||||
parameters: Option<ParametersSection<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> DocstringSections<'a> {
|
||||
|
|
@ -432,6 +530,10 @@ impl<'a> DocstringSections<'a> {
|
|||
let mut docstring_sections = Self::default();
|
||||
for section in sections {
|
||||
match section.kind() {
|
||||
SectionKind::Args | SectionKind::Arguments | SectionKind::Parameters => {
|
||||
docstring_sections.parameters =
|
||||
Some(ParametersSection::from_section(§ion, style));
|
||||
}
|
||||
SectionKind::Raises => {
|
||||
docstring_sections.raises = Some(RaisesSection::from_section(§ion, style));
|
||||
}
|
||||
|
|
@ -448,18 +550,22 @@ impl<'a> DocstringSections<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
/// Parse the entries in a "Raises" section of a docstring.
|
||||
/// Parse the entries in a "Parameters" section of a docstring.
|
||||
///
|
||||
/// Attempts to parse using the specified [`SectionStyle`], falling back to the other style if no
|
||||
/// entries are found.
|
||||
fn parse_entries(content: &str, style: Option<SectionStyle>) -> Vec<QualifiedName<'_>> {
|
||||
fn parse_parameters(
|
||||
content: &str,
|
||||
content_start: TextSize,
|
||||
style: Option<SectionStyle>,
|
||||
) -> Vec<ParameterEntry<'_>> {
|
||||
match style {
|
||||
Some(SectionStyle::Google) => parse_entries_google(content),
|
||||
Some(SectionStyle::Numpy) => parse_entries_numpy(content),
|
||||
Some(SectionStyle::Google) => parse_parameters_google(content, content_start),
|
||||
Some(SectionStyle::Numpy) => parse_parameters_numpy(content, content_start),
|
||||
None => {
|
||||
let entries = parse_entries_google(content);
|
||||
let entries = parse_parameters_google(content, content_start);
|
||||
if entries.is_empty() {
|
||||
parse_entries_numpy(content)
|
||||
parse_parameters_numpy(content, content_start)
|
||||
} else {
|
||||
entries
|
||||
}
|
||||
|
|
@ -467,14 +573,134 @@ fn parse_entries(content: &str, style: Option<SectionStyle>) -> Vec<QualifiedNam
|
|||
}
|
||||
}
|
||||
|
||||
/// Parses Google-style docstring sections of the form:
|
||||
/// Parses Google-style "Args" sections of the form:
|
||||
///
|
||||
/// ```python
|
||||
/// Args:
|
||||
/// a (int): The first number to add.
|
||||
/// b (int): The second number to add.
|
||||
/// ```
|
||||
fn parse_parameters_google(content: &str, content_start: TextSize) -> Vec<ParameterEntry<'_>> {
|
||||
let mut entries: Vec<ParameterEntry> = Vec::new();
|
||||
// Find first entry to determine indentation
|
||||
let Some(first_arg) = content.lines().next() else {
|
||||
return entries;
|
||||
};
|
||||
let indentation = &first_arg[..first_arg.len() - first_arg.trim_start().len()];
|
||||
|
||||
let mut current_pos = TextSize::ZERO;
|
||||
for line in content.lines() {
|
||||
let line_start = current_pos;
|
||||
current_pos = content.full_line_end(line_start);
|
||||
|
||||
if let Some(entry) = line.strip_prefix(indentation) {
|
||||
if entry
|
||||
.chars()
|
||||
.next()
|
||||
.is_some_and(|first_char| !first_char.is_whitespace())
|
||||
{
|
||||
let Some((before_colon, _)) = entry.split_once(':') else {
|
||||
continue;
|
||||
};
|
||||
if let Some(param) = before_colon.split_whitespace().next() {
|
||||
let param_name = param.trim_start_matches('*');
|
||||
if is_identifier(param_name) {
|
||||
let param_start = line_start + indentation.text_len();
|
||||
let param_end = param_start + param.text_len();
|
||||
|
||||
entries.push(ParameterEntry {
|
||||
name: param_name,
|
||||
range: TextRange::new(
|
||||
content_start + param_start,
|
||||
content_start + param_end,
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
entries
|
||||
}
|
||||
|
||||
/// Parses NumPy-style "Parameters" sections of the form:
|
||||
///
|
||||
/// ```python
|
||||
/// Parameters
|
||||
/// ----------
|
||||
/// a : int
|
||||
/// The first number to add.
|
||||
/// b : int
|
||||
/// The second number to add.
|
||||
/// ```
|
||||
fn parse_parameters_numpy(content: &str, content_start: TextSize) -> Vec<ParameterEntry<'_>> {
|
||||
let mut entries: Vec<ParameterEntry> = Vec::new();
|
||||
let mut lines = content.lines();
|
||||
let Some(dashes) = lines.next() else {
|
||||
return entries;
|
||||
};
|
||||
let indentation = &dashes[..dashes.len() - dashes.trim_start().len()];
|
||||
|
||||
let mut current_pos = content.full_line_end(dashes.text_len());
|
||||
for potential in lines {
|
||||
let line_start = current_pos;
|
||||
current_pos = content.full_line_end(line_start);
|
||||
|
||||
if let Some(entry) = potential.strip_prefix(indentation) {
|
||||
if entry
|
||||
.chars()
|
||||
.next()
|
||||
.is_some_and(|first_char| !first_char.is_whitespace())
|
||||
{
|
||||
if let Some(before_colon) = entry.split(':').next() {
|
||||
let param = before_colon.trim_end();
|
||||
let param_name = param.trim_start_matches('*');
|
||||
if is_identifier(param_name) {
|
||||
let param_start = line_start + indentation.text_len();
|
||||
let param_end = param_start + param.text_len();
|
||||
|
||||
entries.push(ParameterEntry {
|
||||
name: param_name,
|
||||
range: TextRange::new(
|
||||
content_start + param_start,
|
||||
content_start + param_end,
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
entries
|
||||
}
|
||||
|
||||
/// Parse the entries in a "Raises" section of a docstring.
|
||||
///
|
||||
/// Attempts to parse using the specified [`SectionStyle`], falling back to the other style if no
|
||||
/// entries are found.
|
||||
fn parse_raises(content: &str, style: Option<SectionStyle>) -> Vec<QualifiedName<'_>> {
|
||||
match style {
|
||||
Some(SectionStyle::Google) => parse_raises_google(content),
|
||||
Some(SectionStyle::Numpy) => parse_raises_numpy(content),
|
||||
None => {
|
||||
let entries = parse_raises_google(content);
|
||||
if entries.is_empty() {
|
||||
parse_raises_numpy(content)
|
||||
} else {
|
||||
entries
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses Google-style "Raises" section of the form:
|
||||
///
|
||||
/// ```python
|
||||
/// Raises:
|
||||
/// FasterThanLightError: If speed is greater than the speed of light.
|
||||
/// DivisionByZero: If attempting to divide by zero.
|
||||
/// ```
|
||||
fn parse_entries_google(content: &str) -> Vec<QualifiedName<'_>> {
|
||||
fn parse_raises_google(content: &str) -> Vec<QualifiedName<'_>> {
|
||||
let mut entries: Vec<QualifiedName> = Vec::new();
|
||||
for potential in content.lines() {
|
||||
let Some(colon_idx) = potential.find(':') else {
|
||||
|
|
@ -486,7 +712,7 @@ fn parse_entries_google(content: &str) -> Vec<QualifiedName<'_>> {
|
|||
entries
|
||||
}
|
||||
|
||||
/// Parses NumPy-style docstring sections of the form:
|
||||
/// Parses NumPy-style "Raises" section of the form:
|
||||
///
|
||||
/// ```python
|
||||
/// Raises
|
||||
|
|
@ -496,7 +722,7 @@ fn parse_entries_google(content: &str) -> Vec<QualifiedName<'_>> {
|
|||
/// DivisionByZero
|
||||
/// If attempting to divide by zero.
|
||||
/// ```
|
||||
fn parse_entries_numpy(content: &str) -> Vec<QualifiedName<'_>> {
|
||||
fn parse_raises_numpy(content: &str) -> Vec<QualifiedName<'_>> {
|
||||
let mut entries: Vec<QualifiedName> = Vec::new();
|
||||
let mut lines = content.lines();
|
||||
let Some(dashes) = lines.next() else {
|
||||
|
|
@ -867,6 +1093,17 @@ fn is_generator_function_annotated_as_returning_none(
|
|||
.is_some_and(GeneratorOrIteratorArguments::indicates_none_returned)
|
||||
}
|
||||
|
||||
fn parameters_from_signature<'a>(docstring: &'a Docstring) -> Vec<&'a str> {
|
||||
let mut parameters = Vec::new();
|
||||
let Some(function) = docstring.definition.as_function_def() else {
|
||||
return parameters;
|
||||
};
|
||||
for param in &function.parameters {
|
||||
parameters.push(param.name());
|
||||
}
|
||||
parameters
|
||||
}
|
||||
|
||||
fn is_one_line(docstring: &Docstring) -> bool {
|
||||
let mut non_empty_line_count = 0;
|
||||
for line in NewlineWithTrailingNewline::from(docstring.body().as_str()) {
|
||||
|
|
@ -880,7 +1117,7 @@ fn is_one_line(docstring: &Docstring) -> bool {
|
|||
true
|
||||
}
|
||||
|
||||
/// DOC201, DOC202, DOC402, DOC403, DOC501, DOC502
|
||||
/// DOC102, DOC201, DOC202, DOC402, DOC403, DOC501, DOC502
|
||||
pub(crate) fn check_docstring(
|
||||
checker: &Checker,
|
||||
definition: &Definition,
|
||||
|
|
@ -920,6 +1157,8 @@ pub(crate) fn check_docstring(
|
|||
visitor.finish()
|
||||
};
|
||||
|
||||
let signature_parameters = parameters_from_signature(docstring);
|
||||
|
||||
// DOC201
|
||||
if checker.is_rule_enabled(Rule::DocstringMissingReturns) {
|
||||
if should_document_returns(function_def)
|
||||
|
|
@ -1008,6 +1247,25 @@ pub(crate) fn check_docstring(
|
|||
}
|
||||
}
|
||||
|
||||
// DOC102
|
||||
if checker.is_rule_enabled(Rule::DocstringExtraneousParameter) {
|
||||
// Don't report extraneous parameters if the signature defines *args or **kwargs
|
||||
if function_def.parameters.vararg.is_none() && function_def.parameters.kwarg.is_none() {
|
||||
if let Some(docstring_params) = docstring_sections.parameters {
|
||||
for docstring_param in &docstring_params.parameters {
|
||||
if !signature_parameters.contains(&docstring_param.name) {
|
||||
checker.report_diagnostic(
|
||||
DocstringExtraneousParameter {
|
||||
id: docstring_param.name.to_string(),
|
||||
},
|
||||
docstring_param.range(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Avoid applying "extraneous" rules to abstract methods. An abstract method's docstring _could_
|
||||
// document that it raises an exception without including the exception in the implementation.
|
||||
if !visibility::is_abstract(&function_def.decorator_list, semantic) {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,180 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pydoclint/mod.rs
|
||||
---
|
||||
DOC102 Documented parameter `a` is not in the function's signature
|
||||
--> DOC102_google.py:7:9
|
||||
|
|
||||
6 | Args:
|
||||
7 | a (int): The first number to add.
|
||||
| ^
|
||||
8 | b (int): The second number to add.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `multiplier` is not in the function's signature
|
||||
--> DOC102_google.py:23:9
|
||||
|
|
||||
21 | Args:
|
||||
22 | lst (list of int): A list of integers.
|
||||
23 | multiplier (int): The multiplier for each element in the list.
|
||||
| ^^^^^^^^^^
|
||||
24 |
|
||||
25 | Returns:
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `numbers` is not in the function's signature
|
||||
--> DOC102_google.py:37:9
|
||||
|
|
||||
36 | Args:
|
||||
37 | numbers (list of int): A list of integers to search through.
|
||||
| ^^^^^^^
|
||||
38 |
|
||||
39 | Returns:
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `name` is not in the function's signature
|
||||
--> DOC102_google.py:51:9
|
||||
|
|
||||
50 | Args:
|
||||
51 | name (str): The name of the user.
|
||||
| ^^^^
|
||||
52 | age (int): The age of the user.
|
||||
53 | email (str): The user's email address.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `age` is not in the function's signature
|
||||
--> DOC102_google.py:52:9
|
||||
|
|
||||
50 | Args:
|
||||
51 | name (str): The name of the user.
|
||||
52 | age (int): The age of the user.
|
||||
| ^^^
|
||||
53 | email (str): The user's email address.
|
||||
54 | location (str): The location of the user.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `email` is not in the function's signature
|
||||
--> DOC102_google.py:53:9
|
||||
|
|
||||
51 | name (str): The name of the user.
|
||||
52 | age (int): The age of the user.
|
||||
53 | email (str): The user's email address.
|
||||
| ^^^^^
|
||||
54 | location (str): The location of the user.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `tax_rate` is not in the function's signature
|
||||
--> DOC102_google.py:74:9
|
||||
|
|
||||
72 | Args:
|
||||
73 | item_prices (list of float): A list of prices for each item.
|
||||
74 | tax_rate (float): The tax rate to apply.
|
||||
| ^^^^^^^^
|
||||
75 | discount (float): The discount to subtract from the total.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `to_address` is not in the function's signature
|
||||
--> DOC102_google.py:94:9
|
||||
|
|
||||
92 | subject (str): The subject of the email.
|
||||
93 | body (str): The content of the email.
|
||||
94 | to_address (str): The recipient's email address.
|
||||
| ^^^^^^^^^^
|
||||
95 | cc_address (str, optional): The email address for CC. Defaults to None.
|
||||
96 | bcc_address (str, optional): The email address for BCC. Defaults to None.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `cc_address` is not in the function's signature
|
||||
--> DOC102_google.py:95:9
|
||||
|
|
||||
93 | body (str): The content of the email.
|
||||
94 | to_address (str): The recipient's email address.
|
||||
95 | cc_address (str, optional): The email address for CC. Defaults to None.
|
||||
| ^^^^^^^^^^
|
||||
96 | bcc_address (str, optional): The email address for BCC. Defaults to None.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `items` is not in the function's signature
|
||||
--> DOC102_google.py:126:9
|
||||
|
|
||||
124 | Args:
|
||||
125 | order_id (int): The unique identifier for the order.
|
||||
126 | *items (str): Variable length argument list of items in the order.
|
||||
| ^^^^^^
|
||||
127 | **details (dict): Additional details such as shipping method and address.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `details` is not in the function's signature
|
||||
--> DOC102_google.py:127:9
|
||||
|
|
||||
125 | order_id (int): The unique identifier for the order.
|
||||
126 | *items (str): Variable length argument list of items in the order.
|
||||
127 | **details (dict): Additional details such as shipping method and address.
|
||||
| ^^^^^^^^^
|
||||
128 |
|
||||
129 | Returns:
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `value` is not in the function's signature
|
||||
--> DOC102_google.py:150:13
|
||||
|
|
||||
149 | Args:
|
||||
150 | value (int, optional): The initial value of the calculator. Defaults to 0.
|
||||
| ^^^^^
|
||||
151 | """
|
||||
152 | self.value = value
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `number` is not in the function's signature
|
||||
--> DOC102_google.py:160:13
|
||||
|
|
||||
159 | Args:
|
||||
160 | number (int or float): The number to add to the current value.
|
||||
| ^^^^^^
|
||||
161 |
|
||||
162 | Returns:
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `value_str` is not in the function's signature
|
||||
--> DOC102_google.py:175:13
|
||||
|
|
||||
174 | Args:
|
||||
175 | value_str (str): The string representing the initial value.
|
||||
| ^^^^^^^^^
|
||||
176 |
|
||||
177 | Returns:
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `number` is not in the function's signature
|
||||
--> DOC102_google.py:190:13
|
||||
|
|
||||
189 | Args:
|
||||
190 | number (any): The value to check.
|
||||
| ^^^^^^
|
||||
191 |
|
||||
192 | Returns:
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `a` is not in the function's signature
|
||||
--> DOC102_google.py:258:9
|
||||
|
|
||||
257 | Args:
|
||||
258 | a: The first number to add.
|
||||
| ^
|
||||
259 | b: The second number to add.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
|
@ -0,0 +1,189 @@
|
|||
---
|
||||
source: crates/ruff_linter/src/rules/pydoclint/mod.rs
|
||||
---
|
||||
DOC102 Documented parameter `a` is not in the function's signature
|
||||
--> DOC102_numpy.py:8:5
|
||||
|
|
||||
6 | Parameters
|
||||
7 | ----------
|
||||
8 | a : int
|
||||
| ^
|
||||
9 | The first number to add.
|
||||
10 | b : int
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `multiplier` is not in the function's signature
|
||||
--> DOC102_numpy.py:30:5
|
||||
|
|
||||
28 | lst : list of int
|
||||
29 | A list of integers.
|
||||
30 | multiplier : int
|
||||
| ^^^^^^^^^^
|
||||
31 | The multiplier for each element in the list.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `numbers` is not in the function's signature
|
||||
--> DOC102_numpy.py:48:5
|
||||
|
|
||||
46 | Parameters
|
||||
47 | ----------
|
||||
48 | numbers : list of int
|
||||
| ^^^^^^^
|
||||
49 | A list of integers to search through.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `name` is not in the function's signature
|
||||
--> DOC102_numpy.py:66:5
|
||||
|
|
||||
64 | Parameters
|
||||
65 | ----------
|
||||
66 | name : str
|
||||
| ^^^^
|
||||
67 | The name of the user.
|
||||
68 | age : int
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `age` is not in the function's signature
|
||||
--> DOC102_numpy.py:68:5
|
||||
|
|
||||
66 | name : str
|
||||
67 | The name of the user.
|
||||
68 | age : int
|
||||
| ^^^
|
||||
69 | The age of the user.
|
||||
70 | email : str
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `email` is not in the function's signature
|
||||
--> DOC102_numpy.py:70:5
|
||||
|
|
||||
68 | age : int
|
||||
69 | The age of the user.
|
||||
70 | email : str
|
||||
| ^^^^^
|
||||
71 | The user's email address.
|
||||
72 | location : str, optional
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `tax_rate` is not in the function's signature
|
||||
--> DOC102_numpy.py:97:5
|
||||
|
|
||||
95 | item_prices : list of float
|
||||
96 | A list of prices for each item.
|
||||
97 | tax_rate : float
|
||||
| ^^^^^^^^
|
||||
98 | The tax rate to apply.
|
||||
99 | discount : float
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `to_address` is not in the function's signature
|
||||
--> DOC102_numpy.py:124:5
|
||||
|
|
||||
122 | body : str
|
||||
123 | The content of the email.
|
||||
124 | to_address : str
|
||||
| ^^^^^^^^^^
|
||||
125 | The recipient's email address.
|
||||
126 | cc_address : str, optional
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `cc_address` is not in the function's signature
|
||||
--> DOC102_numpy.py:126:5
|
||||
|
|
||||
124 | to_address : str
|
||||
125 | The recipient's email address.
|
||||
126 | cc_address : str, optional
|
||||
| ^^^^^^^^^^
|
||||
127 | The email address for CC, by default None.
|
||||
128 | bcc_address : str, optional
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `items` is not in the function's signature
|
||||
--> DOC102_numpy.py:168:5
|
||||
|
|
||||
166 | order_id : int
|
||||
167 | The unique identifier for the order.
|
||||
168 | *items : str
|
||||
| ^^^^^^
|
||||
169 | Variable length argument list of items in the order.
|
||||
170 | **details : dict
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `details` is not in the function's signature
|
||||
--> DOC102_numpy.py:170:5
|
||||
|
|
||||
168 | *items : str
|
||||
169 | Variable length argument list of items in the order.
|
||||
170 | **details : dict
|
||||
| ^^^^^^^^^
|
||||
171 | Additional details such as shipping method and address.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `value` is not in the function's signature
|
||||
--> DOC102_numpy.py:197:9
|
||||
|
|
||||
195 | Parameters
|
||||
196 | ----------
|
||||
197 | value : int, optional
|
||||
| ^^^^^
|
||||
198 | The initial value of the calculator, by default 0.
|
||||
199 | """
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `number` is not in the function's signature
|
||||
--> DOC102_numpy.py:209:9
|
||||
|
|
||||
207 | Parameters
|
||||
208 | ----------
|
||||
209 | number : int or float
|
||||
| ^^^^^^
|
||||
210 | The first number to add.
|
||||
211 | number2 : int or float
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `value_str` is not in the function's signature
|
||||
--> DOC102_numpy.py:230:9
|
||||
|
|
||||
228 | Parameters
|
||||
229 | ----------
|
||||
230 | value_str : str
|
||||
| ^^^^^^^^^
|
||||
231 | The string representing the initial value.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `number` is not in the function's signature
|
||||
--> DOC102_numpy.py:249:9
|
||||
|
|
||||
247 | Parameters
|
||||
248 | ----------
|
||||
249 | number : any
|
||||
| ^^^^^^
|
||||
250 | The value to check.
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
||||
DOC102 Documented parameter `a` is not in the function's signature
|
||||
--> DOC102_numpy.py:300:5
|
||||
|
|
||||
298 | Parameters
|
||||
299 | ----------
|
||||
300 | a
|
||||
| ^
|
||||
301 | The first number to add.
|
||||
302 | b
|
||||
|
|
||||
help: Remove the extraneous parameter from the docstring
|
||||
|
|
@ -1,9 +1,6 @@
|
|||
use ruff_python_ast::{self as ast, Stmt};
|
||||
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::{Violation, checkers::ast::Checker};
|
||||
use crate::Violation;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for `break` statements outside of loops.
|
||||
|
|
@ -29,28 +26,3 @@ impl Violation for BreakOutsideLoop {
|
|||
"`break` outside loop".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// F701
|
||||
pub(crate) fn break_outside_loop<'a>(
|
||||
checker: &Checker,
|
||||
stmt: &'a Stmt,
|
||||
parents: &mut impl Iterator<Item = &'a Stmt>,
|
||||
) {
|
||||
let mut child = stmt;
|
||||
for parent in parents {
|
||||
match parent {
|
||||
Stmt::For(ast::StmtFor { orelse, .. }) | Stmt::While(ast::StmtWhile { orelse, .. }) => {
|
||||
if !orelse.contains(child) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
Stmt::FunctionDef(_) | Stmt::ClassDef(_) => {
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
child = parent;
|
||||
}
|
||||
|
||||
checker.report_diagnostic(BreakOutsideLoop, stmt.range());
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,6 @@
|
|||
use ruff_python_ast::{self as ast, Stmt};
|
||||
|
||||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_text_size::Ranged;
|
||||
|
||||
use crate::{Violation, checkers::ast::Checker};
|
||||
use crate::Violation;
|
||||
|
||||
/// ## What it does
|
||||
/// Checks for `continue` statements outside of loops.
|
||||
|
|
@ -29,28 +26,3 @@ impl Violation for ContinueOutsideLoop {
|
|||
"`continue` not properly in loop".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// F702
|
||||
pub(crate) fn continue_outside_loop<'a>(
|
||||
checker: &Checker,
|
||||
stmt: &'a Stmt,
|
||||
parents: &mut impl Iterator<Item = &'a Stmt>,
|
||||
) {
|
||||
let mut child = stmt;
|
||||
for parent in parents {
|
||||
match parent {
|
||||
Stmt::For(ast::StmtFor { orelse, .. }) | Stmt::While(ast::StmtWhile { orelse, .. }) => {
|
||||
if !orelse.contains(child) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
Stmt::FunctionDef(_) | Stmt::ClassDef(_) => {
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
child = parent;
|
||||
}
|
||||
|
||||
checker.report_diagnostic(ContinueOutsideLoop, stmt.range());
|
||||
}
|
||||
|
|
|
|||
|
|
@ -191,6 +191,7 @@ pub(crate) fn redefined_while_unused(checker: &Checker, scope_id: ScopeId, scope
|
|||
},
|
||||
binding.range(),
|
||||
);
|
||||
diagnostic.add_primary_tag(ruff_db::diagnostic::DiagnosticTag::Unnecessary);
|
||||
|
||||
diagnostic.secondary_annotation(
|
||||
format_args!("previous definition of `{name}` here"),
|
||||
|
|
|
|||
|
|
@ -23,16 +23,17 @@ invalid-syntax: missing closing quote in string literal
|
|||
9 | # Unterminated f-string
|
||||
|
|
||||
|
||||
invalid-syntax: Expected a statement
|
||||
--> invalid_characters_syntax_error.py:7:7
|
||||
PLE2510 Invalid unescaped character backspace, use "\b" instead
|
||||
--> invalid_characters_syntax_error.py:7:6
|
||||
|
|
||||
5 | b = '␈'
|
||||
6 | # Unterminated string
|
||||
7 | b = '␈
|
||||
| ^
|
||||
| ^
|
||||
8 | b = '␈'
|
||||
9 | # Unterminated f-string
|
||||
|
|
||||
help: Replace with escape sequence
|
||||
|
||||
PLE2510 Invalid unescaped character backspace, use "\b" instead
|
||||
--> invalid_characters_syntax_error.py:8:6
|
||||
|
|
@ -46,6 +47,18 @@ PLE2510 Invalid unescaped character backspace, use "\b" instead
|
|||
|
|
||||
help: Replace with escape sequence
|
||||
|
||||
PLE2510 Invalid unescaped character backspace, use "\b" instead
|
||||
--> invalid_characters_syntax_error.py:10:7
|
||||
|
|
||||
8 | b = '␈'
|
||||
9 | # Unterminated f-string
|
||||
10 | b = f'␈
|
||||
| ^
|
||||
11 | b = f'␈'
|
||||
12 | # Implicitly concatenated
|
||||
|
|
||||
help: Replace with escape sequence
|
||||
|
||||
invalid-syntax: f-string: unterminated string
|
||||
--> invalid_characters_syntax_error.py:10:7
|
||||
|
|
||||
|
|
@ -109,11 +122,12 @@ invalid-syntax: missing closing quote in string literal
|
|||
| ^^
|
||||
|
|
||||
|
||||
invalid-syntax: Expected a statement
|
||||
--> invalid_characters_syntax_error.py:13:16
|
||||
PLE2510 Invalid unescaped character backspace, use "\b" instead
|
||||
--> invalid_characters_syntax_error.py:13:15
|
||||
|
|
||||
11 | b = f'␈'
|
||||
12 | # Implicitly concatenated
|
||||
13 | b = '␈' f'␈' '␈
|
||||
| ^
|
||||
| ^
|
||||
|
|
||||
help: Replace with escape sequence
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ mod tests {
|
|||
use crate::rules::{isort, pyupgrade};
|
||||
use crate::settings::types::PreviewMode;
|
||||
use crate::test::{test_path, test_snippet};
|
||||
use crate::{assert_diagnostics, settings};
|
||||
use crate::{assert_diagnostics, assert_diagnostics_diff, settings};
|
||||
|
||||
#[test_case(Rule::ConvertNamedTupleFunctionalToClass, Path::new("UP014.py"))]
|
||||
#[test_case(Rule::ConvertTypedDictFunctionalToClass, Path::new("UP013.py"))]
|
||||
|
|
@ -126,6 +126,7 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test_case(Rule::SuperCallWithParameters, Path::new("UP008.py"))]
|
||||
#[test_case(Rule::TypingTextStrAlias, Path::new("UP019.py"))]
|
||||
fn rules_preview(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}__preview", path.to_string_lossy());
|
||||
let diagnostics = test_path(
|
||||
|
|
@ -139,6 +140,28 @@ mod tests {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::NonPEP695TypeAlias, Path::new("UP040.py"))]
|
||||
#[test_case(Rule::NonPEP695TypeAlias, Path::new("UP040.pyi"))]
|
||||
#[test_case(Rule::NonPEP695GenericClass, Path::new("UP046_0.py"))]
|
||||
#[test_case(Rule::NonPEP695GenericClass, Path::new("UP046_1.py"))]
|
||||
#[test_case(Rule::NonPEP695GenericFunction, Path::new("UP047.py"))]
|
||||
fn type_var_default_preview(rule_code: Rule, path: &Path) -> Result<()> {
|
||||
let snapshot = format!("{}__preview_diff", path.to_string_lossy());
|
||||
assert_diagnostics_diff!(
|
||||
snapshot,
|
||||
Path::new("pyupgrade").join(path).as_path(),
|
||||
&settings::LinterSettings {
|
||||
preview: PreviewMode::Disabled,
|
||||
..settings::LinterSettings::for_rule(rule_code)
|
||||
},
|
||||
&settings::LinterSettings {
|
||||
preview: PreviewMode::Enabled,
|
||||
..settings::LinterSettings::for_rule(rule_code)
|
||||
},
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test_case(Rule::QuotedAnnotation, Path::new("UP037_0.py"))]
|
||||
#[test_case(Rule::QuotedAnnotation, Path::new("UP037_1.py"))]
|
||||
#[test_case(Rule::QuotedAnnotation, Path::new("UP037_2.pyi"))]
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ use rustc_hash::{FxHashMap, FxHashSet};
|
|||
use ruff_macros::{ViolationMetadata, derive_message_formats};
|
||||
use ruff_python_ast::helpers::any_over_expr;
|
||||
use ruff_python_ast::str::{leading_quote, trailing_quote};
|
||||
use ruff_python_ast::{self as ast, Expr, Keyword};
|
||||
use ruff_python_ast::{self as ast, Expr, Keyword, StringFlags};
|
||||
use ruff_python_literal::format::{
|
||||
FieldName, FieldNamePart, FieldType, FormatPart, FormatString, FromTemplate,
|
||||
};
|
||||
|
|
@ -430,7 +430,7 @@ pub(crate) fn f_strings(checker: &Checker, call: &ast::ExprCall, summary: &Forma
|
|||
// dot is the start of an attribute access.
|
||||
break token.start();
|
||||
}
|
||||
TokenKind::String => {
|
||||
TokenKind::String if !token.unwrap_string_flags().is_unclosed() => {
|
||||
match FStringConversion::try_convert(token.range(), &mut summary, checker.locator())
|
||||
{
|
||||
// If the format string contains side effects that would need to be repeated,
|
||||
|
|
|
|||
|
|
@ -14,13 +14,14 @@ use ruff_python_ast::{
|
|||
use ruff_python_semantic::SemanticModel;
|
||||
use ruff_text_size::{Ranged, TextRange};
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
use crate::preview::is_type_var_default_enabled;
|
||||
|
||||
pub(crate) use non_pep695_generic_class::*;
|
||||
pub(crate) use non_pep695_generic_function::*;
|
||||
pub(crate) use non_pep695_type_alias::*;
|
||||
pub(crate) use private_type_parameter::*;
|
||||
|
||||
use crate::checkers::ast::Checker;
|
||||
|
||||
mod non_pep695_generic_class;
|
||||
mod non_pep695_generic_function;
|
||||
mod non_pep695_type_alias;
|
||||
|
|
@ -122,6 +123,10 @@ impl Display for DisplayTypeVar<'_> {
|
|||
}
|
||||
}
|
||||
}
|
||||
if let Some(default) = self.type_var.default {
|
||||
f.write_str(" = ")?;
|
||||
f.write_str(&self.source[default.range()])?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -133,66 +138,63 @@ impl<'a> From<&'a TypeVar<'a>> for TypeParam {
|
|||
name,
|
||||
restriction,
|
||||
kind,
|
||||
default: _, // TODO(brent) see below
|
||||
default,
|
||||
}: &'a TypeVar<'a>,
|
||||
) -> Self {
|
||||
let default = default.map(|expr| Box::new(expr.clone()));
|
||||
match kind {
|
||||
TypeParamKind::TypeVar => {
|
||||
TypeParam::TypeVar(TypeParamTypeVar {
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
name: Identifier::new(*name, TextRange::default()),
|
||||
bound: match restriction {
|
||||
Some(TypeVarRestriction::Bound(bound)) => Some(Box::new((*bound).clone())),
|
||||
Some(TypeVarRestriction::Constraint(constraints)) => {
|
||||
Some(Box::new(Expr::Tuple(ast::ExprTuple {
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
elts: constraints.iter().map(|expr| (*expr).clone()).collect(),
|
||||
ctx: ast::ExprContext::Load,
|
||||
parenthesized: true,
|
||||
})))
|
||||
}
|
||||
Some(TypeVarRestriction::AnyStr) => {
|
||||
Some(Box::new(Expr::Tuple(ast::ExprTuple {
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
elts: vec![
|
||||
Expr::Name(ExprName {
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
id: Name::from("str"),
|
||||
ctx: ast::ExprContext::Load,
|
||||
}),
|
||||
Expr::Name(ExprName {
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
id: Name::from("bytes"),
|
||||
ctx: ast::ExprContext::Load,
|
||||
}),
|
||||
],
|
||||
ctx: ast::ExprContext::Load,
|
||||
parenthesized: true,
|
||||
})))
|
||||
}
|
||||
None => None,
|
||||
},
|
||||
// We don't handle defaults here yet. Should perhaps be a different rule since
|
||||
// defaults are only valid in 3.13+.
|
||||
default: None,
|
||||
})
|
||||
}
|
||||
TypeParamKind::TypeVar => TypeParam::TypeVar(TypeParamTypeVar {
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
name: Identifier::new(*name, TextRange::default()),
|
||||
bound: match restriction {
|
||||
Some(TypeVarRestriction::Bound(bound)) => Some(Box::new((*bound).clone())),
|
||||
Some(TypeVarRestriction::Constraint(constraints)) => {
|
||||
Some(Box::new(Expr::Tuple(ast::ExprTuple {
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
elts: constraints.iter().map(|expr| (*expr).clone()).collect(),
|
||||
ctx: ast::ExprContext::Load,
|
||||
parenthesized: true,
|
||||
})))
|
||||
}
|
||||
Some(TypeVarRestriction::AnyStr) => {
|
||||
Some(Box::new(Expr::Tuple(ast::ExprTuple {
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
elts: vec![
|
||||
Expr::Name(ExprName {
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
id: Name::from("str"),
|
||||
ctx: ast::ExprContext::Load,
|
||||
}),
|
||||
Expr::Name(ExprName {
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
id: Name::from("bytes"),
|
||||
ctx: ast::ExprContext::Load,
|
||||
}),
|
||||
],
|
||||
ctx: ast::ExprContext::Load,
|
||||
parenthesized: true,
|
||||
})))
|
||||
}
|
||||
None => None,
|
||||
},
|
||||
default,
|
||||
}),
|
||||
TypeParamKind::TypeVarTuple => TypeParam::TypeVarTuple(TypeParamTypeVarTuple {
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
name: Identifier::new(*name, TextRange::default()),
|
||||
default: None,
|
||||
default,
|
||||
}),
|
||||
TypeParamKind::ParamSpec => TypeParam::ParamSpec(TypeParamParamSpec {
|
||||
range: TextRange::default(),
|
||||
node_index: ruff_python_ast::AtomicNodeIndex::NONE,
|
||||
name: Identifier::new(*name, TextRange::default()),
|
||||
default: None,
|
||||
default,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
|
@ -318,8 +320,8 @@ pub(crate) fn expr_name_to_type_var<'a>(
|
|||
.first()
|
||||
.is_some_and(Expr::is_string_literal_expr)
|
||||
{
|
||||
// TODO(brent) `default` was added in PEP 696 and Python 3.13 but can't be used in
|
||||
// generic type parameters before that
|
||||
// `default` was added in PEP 696 and Python 3.13. We now support converting
|
||||
// TypeVars with defaults to PEP 695 type parameters.
|
||||
//
|
||||
// ```python
|
||||
// T = TypeVar("T", default=Any, bound=str)
|
||||
|
|
@ -367,21 +369,22 @@ fn in_nested_context(checker: &Checker) -> bool {
|
|||
}
|
||||
|
||||
/// Deduplicate `vars`, returning `None` if `vars` is empty or any duplicates are found.
|
||||
fn check_type_vars(vars: Vec<TypeVar<'_>>) -> Option<Vec<TypeVar<'_>>> {
|
||||
/// Also returns `None` if any `TypeVar` has a default value and preview mode is not enabled.
|
||||
fn check_type_vars<'a>(vars: Vec<TypeVar<'a>>, checker: &Checker) -> Option<Vec<TypeVar<'a>>> {
|
||||
if vars.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// If any type variables have defaults and preview mode is not enabled, skip the rule
|
||||
if vars.iter().any(|tv| tv.default.is_some())
|
||||
&& !is_type_var_default_enabled(checker.settings())
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
// If any type variables were not unique, just bail out here. this is a runtime error and we
|
||||
// can't predict what the user wanted. also bail out if any Python 3.13+ default values are
|
||||
// found on the type parameters
|
||||
(vars
|
||||
.iter()
|
||||
.unique_by(|tvar| tvar.name)
|
||||
.filter(|tvar| tvar.default.is_none())
|
||||
.count()
|
||||
== vars.len())
|
||||
.then_some(vars)
|
||||
// can't predict what the user wanted.
|
||||
(vars.iter().unique_by(|tvar| tvar.name).count() == vars.len()).then_some(vars)
|
||||
}
|
||||
|
||||
/// Search `class_bases` for a `typing.Generic` base class. Returns the `Generic` expression (if
|
||||
|
|
|
|||
|
|
@ -186,7 +186,7 @@ pub(crate) fn non_pep695_generic_class(checker: &Checker, class_def: &StmtClassD
|
|||
//
|
||||
// just because we can't confirm that `SomethingElse` is a `TypeVar`
|
||||
if !visitor.any_skipped {
|
||||
let Some(type_vars) = check_type_vars(visitor.vars) else {
|
||||
let Some(type_vars) = check_type_vars(visitor.vars, checker) else {
|
||||
diagnostic.defuse();
|
||||
return;
|
||||
};
|
||||
|
|
|
|||
|
|
@ -154,7 +154,7 @@ pub(crate) fn non_pep695_generic_function(checker: &Checker, function_def: &Stmt
|
|||
}
|
||||
}
|
||||
|
||||
let Some(type_vars) = check_type_vars(type_vars) else {
|
||||
let Some(type_vars) = check_type_vars(type_vars, checker) else {
|
||||
return;
|
||||
};
|
||||
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue